hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d5e3722f6a01e0b2c97a639bc1a8d5f858c7d947 | 2,727 | py | Python | app/upload/tools/uploads.py | gellerjulia/harmonization-website | c47b109d9110e34520ef43469b6b5ccac01cc178 | [
"MIT"
] | null | null | null | app/upload/tools/uploads.py | gellerjulia/harmonization-website | c47b109d9110e34520ef43469b6b5ccac01cc178 | [
"MIT"
] | null | null | null | app/upload/tools/uploads.py | gellerjulia/harmonization-website | c47b109d9110e34520ef43469b6b5ccac01cc178 | [
"MIT"
] | null | null | null | import requests
def upload_file(uploader_name, uploader_email, dataset_type, f):
url = "http://api:8887/query/dataset-upload/"
payload = {'uploader_name': uploader_name,
'uploader_email': uploader_email,
'dataset_type': dataset_type}
files = [('dataset_file', f.open(mode='rb'))]
headers = {}
response = requests.request(
"POST", url, headers=headers, data=payload, files=files)
return response
def handle_csv_only_file(uploader_name, uploader_email, dataset_type, f):
response = upload_file(uploader_name, uploader_email, dataset_type, f)
return response
def handle_flowers_file(uploader_name, uploader_email, dataset_type, f):
#default_storage.save('datasets/flowers/flowers.csv', f)
response = upload_file(uploader_name, uploader_email, dataset_type, f)
return response
def handle_unm_file(uploader_name, uploader_email, dataset_type, f):
#default_storage.save('datasets/unm/unm.csv', f)
# TODO - Validate csv header and then upload
# Upload to api
response = upload_file(uploader_name, uploader_email, dataset_type, f)
return response
def handle_neu_file(uploader_name, uploader_email, dataset_type, f):
# default_storage.save('datasets/neu/neu.csv', f)
# TODO - Validate csv header and then upload
# Upload to api
response = upload_file(uploader_name, uploader_email, dataset_type, f)
return response
def handle_dartmouth_file(uploader_name, uploader_email, dataset_type, f):
# default_storage.save('datasets/dartmouth/dartmouth.csv', f)
# TODO - Validate csv header and then upload
# Upload to api
response = upload_file(uploader_name, uploader_email, dataset_type, f)
return response
##JAG how do I change default storage.save ??
def handle_nhanes_bio_file(uploader_name, uploader_email, dataset_type, f):
# default_storage.save('datasets/dartmouth/dartmouth.csv', f)
# TODO - Validate csv header and then upload
# Upload to api
response = upload_file(uploader_name, uploader_email, dataset_type, f)
return response
def handle_nhanes_llod_file(uploader_name, uploader_email, dataset_type, f):
# default_storage.save('datasets/dartmouth/dartmouth.csv', f)
# TODO - Validate csv header and then upload
# Upload to api
response = upload_file(uploader_name, uploader_email, dataset_type, f)
return response
def handle_nhanes_dd_file(uploader_name, uploader_email, dataset_type, f):
# default_storage.save('datasets/dartmouth/dartmouth.csv', f)
# TODO - Validate csv header and then upload
# Upload to api
response = upload_file(uploader_name, uploader_email, dataset_type, f)
return response | 31.709302 | 76 | 0.733773 | 365 | 2,727 | 5.224658 | 0.145205 | 0.11956 | 0.199266 | 0.235973 | 0.8086 | 0.8086 | 0.8086 | 0.8086 | 0.787625 | 0.762979 | 0 | 0.001777 | 0.174551 | 2,727 | 86 | 77 | 31.709302 | 0.845402 | 0.284195 | 0 | 0.485714 | 0 | 0 | 0.048654 | 0 | 0 | 0 | 0 | 0.011628 | 0 | 1 | 0.257143 | false | 0 | 0.028571 | 0 | 0.542857 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
911b26d406357f3bdd35870f2b017f050ee1c13f | 19,979 | py | Python | fabric_cf/actor/test/core/kernel/kernel_test.py | fabric-testbed/ActorBase | 3c7dd040ee79fef0759e66996c93eeec57c790b2 | [
"MIT"
] | null | null | null | fabric_cf/actor/test/core/kernel/kernel_test.py | fabric-testbed/ActorBase | 3c7dd040ee79fef0759e66996c93eeec57c790b2 | [
"MIT"
] | null | null | null | fabric_cf/actor/test/core/kernel/kernel_test.py | fabric-testbed/ActorBase | 3c7dd040ee79fef0759e66996c93eeec57c790b2 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# MIT License
#
# Copyright (c) 2020 FABRIC Testbed
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
#
# Author: Komal Thareja (kthare10@renci.org)
import unittest
import time
from fabric_cf.actor.core.apis.abc_actor_mixin import ABCActorMixin
from fabric_cf.actor.core.apis.abc_database import ABCDatabase
from fabric_cf.actor.core.common.constants import Constants
from fabric_cf.actor.core.kernel.kernel_wrapper import KernelWrapper
from fabric_cf.actor.core.kernel.reservation_client import ClientReservationFactory
from fabric_cf.actor.core.kernel.slice import SliceFactory, SliceTypes
from fabric_cf.actor.core.util.id import ID
from fabric_cf.actor.test.base_test_case import BaseTestCase
class KernelTest(BaseTestCase, unittest.TestCase):
from fabric_cf.actor.core.container.globals import Globals
Globals.config_file = "./config/config.test.yaml"
Constants.SUPERBLOCK_LOCATION = './state_recovery.lock'
from fabric_cf.actor.core.container.globals import GlobalsSingleton
GlobalsSingleton.get().start(force_fresh=True)
while not GlobalsSingleton.get().start_completed:
time.sleep(0.001)
base_slices_count = 8
base_client_slices_count = 4
base_inventory_slices_count = 4
base_res_count = 8
def enforceReservationExistsInDatabase(self, *, db: ABCDatabase, rid: ID):
res = db.get_reservations(rid=rid)
self.assertIsNotNone(res)
self.assertEqual(len(res), 1)
def enforceReservationNotInDatabase(self, *, db: ABCDatabase, rid: ID):
res = db.get_reservations(rid=rid)
self.assertIsNone(res)
self.assertEqual(len(res), 0)
def enforceSliceExistsInDatabase(self, *, db: ABCDatabase, slice_id: ID):
slice_obj = db.get_slices(slice_id=slice_id)
self.assertIsNotNone(slice_obj)
self.assertEqual(len(slice_obj), 1)
def enforceSliceNotInDatabase(self, *, db: ABCDatabase, slice_id: ID):
slice_obj = db.get_slices(slice_id=slice_id)
self.assertIsNotNone(slice_obj)
self.assertEqual(len(slice_obj), 0)
def get_kernel_wrapper(self, *, actor: ABCActorMixin) -> KernelWrapper:
wrapper = KernelWrapper(actor=actor, plugin=actor.get_plugin(), policy=actor.get_policy())
return wrapper
def prepare_actor(self):
db = self.get_container_database()
db.reset_db()
actor = self.get_actor()
db.remove_actor_database(actor_name=actor.get_name())
db.add_actor(actor=actor)
actor.actor_added()
return actor
def test_a_register_reservation(self):
actor = self.prepare_actor()
kernel = self.get_kernel_wrapper(actor=actor)
db = actor.get_plugin().get_database()
slice_obj = SliceFactory.create(slice_id=ID(), name="test_slice")
kernel.register_slice(slice_object=slice_obj)
self.base_slices_count += 1
for i in range(10):
rid = ID()
reservation = ClientReservationFactory.create(rid=rid, slice_object=slice_obj)
kernel.register_reservation(reservation=reservation)
self.assertIsNotNone(kernel.get_reservation(rid=rid))
self.enforceReservationExistsInDatabase(db=db, rid=rid)
failed = False
try:
kernel.register_reservation(reservation=reservation)
except Exception:
failed = True
self.assertTrue(failed)
self.assertIsNotNone(kernel.get_reservation(rid=rid))
self.enforceReservationExistsInDatabase(db=db, rid=rid)
def test_b_register_slice_client(self):
actor = self.prepare_actor()
kernel = self.get_kernel_wrapper(actor=actor)
db = actor.get_plugin().get_database()
slices = kernel.get_slices()
self.assertIsNotNone(slices)
self.assertEqual(0, len(slices))
for i in range(10):
slice_obj = SliceFactory.create(slice_id=ID(), name="Slice:{}".format(i))
slice_obj.set_client()
kernel.register_slice(slice_object=slice_obj)
check = kernel.get_slice(slice_id=slice_obj.get_slice_id())
self.assertIsNotNone(check)
self.assertEqual(check, slice_obj)
slices = kernel.get_slices()
self.assertIsNotNone(slices)
self.assertEqual(i + 1, len(slices))
slices = kernel.get_client_slices()
self.assertIsNotNone(slices)
self.assertEqual(i + 1, len(slices))
self.enforceSliceExistsInDatabase(db=db, slice_id=slice_obj.get_slice_id())
slices = db.get_slices()
self.assertIsNotNone(slice_obj)
self.assertEqual(i + 1, len(slices))
slices = db.get_slices(slc_type=[SliceTypes.ClientSlice, SliceTypes.BrokerClientSlice])
self.assertIsNotNone(slice_obj)
self.assertEqual(i + 1, len(slices))
failed = False
try:
kernel.register_slice(slice_object=slice_obj)
except Exception:
failed = True
self.assertTrue(failed)
ss = kernel.get_slice(slice_id=slice_obj.get_slice_id())
self.assertIsNotNone(ss)
self.assertEqual(ss, slice_obj)
def test_c_register_slice_inventory(self):
actor = self.prepare_actor()
kernel = self.get_kernel_wrapper(actor=actor)
db = actor.get_plugin().get_database()
slices = kernel.get_slices()
self.assertIsNotNone(slices)
self.assertEqual(0, len(slices))
for i in range(10):
slice_obj = SliceFactory.create(slice_id=ID(), name="Slice:{}".format(i))
slice_obj.set_inventory(value=True)
kernel.register_slice(slice_object=slice_obj)
check = kernel.get_slice(slice_id=slice_obj.get_slice_id())
self.assertIsNotNone(check)
self.assertEqual(check, slice_obj)
slices = kernel.get_slices()
self.assertIsNotNone(slices)
self.assertEqual(i + 1, len(slices))
slices = kernel.get_inventory_slices()
self.assertIsNotNone(slices)
self.assertEqual(i + 1, len(slices))
self.enforceSliceExistsInDatabase(db=db, slice_id=slice_obj.get_slice_id())
slices = db.get_slices()
self.assertIsNotNone(slice_obj)
self.assertEqual(i + 1, len(slices))
slices = db.get_slices(slc_type=[SliceTypes.InventorySlice])
self.assertIsNotNone(slice_obj)
self.assertEqual(i + 1, len(slices))
failed = False
try:
kernel.register_slice(slice_object=slice_obj)
except Exception:
failed = True
self.assertTrue(failed)
ss = kernel.get_slice(slice_id=slice_obj.get_slice_id())
self.assertIsNotNone(ss)
self.assertEqual(ss, slice_obj)
def test_d_remove_slice_empty(self):
actor = self.prepare_actor()
kernel = self.get_kernel_wrapper(actor=actor)
db = actor.get_plugin().get_database()
slice_list = []
for i in range(10):
slice_obj = SliceFactory.create(slice_id=ID(), name="Slice:{}".format(i))
slice_obj.set_inventory(value=True)
kernel.register_slice(slice_object=slice_obj)
self.enforceSliceExistsInDatabase(db=db, slice_id=slice_obj.get_slice_id())
slice_list.append(slice_obj)
for s in slice_list:
kernel.remove_slice(slice_id=s.get_slice_id())
check = kernel.get_slice(slice_id=s.get_slice_id())
self.assertIsNone(check)
self.enforceSliceNotInDatabase(db=db, slice_id=s.get_slice_id())
kernel.remove_slice(slice_id=s.get_slice_id())
self.enforceSliceNotInDatabase(db=db, slice_id=s.get_slice_id())
kernel.register_slice(slice_object=s)
self.enforceSliceExistsInDatabase(db=db, slice_id=s.get_slice_id())
self.assertIsNotNone(kernel.get_slice(slice_id=s.get_slice_id()))
def test_e_re_register_reservation(self):
actor = self.prepare_actor()
kernel = self.get_kernel_wrapper(actor=actor)
db = actor.get_plugin().get_database()
slice_obj = SliceFactory.create(slice_id=ID(), name="test_slice")
res_list = []
for i in range(10):
res = ClientReservationFactory.create(rid=ID())
res.set_slice(slice_object=slice_obj)
res_list.append(res)
failed = False
try:
kernel.register_reservation(reservation=res)
except Exception:
failed = True
self.assertTrue(failed)
self.assertIsNone(kernel.get_reservation(rid=res.get_reservation_id()))
self.enforceReservationNotInDatabase(db=db, rid=res.get_reservation_id())
self.assertIsNone(kernel.get_slice(slice_id=slice_obj.get_slice_id()))
self.enforceSliceNotInDatabase(db=db, slice_id=slice_obj.get_slice_id())
kernel.register_slice(slice_object=slice_obj)
for res in res_list:
self.assertIsNone(kernel.get_reservation(rid=res.get_reservation_id()))
self.enforceReservationNotInDatabase(db=db, rid=res.get_reservation_id())
failed = False
try:
kernel.re_register_reservation(reservation=res)
except Exception:
failed = True
self.assertTrue(failed)
self.assertIsNone(kernel.get_reservation(rid=res.get_reservation_id()))
self.enforceReservationNotInDatabase(db=db, rid=res.get_reservation_id())
kernel.register_reservation(reservation=res)
self.assertIsNotNone(kernel.get_reservation(rid=res.get_reservation_id()))
self.enforceReservationExistsInDatabase(db=db, rid=res.get_reservation_id())
kernel2 = self.get_kernel_wrapper(actor=actor)
kernel2.re_register_slice(slice_object=slice_obj)
for res in res_list:
self.assertIsNone(kernel2.get_reservation(rid=res.get_reservation_id()))
self.enforceReservationExistsInDatabase(db=db, rid=res.get_reservation_id())
failed = False
try:
kernel.re_register_reservation(reservation=res)
except Exception:
failed = True
self.assertTrue(failed)
self.assertIsNone(kernel2.get_reservation(rid=res.get_reservation_id()))
self.enforceReservationExistsInDatabase(db=db, rid=res.get_reservation_id())
kernel2.register_reservation(reservation=res)
self.assertIsNotNone(kernel2.get_reservation(rid=res.get_reservation_id()))
self.enforceReservationExistsInDatabase(db=db, rid=res.get_reservation_id())
def test_f_re_register_slice(self):
actor = self.prepare_actor()
kernel = self.get_kernel_wrapper(actor=actor)
db = actor.get_plugin().get_database()
slice_list = []
for i in range(10):
slice_obj = SliceFactory.create(slice_id=ID(), name="Slice:{}".format(i))
slice_list.append(slice_obj)
failed = False
try:
kernel.re_register_slice(slice_object=slice_obj)
except Exception:
failed = True
self.assertTrue(failed)
self.enforceSliceNotInDatabase(db=db, slice_id=slice_obj.get_slice_id())
self.assertIsNone(kernel.get_slice(slice_id=slice_obj.get_slice_id()))
kernel.register_slice(slice_object=slice_obj)
self.enforceSliceExistsInDatabase(db=db, slice_id=slice_obj.get_slice_id())
kernel2 = self.get_kernel_wrapper(actor=actor)
for s in slice_list:
check = kernel2.get_slice(slice_id=s.get_slice_id())
self.assertIsNone(check)
self.enforceSliceExistsInDatabase(db=db, slice_id=s.get_slice_id())
failed = False
try:
kernel2.register_slice(slice_object=s)
except Exception:
failed = True
self.assertTrue(failed)
check = kernel2.get_slice(slice_id=s.get_slice_id())
self.assertIsNone(check)
kernel2.re_register_slice(slice_object=s)
check = kernel2.get_slice(slice_id=s.get_slice_id())
self.assertIsNotNone(check)
self.enforceSliceExistsInDatabase(db=db, slice_id=s.get_slice_id())
def test_g_unregister_reservation(self):
actor = self.prepare_actor()
kernel = self.get_kernel_wrapper(actor=actor)
db = actor.get_plugin().get_database()
slice_obj = SliceFactory.create(slice_id=ID(), name="testslice")
kernel.register_slice(slice_object=slice_obj)
res_list = []
for i in range(10):
res = ClientReservationFactory.create(rid=ID())
res.set_slice(slice_object=slice_obj)
res_list.append(res)
self.assertIsNone(kernel.get_reservation(rid=res.get_reservation_id()))
self.enforceReservationNotInDatabase(db=db, rid=res.get_reservation_id())
failed = False
try:
kernel.unregister_reservation(rid=res.get_reservation_id())
except Exception:
failed = True
self.assertTrue(failed)
self.assertIsNone(kernel.get_reservation(rid=res.get_reservation_id()))
self.enforceReservationNotInDatabase(db=db, rid=res.get_reservation_id())
kernel.register_reservation(reservation=res)
self.assertIsNotNone(kernel.get_reservation(rid=res.get_reservation_id()))
self.enforceReservationExistsInDatabase(db=db, rid=res.get_reservation_id())
for res in res_list:
self.assertIsNotNone(kernel.get_slice(slice_id=slice_obj.get_slice_id()))
self.enforceSliceExistsInDatabase(db=db, slice_id=slice_obj.get_slice_id())
res.fail(message="forced")
kernel.unregister_reservation(rid=res.get_reservation_id())
self.assertIsNone(kernel.get_reservation(rid=res.get_reservation_id()))
self.enforceReservationExistsInDatabase(db=db, rid=res.get_reservation_id())
check = kernel.get_reservations(slice_id=slice_obj.get_slice_id())
self.assertIsNotNone(check)
self.assertEqual(0, len(check))
def test_h_unregister_slice_empty(self):
actor = self.prepare_actor()
kernel = self.get_kernel_wrapper(actor=actor)
db = actor.get_plugin().get_database()
slices = []
for i in range(10):
slice_obj = SliceFactory.create(slice_id=ID(), name="Slice:{}".format(i))
slices.append(slice_obj)
kernel.register_slice(slice_object=slice_obj)
for s in slices:
kernel.unregister_slice(slice_id=s.get_slice_id())
check = kernel.get_slice(slice_id=s.get_slice_id())
self.assertIsNone(check)
self.enforceSliceExistsInDatabase(db=db, slice_id=s.get_slice_id())
failed = False
try:
kernel.unregister_slice(slice_id=s.get_slice_id())
except Exception:
failed = True
self.assertTrue(failed)
self.enforceSliceExistsInDatabase(db=db, slice_id=s.get_slice_id())
def test_i_unregister_slice_full(self):
actor = self.prepare_actor()
kernel = self.get_kernel_wrapper(actor=actor)
db = actor.get_plugin().get_database()
slice_obj = SliceFactory.create(slice_id=ID(), name="testslice")
kernel.register_slice(slice_object=slice_obj)
res_list = []
for i in range(10):
res = ClientReservationFactory.create(rid=ID())
res.set_slice(slice_object=slice_obj)
res_list.append(res)
kernel.register_reservation(reservation=res)
for r in res_list:
self.assertIsNotNone(kernel.get_slice(slice_id=slice_obj.get_slice_id()))
self.enforceSliceExistsInDatabase(db=db, slice_id=slice_obj.get_slice_id())
failed = False
try:
kernel.unregister_slice(slice_id=slice_obj.get_slice_id())
except Exception:
failed = True
self.assertTrue(failed)
self.assertIsNotNone(kernel.get_slice(slice_id=slice_obj.get_slice_id()))
self.enforceSliceExistsInDatabase(db=db, slice_id=slice_obj.get_slice_id())
r.fail(message="failed")
kernel.unregister_reservation(rid=r.get_reservation_id())
kernel.unregister_slice(slice_id=slice_obj.get_slice_id())
self.assertIsNone(kernel.get_slice(slice_id=slice_obj.get_slice_id()))
self.enforceSliceExistsInDatabase(db=db, slice_id=slice_obj.get_slice_id())
kernel.re_register_slice(slice_object=slice_obj)
self.assertIsNotNone(kernel.get_slice(slice_id=slice_obj.get_slice_id()))
self.enforceSliceExistsInDatabase(db=db, slice_id=slice_obj.get_slice_id())
def test_j_register_reservation_error(self):
actor = self.prepare_actor()
kernel = self.get_kernel_wrapper(actor=actor)
db = actor.get_plugin().get_database()
slice_obj = SliceFactory.create(slice_id=ID(), name="test_slice")
kernel.register_slice(slice_object=slice_obj)
# Set the database to null to cause errors while registering slices.
actor.get_plugin().set_database(db=None)
for i in range(10):
rid = ID()
reservation = ClientReservationFactory.create(rid=rid, slice_object=slice_obj)
failed = False
try:
kernel.register_reservation(reservation=reservation)
except Exception:
failed = True
self.assertTrue(failed)
self.assertIsNone(kernel.get_reservation(rid=rid))
self.enforceReservationNotInDatabase(db=db, rid=rid)
def test_k_register_slice_error(self):
actor = self.prepare_actor()
kernel = self.get_kernel_wrapper(actor=actor)
db = actor.get_plugin().get_database()
actor.get_plugin().set_database(db=None)
for i in range(10):
slice_obj = SliceFactory.create(slice_id=ID(), name="Slice:{}".format(i))
failed = False
try:
kernel.register_slice(slice_object=slice_obj)
except Exception:
failed = True
self.assertTrue(failed)
check = kernel.get_slice(slice_id=slice_obj.get_slice_id())
self.assertIsNone(check)
self.enforceSliceNotInDatabase(db=db, slice_id=slice_obj.get_slice_id())
| 38.347409 | 99 | 0.658191 | 2,373 | 19,979 | 5.298778 | 0.100716 | 0.058454 | 0.034993 | 0.032209 | 0.806108 | 0.781692 | 0.763242 | 0.757038 | 0.723795 | 0.714729 | 0 | 0.004319 | 0.246709 | 19,979 | 520 | 100 | 38.421154 | 0.831163 | 0.059963 | 0 | 0.788618 | 0 | 0 | 0.008211 | 0.002453 | 0 | 0 | 0 | 0 | 0.214092 | 1 | 0.04607 | false | 0 | 0.03252 | 0 | 0.097561 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fc042531b925624fcdf4922c736bcf69e246d355 | 36,528 | py | Python | mayan/apps/linking/tests/test_views.py | CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons | 0e4e919fd2e1ded6711354a0330135283e87f8c7 | [
"Apache-2.0"
] | 2 | 2021-09-12T19:41:19.000Z | 2021-09-12T19:41:20.000Z | mayan/apps/linking/tests/test_views.py | CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons | 0e4e919fd2e1ded6711354a0330135283e87f8c7 | [
"Apache-2.0"
] | 37 | 2021-09-13T01:00:12.000Z | 2021-10-02T03:54:30.000Z | mayan/apps/linking/tests/test_views.py | CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons | 0e4e919fd2e1ded6711354a0330135283e87f8c7 | [
"Apache-2.0"
] | 1 | 2021-09-22T13:17:30.000Z | 2021-09-22T13:17:30.000Z | from mayan.apps.documents.permissions import (
permission_document_type_edit, permission_document_view
)
from mayan.apps.documents.tests.base import GenericDocumentViewTestCase
from mayan.apps.testing.tests.base import GenericViewTestCase
from ..events import event_smart_link_created, event_smart_link_edited
from ..models import SmartLink
from ..permissions import (
permission_smart_link_create, permission_smart_link_delete,
permission_smart_link_edit, permission_smart_link_view
)
from .literals import TEST_SMART_LINK_LABEL, TEST_SMART_LINK_LABEL_EDITED
from .mixins import (
DocumentTypeAddRemoveSmartLinkViewTestMixin,
SmartLinkConditionViewTestMixin, SmartLinkDocumentTypeViewTestMixin,
SmartLinkDocumentViewTestMixin, SmartLinkTestMixin,
SmartLinkViewTestMixin
)
class DocumentTypeAddRemoveSmartLinkViewTestCase(
DocumentTypeAddRemoveSmartLinkViewTestMixin, SmartLinkTestMixin,
GenericDocumentViewTestCase
):
auto_upload_test_document = False
def setUp(self):
super().setUp()
self._create_test_smart_link()
def test_document_type_smart_link_add_remove_get_view_no_permission(self):
self.test_document_type.smart_links.add(self.test_smart_link)
self._clear_events()
response = self._request_test_document_type_smart_link_add_remove_get_view()
self.assertNotContains(
response=response, text=str(self.test_document_type),
status_code=404
)
self.assertNotContains(
response=response, text=str(self.test_smart_link),
status_code=404
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_smart_link_add_remove_get_view_with_document_type_access(self):
self.test_document_type.smart_links.add(self.test_smart_link)
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self._clear_events()
response = self._request_test_document_type_smart_link_add_remove_get_view()
self.assertContains(
response=response, text=str(self.test_document_type),
status_code=200
)
self.assertNotContains(
response=response, text=str(self.test_smart_link),
status_code=200
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_smart_link_add_remove_get_view_with_smart_link_access(self):
self.test_document_type.smart_links.add(self.test_smart_link)
self.grant_access(
obj=self.test_smart_link,
permission=permission_smart_link_edit
)
self._clear_events()
response = self._request_test_document_type_smart_link_add_remove_get_view()
self.assertNotContains(
response=response, text=str(self.test_document_type),
status_code=404
)
self.assertNotContains(
response=response, text=str(self.test_smart_link),
status_code=404
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_smart_link_add_remove_get_view_with_full_access(self):
self.test_document_type.smart_links.add(self.test_smart_link)
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self.grant_access(
obj=self.test_smart_link,
permission=permission_smart_link_edit
)
self._clear_events()
response = self._request_test_document_type_smart_link_add_remove_get_view()
self.assertContains(
response=response, text=str(self.test_document_type),
status_code=200
)
self.assertContains(
response=response, text=str(self.test_smart_link),
status_code=200
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_smart_link_add_view_no_permission(self):
self._clear_events()
response = self._request_test_document_type_smart_link_add_view()
self.assertEqual(response.status_code, 404)
self.assertTrue(
self.test_smart_link not in self.test_document_type.smart_links.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_smart_link_add_view_with_document_type_access(self):
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self._clear_events()
response = self._request_test_document_type_smart_link_add_view()
self.assertEqual(response.status_code, 200)
self.assertTrue(
self.test_smart_link not in self.test_document_type.smart_links.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_smart_link_add_view_with_smart_link_access(self):
self.grant_access(
obj=self.test_smart_link,
permission=permission_smart_link_edit
)
self._clear_events()
response = self._request_test_document_type_smart_link_add_view()
self.assertEqual(response.status_code, 404)
self.assertTrue(
self.test_smart_link not in self.test_document_type.smart_links.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_smart_link_add_view_with_full_access(self):
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self.grant_access(
obj=self.test_smart_link,
permission=permission_smart_link_edit
)
self._clear_events()
response = self._request_test_document_type_smart_link_add_view()
self.assertEqual(response.status_code, 302)
self.assertTrue(
self.test_smart_link in self.test_document_type.smart_links.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, self.test_document_type)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_smart_link)
self.assertEqual(events[0].verb, event_smart_link_edited.id)
def test_document_type_smart_link_remove_view_no_permission(self):
self.test_document_type.smart_links.add(self.test_smart_link)
self._clear_events()
response = self._request_test_document_type_smart_link_remove_view()
self.assertEqual(response.status_code, 404)
self.assertTrue(
self.test_smart_link in self.test_document_type.smart_links.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_smart_link_remove_view_with_document_type_access(self):
self.test_document_type.smart_links.add(self.test_smart_link)
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self._clear_events()
response = self._request_test_document_type_smart_link_remove_view()
self.assertEqual(response.status_code, 200)
self.assertTrue(
self.test_smart_link in self.test_document_type.smart_links.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_smart_link_remove_view_with_smart_link_access(self):
self.test_document_type.smart_links.add(self.test_smart_link)
self.grant_access(
obj=self.test_smart_link,
permission=permission_smart_link_edit
)
self._clear_events()
response = self._request_test_document_type_smart_link_remove_view()
self.assertEqual(response.status_code, 404)
self.assertTrue(
self.test_smart_link in self.test_document_type.smart_links.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_type_smart_link_remove_view_with_full_access(self):
self.test_document_type.smart_links.add(self.test_smart_link)
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self.grant_access(
obj=self.test_smart_link,
permission=permission_smart_link_edit
)
self._clear_events()
response = self._request_test_document_type_smart_link_remove_view()
self.assertEqual(response.status_code, 302)
self.assertTrue(
self.test_smart_link not in self.test_document_type.smart_links.all()
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, self.test_document_type)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_smart_link)
self.assertEqual(events[0].verb, event_smart_link_edited.id)
class SmartLinkViewTestCase(
SmartLinkTestMixin, SmartLinkViewTestMixin, GenericViewTestCase
):
def test_smart_link_create_view_no_permission(self):
self._clear_events()
response = self._request_test_smart_link_create_view()
self.assertEqual(response.status_code, 403)
self.assertEqual(SmartLink.objects.count(), 0)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_smart_link_create_view_with_permission(self):
self.grant_permission(permission=permission_smart_link_create)
self._clear_events()
response = self._request_test_smart_link_create_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(SmartLink.objects.count(), 1)
self.assertEqual(
SmartLink.objects.first().label, TEST_SMART_LINK_LABEL
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, None)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_smart_link)
self.assertEqual(events[0].verb, event_smart_link_created.id)
def test_smart_link_delete_view_no_permission(self):
self._create_test_smart_link()
self._clear_events()
response = self._request_test_smart_link_delete_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(SmartLink.objects.count(), 1)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_smart_link_delete_view_with_access(self):
self._create_test_smart_link()
self.grant_access(
obj=self.test_smart_link, permission=permission_smart_link_delete
)
self._clear_events()
response = self._request_test_smart_link_delete_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(SmartLink.objects.count(), 0)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_smart_link_edit_view_no_permission(self):
self._create_test_smart_link()
self._clear_events()
response = self._request_test_smart_link_edit_view()
self.assertEqual(response.status_code, 404)
self.test_smart_link.refresh_from_db()
self.assertEqual(self.test_smart_link.label, TEST_SMART_LINK_LABEL)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_smart_link_edit_view_with_access(self):
self._create_test_smart_link()
self.grant_access(
obj=self.test_smart_link, permission=permission_smart_link_edit
)
self._clear_events()
response = self._request_test_smart_link_edit_view()
self.assertEqual(response.status_code, 302)
self.test_smart_link.refresh_from_db()
self.assertEqual(
self.test_smart_link.label, TEST_SMART_LINK_LABEL_EDITED
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, None)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_smart_link)
self.assertEqual(events[0].verb, event_smart_link_edited.id)
def test_smart_link_list_view_no_permission(self):
self._create_test_smart_link()
self._clear_events()
response = self._request_test_smart_link_list_view()
self.assertNotContains(
response=response, text=str(self.test_smart_link), status_code=200
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_smart_link_list_view_with_access(self):
self._create_test_smart_link()
self.grant_access(
obj=self.test_smart_link, permission=permission_smart_link_view
)
self._clear_events()
response = self._request_test_smart_link_list_view()
self.assertContains(
response=response, text=str(self.test_smart_link), status_code=200
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
class SmartLinkConditionViewTestCase(
SmartLinkConditionViewTestMixin, SmartLinkTestMixin,
SmartLinkViewTestMixin, GenericViewTestCase
):
def setUp(self):
super().setUp()
self._create_test_smart_link()
def test_smart_link_condition_create_view_no_permission(self):
condition_count = self.test_smart_link.conditions.count()
self._clear_events()
response = self._request_test_smart_link_condition_create_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_smart_link.conditions.count(), condition_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_smart_link_condition_create_view_with_access(self):
self.grant_access(
obj=self.test_smart_link, permission=permission_smart_link_edit
)
condition_count = self.test_smart_link.conditions.count()
self._clear_events()
response = self._request_test_smart_link_condition_create_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(
self.test_smart_link.conditions.count(), condition_count + 1
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(
events[0].action_object, self.test_smart_link_condition
)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_smart_link)
self.assertEqual(events[0].verb, event_smart_link_edited.id)
def test_smart_link_condition_delete_view_no_permission(self):
self._create_test_smart_link_condition()
condition_count = self.test_smart_link.conditions.count()
self._clear_events()
response = self._request_test_smart_link_condition_delete_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_smart_link.conditions.count(), condition_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_smart_link_condition_delete_view_with_access(self):
self._create_test_smart_link_condition()
self.grant_access(
obj=self.test_smart_link, permission=permission_smart_link_edit
)
condition_count = self.test_smart_link.conditions.count()
self._clear_events()
response = self._request_test_smart_link_condition_delete_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(
self.test_smart_link.conditions.count(), condition_count - 1
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, None)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_smart_link)
self.assertEqual(events[0].verb, event_smart_link_edited.id)
def test_smart_link_condition_edit_view_no_permission(self):
self._create_test_smart_link_condition()
instance_values = self._model_instance_to_dictionary(
instance=self.test_smart_link_condition
)
self._clear_events()
response = self._request_test_smart_link_condition_edit_view()
self.assertEqual(response.status_code, 404)
self.test_smart_link_condition.refresh_from_db()
self.assertEqual(
self._model_instance_to_dictionary(
instance=self.test_smart_link_condition
), instance_values
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_smart_link_condition_edit_view_with_access(self):
self._create_test_smart_link_condition()
self.grant_access(
obj=self.test_smart_link, permission=permission_smart_link_edit
)
instance_values = self._model_instance_to_dictionary(
instance=self.test_smart_link_condition
)
self._clear_events()
response = self._request_test_smart_link_condition_edit_view()
self.assertEqual(response.status_code, 302)
self.test_smart_link_condition.refresh_from_db()
self.assertNotEqual(
self._model_instance_to_dictionary(
instance=self.test_smart_link_condition
), instance_values
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(
events[0].action_object, self.test_smart_link_condition
)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_smart_link)
self.assertEqual(events[0].verb, event_smart_link_edited.id)
def test_smart_link_condition_list_view_no_permission(self):
self._create_test_smart_link_condition()
self._clear_events()
response = self._request_test_smart_link_condition_list_view()
self.assertNotContains(
response=response, status_code=404,
text=self.test_smart_link_condition.smart_link.label
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_smart_link_condition_list_view_with_access(self):
self._create_test_smart_link_condition()
self.grant_access(
obj=self.test_smart_link, permission=permission_smart_link_edit
)
self._clear_events()
response = self._request_test_smart_link_condition_list_view()
self.assertContains(
response=response, status_code=200,
text=self.test_smart_link_condition.smart_link.label
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
class SmartLinkDocumentTypeViewTestCase(
SmartLinkDocumentTypeViewTestMixin, SmartLinkTestMixin,
GenericDocumentViewTestCase
):
auto_upload_test_document = False
def setUp(self):
super().setUp()
self._create_test_smart_link()
def test_smart_link_document_type_add_remove_get_view_no_permission(self):
self.test_smart_link.document_types.add(self.test_document_type)
test_smart_link_document_type_count = self.test_smart_link.document_types.count()
self._clear_events()
response = self._request_test_smart_link_document_type_add_remove_get_view()
self.assertNotContains(
response=response, text=str(self.test_document_type),
status_code=404
)
self.assertNotContains(
response=response, text=str(self.test_smart_link),
status_code=404
)
self.assertEqual(
self.test_smart_link.document_types.count(),
test_smart_link_document_type_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_smart_link_document_type_add_remove_get_view_with_document_type_access(self):
self.test_smart_link.document_types.add(self.test_document_type)
test_smart_link_document_type_count = self.test_smart_link.document_types.count()
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self._clear_events()
response = self._request_test_smart_link_document_type_add_remove_get_view()
self.assertNotContains(
response=response, text=str(self.test_document_type),
status_code=404
)
self.assertNotContains(
response=response, text=str(self.test_smart_link),
status_code=404
)
self.assertEqual(
self.test_smart_link.document_types.count(),
test_smart_link_document_type_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_smart_link_document_type_add_remove_get_view_with_smart_link_access(self):
self.test_smart_link.document_types.add(self.test_document_type)
test_smart_link_document_type_count = self.test_smart_link.document_types.count()
self.grant_access(
obj=self.test_smart_link,
permission=permission_smart_link_edit
)
self._clear_events()
response = self._request_test_smart_link_document_type_add_remove_get_view()
self.assertNotContains(
response=response, text=str(self.test_document_type),
status_code=200
)
self.assertContains(
response=response, text=str(self.test_smart_link),
status_code=200
)
self.assertEqual(
self.test_smart_link.document_types.count(),
test_smart_link_document_type_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_smart_link_document_type_add_remove_get_view_with_full_access(self):
self.test_smart_link.document_types.add(self.test_document_type)
test_smart_link_document_type_count = self.test_smart_link.document_types.count()
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self.grant_access(
obj=self.test_smart_link,
permission=permission_smart_link_edit
)
self._clear_events()
response = self._request_test_smart_link_document_type_add_remove_get_view()
self.assertContains(
response=response, text=str(self.test_document_type),
status_code=200
)
self.assertContains(
response=response, text=str(self.test_smart_link),
status_code=200
)
self.assertEqual(
self.test_smart_link.document_types.count(),
test_smart_link_document_type_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_smart_link_document_type_add_view_no_permission(self):
test_smart_link_document_type_count = self.test_smart_link.document_types.count()
self.grant_access(
obj=self.test_document_type, permission=permission_document_type_edit
)
self._clear_events()
response = self._request_test_smart_link_document_type_add_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_smart_link.document_types.count(),
test_smart_link_document_type_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_smart_link_document_type_add_view_with_document_type_access(self):
test_smart_link_document_type_count = self.test_smart_link.document_types.count()
self.grant_access(
obj=self.test_document_type, permission=permission_document_type_edit
)
self._clear_events()
response = self._request_test_smart_link_document_type_add_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_smart_link.document_types.count(),
test_smart_link_document_type_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_smart_link_document_type_add_view_with_smart_link_access(self):
test_smart_link_document_type_count = self.test_smart_link.document_types.count()
self.grant_access(
obj=self.test_smart_link, permission=permission_smart_link_edit
)
self._clear_events()
response = self._request_test_smart_link_document_type_add_view()
self.assertEqual(response.status_code, 200)
self.assertEqual(
self.test_smart_link.document_types.count(),
test_smart_link_document_type_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_smart_link_document_type_add_view_with_full_access(self):
test_smart_link_document_type_count = self.test_smart_link.document_types.count()
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self.grant_access(
obj=self.test_smart_link, permission=permission_smart_link_edit
)
self._clear_events()
response = self._request_test_smart_link_document_type_add_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(
self.test_smart_link.document_types.count(),
test_smart_link_document_type_count + 1
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, self.test_document_type)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_smart_link)
self.assertEqual(events[0].verb, event_smart_link_edited.id)
def test_smart_link_document_type_remove_view_no_permission(self):
self.test_smart_link.document_types.add(self.test_document_type)
test_smart_link_document_type_count = self.test_smart_link.document_types.count()
self.grant_access(
obj=self.test_document_type, permission=permission_document_type_edit
)
self._clear_events()
response = self._request_test_smart_link_document_type_remove_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_smart_link.document_types.count(),
test_smart_link_document_type_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_smart_link_document_type_remove_view_with_document_type_access(self):
self.test_smart_link.document_types.add(self.test_document_type)
test_smart_link_document_type_count = self.test_smart_link.document_types.count()
self.grant_access(
obj=self.test_document_type, permission=permission_document_type_edit
)
self._clear_events()
response = self._request_test_smart_link_document_type_remove_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_smart_link.document_types.count(),
test_smart_link_document_type_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_smart_link_document_type_remove_view_with_smart_link_access(self):
self.test_smart_link.document_types.add(self.test_document_type)
test_smart_link_document_type_count = self.test_smart_link.document_types.count()
self.grant_access(
obj=self.test_smart_link, permission=permission_smart_link_edit
)
self._clear_events()
response = self._request_test_smart_link_document_type_remove_view()
self.assertEqual(response.status_code, 200)
self.assertEqual(
self.test_smart_link.document_types.count(),
test_smart_link_document_type_count
)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_smart_link_document_type_remove_view_with_full_access(self):
self.test_smart_link.document_types.add(self.test_document_type)
test_smart_link_document_type_count = self.test_smart_link.document_types.count()
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self.grant_access(
obj=self.test_smart_link, permission=permission_smart_link_edit
)
self._clear_events()
response = self._request_test_smart_link_document_type_remove_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(
self.test_smart_link.document_types.count(),
test_smart_link_document_type_count - 1
)
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, self.test_document_type)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_smart_link)
self.assertEqual(events[0].verb, event_smart_link_edited.id)
class SmartLinkDocumentViewTestCase(
SmartLinkTestMixin, SmartLinkDocumentViewTestMixin,
GenericDocumentViewTestCase
):
auto_upload_test_document = False
def setUp(self):
super().setUp()
self._create_test_document_stub()
self._create_test_smart_links(add_test_document_type=True)
def test_document_smart_link_list_view_no_permission(self):
response = self._request_test_smart_link_document_instances_view()
self.assertNotContains(
response=response, status_code=404, text=self.test_document.label
)
def test_document_smart_link_list_view_with_document_access(self):
self.grant_access(
obj=self.test_document, permission=permission_document_view
)
response = self._request_test_smart_link_document_instances_view()
# Text must appear 3 times, two for the title and one for the template
# heading. The two smart links are not shown.
self.assertContains(
count=3, response=response, status_code=200,
text=self.test_document.label
)
def test_document_smart_link_list_view_with_smart_link_access(self):
self.grant_access(
obj=self.test_smart_links[0],
permission=permission_smart_link_view
)
self.grant_access(
obj=self.test_smart_links[1],
permission=permission_smart_link_view
)
response = self._request_test_smart_link_document_instances_view()
self.assertNotContains(
response=response, status_code=404, text=self.test_document.label
)
def test_document_smart_link_list_view_with_full_access(self):
self.grant_access(
obj=self.test_smart_links[0],
permission=permission_smart_link_view
)
self.grant_access(
obj=self.test_smart_links[1],
permission=permission_smart_link_view
)
self.grant_access(
obj=self.test_document, permission=permission_document_view
)
response = self._request_test_smart_link_document_instances_view()
# Text must appear 5 times: 3 for the windows title and template
# heading, plus 2 for the test.
self.assertContains(
count=5, response=response, status_code=200,
text=self.test_document.label
)
def test_trashed_document_smart_link_list_view_with_full_access(self):
self.grant_access(
obj=self.test_smart_links[0],
permission=permission_smart_link_view
)
self.grant_access(
obj=self.test_smart_links[1],
permission=permission_smart_link_view
)
self.grant_access(
obj=self.test_document, permission=permission_document_view
)
self.test_document.delete()
response = self._request_test_smart_link_document_instances_view()
self.assertEqual(response.status_code, 404)
def test_document_resolved_smart_list_with_no_permission(self):
response = self._request_test_document_resolved_smart_link_view()
self.assertEqual(response.status_code, 404)
def test_document_resolved_smart_list_with_document_access(self):
self.grant_access(
obj=self.test_document, permission=permission_document_view
)
response = self._request_test_document_resolved_smart_link_view()
self.assertEqual(response.status_code, 404)
def test_document_resolved_smart_list_with_smart_link_access(self):
self.grant_access(
obj=self.test_smart_links[0],
permission=permission_smart_link_view
)
self.grant_access(
obj=self.test_smart_links[1],
permission=permission_smart_link_view
)
response = self._request_test_document_resolved_smart_link_view()
self.assertEqual(response.status_code, 404)
def test_document_resolved_smart_list_with_full_access(self):
self.grant_access(
obj=self.test_smart_links[0],
permission=permission_smart_link_view
)
self.grant_access(
obj=self.test_smart_links[1],
permission=permission_smart_link_view
)
self.grant_access(
obj=self.test_document, permission=permission_document_view
)
response = self._request_test_document_resolved_smart_link_view()
self.assertEqual(response.status_code, 200)
def test_trashed_document_resolved_smart_list_with_full_access(self):
self.grant_access(
obj=self.test_smart_links[0],
permission=permission_smart_link_view
)
self.grant_access(
obj=self.test_smart_links[1],
permission=permission_smart_link_view
)
self.grant_access(
obj=self.test_document, permission=permission_document_view
)
self.test_document.delete()
response = self._request_test_document_resolved_smart_link_view()
self.assertEqual(response.status_code, 404)
| 34.755471 | 91 | 0.674305 | 4,236 | 36,528 | 5.338055 | 0.029745 | 0.120998 | 0.122457 | 0.084203 | 0.935963 | 0.93446 | 0.930126 | 0.924421 | 0.921148 | 0.916726 | 0 | 0.010101 | 0.251998 | 36,528 | 1,050 | 92 | 34.788571 | 0.81748 | 0.005612 | 0 | 0.73852 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.21301 | 1 | 0.068878 | false | 0 | 0.010204 | 0 | 0.089286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fc16ea2dd69308c31e6298f4f01240465f5cd0ff | 85 | py | Python | src/crucyble/types.py | ttymck/crucyble | 6620dfe06d7b1d84bd3ad4392aed9f52f2754435 | [
"MIT"
] | null | null | null | src/crucyble/types.py | ttymck/crucyble | 6620dfe06d7b1d84bd3ad4392aed9f52f2754435 | [
"MIT"
] | null | null | null | src/crucyble/types.py | ttymck/crucyble | 6620dfe06d7b1d84bd3ad4392aed9f52f2754435 | [
"MIT"
] | null | null | null | from typing import Union
def EnumUnion(enum_type):
return Union[int, enum_type]
| 17 | 32 | 0.764706 | 13 | 85 | 4.846154 | 0.769231 | 0.253968 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.164706 | 85 | 4 | 33 | 21.25 | 0.887324 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
fc1f9c1ccc01c697fb0a45667692f6b7744b739a | 133,795 | py | Python | conf/tests/test_redirects.py | uktrade/invest-ui | cd74b16c8db21af30a6365399fe6fbf6442660c1 | [
"MIT"
] | null | null | null | conf/tests/test_redirects.py | uktrade/invest-ui | cd74b16c8db21af30a6365399fe6fbf6442660c1 | [
"MIT"
] | 183 | 2018-06-26T09:23:59.000Z | 2019-08-01T11:22:42.000Z | conf/tests/test_redirects.py | uktrade/invest-ui | cd74b16c8db21af30a6365399fe6fbf6442660c1 | [
"MIT"
] | 1 | 2019-03-09T11:21:28.000Z | 2019-03-09T11:21:28.000Z | import pytest
@pytest.mark.parametrize(
'incoming_url,expected_url',
[
# ar
('/int/ar/index.html', '/ar'),
('/int/ar/setup-guide/index.html', '/ar/setup-guide'),
('/int/ar/setup-guide/establish-address/index.html',
'/ar/setup-guide/establish-address'),
('/int/ar/setup-guide/apply-for-visa/index.html',
'/ar/setup-guide/apply-for-visa'),
('/int/ar/setup-guide/open-a-business-account/index.html',
'/ar/setup-guide/open-a-business-account'),
('/int/ar/setup-guide/how-to-setup/index.html',
'/ar/setup-guide/how-to-setup'),
('/int/ar/setup-guide/understand-tax/index.html',
'/ar/setup-guide/understand-tax'),
('/int/ar/setup-guide/access-talent/index.html',
'/ar/setup-guide/access-talent'),
('/int/ar/setup-guide/understand-legal/index.html',
'/ar/setup-guide/understand-legal'),
('/int/ar/industries/index.html', '/ar/industries'),
('/int/ar/industries/aerospace/index.html',
'/ar/industries/aerospace'),
('/int/ar/industries/advanced-manufacturing/index.html',
'/ar/industries/advanced-manufacturing'),
('/int/ar/industries/food-and-drink-manufacturing/index.html',
'/ar/industries/food-and-drink-manufacturing'),
('/int/ar/industries/food-and-drink-manufacturing/freefrom/index.html',
'/ar/industries/food-and-drink-manufacturing/freefrom'),
('/int/ar/industries/retail/index.html', '/ar/industries/retail'),
('/int/ar/industries/automotive/index.html',
'/ar/industries/automotive'),
('/int/ar/industries/automotive/motorsport/index.html',
'/ar/industries/automotive/motorsport'),
('/int/ar/industries/automotive/research-and-development/index.html',
'/ar/industries/automotive/research-and-development'),
('/int/ar/industries/automotive/supply-chain/index.html',
'/ar/industries/automotive-supply-chain'),
('/int/ar/industries/energy/index.html', '/ar/industries/energy'),
('/int/ar/industries/energy/offshore-wind/index.html',
'/ar/industries/energy/offshore-wind'),
('/int/ar/industries/energy/electrical-networks/index.html',
'/ar/industries/energy/electrical-networks'),
('/int/ar/industries/energy/energy-from-waste/index.html',
'/ar/industries/energy/energy-from-waste'),
('/int/ar/industries/energy/oil-and-gas/index.html',
'/ar/industries/energy/oil-and-gas'),
('/int/ar/industries/energy/nuclear/index.html',
'/ar/industries/energy/nuclear'),
('/int/ar/industries/health-and-life/index.html',
'/ar/industries/health-and-life'),
('/int/ar/industries/health-and-life/medical-technology/index.html',
'/ar/industries/health-and-life/medical-technology'),
(
'/int/ar/industries/health-and-life/pharmaceutical-manufacturing/index.html', # noqa
'/ar/industries/health-and-life/pharmaceutical-manufacturing'
),
('/int/ar/industries/creative/index.html',
'/ar/industries/creative-industries'),
('/int/ar/industries/creative/content-and-production/index.html',
'/ar/industries/creative/content-and-production'),
('/int/ar/industries/creative/digital-media/index.html',
'/ar/industries/creative/digital-media'),
('/int/ar/industries/financial-services/index.html',
'/ar/industries/financial-services'),
('/int/ar/industries/financial-services/asset-management/index.html',
'/ar/industries/financial-services/asset-management'),
(
'/int/ar/industries/financial-services/financial-technology/index.html', # noqa
'/ar/industries/financial-services/financial-technology'),
('/int/ar/industries/technology/index.html',
'/ar/industries/technology'),
('/int/ar/industries/technology/data-analytics/index.html',
'/ar/industries/technology/data-analytics'),
('/int/ar/terms-and-conditions/index.html',
'/ar/terms-and-conditions'),
('/int/ar/privacy-policy/index.html', '/ar/privacy-policy'),
('/int/ar/feedback/index.html', '/ar/feedback'),
('/int/ar/enquiries/confirmation/index.html',
'/ar/enquiries/confirmation'),
('/int/ar/enquiries/error/index.html', '/ar/enquiries/error'),
('/int/ar/contact/index.html', '/ar/contact'),
('/int/ar/location-guide/index.html', '/ar/location-guide'),
('/int/ar/location-guide/confirmation/index.html',
'/ar/location-guide/confirmation'),
# de
('/int/de/index.html', '/de'),
('/int/de/setup-guide/index.html', '/de/setup-guide'),
('/int/de/setup-guide/establish-address/index.html',
'/de/setup-guide/establish-address'),
('/int/de/setup-guide/apply-for-visa/index.html',
'/de/setup-guide/apply-for-visa'),
('/int/de/setup-guide/open-a-business-account/index.html',
'/de/setup-guide/open-a-business-account'),
('/int/de/setup-guide/how-to-setup/index.html',
'/de/setup-guide/how-to-setup'),
('/int/de/setup-guide/understand-tax/index.html',
'/de/setup-guide/understand-tax'),
('/int/de/setup-guide/access-talent/index.html',
'/de/setup-guide/access-talent'),
('/int/de/setup-guide/understand-legal/index.html',
'/de/setup-guide/understand-legal'),
('/int/de/industries/index.html', '/de/industries'),
('/int/de/industries/aerospace/index.html',
'/de/industries/aerospace'),
('/int/de/industries/advanced-manufacturing/index.html',
'/de/industries/advanced-manufacturing'),
('/int/de/industries/food-and-drink-manufacturing/index.html',
'/de/industries/food-and-drink-manufacturing'),
('/int/de/industries/food-and-drink-manufacturing/freefrom/index.html',
'/de/industries/food-and-drink-manufacturing/freefrom'),
('/int/de/industries/retail/index.html', '/de/industries/retail'),
('/int/de/industries/automotive/index.html',
'/de/industries/automotive'),
('/int/de/industries/automotive/motorsport/index.html',
'/de/industries/automotive/motorsport'),
('/int/de/industries/automotive/research-and-development/index.html',
'/de/industries/automotive/research-and-development'),
('/int/de/industries/automotive/supply-chain/index.html',
'/de/industries/automotive-supply-chain'),
('/int/de/industries/energy/index.html', '/de/industries/energy'),
('/int/de/industries/energy/offshore-wind/index.html',
'/de/industries/energy/offshore-wind'),
('/int/de/industries/energy/electrical-networks/index.html',
'/de/industries/energy/electrical-networks'),
('/int/de/industries/energy/energy-from-waste/index.html',
'/de/industries/energy/energy-from-waste'),
('/int/de/industries/energy/oil-and-gas/index.html',
'/de/industries/energy/oil-and-gas'),
('/int/de/industries/energy/nuclear/index.html',
'/de/industries/energy/nuclear'),
('/int/de/industries/health-and-life/index.html',
'/de/industries/health-and-life'),
('/int/de/industries/health-and-life/medical-technology/index.html',
'/de/industries/health-and-life/medical-technology'),
(
'/int/de/industries/health-and-life/pharmaceutical-manufacturing/index.html', # noqa
'/de/industries/health-and-life/pharmaceutical-manufacturing'
),
('/int/de/industries/creative/index.html',
'/de/industries/creative-industries'),
('/int/de/industries/creative/content-and-production/index.html',
'/de/industries/creative/content-and-production'),
('/int/de/industries/creative/digital-media/index.html',
'/de/industries/creative/digital-media'),
('/int/de/industries/financial-services/index.html',
'/de/industries/financial-services'),
('/int/de/industries/financial-services/asset-management/index.html',
'/de/industries/financial-services/asset-management'),
(
'/int/de/industries/financial-services/financial-technology/index.html', # noqa
'/de/industries/financial-services/financial-technology'
),
('/int/de/industries/technology/index.html',
'/de/industries/technology'),
('/int/de/industries/technology/data-analytics/index.html',
'/de/industries/technology/data-analytics'),
('/int/de/terms-and-conditions/index.html',
'/de/terms-and-conditions'),
('/int/de/privacy-policy/index.html', '/de/privacy-policy'),
('/int/de/feedback/index.html', '/de/feedback'),
('/int/de/enquiries/confirmation/index.html',
'/de/enquiries/confirmation'),
('/int/de/enquiries/error/index.html', '/de/enquiries/error'),
('/int/de/contact/index.html', '/de/contact'),
('/int/de/location-guide/index.html', '/de/location-guide'),
('/int/de/location-guide/confirmation/index.html',
'/de/location-guide/confirmation'),
# es
('/int/es/index.html', '/es'),
('/int/es/setup-guide/index.html', '/es/setup-guide'),
('/int/es/setup-guide/establish-address/index.html',
'/es/setup-guide/establish-address'),
('/int/es/setup-guide/apply-for-visa/index.html',
'/es/setup-guide/apply-for-visa'),
('/int/es/setup-guide/open-a-business-account/index.html',
'/es/setup-guide/open-a-business-account'),
('/int/es/setup-guide/how-to-setup/index.html',
'/es/setup-guide/how-to-setup'),
('/int/es/setup-guide/understand-tax/index.html',
'/es/setup-guide/understand-tax'),
('/int/es/setup-guide/access-talent/index.html',
'/es/setup-guide/access-talent'),
('/int/es/setup-guide/understand-legal/index.html',
'/es/setup-guide/understand-legal'),
('/int/es/industries/index.html', '/es/industries'),
(
'/int/es/industries/aerospace/index.html',
'/es/industries/aerospace'
),
('/int/es/industries/advanced-manufacturing/index.html',
'/es/industries/advanced-manufacturing'),
('/int/es/industries/food-and-drink-manufacturing/index.html',
'/es/industries/food-and-drink-manufacturing'),
('/int/es/industries/food-and-drink-manufacturing/freefrom/index.html',
'/es/industries/food-and-drink-manufacturing/freefrom'),
('/int/es/industries/retail/index.html', '/es/industries/retail'),
('/int/es/industries/automotive/index.html',
'/es/industries/automotive'),
('/int/es/industries/automotive/motorsport/index.html',
'/es/industries/automotive/motorsport'),
('/int/es/industries/automotive/research-and-development/index.html',
'/es/industries/automotive/research-and-development'),
('/int/es/industries/automotive/supply-chain/index.html',
'/es/industries/automotive-supply-chain'),
('/int/es/industries/energy/index.html', '/es/industries/energy'),
('/int/es/industries/energy/offshore-wind/index.html',
'/es/industries/energy/offshore-wind'),
('/int/es/industries/energy/electrical-networks/index.html',
'/es/industries/energy/electrical-networks'),
('/int/es/industries/energy/energy-from-waste/index.html',
'/es/industries/energy/energy-from-waste'),
('/int/es/industries/energy/oil-and-gas/index.html',
'/es/industries/energy/oil-and-gas'),
('/int/es/industries/energy/nuclear/index.html',
'/es/industries/energy/nuclear'),
('/int/es/industries/health-and-life/index.html',
'/es/industries/health-and-life'),
('/int/es/industries/health-and-life/medical-technology/index.html',
'/es/industries/health-and-life/medical-technology'),
(
'/int/es/industries/health-and-life/pharmaceutical-manufacturing/index.html', # noqa
'/es/industries/health-and-life/pharmaceutical-manufacturing'
),
('/int/es/industries/creative/index.html',
'/es/industries/creative-industries'),
('/int/es/industries/creative/content-and-production/index.html',
'/es/industries/creative/content-and-production'),
('/int/es/industries/creative/digital-media/index.html',
'/es/industries/creative/digital-media'),
('/int/es/industries/financial-services/index.html',
'/es/industries/financial-services'),
('/int/es/industries/financial-services/asset-management/index.html',
'/es/industries/financial-services/asset-management'),
(
'/int/es/industries/financial-services/financial-technology/index.html', # noqa
'/es/industries/financial-services/financial-technology'
),
('/int/es/industries/technology/index.html',
'/es/industries/technology'),
('/int/es/industries/technology/data-analytics/index.html',
'/es/industries/technology/data-analytics'),
('/int/es/terms-and-conditions/index.html',
'/es/terms-and-conditions'),
('/int/es/privacy-policy/index.html', '/es/privacy-policy'),
('/int/es/feedback/index.html', '/es/feedback'),
('/int/es/enquiries/confirmation/index.html',
'/es/enquiries/confirmation'),
('/int/es/enquiries/error/index.html', '/es/enquiries/error'),
('/int/es/contact/index.html', '/es/contact'),
('/int/es/location-guide/index.html', '/es/location-guide'),
('/int/es/location-guide/confirmation/index.html',
'/es/location-guide/confirmation'),
# fr
('/int/fr/index.html', '/fr'),
('/int/fr/setup-guide/index.html', '/fr/setup-guide'),
('/int/fr/setup-guide/establish-address/index.html',
'/fr/setup-guide/establish-address'),
('/int/fr/setup-guide/apply-for-visa/index.html',
'/fr/setup-guide/apply-for-visa'),
('/int/fr/setup-guide/open-a-business-account/index.html',
'/fr/setup-guide/open-a-business-account'),
('/int/fr/setup-guide/how-to-setup/index.html',
'/fr/setup-guide/how-to-setup'),
('/int/fr/setup-guide/understand-tax/index.html',
'/fr/setup-guide/understand-tax'),
('/int/fr/setup-guide/access-talent/index.html',
'/fr/setup-guide/access-talent'),
('/int/fr/setup-guide/understand-legal/index.html',
'/fr/setup-guide/understand-legal'),
('/int/fr/industries/index.html', '/fr/industries'),
('/int/fr/industries/aerospace/index.html',
'/fr/industries/aerospace'),
('/int/fr/industries/advanced-manufacturing/index.html',
'/fr/industries/advanced-manufacturing'),
('/int/fr/industries/food-and-drink-manufacturing/index.html',
'/fr/industries/food-and-drink-manufacturing'),
('/int/fr/industries/food-and-drink-manufacturing/freefrom/index.html',
'/fr/industries/food-and-drink-manufacturing/freefrom'),
('/int/fr/industries/retail/index.html', '/fr/industries/retail'),
('/int/fr/industries/automotive/index.html',
'/fr/industries/automotive'),
('/int/fr/industries/automotive/motorsport/index.html',
'/fr/industries/automotive/motorsport'),
('/int/fr/industries/automotive/research-and-development/index.html',
'/fr/industries/automotive/research-and-development'),
('/int/fr/industries/automotive/supply-chain/index.html',
'/fr/industries/automotive-supply-chain'),
('/int/fr/industries/energy/index.html', '/fr/industries/energy'),
('/int/fr/industries/energy/offshore-wind/index.html',
'/fr/industries/energy/offshore-wind'),
('/int/fr/industries/energy/electrical-networks/index.html',
'/fr/industries/energy/electrical-networks'),
('/int/fr/industries/energy/energy-from-waste/index.html',
'/fr/industries/energy/energy-from-waste'),
('/int/fr/industries/energy/oil-and-gas/index.html',
'/fr/industries/energy/oil-and-gas'),
('/int/fr/industries/energy/nuclear/index.html',
'/fr/industries/energy/nuclear'),
('/int/fr/industries/health-and-life/index.html',
'/fr/industries/health-and-life'),
('/int/fr/industries/health-and-life/medical-technology/index.html',
'/fr/industries/health-and-life/medical-technology'),
(
'/int/fr/industries/health-and-life/pharmaceutical-manufacturing/index.html', # noqa
'/fr/industries/health-and-life/pharmaceutical-manufacturing'
),
('/int/fr/industries/creative/index.html',
'/fr/industries/creative-industries'),
('/int/fr/industries/creative/content-and-production/index.html',
'/fr/industries/creative/content-and-production'),
('/int/fr/industries/creative/digital-media/index.html',
'/fr/industries/creative/digital-media'),
('/int/fr/industries/financial-services/index.html',
'/fr/industries/financial-services'),
('/int/fr/industries/financial-services/asset-management/index.html',
'/fr/industries/financial-services/asset-management'),
(
'/int/fr/industries/financial-services/financial-technology/index.html', # noqa
'/fr/industries/financial-services/financial-technology'
),
('/int/fr/industries/technology/index.html',
'/fr/industries/technology'),
('/int/fr/industries/technology/data-analytics/index.html',
'/fr/industries/technology/data-analytics'),
('/int/fr/terms-and-conditions/index.html',
'/fr/terms-and-conditions'),
('/int/fr/privacy-policy/index.html', '/fr/privacy-policy'),
('/int/fr/feedback/index.html', '/fr/feedback'),
('/int/fr/enquiries/confirmation/index.html',
'/fr/enquiries/confirmation'),
('/int/fr/enquiries/error/index.html', '/fr/enquiries/error'),
('/int/fr/contact/index.html', '/fr/contact'),
('/int/fr/location-guide/index.html', '/fr/location-guide'),
('/int/fr/location-guide/confirmation/index.html',
'/fr/location-guide/confirmation'),
# ja
('/int/ja/index.html', '/ja'),
('/int/ja/setup-guide/index.html', '/ja/setup-guide'),
('/int/ja/setup-guide/establish-address/index.html',
'/ja/setup-guide/establish-address'),
('/int/ja/setup-guide/apply-for-visa/index.html',
'/ja/setup-guide/apply-for-visa'),
('/int/ja/setup-guide/open-a-business-account/index.html',
'/ja/setup-guide/open-a-business-account'),
('/int/ja/setup-guide/how-to-setup/index.html',
'/ja/setup-guide/how-to-setup'),
('/int/ja/setup-guide/understand-tax/index.html',
'/ja/setup-guide/understand-tax'),
('/int/ja/setup-guide/access-talent/index.html',
'/ja/setup-guide/access-talent'),
('/int/ja/setup-guide/understand-legal/index.html',
'/ja/setup-guide/understand-legal'),
('/int/ja/industries/index.html', '/ja/industries'),
('/int/ja/industries/aerospace/index.html',
'/ja/industries/aerospace'),
('/int/ja/industries/advanced-manufacturing/index.html',
'/ja/industries/advanced-manufacturing'),
('/int/ja/industries/food-and-drink-manufacturing/index.html',
'/ja/industries/food-and-drink-manufacturing'),
('/int/ja/industries/food-and-drink-manufacturing/freefrom/index.html',
'/ja/industries/food-and-drink-manufacturing/freefrom'),
('/int/ja/industries/retail/index.html', '/ja/industries/retail'),
('/int/ja/industries/automotive/index.html',
'/ja/industries/automotive'),
('/int/ja/industries/automotive/motorsport/index.html',
'/ja/industries/automotive/motorsport'),
('/int/ja/industries/automotive/research-and-development/index.html',
'/ja/industries/automotive/research-and-development'),
('/int/ja/industries/automotive/supply-chain/index.html',
'/ja/industries/automotive-supply-chain'),
('/int/ja/industries/energy/index.html', '/ja/industries/energy'),
('/int/ja/industries/energy/offshore-wind/index.html',
'/ja/industries/energy/offshore-wind'),
('/int/ja/industries/energy/electrical-networks/index.html',
'/ja/industries/energy/electrical-networks'),
('/int/ja/industries/energy/energy-from-waste/index.html',
'/ja/industries/energy/energy-from-waste'),
('/int/ja/industries/energy/oil-and-gas/index.html',
'/ja/industries/energy/oil-and-gas'),
('/int/ja/industries/energy/nuclear/index.html',
'/ja/industries/energy/nuclear'),
('/int/ja/industries/health-and-life/index.html',
'/ja/industries/health-and-life'),
('/int/ja/industries/health-and-life/medical-technology/index.html',
'/ja/industries/health-and-life/medical-technology'),
(
'/int/ja/industries/health-and-life/pharmaceutical-manufacturing/index.html', # noqa
'/ja/industries/health-and-life/pharmaceutical-manufacturing'
),
('/int/ja/industries/creative/index.html',
'/ja/industries/creative-industries'),
('/int/ja/industries/creative/content-and-production/index.html',
'/ja/industries/creative/content-and-production'),
('/int/ja/industries/creative/digital-media/index.html',
'/ja/industries/creative/digital-media'),
('/int/ja/industries/financial-services/index.html',
'/ja/industries/financial-services'),
('/int/ja/industries/financial-services/asset-management/index.html',
'/ja/industries/financial-services/asset-management'),
(
'/int/ja/industries/financial-services/financial-technology/index.html', # noqa
'/ja/industries/financial-services/financial-technology'
),
('/int/ja/industries/technology/index.html',
'/ja/industries/technology'),
('/int/ja/industries/technology/data-analytics/index.html',
'/ja/industries/technology/data-analytics'),
('/int/ja/terms-and-conditions/index.html',
'/ja/terms-and-conditions'),
('/int/ja/privacy-policy/index.html', '/ja/privacy-policy'),
('/int/ja/feedback/index.html', '/ja/feedback'),
('/int/ja/enquiries/confirmation/index.html',
'/ja/enquiries/confirmation'),
('/int/ja/enquiries/error/index.html', '/ja/enquiries/error'),
('/int/ja/contact/index.html', '/ja/contact'),
('/int/ja/location-guide/index.html', '/ja/location-guide'),
('/int/ja/location-guide/confirmation/index.html',
'/ja/location-guide/confirmation'),
# pt
('/int/pt/index.html', '/pt'),
('/int/pt/setup-guide/index.html', '/pt/setup-guide'),
('/int/pt/setup-guide/establish-address/index.html',
'/pt/setup-guide/establish-address'),
('/int/pt/setup-guide/apply-for-visa/index.html',
'/pt/setup-guide/apply-for-visa'),
('/int/pt/setup-guide/open-a-business-account/index.html',
'/pt/setup-guide/open-a-business-account'),
('/int/pt/setup-guide/how-to-setup/index.html',
'/pt/setup-guide/how-to-setup'),
('/int/pt/setup-guide/understand-tax/index.html',
'/pt/setup-guide/understand-tax'),
('/int/pt/setup-guide/access-talent/index.html',
'/pt/setup-guide/access-talent'),
('/int/pt/setup-guide/understand-legal/index.html',
'/pt/setup-guide/understand-legal'),
('/int/pt/industries/index.html', '/pt/industries'),
(
'/int/pt/industries/aerospace/index.html', '/pt/industries/aerospace'),
('/int/pt/industries/advanced-manufacturing/index.html',
'/pt/industries/advanced-manufacturing'),
('/int/pt/industries/food-and-drink-manufacturing/index.html',
'/pt/industries/food-and-drink-manufacturing'),
('/int/pt/industries/food-and-drink-manufacturing/freefrom/index.html',
'/pt/industries/food-and-drink-manufacturing/freefrom'),
('/int/pt/industries/retail/index.html', '/pt/industries/retail'),
('/int/pt/industries/automotive/index.html',
'/pt/industries/automotive'),
('/int/pt/industries/automotive/motorsport/index.html',
'/pt/industries/automotive/motorsport'),
('/int/pt/industries/automotive/research-and-development/index.html',
'/pt/industries/automotive/research-and-development'),
('/int/pt/industries/automotive/supply-chain/index.html',
'/pt/industries/automotive-supply-chain'),
('/int/pt/industries/energy/index.html', '/pt/industries/energy'),
('/int/pt/industries/energy/offshore-wind/index.html',
'/pt/industries/energy/offshore-wind'),
('/int/pt/industries/energy/electrical-networks/index.html',
'/pt/industries/energy/electrical-networks'),
('/int/pt/industries/energy/energy-from-waste/index.html',
'/pt/industries/energy/energy-from-waste'),
('/int/pt/industries/energy/oil-and-gas/index.html',
'/pt/industries/energy/oil-and-gas'),
('/int/pt/industries/energy/nuclear/index.html',
'/pt/industries/energy/nuclear'),
('/int/pt/industries/health-and-life/index.html',
'/pt/industries/health-and-life'),
('/int/pt/industries/health-and-life/medical-technology/index.html',
'/pt/industries/health-and-life/medical-technology'),
(
'/int/pt/industries/health-and-life/pharmaceutical-manufacturing/index.html', # noqa
'/pt/industries/health-and-life/pharmaceutical-manufacturing'
),
('/int/pt/industries/creative/index.html',
'/pt/industries/creative-industries'),
('/int/pt/industries/creative/content-and-production/index.html',
'/pt/industries/creative/content-and-production'),
('/int/pt/industries/creative/digital-media/index.html',
'/pt/industries/creative/digital-media'),
('/int/pt/industries/financial-services/index.html',
'/pt/industries/financial-services'),
('/int/pt/industries/financial-services/asset-management/index.html',
'/pt/industries/financial-services/asset-management'),
(
'/int/pt/industries/financial-services/financial-technology/index.html', # noqa
'/pt/industries/financial-services/financial-technology'
),
('/int/pt/industries/technology/index.html',
'/pt/industries/technology'),
('/int/pt/industries/technology/data-analytics/index.html',
'/pt/industries/technology/data-analytics'),
(
'/int/pt/terms-and-conditions/index.html', '/pt/terms-and-conditions'),
('/int/pt/privacy-policy/index.html', '/pt/privacy-policy'),
('/int/pt/feedback/index.html', '/pt/feedback'),
('/int/pt/enquiries/confirmation/index.html',
'/pt/enquiries/confirmation'),
('/int/pt/enquiries/error/index.html', '/pt/enquiries/error'),
('/int/pt/contact/index.html', '/pt/contact'),
('/int/pt/location-guide/index.html', '/pt/location-guide'),
('/int/pt/location-guide/confirmation/index.html',
'/pt/location-guide/confirmation'),
# zh
('/int/zh/index.html', '/zh-hans'),
('/int/zh/setup-guide/index.html', '/zh-hans/setup-guide'),
('/int/zh/setup-guide/establish-address/index.html',
'/zh-hans/setup-guide/establish-address'),
('/int/zh/setup-guide/apply-for-visa/index.html',
'/zh-hans/setup-guide/apply-for-visa'),
('/int/zh/setup-guide/open-a-business-account/index.html',
'/zh-hans/setup-guide/open-a-business-account'),
('/int/zh/setup-guide/how-to-setup/index.html',
'/zh-hans/setup-guide/how-to-setup'),
('/int/zh/setup-guide/understand-tax/index.html',
'/zh-hans/setup-guide/understand-tax'),
('/int/zh/setup-guide/access-talent/index.html',
'/zh-hans/setup-guide/access-talent'),
('/int/zh/setup-guide/understand-legal/index.html',
'/zh-hans/setup-guide/understand-legal'),
('/int/zh/industries/index.html', '/zh-hans/industries'),
('/int/zh/industries/aerospace/index.html',
'/zh-hans/industries/aerospace'),
('/int/zh/industries/advanced-manufacturing/index.html',
'/zh-hans/industries/advanced-manufacturing'),
('/int/zh/industries/food-and-drink-manufacturing/index.html',
'/zh-hans/industries/food-and-drink-manufacturing'),
('/int/zh/industries/food-and-drink-manufacturing/freefrom/index.html',
'/zh-hans/industries/food-and-drink-manufacturing/freefrom'),
('/int/zh/industries/retail/index.html', '/zh-hans/industries/retail'),
('/int/zh/industries/automotive/index.html',
'/zh-hans/industries/automotive'),
('/int/zh/industries/automotive/motorsport/index.html',
'/zh-hans/industries/automotive/motorsport'),
('/int/zh/industries/automotive/research-and-development/index.html',
'/zh-hans/industries/automotive/research-and-development'),
('/int/zh/industries/automotive/supply-chain/index.html',
'/zh-hans/industries/automotive-supply-chain'),
('/int/zh/industries/energy/index.html', '/zh-hans/industries/energy'),
('/int/zh/industries/energy/offshore-wind/index.html',
'/zh-hans/industries/energy/offshore-wind'),
('/int/zh/industries/energy/energy-from-waste/index.html',
'/zh-hans/industries/energy/energy-from-waste'),
('/int/zh/industries/energy/nuclear/index.html',
'/zh-hans/industries/energy/nuclear'),
('/int/zh/industries/energy/oil-and-gas/index.html',
'/zh-hans/industries/energy/oil-and-gas'),
('/int/zh/industries/energy/electrical-networks/index.html',
'/zh-hans/industries/energy/electrical-networks'),
('/int/zh/industries/health-and-life/index.html',
'/zh-hans/industries/health-and-life'),
('/int/zh/industries/health-and-life/medical-technology/index.html',
'/zh-hans/industries/health-and-life/medical-technology'),
(
'/int/zh/industries/health-and-life/pharmaceutical-manufacturing/index.html', # noqa
'/zh-hans/industries/health-and-life/pharmaceutical-manufacturing'
),
('/int/zh/industries/creative/index.html',
'/zh-hans/industries/creative-industries'),
('/int/zh/industries/creative/content-and-production/index.html',
'/zh-hans/industries/creative/content-and-production'),
('/int/zh/industries/creative/digital-media/index.html',
'/zh-hans/industries/creative/digital-media'),
('/int/zh/industries/financial-services/index.html',
'/zh-hans/industries/financial-services'),
('/int/zh/industries/financial-services/asset-management/index.html',
'/zh-hans/industries/financial-services/asset-management'),
(
'/int/zh/industries/financial-services/financial-technology/index.html', # noqa
'/zh-hans/industries/financial-services/financial-technology'
),
('/int/zh/industries/technology/index.html',
'/zh-hans/industries/technology'),
('/int/zh/industries/technology/data-analytics/index.html',
'/zh-hans/industries/technology/data-analytics'),
('/int/zh/terms-and-conditions/index.html',
'/zh-hans/terms-and-conditions'),
('/int/zh/privacy-policy/index.html', '/zh-hans/privacy-policy'),
('/int/zh/feedback/index.html', '/zh-hans/feedback'),
('/int/zh/enquiries/confirmation/index.html',
'/zh-hans/enquiries/confirmation'),
('/int/zh/enquiries/error/index.html', '/zh-hans/enquiries/error'),
('/int/zh/contact/index.html', '/zh-hans/contact'),
('/int/zh/location-guide/index.html', '/zh-hans/location-guide'),
('/int/zh/location-guide/confirmation/index.html',
'/zh-hans/location-guide/confirmation'),
# cn
('/cn/index.html', '/zh-hans'),
('/cn/setup-guide/index.html', '/zh-hans/setup-guide'),
('/cn/setup-guide/establish-address/index.html',
'/zh-hans/setup-guide/establish-address'),
('/cn/setup-guide/apply-for-visa/index.html',
'/zh-hans/setup-guide/apply-for-visa'),
('/cn/setup-guide/open-a-business-account/index.html',
'/zh-hans/setup-guide/open-a-business-account'),
('/cn/setup-guide/how-to-setup/index.html',
'/zh-hans/setup-guide/how-to-setup'),
('/cn/setup-guide/understand-tax/index.html',
'/zh-hans/setup-guide/understand-tax'),
('/cn/setup-guide/access-talent/index.html',
'/zh-hans/setup-guide/access-talent'),
('/cn/setup-guide/understand-legal/index.html',
'/zh-hans/setup-guide/understand-legal'),
('/cn/industries/index.html', '/zh-hans/industries'),
(
'/cn/industries/aerospace/index.html',
'/zh-hans/industries/aerospace'
),
('/cn/industries/advanced-manufacturing/index.html',
'/zh-hans/industries/advanced-manufacturing'),
('/cn/industries/food-and-drink-manufacturing/index.html',
'/zh-hans/industries/food-and-drink-manufacturing'),
('/cn/industries/food-and-drink-manufacturing/freefrom/index.html',
'/zh-hans/industries/food-and-drink-manufacturing/freefrom'),
('/cn/industries/retail/index.html', '/zh-hans/industries/retail'),
('/cn/industries/automotive/index.html',
'/zh-hans/industries/automotive'),
('/cn/industries/automotive/motorsport/index.html',
'/zh-hans/industries/automotive/motorsport'),
('/cn/industries/automotive/research-and-development/index.html',
'/zh-hans/industries/automotive/research-and-development'),
('/cn/industries/automotive/supply-chain/index.html',
'/zh-hans/industries/automotive-supply-chain'),
('/cn/industries/energy/index.html', '/zh-hans/industries/energy'),
('/cn/industries/energy/offshore-wind/index.html',
'/zh-hans/industries/energy/offshore-wind'),
('/cn/industries/energy/energy-from-waste/index.html',
'/zh-hans/industries/energy/energy-from-waste'),
('/cn/industries/energy/nuclear/index.html',
'/zh-hans/industries/energy/nuclear'),
('/cn/industries/energy/oil-and-gas/index.html',
'/zh-hans/industries/energy/oil-and-gas'),
('/cn/industries/energy/electrical-networks/index.html',
'/zh-hans/industries/energy/electrical-networks'),
('/cn/industries/health-and-life/index.html',
'/zh-hans/industries/health-and-life'),
('/cn/industries/health-and-life/medical-technology/index.html',
'/zh-hans/industries/health-and-life/medical-technology'),
(
'/cn/industries/health-and-life/pharmaceutical-manufacturing/index.html', # noqa
'/zh-hans/industries/health-and-life/pharmaceutical-manufacturing'
),
('/cn/industries/creative/index.html',
'/zh-hans/industries/creative-industries'),
('/cn/industries/creative/content-and-production/index.html',
'/zh-hans/industries/creative/content-and-production'),
('/cn/industries/creative/digital-media/index.html',
'/zh-hans/industries/creative/digital-media'),
('/cn/industries/financial-services/index.html',
'/zh-hans/industries/financial-services'),
('/cn/industries/financial-services/asset-management/index.html',
'/zh-hans/industries/financial-services/asset-management'),
('/cn/industries/financial-services/financial-technology/index.html',
'/zh-hans/industries/financial-services/financial-technology'),
('/cn/industries/technology/index.html',
'/zh-hans/industries/technology'),
('/cn/industries/technology/data-analytics/index.html',
'/zh-hans/industries/technology/data-analytics'),
(
'/cn/terms-and-conditions/index.html',
'/zh-hans/terms-and-conditions'
),
('/cn/privacy-policy/index.html', '/zh-hans/privacy-policy'),
('/cn/feedback/index.html', '/zh-hans/feedback'),
('/cn/enquiries/confirmation/index.html',
'/zh-hans/enquiries/confirmation'),
('/cn/enquiries/error/index.html', '/zh-hans/enquiries/error'),
('/cn/contact/index.html', '/zh-hans/contact'),
('/cn/location-guide/index.html', '/zh-hans/location-guide'),
('/cn/location-guide/confirmation/index.html',
'/zh-hans/location-guide/confirmation'),
# zh-cn
('/zh-cn/index.html', '/zh-hans'),
('/zh-cn/setup-guide/index.html', '/zh-hans/setup-guide'),
('/zh-cn/setup-guide/establish-address/index.html',
'/zh-hans/setup-guide/establish-address'),
('/zh-cn/setup-guide/apply-for-visa/index.html',
'/zh-hans/setup-guide/apply-for-visa'),
('/zh-cn/setup-guide/open-a-business-account/index.html',
'/zh-hans/setup-guide/open-a-business-account'),
('/zh-cn/setup-guide/how-to-setup/index.html',
'/zh-hans/setup-guide/how-to-setup'),
('/zh-cn/setup-guide/understand-tax/index.html',
'/zh-hans/setup-guide/understand-tax'),
('/zh-cn/setup-guide/access-talent/index.html',
'/zh-hans/setup-guide/access-talent'),
('/zh-cn/setup-guide/understand-legal/index.html',
'/zh-hans/setup-guide/understand-legal'),
('/zh-cn/industries/index.html', '/zh-hans/industries'),
(
'/zh-cn/industries/aerospace/index.html',
'/zh-hans/industries/aerospace'
),
('/zh-cn/industries/advanced-manufacturing/index.html',
'/zh-hans/industries/advanced-manufacturing'),
('/zh-cn/industries/food-and-drink-manufacturing/index.html',
'/zh-hans/industries/food-and-drink-manufacturing'),
('/zh-cn/industries/food-and-drink-manufacturing/freefrom/index.html',
'/zh-hans/industries/food-and-drink-manufacturing/freefrom'),
('/zh-cn/industries/retail/index.html', '/zh-hans/industries/retail'),
('/zh-cn/industries/automotive/index.html',
'/zh-hans/industries/automotive'),
('/zh-cn/industries/automotive/motorsport/index.html',
'/zh-hans/industries/automotive/motorsport'),
('/zh-cn/industries/automotive/research-and-development/index.html',
'/zh-hans/industries/automotive/research-and-development'),
('/zh-cn/industries/automotive/supply-chain/index.html',
'/zh-hans/industries/automotive-supply-chain'),
('/zh-cn/industries/energy/index.html', '/zh-hans/industries/energy'),
('/zh-cn/industries/energy/offshore-wind/index.html',
'/zh-hans/industries/energy/offshore-wind'),
('/zh-cn/industries/energy/energy-from-waste/index.html',
'/zh-hans/industries/energy/energy-from-waste'),
('/zh-cn/industries/energy/nuclear/index.html',
'/zh-hans/industries/energy/nuclear'),
('/zh-cn/industries/energy/oil-and-gas/index.html',
'/zh-hans/industries/energy/oil-and-gas'),
('/zh-cn/industries/energy/electrical-networks/index.html',
'/zh-hans/industries/energy/electrical-networks'),
('/zh-cn/industries/health-and-life/index.html',
'/zh-hans/industries/health-and-life'),
('/zh-cn/industries/health-and-life/medical-technology/index.html',
'/zh-hans/industries/health-and-life/medical-technology'),
(
'/zh-cn/industries/health-and-life/pharmaceutical-manufacturing/index.html', # noqa
'/zh-hans/industries/health-and-life/pharmaceutical-manufacturing'
),
('/zh-cn/industries/creative/index.html',
'/zh-hans/industries/creative-industries'),
('/zh-cn/industries/creative/content-and-production/index.html',
'/zh-hans/industries/creative/content-and-production'),
('/zh-cn/industries/creative/digital-media/index.html',
'/zh-hans/industries/creative/digital-media'),
('/zh-cn/industries/financial-services/index.html',
'/zh-hans/industries/financial-services'),
('/zh-cn/industries/financial-services/asset-management/index.html',
'/zh-hans/industries/financial-services/asset-management'),
('/zh-cn/industries/financial-services/financial-technology/index.html', # noqa
'/zh-hans/industries/financial-services/financial-technology'),
('/zh-cn/industries/technology/index.html',
'/zh-hans/industries/technology'),
('/zh-cn/industries/technology/data-analytics/index.html',
'/zh-hans/industries/technology/data-analytics'),
(
'/zh-cn/terms-and-conditions/index.html',
'/zh-hans/terms-and-conditions'
),
('/zh-cn/privacy-policy/index.html', '/zh-hans/privacy-policy'),
('/zh-cn/feedback/index.html', '/zh-hans/feedback'),
('/zh-cn/enquiries/confirmation/index.html',
'/zh-hans/enquiries/confirmation'),
('/zh-cn/enquiries/error/index.html', '/zh-hans/enquiries/error'),
('/zh-cn/contact/index.html', '/zh-hans/contact'),
('/zh-cn/location-guide/index.html', '/zh-hans/location-guide'),
('/zh-cn/location-guide/confirmation/index.html',
'/zh-hans/location-guide/confirmation'),
# br
('/br/index.html', '/pt'),
('/br/setup-guide/index.html', '/pt/setup-guide'),
('/br/setup-guide/establish-address/index.html',
'/pt/setup-guide/establish-address'),
('/br/setup-guide/apply-for-visa/index.html',
'/pt/setup-guide/apply-for-visa'),
('/br/setup-guide/open-a-business-account/index.html',
'/pt/setup-guide/open-a-business-account'),
('/br/setup-guide/how-to-setup/index.html',
'/pt/setup-guide/how-to-setup'),
('/br/setup-guide/understand-tax/index.html',
'/pt/setup-guide/understand-tax'),
('/br/setup-guide/access-talent/index.html',
'/pt/setup-guide/access-talent'),
('/br/setup-guide/understand-legal/index.html',
'/pt/setup-guide/understand-legal'),
('/br/industries/index.html', '/pt/industries'),
('/br/industries/aerospace/index.html', '/pt/industries/aerospace'),
('/br/industries/advanced-manufacturing/index.html',
'/pt/industries/advanced-manufacturing'),
('/br/industries/food-and-drink-manufacturing/index.html',
'/pt/industries/food-and-drink-manufacturing'),
('/br/industries/food-and-drink-manufacturing/freefrom/index.html',
'/pt/industries/food-and-drink-manufacturing/freefrom'),
('/br/industries/retail/index.html', '/pt/industries/retail'),
('/br/industries/automotive/index.html', '/pt/industries/automotive'),
('/br/industries/automotive/motorsport/index.html',
'/pt/industries/automotive/motorsport'),
('/br/industries/automotive/research-and-development/index.html',
'/pt/industries/automotive/research-and-development'),
('/br/industries/automotive/supply-chain/index.html',
'/pt/industries/automotive-supply-chain'),
('/br/industries/energy/index.html', '/pt/industries/energy'),
('/br/industries/energy/offshore-wind/index.html',
'/pt/industries/energy/offshore-wind'),
('/br/industries/energy/electrical-networks/index.html',
'/pt/industries/energy/electrical-networks'),
('/br/industries/energy/energy-from-waste/index.html',
'/pt/industries/energy/energy-from-waste'),
('/br/industries/energy/oil-and-gas/index.html',
'/pt/industries/energy/oil-and-gas'),
('/br/industries/energy/nuclear/index.html',
'/pt/industries/energy/nuclear'),
('/br/industries/health-and-life/index.html',
'/pt/industries/health-and-life'),
('/br/industries/health-and-life/medical-technology/index.html',
'/pt/industries/health-and-life/medical-technology'),
(
'/br/industries/health-and-life/pharmaceutical-manufacturing/index.html', # noqa
'/pt/industries/health-and-life/pharmaceutical-manufacturing'
),
('/br/industries/creative/index.html',
'/pt/industries/creative-industries'),
('/br/industries/creative/content-and-production/index.html',
'/pt/industries/creative/content-and-production'),
('/br/industries/creative/digital-media/index.html',
'/pt/industries/creative/digital-media'),
('/br/industries/financial-services/index.html',
'/pt/industries/financial-services'),
('/br/industries/financial-services/asset-management/index.html',
'/pt/industries/financial-services/asset-management'),
('/br/industries/financial-services/financial-technology/index.html',
'/pt/industries/financial-services/financial-technology'),
('/br/industries/technology/index.html', '/pt/industries/technology'),
('/br/industries/technology/data-analytics/index.html',
'/pt/industries/technology/data-analytics'),
('/br/terms-and-conditions/index.html', '/pt/terms-and-conditions'),
('/br/privacy-policy/index.html', '/pt/privacy-policy'),
('/br/feedback/index.html', '/pt/feedback'),
('/br/enquiries/confirmation/index.html',
'/pt/enquiries/confirmation'),
('/br/enquiries/error/index.html', '/pt/enquiries/error'),
('/br/contact/index.html', '/pt/contact'),
('/br/location-guide/index.html', '/pt/location-guide'),
('/br/location-guide/confirmation/index.html',
'/pt/location-guide/confirmation'),
# in
('/in/index.html', '/'),
('/in/setup-guide/index.html', '/setup-guide'),
('/in/setup-guide/establish-address/index.html',
'/setup-guide/establish-address'),
('/in/setup-guide/apply-for-visa/index.html',
'/setup-guide/apply-for-visa'),
('/in/setup-guide/open-a-business-account/index.html',
'/setup-guide/open-a-business-account'),
('/in/setup-guide/how-to-setup/index.html',
'/setup-guide/how-to-setup'),
('/in/setup-guide/understand-tax/index.html',
'/setup-guide/understand-tax'),
('/in/setup-guide/access-talent/index.html',
'/setup-guide/access-talent'),
('/in/setup-guide/understand-legal/index.html',
'/setup-guide/understand-legal'),
('/in/industries/index.html', '/industries'),
('/in/industries/aerospace/index.html', '/industries/aerospace'),
('/in/industries/advanced-manufacturing/index.html',
'/industries/advanced-manufacturing'),
('/in/industries/food-and-drink-manufacturing/index.html',
'/industries/food-and-drink-manufacturing'),
('/in/industries/food-and-drink-manufacturing/freefrom/index.html',
'/industries/food-and-drink-manufacturing/freefrom'),
('/in/industries/retail/index.html', '/industries/retail'),
('/in/industries/automotive/index.html', '/industries/automotive'),
('/in/industries/automotive/motorsport/index.html',
'/industries/automotive/motorsport'),
('/in/industries/automotive/research-and-development/index.html',
'/industries/automotive/research-and-development'),
('/in/industries/automotive/supply-chain/index.html',
'/industries/automotive-supply-chain'),
('/in/industries/energy/index.html', '/industries/energy'),
('/in/industries/energy/offshore-wind/index.html',
'/industries/energy/offshore-wind'),
('/in/industries/energy/electrical-networks/index.html',
'/industries/energy/electrical-networks'),
('/in/industries/energy/energy-from-waste/index.html',
'/industries/energy/energy-from-waste'),
('/in/industries/energy/oil-and-gas/index.html',
'/industries/energy/oil-and-gas'),
('/in/industries/energy/nuclear/index.html',
'/industries/energy/nuclear'),
('/in/industries/health-and-life/index.html',
'/industries/health-and-life'),
('/in/industries/health-and-life/medical-technology/index.html',
'/industries/health-and-life/medical-technology'),
(
'/in/industries/health-and-life/pharmaceutical-manufacturing/index.html', # noqa
'/industries/health-and-life/pharmaceutical-manufacturing'
),
('/in/industries/creative/index.html',
'/industries/creative-industries'),
('/in/industries/creative/content-and-production/index.html',
'/industries/creative/content-and-production'),
('/in/industries/creative/digital-media/index.html',
'/industries/creative/digital-media'),
('/in/industries/financial-services/index.html',
'/industries/financial-services'),
('/in/industries/financial-services/asset-management/index.html',
'/industries/financial-services/asset-management'),
('/in/industries/financial-services/financial-technology/index.html',
'/industries/financial-services/financial-technology'),
('/in/industries/technology/index.html', '/industries/technology'),
('/in/industries/technology/data-analytics/index.html',
'/industries/technology/data-analytics'),
('/in/terms-and-conditions/index.html', '/terms-and-conditions'),
('/in/privacy-policy/index.html', '/privacy-policy'),
('/in/feedback/index.html', '/feedback'),
('/in/enquiries/confirmation/index.html', '/enquiries/confirmation'),
('/in/enquiries/error/index.html', '/enquiries/error'),
('/in/contact/index.html', '/contact'),
('/in/location-guide/index.html', '/location-guide'),
('/in/location-guide/confirmation/index.html',
'/location-guide/confirmation'),
# us
('/us/index.html', '/'),
('/us/setup-guide/index.html', '/setup-guide'),
('/us/setup-guide/establish-address/index.html',
'/setup-guide/establish-address'),
('/us/setup-guide/apply-for-visa/index.html',
'/setup-guide/apply-for-visa'),
('/us/setup-guide/open-a-business-account/index.html',
'/setup-guide/open-a-business-account'),
('/us/setup-guide/how-to-setup/index.html',
'/setup-guide/how-to-setup'),
('/us/setup-guide/understand-tax/index.html',
'/setup-guide/understand-tax'),
('/us/setup-guide/access-talent/index.html',
'/setup-guide/access-talent'),
('/us/setup-guide/understand-legal/index.html',
'/setup-guide/understand-legal'),
('/us/industries/index.html', '/industries'),
('/us/industries/aerospace/index.html', '/industries/aerospace'),
('/us/industries/advanced-manufacturing/index.html',
'/industries/advanced-manufacturing'),
('/us/industries/food-and-drink-manufacturing/index.html',
'/industries/food-and-drink-manufacturing'),
('/us/industries/food-and-drink-manufacturing/freefrom/index.html',
'/industries/food-and-drink-manufacturing/freefrom'),
('/us/industries/retail/index.html', '/industries/retail'),
('/us/industries/automotive/index.html', '/industries/automotive'),
('/us/industries/automotive/motorsport/index.html',
'/industries/automotive/motorsport'),
('/us/industries/automotive/research-and-development/index.html',
'/industries/automotive/research-and-development'),
('/us/industries/automotive/supply-chain/index.html',
'/industries/automotive-supply-chain'),
('/us/industries/energy/index.html', '/industries/energy'),
('/us/industries/energy/offshore-wind/index.html',
'/industries/energy/offshore-wind'),
('/us/industries/energy/electrical-networks/index.html',
'/industries/energy/electrical-networks'),
('/us/industries/energy/energy-from-waste/index.html',
'/industries/energy/energy-from-waste'),
('/us/industries/energy/oil-and-gas/index.html',
'/industries/energy/oil-and-gas'),
('/us/industries/energy/nuclear/index.html',
'/industries/energy/nuclear'),
('/us/industries/health-and-life/index.html',
'/industries/health-and-life'),
('/us/industries/health-and-life/medical-technology/index.html',
'/industries/health-and-life/medical-technology'),
(
'/us/industries/health-and-life/pharmaceutical-manufacturing/index.html', # noqa
'/industries/health-and-life/pharmaceutical-manufacturing'
),
('/us/industries/creative/index.html',
'/industries/creative-industries'),
('/us/industries/creative/content-and-production/index.html',
'/industries/creative/content-and-production'),
('/us/industries/creative/digital-media/index.html',
'/industries/creative/digital-media'),
('/us/industries/financial-services/index.html',
'/industries/financial-services'),
('/us/industries/financial-services/asset-management/index.html',
'/industries/financial-services/asset-management'),
('/us/industries/financial-services/financial-technology/index.html',
'/industries/financial-services/financial-technology'),
('/us/industries/technology/index.html', '/industries/technology'),
('/us/industries/technology/data-analytics/index.html',
'/industries/technology/data-analytics'),
('/us/terms-and-conditions/index.html', '/terms-and-conditions'),
('/us/privacy-policy/index.html', '/privacy-policy'),
('/us/feedback/index.html', '/feedback'),
('/us/enquiries/confirmation/index.html', '/enquiries/confirmation'),
('/us/enquiries/error/index.html', '/enquiries/error'),
('/us/contact/index.html', '/contact'),
('/us/location-guide/index.html', '/location-guide'),
('/us/location-guide/confirmation/index.html',
'/location-guide/confirmation'),
# ja
('/jp/index.html', '/ja'),
('/jp/setup-guide/index.html', '/ja/setup-guide'),
('/jp/setup-guide/establish-address/index.html',
'/ja/setup-guide/establish-address'),
('/jp/setup-guide/apply-for-visa/index.html',
'/ja/setup-guide/apply-for-visa'),
('/jp/setup-guide/open-a-business-account/index.html',
'/ja/setup-guide/open-a-business-account'),
('/jp/setup-guide/how-to-setup/index.html',
'/ja/setup-guide/how-to-setup'),
('/jp/setup-guide/understand-tax/index.html',
'/ja/setup-guide/understand-tax'),
('/jp/setup-guide/access-talent/index.html',
'/ja/setup-guide/access-talent'),
('/jp/setup-guide/understand-legal/index.html',
'/ja/setup-guide/understand-legal'),
('/jp/industries/index.html', '/ja/industries'),
('/jp/industries/aerospace/index.html', '/ja/industries/aerospace'),
('/jp/industries/advanced-manufacturing/index.html',
'/ja/industries/advanced-manufacturing'),
('/jp/industries/food-and-drink-manufacturing/index.html',
'/ja/industries/food-and-drink-manufacturing'),
('/jp/industries/food-and-drink-manufacturing/freefrom/index.html',
'/ja/industries/food-and-drink-manufacturing/freefrom'),
('/jp/industries/retail/index.html', '/ja/industries/retail'),
('/jp/industries/automotive/index.html', '/ja/industries/automotive'),
('/jp/industries/automotive/motorsport/index.html',
'/ja/industries/automotive/motorsport'),
('/jp/industries/automotive/research-and-development/index.html',
'/ja/industries/automotive/research-and-development'),
('/jp/industries/automotive/supply-chain/index.html',
'/ja/industries/automotive-supply-chain'),
('/jp/industries/energy/index.html', '/ja/industries/energy'),
('/jp/industries/energy/offshore-wind/index.html',
'/ja/industries/energy/offshore-wind'),
('/jp/industries/energy/electrical-networks/index.html',
'/ja/industries/energy/electrical-networks'),
('/jp/industries/energy/energy-from-waste/index.html',
'/ja/industries/energy/energy-from-waste'),
('/jp/industries/energy/oil-and-gas/index.html',
'/ja/industries/energy/oil-and-gas'),
('/jp/industries/energy/nuclear/index.html',
'/ja/industries/energy/nuclear'),
('/jp/industries/health-and-life/index.html',
'/ja/industries/health-and-life'),
('/jp/industries/health-and-life/medical-technology/index.html',
'/ja/industries/health-and-life/medical-technology'),
(
'/jp/industries/health-and-life/pharmaceutical-manufacturing/index.html', # noqa
'/ja/industries/health-and-life/pharmaceutical-manufacturing'
),
('/jp/industries/creative/index.html',
'/ja/industries/creative-industries'),
('/jp/industries/creative/content-and-production/index.html',
'/ja/industries/creative/content-and-production'),
('/jp/industries/creative/digital-media/index.html',
'/ja/industries/creative/digital-media'),
('/jp/industries/financial-services/index.html',
'/ja/industries/financial-services'),
('/jp/industries/financial-services/asset-management/index.html',
'/ja/industries/financial-services/asset-management'),
('/jp/industries/financial-services/financial-technology/index.html',
'/ja/industries/financial-services/financial-technology'),
('/jp/industries/technology/index.html', '/ja/industries/technology'),
('/jp/industries/technology/data-analytics/index.html',
'/ja/industries/technology/data-analytics'),
('/jp/terms-and-conditions/index.html', '/ja/terms-and-conditions'),
('/jp/privacy-policy/index.html', '/ja/privacy-policy'),
('/jp/feedback/index.html', '/ja/feedback'),
('/jp/enquiries/confirmation/index.html',
'/ja/enquiries/confirmation'),
('/jp/enquiries/error/index.html', '/ja/enquiries/error'),
('/jp/contact/index.html', '/ja/contact'),
('/jp/location-guide/index.html', '/ja/location-guide'),
('/jp/location-guide/confirmation/index.html',
'/ja/location-guide/confirmation'),
# int
('/int/index.html', '/'),
('/int/setup-guide/index.html', '/setup-guide'),
('/int/setup-guide/establish-address/index.html',
'/setup-guide/establish-address'),
('/int/setup-guide/apply-for-visa/index.html',
'/setup-guide/apply-for-visa'),
('/int/setup-guide/open-a-business-account/index.html',
'/setup-guide/open-a-business-account'),
('/int/setup-guide/how-to-setup/index.html',
'/setup-guide/how-to-setup'),
('/int/setup-guide/understand-tax/index.html',
'/setup-guide/understand-tax'),
('/int/setup-guide/access-talent/index.html',
'/setup-guide/access-talent'),
('/int/setup-guide/understand-legal/index.html',
'/setup-guide/understand-legal'),
('/int/industries/index.html', '/industries'),
('/int/industries/aerospace/index.html', '/industries/aerospace'),
('/int/industries/advanced-manufacturing/index.html',
'/industries/advanced-manufacturing'),
('/int/industries/food-and-drink-manufacturing/index.html',
'/industries/food-and-drink-manufacturing'),
('/int/industries/food-and-drink-manufacturing/freefrom/index.html',
'/industries/food-and-drink-manufacturing/freefrom'),
('/int/industries/retail/index.html', '/industries/retail'),
('/int/industries/automotive/index.html', '/industries/automotive'),
('/int/industries/automotive/motorsport/index.html',
'/industries/automotive/motorsport'),
('/int/industries/automotive/research-and-development/index.html',
'/industries/automotive/research-and-development'),
('/int/industries/automotive/supply-chain/index.html',
'/industries/automotive-supply-chain'),
('/int/industries/energy/index.html', '/industries/energy'),
('/int/industries/energy/offshore-wind/index.html',
'/industries/energy/offshore-wind'),
('/int/industries/energy/electrical-networks/index.html',
'/industries/energy/electrical-networks'),
('/int/industries/energy/energy-from-waste/index.html',
'/industries/energy/energy-from-waste'),
('/int/industries/energy/oil-and-gas/index.html',
'/industries/energy/oil-and-gas'),
('/int/industries/energy/nuclear/index.html',
'/industries/energy/nuclear'),
('/int/industries/health-and-life/index.html',
'/industries/health-and-life'),
('/int/industries/health-and-life/medical-technology/index.html',
'/industries/health-and-life/medical-technology'),
(
'/int/industries/health-and-life/pharmaceutical-manufacturing/index.html', # noqa
'/industries/health-and-life/pharmaceutical-manufacturing'
),
('/int/industries/creative/index.html',
'/industries/creative-industries'),
('/int/industries/creative/content-and-production/index.html',
'/industries/creative/content-and-production'),
('/int/industries/creative/digital-media/index.html',
'/industries/creative/digital-media'),
('/int/industries/financial-services/index.html',
'/industries/financial-services'),
('/int/industries/financial-services/asset-management/index.html',
'/industries/financial-services/asset-management'),
('/int/industries/financial-services/financial-technology/index.html',
'/industries/financial-services/financial-technology'),
('/int/industries/technology/index.html', '/industries/technology'),
('/int/industries/technology/data-analytics/index.html',
'/industries/technology/data-analytics'),
('/int/terms-and-conditions/index.html', '/terms-and-conditions'),
('/int/privacy-policy/index.html', '/privacy-policy'),
('/int/feedback/index.html', '/feedback'),
('/int/enquiries/confirmation/index.html', '/enquiries/confirmation'),
('/int/enquiries/error/index.html', '/enquiries/error'),
('/int/contact/index.html', '/contact'),
('/int/location-guide/index.html', '/location-guide'),
('/int/location-guide/confirmation/index.html',
'/location-guide/confirmation'),
# no index.html
# ar
('/int/ar/', '/ar'),
('/int/ar/setup-guide/', '/ar/setup-guide'),
('/int/ar/setup-guide/establish-address/',
'/ar/setup-guide/establish-address'),
('/int/ar/setup-guide/apply-for-visa/',
'/ar/setup-guide/apply-for-visa'),
('/int/ar/setup-guide/open-a-business-account/',
'/ar/setup-guide/open-a-business-account'),
('/int/ar/setup-guide/how-to-setup/',
'/ar/setup-guide/how-to-setup'),
('/int/ar/setup-guide/understand-tax/',
'/ar/setup-guide/understand-tax'),
('/int/ar/setup-guide/access-talent/',
'/ar/setup-guide/access-talent'),
('/int/ar/setup-guide/understand-legal/',
'/ar/setup-guide/understand-legal'),
('/int/ar/industries/', '/ar/industries'),
('/int/ar/industries/aerospace/',
'/ar/industries/aerospace'),
('/int/ar/industries/advanced-manufacturing/',
'/ar/industries/advanced-manufacturing'),
('/int/ar/industries/food-and-drink-manufacturing/',
'/ar/industries/food-and-drink-manufacturing'),
('/int/ar/industries/food-and-drink-manufacturing/freefrom/',
'/ar/industries/food-and-drink-manufacturing/freefrom'),
('/int/ar/industries/retail/', '/ar/industries/retail'),
('/int/ar/industries/automotive/',
'/ar/industries/automotive'),
('/int/ar/industries/automotive/motorsport/',
'/ar/industries/automotive/motorsport'),
('/int/ar/industries/automotive/research-and-development/',
'/ar/industries/automotive/research-and-development'),
('/int/ar/industries/automotive/supply-chain/',
'/ar/industries/automotive-supply-chain'),
('/int/ar/industries/energy/', '/ar/industries/energy'),
('/int/ar/industries/energy/offshore-wind/',
'/ar/industries/energy/offshore-wind'),
('/int/ar/industries/energy/electrical-networks/',
'/ar/industries/energy/electrical-networks'),
('/int/ar/industries/energy/energy-from-waste/',
'/ar/industries/energy/energy-from-waste'),
('/int/ar/industries/energy/oil-and-gas/',
'/ar/industries/energy/oil-and-gas'),
('/int/ar/industries/energy/nuclear/',
'/ar/industries/energy/nuclear'),
('/int/ar/industries/health-and-life/',
'/ar/industries/health-and-life'),
('/int/ar/industries/health-and-life/medical-technology/',
'/ar/industries/health-and-life/medical-technology'),
(
'/int/ar/industries/health-and-life/pharmaceutical-manufacturing/', # noqa
'/ar/industries/health-and-life/pharmaceutical-manufacturing'
),
('/int/ar/industries/creative/', '/ar/industries/creative-industries'),
('/int/ar/industries/creative/content-and-production/',
'/ar/industries/creative/content-and-production'),
('/int/ar/industries/creative/digital-media/',
'/ar/industries/creative/digital-media'),
('/int/ar/industries/financial-services/',
'/ar/industries/financial-services'),
('/int/ar/industries/financial-services/asset-management/',
'/ar/industries/financial-services/asset-management'),
(
'/int/ar/industries/financial-services/financial-technology/', # noqa
'/ar/industries/financial-services/financial-technology'),
('/int/ar/industries/technology/',
'/ar/industries/technology'),
('/int/ar/industries/technology/data-analytics/',
'/ar/industries/technology/data-analytics'),
('/int/ar/terms-and-conditions/',
'/ar/terms-and-conditions'),
('/int/ar/privacy-policy/', '/ar/privacy-policy'),
('/int/ar/feedback/', '/ar/feedback'),
('/int/ar/enquiries/confirmation/',
'/ar/enquiries/confirmation'),
('/int/ar/enquiries/error/', '/ar/enquiries/error'),
('/int/ar/contact/', '/ar/contact'),
('/int/ar/location-guide/', '/ar/location-guide'),
('/int/ar/location-guide/confirmation/',
'/ar/location-guide/confirmation'),
# de
('/int/de/', '/de'),
('/int/de/setup-guide/', '/de/setup-guide'),
('/int/de/setup-guide/establish-address/',
'/de/setup-guide/establish-address'),
('/int/de/setup-guide/apply-for-visa/',
'/de/setup-guide/apply-for-visa'),
('/int/de/setup-guide/open-a-business-account/',
'/de/setup-guide/open-a-business-account'),
('/int/de/setup-guide/how-to-setup/',
'/de/setup-guide/how-to-setup'),
('/int/de/setup-guide/understand-tax/',
'/de/setup-guide/understand-tax'),
('/int/de/setup-guide/access-talent/',
'/de/setup-guide/access-talent'),
('/int/de/setup-guide/understand-legal/',
'/de/setup-guide/understand-legal'),
('/int/de/industries/', '/de/industries'),
('/int/de/industries/aerospace/',
'/de/industries/aerospace'),
('/int/de/industries/advanced-manufacturing/',
'/de/industries/advanced-manufacturing'),
('/int/de/industries/food-and-drink-manufacturing/',
'/de/industries/food-and-drink-manufacturing'),
('/int/de/industries/food-and-drink-manufacturing/freefrom/',
'/de/industries/food-and-drink-manufacturing/freefrom'),
('/int/de/industries/retail/', '/de/industries/retail'),
('/int/de/industries/automotive/',
'/de/industries/automotive'),
('/int/de/industries/automotive/motorsport/',
'/de/industries/automotive/motorsport'),
('/int/de/industries/automotive/research-and-development/',
'/de/industries/automotive/research-and-development'),
('/int/de/industries/automotive/supply-chain/',
'/de/industries/automotive-supply-chain'),
('/int/de/industries/energy/', '/de/industries/energy'),
('/int/de/industries/energy/offshore-wind/',
'/de/industries/energy/offshore-wind'),
('/int/de/industries/energy/electrical-networks/',
'/de/industries/energy/electrical-networks'),
('/int/de/industries/energy/energy-from-waste/',
'/de/industries/energy/energy-from-waste'),
('/int/de/industries/energy/oil-and-gas/',
'/de/industries/energy/oil-and-gas'),
('/int/de/industries/energy/nuclear/',
'/de/industries/energy/nuclear'),
('/int/de/industries/health-and-life/',
'/de/industries/health-and-life'),
('/int/de/industries/health-and-life/medical-technology/',
'/de/industries/health-and-life/medical-technology'),
(
'/int/de/industries/health-and-life/pharmaceutical-manufacturing/', # noqa
'/de/industries/health-and-life/pharmaceutical-manufacturing'
),
('/int/de/industries/creative/', '/de/industries/creative-industries'),
('/int/de/industries/creative/content-and-production/',
'/de/industries/creative/content-and-production'),
('/int/de/industries/creative/digital-media/',
'/de/industries/creative/digital-media'),
('/int/de/industries/financial-services/',
'/de/industries/financial-services'),
('/int/de/industries/financial-services/asset-management/',
'/de/industries/financial-services/asset-management'),
(
'/int/de/industries/financial-services/financial-technology/', # noqa
'/de/industries/financial-services/financial-technology'
),
('/int/de/industries/technology/',
'/de/industries/technology'),
('/int/de/industries/technology/data-analytics/',
'/de/industries/technology/data-analytics'),
('/int/de/terms-and-conditions/',
'/de/terms-and-conditions'),
('/int/de/privacy-policy/', '/de/privacy-policy'),
('/int/de/feedback/', '/de/feedback'),
('/int/de/enquiries/confirmation/',
'/de/enquiries/confirmation'),
('/int/de/enquiries/error/', '/de/enquiries/error'),
('/int/de/contact/', '/de/contact'),
('/int/de/location-guide/', '/de/location-guide'),
('/int/de/location-guide/confirmation/',
'/de/location-guide/confirmation'),
# es
('/int/es/', '/es'),
('/int/es/setup-guide/', '/es/setup-guide'),
('/int/es/setup-guide/establish-address/',
'/es/setup-guide/establish-address'),
('/int/es/setup-guide/apply-for-visa/',
'/es/setup-guide/apply-for-visa'),
('/int/es/setup-guide/open-a-business-account/',
'/es/setup-guide/open-a-business-account'),
('/int/es/setup-guide/how-to-setup/',
'/es/setup-guide/how-to-setup'),
('/int/es/setup-guide/understand-tax/',
'/es/setup-guide/understand-tax'),
('/int/es/setup-guide/access-talent/',
'/es/setup-guide/access-talent'),
('/int/es/setup-guide/understand-legal/',
'/es/setup-guide/understand-legal'),
('/int/es/industries/', '/es/industries'),
(
'/int/es/industries/aerospace/',
'/es/industries/aerospace'
),
('/int/es/industries/advanced-manufacturing/',
'/es/industries/advanced-manufacturing'),
('/int/es/industries/food-and-drink-manufacturing/',
'/es/industries/food-and-drink-manufacturing'),
('/int/es/industries/food-and-drink-manufacturing/freefrom/',
'/es/industries/food-and-drink-manufacturing/freefrom'),
('/int/es/industries/retail/', '/es/industries/retail'),
('/int/es/industries/automotive/',
'/es/industries/automotive'),
('/int/es/industries/automotive/motorsport/',
'/es/industries/automotive/motorsport'),
('/int/es/industries/automotive/research-and-development/',
'/es/industries/automotive/research-and-development'),
('/int/es/industries/automotive/supply-chain/',
'/es/industries/automotive-supply-chain'),
('/int/es/industries/energy/', '/es/industries/energy'),
('/int/es/industries/energy/offshore-wind/',
'/es/industries/energy/offshore-wind'),
('/int/es/industries/energy/electrical-networks/',
'/es/industries/energy/electrical-networks'),
('/int/es/industries/energy/energy-from-waste/',
'/es/industries/energy/energy-from-waste'),
('/int/es/industries/energy/oil-and-gas/',
'/es/industries/energy/oil-and-gas'),
('/int/es/industries/energy/nuclear/',
'/es/industries/energy/nuclear'),
('/int/es/industries/health-and-life/',
'/es/industries/health-and-life'),
('/int/es/industries/health-and-life/medical-technology/',
'/es/industries/health-and-life/medical-technology'),
(
'/int/es/industries/health-and-life/pharmaceutical-manufacturing/', # noqa
'/es/industries/health-and-life/pharmaceutical-manufacturing'
),
('/int/es/industries/creative/', '/es/industries/creative-industries'),
('/int/es/industries/creative/content-and-production/',
'/es/industries/creative/content-and-production'),
('/int/es/industries/creative/digital-media/',
'/es/industries/creative/digital-media'),
('/int/es/industries/financial-services/',
'/es/industries/financial-services'),
('/int/es/industries/financial-services/asset-management/',
'/es/industries/financial-services/asset-management'),
(
'/int/es/industries/financial-services/financial-technology/', # noqa
'/es/industries/financial-services/financial-technology'
),
('/int/es/industries/technology/',
'/es/industries/technology'),
('/int/es/industries/technology/data-analytics/',
'/es/industries/technology/data-analytics'),
('/int/es/terms-and-conditions/',
'/es/terms-and-conditions'),
('/int/es/privacy-policy/', '/es/privacy-policy'),
('/int/es/feedback/', '/es/feedback'),
('/int/es/enquiries/confirmation/',
'/es/enquiries/confirmation'),
('/int/es/enquiries/error/', '/es/enquiries/error'),
('/int/es/contact/', '/es/contact'),
('/int/es/location-guide/', '/es/location-guide'),
('/int/es/location-guide/confirmation/',
'/es/location-guide/confirmation'),
# fr
('/int/fr/', '/fr'),
('/int/fr/setup-guide/', '/fr/setup-guide'),
('/int/fr/setup-guide/establish-address/',
'/fr/setup-guide/establish-address'),
('/int/fr/setup-guide/apply-for-visa/',
'/fr/setup-guide/apply-for-visa'),
('/int/fr/setup-guide/open-a-business-account/',
'/fr/setup-guide/open-a-business-account'),
('/int/fr/setup-guide/how-to-setup/',
'/fr/setup-guide/how-to-setup'),
('/int/fr/setup-guide/understand-tax/',
'/fr/setup-guide/understand-tax'),
('/int/fr/setup-guide/access-talent/',
'/fr/setup-guide/access-talent'),
('/int/fr/setup-guide/understand-legal/',
'/fr/setup-guide/understand-legal'),
('/int/fr/industries/', '/fr/industries'),
('/int/fr/industries/aerospace/',
'/fr/industries/aerospace'),
('/int/fr/industries/advanced-manufacturing/',
'/fr/industries/advanced-manufacturing'),
('/int/fr/industries/food-and-drink-manufacturing/',
'/fr/industries/food-and-drink-manufacturing'),
('/int/fr/industries/food-and-drink-manufacturing/freefrom/',
'/fr/industries/food-and-drink-manufacturing/freefrom'),
('/int/fr/industries/retail/', '/fr/industries/retail'),
('/int/fr/industries/automotive/',
'/fr/industries/automotive'),
('/int/fr/industries/automotive/motorsport/',
'/fr/industries/automotive/motorsport'),
('/int/fr/industries/automotive/research-and-development/',
'/fr/industries/automotive/research-and-development'),
('/int/fr/industries/automotive/supply-chain/',
'/fr/industries/automotive-supply-chain'),
('/int/fr/industries/energy/', '/fr/industries/energy'),
('/int/fr/industries/energy/offshore-wind/',
'/fr/industries/energy/offshore-wind'),
('/int/fr/industries/energy/electrical-networks/',
'/fr/industries/energy/electrical-networks'),
('/int/fr/industries/energy/energy-from-waste/',
'/fr/industries/energy/energy-from-waste'),
('/int/fr/industries/energy/oil-and-gas/',
'/fr/industries/energy/oil-and-gas'),
('/int/fr/industries/energy/nuclear/',
'/fr/industries/energy/nuclear'),
('/int/fr/industries/health-and-life/',
'/fr/industries/health-and-life'),
('/int/fr/industries/health-and-life/medical-technology/',
'/fr/industries/health-and-life/medical-technology'),
(
'/int/fr/industries/health-and-life/pharmaceutical-manufacturing/', # noqa
'/fr/industries/health-and-life/pharmaceutical-manufacturing'
),
('/int/fr/industries/creative/', '/fr/industries/creative-industries'),
('/int/fr/industries/creative/content-and-production/',
'/fr/industries/creative/content-and-production'),
('/int/fr/industries/creative/digital-media/',
'/fr/industries/creative/digital-media'),
('/int/fr/industries/financial-services/',
'/fr/industries/financial-services'),
('/int/fr/industries/financial-services/asset-management/',
'/fr/industries/financial-services/asset-management'),
(
'/int/fr/industries/financial-services/financial-technology/', # noqa
'/fr/industries/financial-services/financial-technology'
),
('/int/fr/industries/technology/',
'/fr/industries/technology'),
('/int/fr/industries/technology/data-analytics/',
'/fr/industries/technology/data-analytics'),
('/int/fr/terms-and-conditions/',
'/fr/terms-and-conditions'),
('/int/fr/privacy-policy/', '/fr/privacy-policy'),
('/int/fr/feedback/', '/fr/feedback'),
('/int/fr/enquiries/confirmation/',
'/fr/enquiries/confirmation'),
('/int/fr/enquiries/error/', '/fr/enquiries/error'),
('/int/fr/contact/', '/fr/contact'),
('/int/fr/location-guide/', '/fr/location-guide'),
('/int/fr/location-guide/confirmation/',
'/fr/location-guide/confirmation'),
# ja
('/int/ja/', '/ja'),
('/int/ja/setup-guide/', '/ja/setup-guide'),
('/int/ja/setup-guide/establish-address/',
'/ja/setup-guide/establish-address'),
('/int/ja/setup-guide/apply-for-visa/',
'/ja/setup-guide/apply-for-visa'),
('/int/ja/setup-guide/open-a-business-account/',
'/ja/setup-guide/open-a-business-account'),
('/int/ja/setup-guide/how-to-setup/',
'/ja/setup-guide/how-to-setup'),
('/int/ja/setup-guide/understand-tax/',
'/ja/setup-guide/understand-tax'),
('/int/ja/setup-guide/access-talent/',
'/ja/setup-guide/access-talent'),
('/int/ja/setup-guide/understand-legal/',
'/ja/setup-guide/understand-legal'),
('/int/ja/industries/', '/ja/industries'),
('/int/ja/industries/aerospace/',
'/ja/industries/aerospace'),
('/int/ja/industries/advanced-manufacturing/',
'/ja/industries/advanced-manufacturing'),
('/int/ja/industries/food-and-drink-manufacturing/',
'/ja/industries/food-and-drink-manufacturing'),
('/int/ja/industries/food-and-drink-manufacturing/freefrom/',
'/ja/industries/food-and-drink-manufacturing/freefrom'),
('/int/ja/industries/retail/', '/ja/industries/retail'),
('/int/ja/industries/automotive/',
'/ja/industries/automotive'),
('/int/ja/industries/automotive/motorsport/',
'/ja/industries/automotive/motorsport'),
('/int/ja/industries/automotive/research-and-development/',
'/ja/industries/automotive/research-and-development'),
('/int/ja/industries/automotive/supply-chain/',
'/ja/industries/automotive-supply-chain'),
('/int/ja/industries/energy/', '/ja/industries/energy'),
('/int/ja/industries/energy/offshore-wind/',
'/ja/industries/energy/offshore-wind'),
('/int/ja/industries/energy/electrical-networks/',
'/ja/industries/energy/electrical-networks'),
('/int/ja/industries/energy/energy-from-waste/',
'/ja/industries/energy/energy-from-waste'),
('/int/ja/industries/energy/oil-and-gas/',
'/ja/industries/energy/oil-and-gas'),
('/int/ja/industries/energy/nuclear/',
'/ja/industries/energy/nuclear'),
('/int/ja/industries/health-and-life/',
'/ja/industries/health-and-life'),
('/int/ja/industries/health-and-life/medical-technology/',
'/ja/industries/health-and-life/medical-technology'),
(
'/int/ja/industries/health-and-life/pharmaceutical-manufacturing/', # noqa
'/ja/industries/health-and-life/pharmaceutical-manufacturing'
),
('/int/ja/industries/creative/', '/ja/industries/creative-industries'),
('/int/ja/industries/creative/content-and-production/',
'/ja/industries/creative/content-and-production'),
('/int/ja/industries/creative/digital-media/',
'/ja/industries/creative/digital-media'),
('/int/ja/industries/financial-services/',
'/ja/industries/financial-services'),
('/int/ja/industries/financial-services/asset-management/',
'/ja/industries/financial-services/asset-management'),
(
'/int/ja/industries/financial-services/financial-technology/', # noqa
'/ja/industries/financial-services/financial-technology'
),
('/int/ja/industries/technology/',
'/ja/industries/technology'),
('/int/ja/industries/technology/data-analytics/',
'/ja/industries/technology/data-analytics'),
('/int/ja/terms-and-conditions/',
'/ja/terms-and-conditions'),
('/int/ja/privacy-policy/', '/ja/privacy-policy'),
('/int/ja/feedback/', '/ja/feedback'),
('/int/ja/enquiries/confirmation/',
'/ja/enquiries/confirmation'),
('/int/ja/enquiries/error/', '/ja/enquiries/error'),
('/int/ja/contact/', '/ja/contact'),
('/int/ja/location-guide/', '/ja/location-guide'),
('/int/ja/location-guide/confirmation/',
'/ja/location-guide/confirmation'),
# pt
('/int/pt/', '/pt'),
('/int/pt/setup-guide/', '/pt/setup-guide'),
('/int/pt/setup-guide/establish-address/',
'/pt/setup-guide/establish-address'),
('/int/pt/setup-guide/apply-for-visa/',
'/pt/setup-guide/apply-for-visa'),
('/int/pt/setup-guide/open-a-business-account/',
'/pt/setup-guide/open-a-business-account'),
('/int/pt/setup-guide/how-to-setup/',
'/pt/setup-guide/how-to-setup'),
('/int/pt/setup-guide/understand-tax/',
'/pt/setup-guide/understand-tax'),
('/int/pt/setup-guide/access-talent/',
'/pt/setup-guide/access-talent'),
('/int/pt/setup-guide/understand-legal/',
'/pt/setup-guide/understand-legal'),
('/int/pt/industries/', '/pt/industries'),
(
'/int/pt/industries/aerospace/', '/pt/industries/aerospace'),
('/int/pt/industries/advanced-manufacturing/',
'/pt/industries/advanced-manufacturing'),
('/int/pt/industries/food-and-drink-manufacturing/',
'/pt/industries/food-and-drink-manufacturing'),
('/int/pt/industries/food-and-drink-manufacturing/freefrom/',
'/pt/industries/food-and-drink-manufacturing/freefrom'),
('/int/pt/industries/retail/', '/pt/industries/retail'),
('/int/pt/industries/automotive/',
'/pt/industries/automotive'),
('/int/pt/industries/automotive/motorsport/',
'/pt/industries/automotive/motorsport'),
('/int/pt/industries/automotive/research-and-development/',
'/pt/industries/automotive/research-and-development'),
('/int/pt/industries/automotive/supply-chain/',
'/pt/industries/automotive-supply-chain'),
('/int/pt/industries/energy/', '/pt/industries/energy'),
('/int/pt/industries/energy/offshore-wind/',
'/pt/industries/energy/offshore-wind'),
('/int/pt/industries/energy/electrical-networks/',
'/pt/industries/energy/electrical-networks'),
('/int/pt/industries/energy/energy-from-waste/',
'/pt/industries/energy/energy-from-waste'),
('/int/pt/industries/energy/oil-and-gas/',
'/pt/industries/energy/oil-and-gas'),
('/int/pt/industries/energy/nuclear/',
'/pt/industries/energy/nuclear'),
('/int/pt/industries/health-and-life/',
'/pt/industries/health-and-life'),
('/int/pt/industries/health-and-life/medical-technology/',
'/pt/industries/health-and-life/medical-technology'),
(
'/int/pt/industries/health-and-life/pharmaceutical-manufacturing/', # noqa
'/pt/industries/health-and-life/pharmaceutical-manufacturing'
),
('/int/pt/industries/creative/', '/pt/industries/creative-industries'),
('/int/pt/industries/creative/content-and-production/',
'/pt/industries/creative/content-and-production'),
('/int/pt/industries/creative/digital-media/',
'/pt/industries/creative/digital-media'),
('/int/pt/industries/financial-services/',
'/pt/industries/financial-services'),
('/int/pt/industries/financial-services/asset-management/',
'/pt/industries/financial-services/asset-management'),
(
'/int/pt/industries/financial-services/financial-technology/', # noqa
'/pt/industries/financial-services/financial-technology'
),
('/int/pt/industries/technology/',
'/pt/industries/technology'),
('/int/pt/industries/technology/data-analytics/',
'/pt/industries/technology/data-analytics'),
(
'/int/pt/terms-and-conditions/', '/pt/terms-and-conditions'),
('/int/pt/privacy-policy/', '/pt/privacy-policy'),
('/int/pt/feedback/', '/pt/feedback'),
('/int/pt/enquiries/confirmation/',
'/pt/enquiries/confirmation'),
('/int/pt/enquiries/error/', '/pt/enquiries/error'),
('/int/pt/contact/', '/pt/contact'),
('/int/pt/location-guide/', '/pt/location-guide'),
('/int/pt/location-guide/confirmation/',
'/pt/location-guide/confirmation'),
# zh
('/int/zh/', '/zh-hans'),
('/int/zh/setup-guide/', '/zh-hans/setup-guide'),
('/int/zh/setup-guide/establish-address/',
'/zh-hans/setup-guide/establish-address'),
('/int/zh/setup-guide/apply-for-visa/',
'/zh-hans/setup-guide/apply-for-visa'),
('/int/zh/setup-guide/open-a-business-account/',
'/zh-hans/setup-guide/open-a-business-account'),
('/int/zh/setup-guide/how-to-setup/',
'/zh-hans/setup-guide/how-to-setup'),
('/int/zh/setup-guide/understand-tax/',
'/zh-hans/setup-guide/understand-tax'),
('/int/zh/setup-guide/access-talent/',
'/zh-hans/setup-guide/access-talent'),
('/int/zh/setup-guide/understand-legal/',
'/zh-hans/setup-guide/understand-legal'),
('/int/zh/industries/', '/zh-hans/industries'),
('/int/zh/industries/aerospace/',
'/zh-hans/industries/aerospace'),
('/int/zh/industries/advanced-manufacturing/',
'/zh-hans/industries/advanced-manufacturing'),
('/int/zh/industries/food-and-drink-manufacturing/',
'/zh-hans/industries/food-and-drink-manufacturing'),
('/int/zh/industries/food-and-drink-manufacturing/freefrom/',
'/zh-hans/industries/food-and-drink-manufacturing/freefrom'),
('/int/zh/industries/retail/', '/zh-hans/industries/retail'),
('/int/zh/industries/automotive/',
'/zh-hans/industries/automotive'),
('/int/zh/industries/automotive/motorsport/',
'/zh-hans/industries/automotive/motorsport'),
('/int/zh/industries/automotive/research-and-development/',
'/zh-hans/industries/automotive/research-and-development'),
('/int/zh/industries/automotive/supply-chain/',
'/zh-hans/industries/automotive-supply-chain'),
('/int/zh/industries/energy/', '/zh-hans/industries/energy'),
('/int/zh/industries/energy/offshore-wind/',
'/zh-hans/industries/energy/offshore-wind'),
('/int/zh/industries/energy/energy-from-waste/',
'/zh-hans/industries/energy/energy-from-waste'),
('/int/zh/industries/energy/nuclear/',
'/zh-hans/industries/energy/nuclear'),
('/int/zh/industries/energy/oil-and-gas/',
'/zh-hans/industries/energy/oil-and-gas'),
('/int/zh/industries/energy/electrical-networks/',
'/zh-hans/industries/energy/electrical-networks'),
('/int/zh/industries/health-and-life/',
'/zh-hans/industries/health-and-life'),
('/int/zh/industries/health-and-life/medical-technology/',
'/zh-hans/industries/health-and-life/medical-technology'),
(
'/int/zh/industries/health-and-life/pharmaceutical-manufacturing/', # noqa
'/zh-hans/industries/health-and-life/pharmaceutical-manufacturing'
),
('/int/zh/industries/creative/',
'/zh-hans/industries/creative-industries'),
('/int/zh/industries/creative/content-and-production/',
'/zh-hans/industries/creative/content-and-production'),
('/int/zh/industries/creative/digital-media/',
'/zh-hans/industries/creative/digital-media'),
('/int/zh/industries/financial-services/',
'/zh-hans/industries/financial-services'),
('/int/zh/industries/financial-services/asset-management/',
'/zh-hans/industries/financial-services/asset-management'),
(
'/int/zh/industries/financial-services/financial-technology/', # noqa
'/zh-hans/industries/financial-services/financial-technology'
),
('/int/zh/industries/technology/',
'/zh-hans/industries/technology'),
('/int/zh/industries/technology/data-analytics/',
'/zh-hans/industries/technology/data-analytics'),
('/int/zh/terms-and-conditions/',
'/zh-hans/terms-and-conditions'),
('/int/zh/privacy-policy/', '/zh-hans/privacy-policy'),
('/int/zh/feedback/', '/zh-hans/feedback'),
('/int/zh/enquiries/confirmation/',
'/zh-hans/enquiries/confirmation'),
('/int/zh/enquiries/error/', '/zh-hans/enquiries/error'),
('/int/zh/contact/', '/zh-hans/contact'),
('/int/zh/location-guide/', '/zh-hans/location-guide'),
('/int/zh/location-guide/confirmation/',
'/zh-hans/location-guide/confirmation'),
# cn
('/cn/', '/zh-hans'),
('/cn/setup-guide/', '/zh-hans/setup-guide'),
('/cn/setup-guide/establish-address/',
'/zh-hans/setup-guide/establish-address'),
('/cn/setup-guide/apply-for-visa/',
'/zh-hans/setup-guide/apply-for-visa'),
('/cn/setup-guide/open-a-business-account/',
'/zh-hans/setup-guide/open-a-business-account'),
('/cn/setup-guide/how-to-setup/',
'/zh-hans/setup-guide/how-to-setup'),
('/cn/setup-guide/understand-tax/',
'/zh-hans/setup-guide/understand-tax'),
('/cn/setup-guide/access-talent/',
'/zh-hans/setup-guide/access-talent'),
('/cn/setup-guide/understand-legal/',
'/zh-hans/setup-guide/understand-legal'),
('/cn/industries/', '/zh-hans/industries'),
('/cn/industries/aerospace/', '/zh-hans/industries/aerospace'),
('/cn/industries/advanced-manufacturing/',
'/zh-hans/industries/advanced-manufacturing'),
('/cn/industries/food-and-drink-manufacturing/',
'/zh-hans/industries/food-and-drink-manufacturing'),
('/cn/industries/food-and-drink-manufacturing/freefrom/',
'/zh-hans/industries/food-and-drink-manufacturing/freefrom'),
('/cn/industries/retail/', '/zh-hans/industries/retail'),
('/cn/industries/automotive/',
'/zh-hans/industries/automotive'),
('/cn/industries/automotive/motorsport/',
'/zh-hans/industries/automotive/motorsport'),
('/cn/industries/automotive/research-and-development/',
'/zh-hans/industries/automotive/research-and-development'),
('/cn/industries/automotive/supply-chain/',
'/zh-hans/industries/automotive-supply-chain'),
('/cn/industries/energy/', '/zh-hans/industries/energy'),
('/cn/industries/energy/offshore-wind/',
'/zh-hans/industries/energy/offshore-wind'),
('/cn/industries/energy/energy-from-waste/',
'/zh-hans/industries/energy/energy-from-waste'),
('/cn/industries/energy/nuclear/',
'/zh-hans/industries/energy/nuclear'),
('/cn/industries/energy/oil-and-gas/',
'/zh-hans/industries/energy/oil-and-gas'),
('/cn/industries/energy/electrical-networks/',
'/zh-hans/industries/energy/electrical-networks'),
('/cn/industries/health-and-life/',
'/zh-hans/industries/health-and-life'),
('/cn/industries/health-and-life/medical-technology/',
'/zh-hans/industries/health-and-life/medical-technology'),
(
'/cn/industries/health-and-life/pharmaceutical-manufacturing/', # noqa
'/zh-hans/industries/health-and-life/pharmaceutical-manufacturing'
),
('/cn/industries/creative/',
'/zh-hans/industries/creative-industries'),
('/cn/industries/creative/content-and-production/',
'/zh-hans/industries/creative/content-and-production'),
('/cn/industries/creative/digital-media/',
'/zh-hans/industries/creative/digital-media'),
('/cn/industries/financial-services/',
'/zh-hans/industries/financial-services'),
('/cn/industries/financial-services/asset-management/',
'/zh-hans/industries/financial-services/asset-management'),
('/cn/industries/financial-services/financial-technology/',
'/zh-hans/industries/financial-services/financial-technology'),
('/cn/industries/technology/',
'/zh-hans/industries/technology'),
('/cn/industries/technology/data-analytics/',
'/zh-hans/industries/technology/data-analytics'),
('/cn/terms-and-conditions/', '/zh-hans/terms-and-conditions'),
('/cn/privacy-policy/', '/zh-hans/privacy-policy'),
('/cn/feedback/', '/zh-hans/feedback'),
('/cn/enquiries/confirmation/',
'/zh-hans/enquiries/confirmation'),
('/cn/enquiries/error/', '/zh-hans/enquiries/error'),
('/cn/contact/', '/zh-hans/contact'),
('/cn/location-guide/', '/zh-hans/location-guide'),
('/cn/location-guide/confirmation/',
'/zh-hans/location-guide/confirmation'),
# br
('/br/', '/pt'),
('/br/setup-guide/', '/pt/setup-guide'),
('/br/setup-guide/establish-address/',
'/pt/setup-guide/establish-address'),
('/br/setup-guide/apply-for-visa/',
'/pt/setup-guide/apply-for-visa'),
('/br/setup-guide/open-a-business-account/',
'/pt/setup-guide/open-a-business-account'),
('/br/setup-guide/how-to-setup/',
'/pt/setup-guide/how-to-setup'),
('/br/setup-guide/understand-tax/',
'/pt/setup-guide/understand-tax'),
('/br/setup-guide/access-talent/',
'/pt/setup-guide/access-talent'),
('/br/setup-guide/understand-legal/',
'/pt/setup-guide/understand-legal'),
('/br/industries/', '/pt/industries'),
('/br/industries/aerospace/', '/pt/industries/aerospace'),
('/br/industries/advanced-manufacturing/',
'/pt/industries/advanced-manufacturing'),
('/br/industries/food-and-drink-manufacturing/',
'/pt/industries/food-and-drink-manufacturing'),
('/br/industries/food-and-drink-manufacturing/freefrom/',
'/pt/industries/food-and-drink-manufacturing/freefrom'),
('/br/industries/retail/', '/pt/industries/retail'),
('/br/industries/automotive/', '/pt/industries/automotive'),
('/br/industries/automotive/motorsport/',
'/pt/industries/automotive/motorsport'),
('/br/industries/automotive/research-and-development/',
'/pt/industries/automotive/research-and-development'),
('/br/industries/automotive/supply-chain/',
'/pt/industries/automotive-supply-chain'),
('/br/industries/energy/', '/pt/industries/energy'),
('/br/industries/energy/offshore-wind/',
'/pt/industries/energy/offshore-wind'),
('/br/industries/energy/electrical-networks/',
'/pt/industries/energy/electrical-networks'),
('/br/industries/energy/energy-from-waste/',
'/pt/industries/energy/energy-from-waste'),
('/br/industries/energy/oil-and-gas/',
'/pt/industries/energy/oil-and-gas'),
('/br/industries/energy/nuclear/',
'/pt/industries/energy/nuclear'),
('/br/industries/health-and-life/',
'/pt/industries/health-and-life'),
('/br/industries/health-and-life/medical-technology/',
'/pt/industries/health-and-life/medical-technology'),
(
'/br/industries/health-and-life/pharmaceutical-manufacturing/', # noqa
'/pt/industries/health-and-life/pharmaceutical-manufacturing'
),
('/br/industries/creative/', '/pt/industries/creative-industries'),
('/br/industries/creative/content-and-production/',
'/pt/industries/creative/content-and-production'),
('/br/industries/creative/digital-media/',
'/pt/industries/creative/digital-media'),
('/br/industries/financial-services/',
'/pt/industries/financial-services'),
('/br/industries/financial-services/asset-management/',
'/pt/industries/financial-services/asset-management'),
('/br/industries/financial-services/financial-technology/',
'/pt/industries/financial-services/financial-technology'),
('/br/industries/technology/', '/pt/industries/technology'),
('/br/industries/technology/data-analytics/',
'/pt/industries/technology/data-analytics'),
('/br/terms-and-conditions/', '/pt/terms-and-conditions'),
('/br/privacy-policy/', '/pt/privacy-policy'),
('/br/feedback/', '/pt/feedback'),
('/br/enquiries/confirmation/',
'/pt/enquiries/confirmation'),
('/br/enquiries/error/', '/pt/enquiries/error'),
('/br/contact/', '/pt/contact'),
('/br/location-guide/', '/pt/location-guide'),
('/br/location-guide/confirmation/',
'/pt/location-guide/confirmation'),
# in
('/in/', '/'),
('/in/setup-guide/', '/setup-guide'),
('/in/setup-guide/establish-address/',
'/setup-guide/establish-address'),
('/in/setup-guide/apply-for-visa/',
'/setup-guide/apply-for-visa'),
('/in/setup-guide/open-a-business-account/',
'/setup-guide/open-a-business-account'),
('/in/setup-guide/how-to-setup/',
'/setup-guide/how-to-setup'),
('/in/setup-guide/understand-tax/',
'/setup-guide/understand-tax'),
('/in/setup-guide/access-talent/',
'/setup-guide/access-talent'),
('/in/setup-guide/understand-legal/',
'/setup-guide/understand-legal'),
('/in/industries/', '/industries'),
('/in/industries/aerospace/', '/industries/aerospace'),
('/in/industries/advanced-manufacturing/',
'/industries/advanced-manufacturing'),
('/in/industries/food-and-drink-manufacturing/',
'/industries/food-and-drink-manufacturing'),
('/in/industries/food-and-drink-manufacturing/freefrom/',
'/industries/food-and-drink-manufacturing/freefrom'),
('/in/industries/retail/', '/industries/retail'),
('/in/industries/automotive/', '/industries/automotive'),
('/in/industries/automotive/motorsport/',
'/industries/automotive/motorsport'),
('/in/industries/automotive/research-and-development/',
'/industries/automotive/research-and-development'),
('/in/industries/automotive/supply-chain/',
'/industries/automotive-supply-chain'),
('/in/industries/energy/', '/industries/energy'),
('/in/industries/energy/offshore-wind/',
'/industries/energy/offshore-wind'),
('/in/industries/energy/electrical-networks/',
'/industries/energy/electrical-networks'),
('/in/industries/energy/energy-from-waste/',
'/industries/energy/energy-from-waste'),
('/in/industries/energy/oil-and-gas/',
'/industries/energy/oil-and-gas'),
('/in/industries/energy/nuclear/',
'/industries/energy/nuclear'),
('/in/industries/health-and-life/',
'/industries/health-and-life'),
('/in/industries/health-and-life/medical-technology/',
'/industries/health-and-life/medical-technology'),
(
'/in/industries/health-and-life/pharmaceutical-manufacturing/', # noqa
'/industries/health-and-life/pharmaceutical-manufacturing'
),
('/in/industries/creative/', '/industries/creative-industries'),
('/in/industries/creative/content-and-production/',
'/industries/creative/content-and-production'),
('/in/industries/creative/digital-media/',
'/industries/creative/digital-media'),
('/in/industries/financial-services/',
'/industries/financial-services'),
('/in/industries/financial-services/asset-management/',
'/industries/financial-services/asset-management'),
('/in/industries/financial-services/financial-technology/',
'/industries/financial-services/financial-technology'),
('/in/industries/technology/', '/industries/technology'),
('/in/industries/technology/data-analytics/',
'/industries/technology/data-analytics'),
('/in/terms-and-conditions/', '/terms-and-conditions'),
('/in/privacy-policy/', '/privacy-policy'),
('/in/feedback/', '/feedback'),
('/in/enquiries/confirmation/', '/enquiries/confirmation'),
('/in/enquiries/error/', '/enquiries/error'),
('/in/contact/', '/contact'),
('/in/location-guide/', '/location-guide'),
('/in/location-guide/confirmation/',
'/location-guide/confirmation'),
# us
('/us/', '/'),
('/us/setup-guide/', '/setup-guide'),
('/us/setup-guide/establish-address/',
'/setup-guide/establish-address'),
('/us/setup-guide/apply-for-visa/',
'/setup-guide/apply-for-visa'),
('/us/setup-guide/open-a-business-account/',
'/setup-guide/open-a-business-account'),
('/us/setup-guide/how-to-setup/',
'/setup-guide/how-to-setup'),
('/us/setup-guide/understand-tax/',
'/setup-guide/understand-tax'),
('/us/setup-guide/access-talent/',
'/setup-guide/access-talent'),
('/us/setup-guide/understand-legal/',
'/setup-guide/understand-legal'),
('/us/industries/', '/industries'),
('/us/industries/aerospace/', '/industries/aerospace'),
('/us/industries/advanced-manufacturing/',
'/industries/advanced-manufacturing'),
('/us/industries/food-and-drink-manufacturing/',
'/industries/food-and-drink-manufacturing'),
('/us/industries/food-and-drink-manufacturing/freefrom/',
'/industries/food-and-drink-manufacturing/freefrom'),
('/us/industries/retail/', '/industries/retail'),
('/us/industries/automotive/', '/industries/automotive'),
('/us/industries/automotive/motorsport/',
'/industries/automotive/motorsport'),
('/us/industries/automotive/research-and-development/',
'/industries/automotive/research-and-development'),
('/us/industries/automotive/supply-chain/',
'/industries/automotive-supply-chain'),
('/us/industries/energy/', '/industries/energy'),
('/us/industries/energy/offshore-wind/',
'/industries/energy/offshore-wind'),
('/us/industries/energy/electrical-networks/',
'/industries/energy/electrical-networks'),
('/us/industries/energy/energy-from-waste/',
'/industries/energy/energy-from-waste'),
('/us/industries/energy/oil-and-gas/',
'/industries/energy/oil-and-gas'),
('/us/industries/energy/nuclear/',
'/industries/energy/nuclear'),
('/us/industries/health-and-life/',
'/industries/health-and-life'),
('/us/industries/health-and-life/medical-technology/',
'/industries/health-and-life/medical-technology'),
(
'/us/industries/health-and-life/pharmaceutical-manufacturing/', # noqa
'/industries/health-and-life/pharmaceutical-manufacturing'
),
('/us/industries/creative/', '/industries/creative-industries'),
('/us/industries/creative/content-and-production/',
'/industries/creative/content-and-production'),
('/us/industries/creative/digital-media/',
'/industries/creative/digital-media'),
('/us/industries/financial-services/',
'/industries/financial-services'),
('/us/industries/financial-services/asset-management/',
'/industries/financial-services/asset-management'),
('/us/industries/financial-services/financial-technology/',
'/industries/financial-services/financial-technology'),
('/us/industries/technology/', '/industries/technology'),
('/us/industries/technology/data-analytics/',
'/industries/technology/data-analytics'),
('/us/terms-and-conditions/', '/terms-and-conditions'),
('/us/privacy-policy/', '/privacy-policy'),
('/us/feedback/', '/feedback'),
('/us/enquiries/confirmation/', '/enquiries/confirmation'),
('/us/enquiries/error/', '/enquiries/error'),
('/us/contact/', '/contact'),
('/us/location-guide/', '/location-guide'),
('/us/location-guide/confirmation/',
'/location-guide/confirmation'),
# ja
('/jp/', '/ja'),
('/jp/setup-guide/', '/ja/setup-guide'),
('/jp/setup-guide/establish-address/',
'/ja/setup-guide/establish-address'),
('/jp/setup-guide/apply-for-visa/',
'/ja/setup-guide/apply-for-visa'),
('/jp/setup-guide/open-a-business-account/',
'/ja/setup-guide/open-a-business-account'),
('/jp/setup-guide/how-to-setup/',
'/ja/setup-guide/how-to-setup'),
('/jp/setup-guide/understand-tax/',
'/ja/setup-guide/understand-tax'),
('/jp/setup-guide/access-talent/',
'/ja/setup-guide/access-talent'),
('/jp/setup-guide/understand-legal/',
'/ja/setup-guide/understand-legal'),
('/jp/industries/', '/ja/industries'),
('/jp/industries/aerospace/', '/ja/industries/aerospace'),
('/jp/industries/advanced-manufacturing/',
'/ja/industries/advanced-manufacturing'),
('/jp/industries/food-and-drink-manufacturing/',
'/ja/industries/food-and-drink-manufacturing'),
('/jp/industries/food-and-drink-manufacturing/freefrom/',
'/ja/industries/food-and-drink-manufacturing/freefrom'),
('/jp/industries/retail/', '/ja/industries/retail'),
('/jp/industries/automotive/', '/ja/industries/automotive'),
('/jp/industries/automotive/motorsport/',
'/ja/industries/automotive/motorsport'),
('/jp/industries/automotive/research-and-development/',
'/ja/industries/automotive/research-and-development'),
('/jp/industries/automotive/supply-chain/',
'/ja/industries/automotive-supply-chain'),
('/jp/industries/energy/', '/ja/industries/energy'),
('/jp/industries/energy/offshore-wind/',
'/ja/industries/energy/offshore-wind'),
('/jp/industries/energy/electrical-networks/',
'/ja/industries/energy/electrical-networks'),
('/jp/industries/energy/energy-from-waste/',
'/ja/industries/energy/energy-from-waste'),
('/jp/industries/energy/oil-and-gas/',
'/ja/industries/energy/oil-and-gas'),
('/jp/industries/energy/nuclear/',
'/ja/industries/energy/nuclear'),
('/jp/industries/health-and-life/',
'/ja/industries/health-and-life'),
('/jp/industries/health-and-life/medical-technology/',
'/ja/industries/health-and-life/medical-technology'),
(
'/jp/industries/health-and-life/pharmaceutical-manufacturing/', # noqa
'/ja/industries/health-and-life/pharmaceutical-manufacturing'
),
('/jp/industries/creative/', '/ja/industries/creative-industries'),
('/jp/industries/creative/content-and-production/',
'/ja/industries/creative/content-and-production'),
('/jp/industries/creative/digital-media/',
'/ja/industries/creative/digital-media'),
('/jp/industries/financial-services/',
'/ja/industries/financial-services'),
('/jp/industries/financial-services/asset-management/',
'/ja/industries/financial-services/asset-management'),
('/jp/industries/financial-services/financial-technology/',
'/ja/industries/financial-services/financial-technology'),
('/jp/industries/technology/', '/ja/industries/technology'),
('/jp/industries/technology/data-analytics/',
'/ja/industries/technology/data-analytics'),
('/jp/terms-and-conditions/', '/ja/terms-and-conditions'),
('/jp/privacy-policy/', '/ja/privacy-policy'),
('/jp/feedback/', '/ja/feedback'),
('/jp/enquiries/confirmation/',
'/ja/enquiries/confirmation'),
('/jp/enquiries/error/', '/ja/enquiries/error'),
('/jp/contact/', '/ja/contact'),
('/jp/location-guide/', '/ja/location-guide'),
('/jp/location-guide/confirmation/',
'/ja/location-guide/confirmation'),
# int
('/int/', '/'),
('/int/setup-guide/', '/setup-guide'),
('/int/setup-guide/establish-address/',
'/setup-guide/establish-address'),
('/int/setup-guide/apply-for-visa/',
'/setup-guide/apply-for-visa'),
('/int/setup-guide/open-a-business-account/',
'/setup-guide/open-a-business-account'),
('/int/setup-guide/how-to-setup/',
'/setup-guide/how-to-setup'),
('/int/setup-guide/understand-tax/',
'/setup-guide/understand-tax'),
('/int/setup-guide/access-talent/',
'/setup-guide/access-talent'),
('/int/setup-guide/understand-legal/',
'/setup-guide/understand-legal'),
('/int/industries/', '/industries'),
('/int/industries/aerospace/', '/industries/aerospace'),
('/int/industries/advanced-manufacturing/',
'/industries/advanced-manufacturing'),
('/int/industries/food-and-drink-manufacturing/',
'/industries/food-and-drink-manufacturing'),
('/int/industries/food-and-drink-manufacturing/freefrom/',
'/industries/food-and-drink-manufacturing/freefrom'),
('/int/industries/retail/', '/industries/retail'),
('/int/industries/automotive/', '/industries/automotive'),
('/int/industries/automotive/motorsport/',
'/industries/automotive/motorsport'),
('/int/industries/automotive/research-and-development/',
'/industries/automotive/research-and-development'),
('/int/industries/automotive/supply-chain/',
'/industries/automotive-supply-chain'),
('/int/industries/energy/', '/industries/energy'),
('/int/industries/energy/offshore-wind/',
'/industries/energy/offshore-wind'),
('/int/industries/energy/electrical-networks/',
'/industries/energy/electrical-networks'),
('/int/industries/energy/energy-from-waste/',
'/industries/energy/energy-from-waste'),
('/int/industries/energy/oil-and-gas/',
'/industries/energy/oil-and-gas'),
('/int/industries/energy/nuclear/',
'/industries/energy/nuclear'),
('/int/industries/health-and-life/',
'/industries/health-and-life'),
('/int/industries/health-and-life/medical-technology/',
'/industries/health-and-life/medical-technology'),
(
'/int/industries/health-and-life/pharmaceutical-manufacturing/', # noqa
'/industries/health-and-life/pharmaceutical-manufacturing'
),
('/int/industries/creative/', '/industries/creative-industries'),
('/int/industries/creative/content-and-production/',
'/industries/creative/content-and-production'),
('/int/industries/creative/digital-media/',
'/industries/creative/digital-media'),
('/int/industries/financial-services/',
'/industries/financial-services'),
('/int/industries/financial-services/asset-management/',
'/industries/financial-services/asset-management'),
('/int/industries/financial-services/financial-technology/',
'/industries/financial-services/financial-technology'),
('/int/industries/technology/', '/industries/technology'),
('/int/industries/technology/data-analytics/',
'/industries/technology/data-analytics'),
('/int/terms-and-conditions/', '/terms-and-conditions'),
('/int/privacy-policy/', '/privacy-policy'),
('/int/feedback/', '/feedback'),
('/int/enquiries/confirmation/', '/enquiries/confirmation'),
('/int/enquiries/error/', '/enquiries/error'),
('/int/contact/', '/contact'),
('/int/location-guide/', '/location-guide'),
('/int/location-guide/confirmation/',
'/location-guide/confirmation'),
# feedback form
('/feedback/',
'https://contact-us.export.great.gov.uk/directory/FeedbackForm/'),
('/de/feedback/',
'https://contact-us.export.great.gov.uk/directory/FeedbackForm/'),
('/ja/feedback/',
'https://contact-us.export.great.gov.uk/directory/FeedbackForm/'),
('/zh-hans/feedback/',
'https://contact-us.export.great.gov.uk/directory/FeedbackForm/'),
('/fr/feedback/',
'https://contact-us.export.great.gov.uk/directory/FeedbackForm/'),
('/es/feedback/',
'https://contact-us.export.great.gov.uk/directory/FeedbackForm/'),
('/pt/feedback/',
'https://contact-us.export.great.gov.uk/directory/FeedbackForm/'),
('/ar/feedback/',
'https://contact-us.export.great.gov.uk/directory/FeedbackForm/'),
# terms-and-conditions
('/terms-and-conditions/',
'https://www.great.gov.uk/terms-and-conditions/'),
('/de/terms-and-conditions/',
'https://www.great.gov.uk/terms-and-conditions/'),
('/ja/terms-and-conditions/',
'https://www.great.gov.uk/terms-and-conditions/'),
('/zh-hans/terms-and-conditions/',
'https://www.great.gov.uk/terms-and-conditions/'),
('/fr/terms-and-conditions/',
'https://www.great.gov.uk/terms-and-conditions/'),
('/es/terms-and-conditions/',
'https://www.great.gov.uk/terms-and-conditions/'),
('/pt/terms-and-conditions/',
'https://www.great.gov.uk/terms-and-conditions/'),
('/ar/terms-and-conditions/',
'https://www.great.gov.uk/terms-and-conditions/'),
# privacy and cookies
('/privacy-and-cookies/',
'https://www.great.gov.uk/privacy-and-cookies/'),
('/de/privacy-and-cookies/',
'https://www.great.gov.uk/privacy-and-cookies/'),
('/ja/privacy-and-cookies/',
'https://www.great.gov.uk/privacy-and-cookies/'),
('/zh-hans/privacy-and-cookies/',
'https://www.great.gov.uk/privacy-and-cookies/'),
('/fr/privacy-and-cookies/',
'https://www.great.gov.uk/privacy-and-cookies/'),
('/es/privacy-and-cookies/',
'https://www.great.gov.uk/privacy-and-cookies/'),
('/pt/privacy-and-cookies/',
'https://www.great.gov.uk/privacy-and-cookies/'),
('/ar/privacy-and-cookies/',
'https://www.great.gov.uk/privacy-and-cookies/'),
# Chinese from zh-cn to zh-hans
('/zh-cn/foo/', '/zh-hans/foo/'),
('/zh-cn/', '/zh-hans'),
('/zh-cn/foo/bar/', '/zh-hans/foo/bar/'),
('/zh-cn/hans/bar/', '/zh-hans/hans/bar/'),
# Fair processing notice urls
('/privacy-and-cookies/fair-processing-notice-zendesk/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-zendesk/'), # noqa
('/ar/privacy-and-cookies/fair-processing-notice-zendesk/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-zendesk/'), # noqa
('/es/privacy-and-cookies/fair-processing-notice-zendesk/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-zendesk/'), # noqa
('/fr/privacy-and-cookies/fair-processing-notice-zendesk/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-zendesk/'), # noqa
('/de/privacy-and-cookies/fair-processing-notice-zendesk/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-zendesk/'), # noqa
('/pt/privacy-and-cookies/fair-processing-notice-zendesk/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-zendesk/'), # noqa
('/zh-hans/privacy-and-cookies/fair-processing-notice-zendesk/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-zendesk/'), # noqa
('/ja/privacy-and-cookies/fair-processing-notice-zendesk/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-zendesk/'), # noqa
('/privacy-and-cookies/fair-processing-notice-for-smart-survey/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-for-smart-survey/'), # noqa
('/ar/privacy-and-cookies/fair-processing-notice-for-smart-survey/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-for-smart-survey/'), # noqa
('/es/privacy-and-cookies/fair-processing-notice-for-smart-survey/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-for-smart-survey/'), # noqa
('/fr/privacy-and-cookies/fair-processing-notice-for-smart-survey/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-for-smart-survey/'), # noqa
('/de/privacy-and-cookies/fair-processing-notice-for-smart-survey/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-for-smart-survey/'), # noqa
('/pt/privacy-and-cookies/fair-processing-notice-for-smart-survey/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-for-smart-survey/'), # noqa
('/zh-hans/privacy-and-cookies/fair-processing-notice-for-smart-survey/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-for-smart-survey/'), # noqa
('/ja/privacy-and-cookies/fair-processing-notice-for-smart-survey/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-for-smart-survey/'), # noqa
('/privacy-and-cookies/fair-processing-notice-trade-profiles-find-a-buyer-fab-find-a-supplier-fas/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-trade-profiles-find-a-buyer-fab-find-a-supplier-fas/'), # noqa
('/ar/privacy-and-cookies/fair-processing-notice-trade-profiles-find-a-buyer-fab-find-a-supplier-fas/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-trade-profiles-find-a-buyer-fab-find-a-supplier-fas/'), # noqa
('/es/privacy-and-cookies/fair-processing-notice-trade-profiles-find-a-buyer-fab-find-a-supplier-fas/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-trade-profiles-find-a-buyer-fab-find-a-supplier-fas/'), # noqa
('/fr/privacy-and-cookies/fair-processing-notice-trade-profiles-find-a-buyer-fab-find-a-supplier-fas/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-trade-profiles-find-a-buyer-fab-find-a-supplier-fas/'), # noqa
('/de/privacy-and-cookies/fair-processing-notice-trade-profiles-find-a-buyer-fab-find-a-supplier-fas/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-trade-profiles-find-a-buyer-fab-find-a-supplier-fas/'), # noqa
('/pt/privacy-and-cookies/fair-processing-notice-trade-profiles-find-a-buyer-fab-find-a-supplier-fas/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-trade-profiles-find-a-buyer-fab-find-a-supplier-fas/'), # noqa
('/zh-hans/privacy-and-cookies/fair-processing-notice-trade-profiles-find-a-buyer-fab-find-a-supplier-fas/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-trade-profiles-find-a-buyer-fab-find-a-supplier-fas/'), # noqa
('/ja/privacy-and-cookies/fair-processing-notice-trade-profiles-find-a-buyer-fab-find-a-supplier-fas/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-trade-profiles-find-a-buyer-fab-find-a-supplier-fas/'), # noqa
('/privacy-and-cookies/fair-processing-notice-export-opportunities/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-export-opportunities/'), # noqa
('/ar/privacy-and-cookies/fair-processing-notice-export-opportunities/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-export-opportunities/'), # noqa
('/es/privacy-and-cookies/fair-processing-notice-export-opportunities/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-export-opportunities/'), # noqa
('/fr/privacy-and-cookies/fair-processing-notice-export-opportunities/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-export-opportunities/'), # noqa
('/de/privacy-and-cookies/fair-processing-notice-export-opportunities/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-export-opportunities/'), # noqa
('/pt/privacy-and-cookies/fair-processing-notice-export-opportunities/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-export-opportunities/'), # noqa
('/zh-hans/privacy-and-cookies/fair-processing-notice-export-opportunities/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-export-opportunities/'), # noqa
('/ja/privacy-and-cookies/fair-processing-notice-export-opportunities/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-export-opportunities/'), # noqa
('/privacy-and-cookies/fair-processing-notice-selling-online-overseas/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-selling-online-overseas/'), # noqa
('/ar/privacy-and-cookies/fair-processing-notice-selling-online-overseas/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-selling-online-overseas/'), # noqa
('/es/privacy-and-cookies/fair-processing-notice-selling-online-overseas/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-selling-online-overseas/'), # noqa
('/fr/privacy-and-cookies/fair-processing-notice-selling-online-overseas/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-selling-online-overseas/'), # noqa
('/de/privacy-and-cookies/fair-processing-notice-selling-online-overseas/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-selling-online-overseas/'), # noqa
('/pt/privacy-and-cookies/fair-processing-notice-selling-online-overseas/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-selling-online-overseas/'), # noqa
('/zh-hans/privacy-and-cookies/fair-processing-notice-selling-online-overseas/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-selling-online-overseas/'), # noqa
('/ja/privacy-and-cookies/fair-processing-notice-selling-online-overseas/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-selling-online-overseas/'), # noqa
('/privacy-and-cookies/fair-processing-notice-invest-in-great-britain/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-invest-in-great-britain/'), # noqa
('/ar/privacy-and-cookies/fair-processing-notice-invest-in-great-britain/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-invest-in-great-britain/'), # noqa
('/es/privacy-and-cookies/fair-processing-notice-invest-in-great-britain/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-invest-in-great-britain/'), # noqa
('/fr/privacy-and-cookies/fair-processing-notice-invest-in-great-britain/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-invest-in-great-britain/'), # noqa
('/de/privacy-and-cookies/fair-processing-notice-invest-in-great-britain/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-invest-in-great-britain/'), # noqa
('/pt/privacy-and-cookies/fair-processing-notice-invest-in-great-britain/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-invest-in-great-britain/'), # noqa
('/zh-hans/privacy-and-cookies/fair-processing-notice-invest-in-great-britain/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-invest-in-great-britain/'), # noqa
('/ja/privacy-and-cookies/fair-processing-notice-invest-in-great-britain/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-invest-in-great-britain/'), # noqa
('/privacy-and-cookies/fair-processing-notice-export-readiness/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-export-readiness/'), # noqa
('/ar/privacy-and-cookies/fair-processing-notice-export-readiness/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-export-readiness/'), # noqa
('/es/privacy-and-cookies/fair-processing-notice-export-readiness/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-export-readiness/'), # noqa
('/fr/privacy-and-cookies/fair-processing-notice-export-readiness/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-export-readiness/'), # noqa
('/de/privacy-and-cookies/fair-processing-notice-export-readiness/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-export-readiness/'), # noqa
('/pt/privacy-and-cookies/fair-processing-notice-export-readiness/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-export-readiness/'), # noqa
('/zh-hans/privacy-and-cookies/fair-processing-notice-export-readiness/', # noqa
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-export-readiness/'), # noqa
('/ja/privacy-and-cookies/fair-processing-notice-export-readiness/',
'https://great.gov.uk/privacy-and-cookies/fair-processing-notice-export-readiness/'), # noqa
(
'/industries/automotive/supply-chain/',
'/industries/automotive/automotive-supply-chain/'
),
(
'/industries/automotive/research-and-development/',
'/industries/automotive/automotive-research-and-development/'
),
(
'/industries/energy/offshore-wind/',
'/industries/energy/offshore-wind-energy/'
),
(
'/industries/food-and-drink-manufacturing/freefrom/',
'/industries/food-and-drink/free-foods/'
),
(
'/industries/food-and-drink-manufacturing/',
'/industries/food-and-drink/'
),
(
'/uk-setup-guide/',
'https://great.gov.uk/international/content/how-to-setup-in-the-uk' # noqa
),
(
'/uk-setup-guide/establish-base-business-uk/',
'https://great.gov.uk/international/content/how-to-setup-in-the-uk/establish-a-base-for-business-in-the-uk/' # noqa
),
(
'/uk-setup-guide/understand-uk-tax-and-incentives/',
'https://great.gov.uk/international/content/how-to-setup-in-the-uk/uk-tax-and-incentives/' # noqa
),
(
'/uk-setup-guide/hire-skilled-workers-your-uk-operations/',
'https://great.gov.uk/international/content/how-to-setup-in-the-uk/hire-skilled-workers-for-your-uk-operations/' # noqa
),
(
'/uk-setup-guide/open-uk-business-bank-account/',
'https://great.gov.uk/international/content/how-to-setup-in-the-uk/open-a-uk-business-bank-account/' # noqa
),
(
'/uk-setup-guide/setup-your-business-uk/',
'https://great.gov.uk/international/content/how-to-setup-in-the-uk/register-a-company-in-the-uk/' # noqa
),
(
'/uk-setup-guide/apply-uk-visa/',
'https://great.gov.uk/international/content/how-to-setup-in-the-uk/uk-visas-and-migration/' # noqa
),
(
'/de/uk-setup-guide/',
'https://great.gov.uk/international/content/how-to-setup-in-the-uk?lang=de' # noqa
),
(
'/de/uk-setup-guide/establish-base-business-uk/',
'https://great.gov.uk/international/content/how-to-setup-in-the-uk/establish-a-base-for-business-in-the-uk/?lang=de' # noqa
),
(
'/de/uk-setup-guide/understand-uk-tax-and-incentives/',
'https://great.gov.uk/international/content/how-to-setup-in-the-uk/uk-tax-and-incentives/?lang=de' # noqa
),
(
'/de/uk-setup-guide/hire-skilled-workers-your-uk-operations/',
'https://great.gov.uk/international/content/how-to-setup-in-the-uk/hire-skilled-workers-for-your-uk-operations/?lang=de' # noqa
),
(
'/de/uk-setup-guide/open-uk-business-bank-account/',
'https://great.gov.uk/international/content/how-to-setup-in-the-uk/open-a-uk-business-bank-account/?lang=de' # noqa
),
(
'/de/uk-setup-guide/setup-your-business-uk/',
'https://great.gov.uk/international/content/how-to-setup-in-the-uk/register-a-company-in-the-uk/?lang=de' # noqa
),
(
'/de/uk-setup-guide/apply-uk-visa/',
'https://great.gov.uk/international/content/how-to-setup-in-the-uk/uk-visas-and-migration/?lang=de' # noqa
),
]
)
def test_redirects(incoming_url, expected_url, client):
response = client.get(incoming_url)
assert response.status_code == 302
assert response.url == expected_url
| 55.014391 | 140 | 0.62657 | 14,749 | 133,795 | 5.683368 | 0.010509 | 0.066246 | 0.03672 | 0.04445 | 0.982105 | 0.963817 | 0.913521 | 0.861865 | 0.793102 | 0.707435 | 0 | 0.000027 | 0.179588 | 133,795 | 2,431 | 141 | 55.037022 | 0.763627 | 0.007003 | 0 | 0.416492 | 0 | 0.043114 | 0.741044 | 0.649869 | 0 | 0 | 0 | 0 | 0.000837 | 1 | 0.000419 | false | 0 | 0.000419 | 0 | 0.000837 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
fc236b852539c3933b0f4655d46190eac27a941d | 199,802 | py | Python | alpharotate/utils/gaussian_wasserstein_distance.py | chisyliu/RotationDetection | 6f2bd55a51a6de0bcd0959a85977682511fd440d | [
"Apache-2.0"
] | 2 | 2022-03-05T09:55:49.000Z | 2022-03-05T10:12:51.000Z | alpharotate/utils/gaussian_wasserstein_distance.py | junhai0428/RotationDetection | 4249720ea4dacdd60e696901df8034e5cd0a1843 | [
"Apache-2.0"
] | null | null | null | alpharotate/utils/gaussian_wasserstein_distance.py | junhai0428/RotationDetection | 4249720ea4dacdd60e696901df8034e5cd0a1843 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Author: Xue Yang <yangxue-2019-sjtu@sjtu.edu.cn>, <yangxue0827@126.com>
# License: Apache-2.0 license
# Copyright (c) SJTU. ALL rights reserved.
import numpy as np
import tensorflow as tf
def get_element1_tf(w1, h1, a1, w2, h2, a2):
element1 = (w1 - w1 * tf.sin(a1) ** 2 + h1 * tf.sin(a1) ** 2) ** 2 / 4 + (w2 ** 2 * tf.cos(a2) ** 2) / 4 + (h2 ** 2 * tf.sin(a2) ** 2) / 4 + (tf.sin(
2 * a1) ** 2 * (h1 - w1) ** 2) / 16 - ((2 * w1 ** 2 * w2 ** 2 + 2 ** (1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (
1 / 2) + 2 * h1 ** 2 * h2 ** 2 + 2 * h1 ** 2 * w2 ** 2 + 2 * h2 ** 2 * w1 ** 2 + 2 * h1 ** 2 * h2 ** 2 * tf.cos(
2 * a1 - 2 * a2) - 2 * h1 ** 2 * w2 ** 2 * tf.cos(2 * a1 - 2 * a2) - 2 * h2 ** 2 * w1 ** 2 * tf.cos(
2 * a1 - 2 * a2) + 2 * w1 ** 2 * w2 ** 2 * tf.cos(2 * a1 - 2 * a2)) ** (1 / 2) * (
(h1 ** 2 * h2 ** 2 * tf.sin(2 * a1)) / 4 + (
h1 ** 2 * h2 ** 2 * tf.sin(2 * a2)) / 8 + (
h1 ** 2 * w2 ** 2 * tf.sin(2 * a1)) / 4 - (
h2 ** 2 * w1 ** 2 * tf.sin(2 * a1)) / 4 - (
h1 ** 2 * w2 ** 2 * tf.sin(2 * a2)) / 8 + (
h2 ** 2 * w1 ** 2 * tf.sin(2 * a2)) / 8 - (
w1 ** 2 * w2 ** 2 * tf.sin(2 * a1)) / 4 - (
w1 ** 2 * w2 ** 2 * tf.sin(2 * a2)) / 8 + (
h1 ** 2 * h2 ** 2 * tf.sin(4 * a1 - 2 * a2)) / 8 - (
h1 ** 2 * w2 ** 2 * tf.sin(4 * a1 - 2 * a2)) / 8 + (
h2 ** 2 * w1 ** 2 * tf.sin(4 * a1 - 2 * a2)) / 8 - (
w1 ** 2 * w2 ** 2 * tf.sin(4 * a1 - 2 * a2)) / 8 + (
h1 * h2 ** 2 * w1 * tf.sin(2 * a2)) / 4 - (
h1 * w1 * w2 ** 2 * tf.sin(2 * a2)) / 4 - (
h1 * h2 ** 2 * w1 * tf.sin(4 * a1 - 2 * a2)) / 4 + (
h1 * w1 * w2 ** 2 * tf.sin(4 * a1 - 2 * a2)) / 4) * (2 ** (1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (1 / 2) - 2 * h1 ** 2 * h2 ** 2 * tf.cos(2 * a1) - h1 ** 2 * h2 ** 2 * tf.cos(
2 * a2) - 2 * h1 ** 2 * w2 ** 2 * tf.cos(2 * a1) + 2 * h2 ** 2 * w1 ** 2 * tf.cos(2 * a1) + h1 ** 2 * w2 ** 2 * tf.cos(
2 * a2) - h2 ** 2 * w1 ** 2 * tf.cos(2 * a2) + 2 * w1 ** 2 * w2 ** 2 * tf.cos(2 * a1) + w1 ** 2 * w2 ** 2 * tf.cos(
2 * a2) - h1 ** 2 * h2 ** 2 * tf.cos(4 * a1 - 2 * a2) + h1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2) - h2 ** 2 * w1 ** 2 * tf.cos(4 * a1 - 2 * a2) + w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2) - 2 * h1 * h2 ** 2 * w1 * tf.cos(2 * a2) + 2 * h1 * w1 * w2 ** 2 * tf.cos(
2 * a2) + 2 * h1 * h2 ** 2 * w1 * tf.cos(4 * a1 - 2 * a2) - 2 * h1 * w1 * w2 ** 2 * tf.cos(4 * a1 - 2 * a2))) / (2 * (
2 * h1 ** 2 * h2 ** 2 * tf.sin(2 * a1) + h1 ** 2 * h2 ** 2 * tf.sin(2 * a2) + 2 * h1 ** 2 * w2 ** 2 * tf.sin(
2 * a1) - 2 * h2 ** 2 * w1 ** 2 * tf.sin(2 * a1) - h1 ** 2 * w2 ** 2 * tf.sin(2 * a2) + h2 ** 2 * w1 ** 2 * tf.sin(
2 * a2) - 2 * w1 ** 2 * w2 ** 2 * tf.sin(2 * a1) - w1 ** 2 * w2 ** 2 * tf.sin(2 * a2) + h1 ** 2 * h2 ** 2 * tf.sin(
4 * a1 - 2 * a2) - h1 ** 2 * w2 ** 2 * tf.sin(4 * a1 - 2 * a2) + h2 ** 2 * w1 ** 2 * tf.sin(
4 * a1 - 2 * a2) - w1 ** 2 * w2 ** 2 * tf.sin(4 * a1 - 2 * a2) + 2 * h1 * h2 ** 2 * w1 * tf.sin(
2 * a2) - 2 * h1 * w1 * w2 ** 2 * tf.sin(2 * a2) - 2 * h1 * h2 ** 2 * w1 * tf.sin(
4 * a1 - 2 * a2) + 2 * h1 * w1 * w2 ** 2 * tf.sin(4 * a1 - 2 * a2)) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (
1 / 2)) - (
(
2 * w1 ** 2 * w2 ** 2 - 2 ** (
1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (
1 / 2) + 2 * h1 ** 2 * h2 ** 2 + 2 * h1 ** 2 * w2 ** 2 + 2 * h2 ** 2 * w1 ** 2 + 2 * h1 ** 2 * h2 ** 2 * tf.cos(
2 * a1 - 2 * a2) - 2 * h1 ** 2 * w2 ** 2 * tf.cos(
2 * a1 - 2 * a2) - 2 * h2 ** 2 * w1 ** 2 * tf.cos(
2 * a1 - 2 * a2) + 2 * w1 ** 2 * w2 ** 2 * tf.cos(
2 * a1 - 2 * a2)) ** (
1 / 2) * (
(
h1 ** 2 * h2 ** 2 * tf.sin(
2 * a1)) / 4 + (
h1 ** 2 * h2 ** 2 * tf.sin(
2 * a2)) / 8 + (
h1 ** 2 * w2 ** 2 * tf.sin(
2 * a1)) / 4 - (
h2 ** 2 * w1 ** 2 * tf.sin(
2 * a1)) / 4 - (
h1 ** 2 * w2 ** 2 * tf.sin(
2 * a2)) / 8 + (
h2 ** 2 * w1 ** 2 * tf.sin(
2 * a2)) / 8 - (
w1 ** 2 * w2 ** 2 * tf.sin(
2 * a1)) / 4 - (
w1 ** 2 * w2 ** 2 * tf.sin(
2 * a2)) / 8 + (
h1 ** 2 * h2 ** 2 * tf.sin(
4 * a1 - 2 * a2)) / 8 - (
h1 ** 2 * w2 ** 2 * tf.sin(
4 * a1 - 2 * a2)) / 8 + (
h2 ** 2 * w1 ** 2 * tf.sin(
4 * a1 - 2 * a2)) / 8 - (
w1 ** 2 * w2 ** 2 * tf.sin(
4 * a1 - 2 * a2)) / 8 + (
h1 * h2 ** 2 * w1 * tf.sin(
2 * a2)) / 4 - (
h1 * w1 * w2 ** 2 * tf.sin(
2 * a2)) / 4 - (
h1 * h2 ** 2 * w1 * tf.sin(
4 * a1 - 2 * a2)) / 4 + (
h1 * w1 * w2 ** 2 * tf.sin(
4 * a1 - 2 * a2)) / 4) * (
2 ** (
1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (
1 / 2) + 2 * h1 ** 2 * h2 ** 2 * tf.cos(
2 * a1) + h1 ** 2 * h2 ** 2 * tf.cos(
2 * a2) + 2 * h1 ** 2 * w2 ** 2 * tf.cos(
2 * a1) - 2 * h2 ** 2 * w1 ** 2 * tf.cos(
2 * a1) - h1 ** 2 * w2 ** 2 * tf.cos(
2 * a2) + h2 ** 2 * w1 ** 2 * tf.cos(
2 * a2) - 2 * w1 ** 2 * w2 ** 2 * tf.cos(
2 * a1) - w1 ** 2 * w2 ** 2 * tf.cos(
2 * a2) + h1 ** 2 * h2 ** 2 * tf.cos(
4 * a1 - 2 * a2) - h1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2) + h2 ** 2 * w1 ** 2 * tf.cos(
4 * a1 - 2 * a2) - w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2) + 2 * h1 * h2 ** 2 * w1 * tf.cos(
2 * a2) - 2 * h1 * w1 * w2 ** 2 * tf.cos(
2 * a2) - 2 * h1 * h2 ** 2 * w1 * tf.cos(
4 * a1 - 2 * a2) + 2 * h1 * w1 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2))) / (
2 * (
2 * h1 ** 2 * h2 ** 2 * tf.sin(
2 * a1) + h1 ** 2 * h2 ** 2 * tf.sin(
2 * a2) + 2 * h1 ** 2 * w2 ** 2 * tf.sin(
2 * a1) - 2 * h2 ** 2 * w1 ** 2 * tf.sin(
2 * a1) - h1 ** 2 * w2 ** 2 * tf.sin(
2 * a2) + h2 ** 2 * w1 ** 2 * tf.sin(
2 * a2) - 2 * w1 ** 2 * w2 ** 2 * tf.sin(
2 * a1) - w1 ** 2 * w2 ** 2 * tf.sin(
2 * a2) + h1 ** 2 * h2 ** 2 * tf.sin(
4 * a1 - 2 * a2) - h1 ** 2 * w2 ** 2 * tf.sin(
4 * a1 - 2 * a2) + h2 ** 2 * w1 ** 2 * tf.sin(
4 * a1 - 2 * a2) - w1 ** 2 * w2 ** 2 * tf.sin(
4 * a1 - 2 * a2) + 2 * h1 * h2 ** 2 * w1 * tf.sin(
2 * a2) - 2 * h1 * w1 * w2 ** 2 * tf.sin(
2 * a2) - 2 * h1 * h2 ** 2 * w1 * tf.sin(
4 * a1 - 2 * a2) + 2 * h1 * w1 * w2 ** 2 * tf.sin(
4 * a1 - 2 * a2)) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (
1 / 2))
return element1
def get_element3_tf(w1, h1, a1, w2, h2, a2):
element3 = (w2 ** 2 * tf.sin(2 * a2)) / 8 - (h2 ** 2 * tf.sin(2 * a2)) / 8 + ((2 * w1 ** 2 * w2 ** 2 + 2 ** (
1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (
1 / 2) + 2 * h1 ** 2 * h2 ** 2 + 2 * h1 ** 2 * w2 ** 2 + 2 * h2 ** 2 * w1 ** 2 + 2 * h1 ** 2 * h2 ** 2 * tf.cos(
2 * a1 - 2 * a2) - 2 * h1 ** 2 * w2 ** 2 * tf.cos(2 * a1 - 2 * a2) - 2 * h2 ** 2 * w1 ** 2 * tf.cos(
2 * a1 - 2 * a2) + 2 * w1 ** 2 * w2 ** 2 * tf.cos(2 * a1 - 2 * a2)) ** (1 / 2) * (
(h1 ** 2 * h2 ** 2 * tf.sin(
2 * a1)) / 4 + (
h1 ** 2 * h2 ** 2 * tf.sin(
2 * a2)) / 8 + (
h1 ** 2 * w2 ** 2 * tf.sin(
2 * a1)) / 4 - (
h2 ** 2 * w1 ** 2 * tf.sin(
2 * a1)) / 4 - (
h1 ** 2 * w2 ** 2 * tf.sin(
2 * a2)) / 8 + (
h2 ** 2 * w1 ** 2 * tf.sin(
2 * a2)) / 8 - (
w1 ** 2 * w2 ** 2 * tf.sin(
2 * a1)) / 4 - (
w1 ** 2 * w2 ** 2 * tf.sin(
2 * a2)) / 8 + (
h1 ** 2 * h2 ** 2 * tf.sin(
4 * a1 - 2 * a2)) / 8 - (
h1 ** 2 * w2 ** 2 * tf.sin(
4 * a1 - 2 * a2)) / 8 + (
h2 ** 2 * w1 ** 2 * tf.sin(
4 * a1 - 2 * a2)) / 8 - (
w1 ** 2 * w2 ** 2 * tf.sin(
4 * a1 - 2 * a2)) / 8 + (
h1 * h2 ** 2 * w1 * tf.sin(
2 * a2)) / 4 - (
h1 * w1 * w2 ** 2 * tf.sin(
2 * a2)) / 4 - (
h1 * h2 ** 2 * w1 * tf.sin(
4 * a1 - 2 * a2)) / 4 + (
h1 * w1 * w2 ** 2 * tf.sin(
4 * a1 - 2 * a2)) / 4)) / (
2 * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (
1 / 2)) - ((
2 * w1 ** 2 * w2 ** 2 - 2 ** (
1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (
1 / 2) + 2 * h1 ** 2 * h2 ** 2 + 2 * h1 ** 2 * w2 ** 2 + 2 * h2 ** 2 * w1 ** 2 + 2 * h1 ** 2 * h2 ** 2 * tf.cos(
2 * a1 - 2 * a2) - 2 * h1 ** 2 * w2 ** 2 * tf.cos(
2 * a1 - 2 * a2) - 2 * h2 ** 2 * w1 ** 2 * tf.cos(
2 * a1 - 2 * a2) + 2 * w1 ** 2 * w2 ** 2 * tf.cos(
2 * a1 - 2 * a2)) ** (
1 / 2) * (
(
h1 ** 2 * h2 ** 2 * tf.sin(
2 * a1)) / 4 + (
h1 ** 2 * h2 ** 2 * tf.sin(
2 * a2)) / 8 + (
h1 ** 2 * w2 ** 2 * tf.sin(
2 * a1)) / 4 - (
h2 ** 2 * w1 ** 2 * tf.sin(
2 * a1)) / 4 - (
h1 ** 2 * w2 ** 2 * tf.sin(
2 * a2)) / 8 + (
h2 ** 2 * w1 ** 2 * tf.sin(
2 * a2)) / 8 - (
w1 ** 2 * w2 ** 2 * tf.sin(
2 * a1)) / 4 - (
w1 ** 2 * w2 ** 2 * tf.sin(
2 * a2)) / 8 + (
h1 ** 2 * h2 ** 2 * tf.sin(
4 * a1 - 2 * a2)) / 8 - (
h1 ** 2 * w2 ** 2 * tf.sin(
4 * a1 - 2 * a2)) / 8 + (
h2 ** 2 * w1 ** 2 * tf.sin(
4 * a1 - 2 * a2)) / 8 - (
w1 ** 2 * w2 ** 2 * tf.sin(
4 * a1 - 2 * a2)) / 8 + (
h1 * h2 ** 2 * w1 * tf.sin(
2 * a2)) / 4 - (
h1 * w1 * w2 ** 2 * tf.sin(
2 * a2)) / 4 - (
h1 * h2 ** 2 * w1 * tf.sin(
4 * a1 - 2 * a2)) / 4 + (
h1 * w1 * w2 ** 2 * tf.sin(
4 * a1 - 2 * a2)) / 4)) / (
2 * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (
1 / 2)) - tf.sin(2 * a1) * (
h1 / 4 - w1 / 4) * (
h1 / 2 + (
w1 * tf.sin(
a1) ** 2) / 2 - (
h1 * tf.sin(
a1) ** 2) / 2) - tf.sin(
2 * a1) * (h1 / 4 - w1 / 4) * (
w1 / 2 - (
w1 * tf.sin(
a1) ** 2) / 2 + (
h1 * tf.sin(
a1) ** 2) / 2)
return element3
def get_element2_tf(w1, h1, a1, w2, h2, a2):
element2 = (w2 ** 2 * tf.sin(2 * a2)) / 8 - (h2 ** 2 * tf.sin(2 * a2)) / 8 - tf.sin(2 * a1) * (h1 / 4 - w1 / 4) * (
h1 / 2 + (w1 * tf.sin(a1) ** 2) / 2 - (h1 * tf.sin(a1) ** 2) / 2) - tf.sin(2 * a1) * (h1 / 4 - w1 / 4) * (
w1 / 2 - (w1 * tf.sin(a1) ** 2) / 2 + (h1 * tf.sin(a1) ** 2) / 2) + ((2 * w1 ** 2 * w2 ** 2 + 2 ** (1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (
1 / 2) + 2 * h1 ** 2 * h2 ** 2 + 2 * h1 ** 2 * w2 ** 2 + 2 * h2 ** 2 * w1 ** 2 + 2 * h1 ** 2 * h2 ** 2 * tf.cos(
2 * a1 - 2 * a2) - 2 * h1 ** 2 * w2 ** 2 * tf.cos(2 * a1 - 2 * a2) - 2 * h2 ** 2 * w1 ** 2 * tf.cos(
2 * a1 - 2 * a2) + 2 * w1 ** 2 * w2 ** 2 * tf.cos(2 * a1 - 2 * a2)) ** (1 / 2) * (2 ** (1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (1 / 2) - 2 * h1 ** 2 * h2 ** 2 * tf.cos(2 * a1) - h1 ** 2 * h2 ** 2 * tf.cos(
2 * a2) - 2 * h1 ** 2 * w2 ** 2 * tf.cos(2 * a1) + 2 * h2 ** 2 * w1 ** 2 * tf.cos(2 * a1) + h1 ** 2 * w2 ** 2 * tf.cos(
2 * a2) - h2 ** 2 * w1 ** 2 * tf.cos(2 * a2) + 2 * w1 ** 2 * w2 ** 2 * tf.cos(2 * a1) + w1 ** 2 * w2 ** 2 * tf.cos(
2 * a2) - h1 ** 2 * h2 ** 2 * tf.cos(4 * a1 - 2 * a2) + h1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2) - h2 ** 2 * w1 ** 2 * tf.cos(4 * a1 - 2 * a2) + w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2) - 2 * h1 * h2 ** 2 * w1 * tf.cos(2 * a2) + 2 * h1 * w1 * w2 ** 2 * tf.cos(
2 * a2) + 2 * h1 * h2 ** 2 * w1 * tf.cos(4 * a1 - 2 * a2) - 2 * h1 * w1 * w2 ** 2 * tf.cos(4 * a1 - 2 * a2)) * ((2 ** (
1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (
1 / 2)) / 4 + (
h1 ** 2 * h2 ** 2 * tf.cos(
2 * a1)) / 2 + (
h1 ** 2 * h2 ** 2 * tf.cos(
2 * a2)) / 4 + (
h1 ** 2 * w2 ** 2 * tf.cos(
2 * a1)) / 2 - (
h2 ** 2 * w1 ** 2 * tf.cos(
2 * a1)) / 2 - (
h1 ** 2 * w2 ** 2 * tf.cos(
2 * a2)) / 4 + (
h2 ** 2 * w1 ** 2 * tf.cos(
2 * a2)) / 4 - (
w1 ** 2 * w2 ** 2 * tf.cos(
2 * a1)) / 2 - (
w1 ** 2 * w2 ** 2 * tf.cos(
2 * a2)) / 4 + (
h1 ** 2 * h2 ** 2 * tf.cos(
4 * a1 - 2 * a2)) / 4 - (
h1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2)) / 4 + (
h2 ** 2 * w1 ** 2 * tf.cos(
4 * a1 - 2 * a2)) / 4 - (
w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2)) / 4 + (
h1 * h2 ** 2 * w1 * tf.cos(
2 * a2)) / 2 - (
h1 * w1 * w2 ** 2 * tf.cos(
2 * a2)) / 2 - (
h1 * h2 ** 2 * w1 * tf.cos(
4 * a1 - 2 * a2)) / 2 + (
h1 * w1 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2)) / 2)) / (
4 * (2 * h1 ** 2 * h2 ** 2 * tf.sin(
2 * a1) + h1 ** 2 * h2 ** 2 * tf.sin(
2 * a2) + 2 * h1 ** 2 * w2 ** 2 * tf.sin(
2 * a1) - 2 * h2 ** 2 * w1 ** 2 * tf.sin(
2 * a1) - h1 ** 2 * w2 ** 2 * tf.sin(
2 * a2) + h2 ** 2 * w1 ** 2 * tf.sin(
2 * a2) - 2 * w1 ** 2 * w2 ** 2 * tf.sin(
2 * a1) - w1 ** 2 * w2 ** 2 * tf.sin(
2 * a2) + h1 ** 2 * h2 ** 2 * tf.sin(
4 * a1 - 2 * a2) - h1 ** 2 * w2 ** 2 * tf.sin(
4 * a1 - 2 * a2) + h2 ** 2 * w1 ** 2 * tf.sin(
4 * a1 - 2 * a2) - w1 ** 2 * w2 ** 2 * tf.sin(
4 * a1 - 2 * a2) + 2 * h1 * h2 ** 2 * w1 * tf.sin(
2 * a2) - 2 * h1 * w1 * w2 ** 2 * tf.sin(
2 * a2) - 2 * h1 * h2 ** 2 * w1 * tf.sin(
4 * a1 - 2 * a2) + 2 * h1 * w1 * w2 ** 2 * tf.sin(
4 * a1 - 2 * a2)) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (1 / 2)) - ((
2 * w1 ** 2 * w2 ** 2 - 2 ** (
1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (
1 / 2) + 2 * h1 ** 2 * h2 ** 2 + 2 * h1 ** 2 * w2 ** 2 + 2 * h2 ** 2 * w1 ** 2 + 2 * h1 ** 2 * h2 ** 2 * tf.cos(
2 * a1 - 2 * a2) - 2 * h1 ** 2 * w2 ** 2 * tf.cos(
2 * a1 - 2 * a2) - 2 * h2 ** 2 * w1 ** 2 * tf.cos(
2 * a1 - 2 * a2) + 2 * w1 ** 2 * w2 ** 2 * tf.cos(
2 * a1 - 2 * a2)) ** (
1 / 2) * (
2 ** (1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (
1 / 2) + 2 * h1 ** 2 * h2 ** 2 * tf.cos(
2 * a1) + h1 ** 2 * h2 ** 2 * tf.cos(
2 * a2) + 2 * h1 ** 2 * w2 ** 2 * tf.cos(
2 * a1) - 2 * h2 ** 2 * w1 ** 2 * tf.cos(
2 * a1) - h1 ** 2 * w2 ** 2 * tf.cos(
2 * a2) + h2 ** 2 * w1 ** 2 * tf.cos(
2 * a2) - 2 * w1 ** 2 * w2 ** 2 * tf.cos(
2 * a1) - w1 ** 2 * w2 ** 2 * tf.cos(
2 * a2) + h1 ** 2 * h2 ** 2 * tf.cos(
4 * a1 - 2 * a2) - h1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2) + h2 ** 2 * w1 ** 2 * tf.cos(
4 * a1 - 2 * a2) - w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2) + 2 * h1 * h2 ** 2 * w1 * tf.cos(
2 * a2) - 2 * h1 * w1 * w2 ** 2 * tf.cos(
2 * a2) - 2 * h1 * h2 ** 2 * w1 * tf.cos(
4 * a1 - 2 * a2) + 2 * h1 * w1 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2)) * (
(2 ** (1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (
1 / 2)) / 4 - (
h1 ** 2 * h2 ** 2 * tf.cos(
2 * a1)) / 2 - (
h1 ** 2 * h2 ** 2 * tf.cos(
2 * a2)) / 4 - (
h1 ** 2 * w2 ** 2 * tf.cos(
2 * a1)) / 2 + (
h2 ** 2 * w1 ** 2 * tf.cos(
2 * a1)) / 2 + (
h1 ** 2 * w2 ** 2 * tf.cos(
2 * a2)) / 4 - (
h2 ** 2 * w1 ** 2 * tf.cos(
2 * a2)) / 4 + (
w1 ** 2 * w2 ** 2 * tf.cos(
2 * a1)) / 2 + (
w1 ** 2 * w2 ** 2 * tf.cos(
2 * a2)) / 4 - (
h1 ** 2 * h2 ** 2 * tf.cos(
4 * a1 - 2 * a2)) / 4 + (
h1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2)) / 4 - (
h2 ** 2 * w1 ** 2 * tf.cos(
4 * a1 - 2 * a2)) / 4 + (
w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2)) / 4 - (
h1 * h2 ** 2 * w1 * tf.cos(
2 * a2)) / 2 + (
h1 * w1 * w2 ** 2 * tf.cos(
2 * a2)) / 2 + (
h1 * h2 ** 2 * w1 * tf.cos(
4 * a1 - 2 * a2)) / 2 - (
h1 * w1 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2)) / 2)) / (
4 * (
2 * h1 ** 2 * h2 ** 2 * tf.sin(
2 * a1) + h1 ** 2 * h2 ** 2 * tf.sin(
2 * a2) + 2 * h1 ** 2 * w2 ** 2 * tf.sin(
2 * a1) - 2 * h2 ** 2 * w1 ** 2 * tf.sin(
2 * a1) - h1 ** 2 * w2 ** 2 * tf.sin(
2 * a2) + h2 ** 2 * w1 ** 2 * tf.sin(
2 * a2) - 2 * w1 ** 2 * w2 ** 2 * tf.sin(
2 * a1) - w1 ** 2 * w2 ** 2 * tf.sin(
2 * a2) + h1 ** 2 * h2 ** 2 * tf.sin(
4 * a1 - 2 * a2) - h1 ** 2 * w2 ** 2 * tf.sin(
4 * a1 - 2 * a2) + h2 ** 2 * w1 ** 2 * tf.sin(
4 * a1 - 2 * a2) - w1 ** 2 * w2 ** 2 * tf.sin(
4 * a1 - 2 * a2) + 2 * h1 * h2 ** 2 * w1 * tf.sin(
2 * a2) - 2 * h1 * w1 * w2 ** 2 * tf.sin(
2 * a2) - 2 * h1 * h2 ** 2 * w1 * tf.sin(
4 * a1 - 2 * a2) + 2 * h1 * w1 * w2 ** 2 * tf.sin(
4 * a1 - 2 * a2)) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (
1 / 2))
return element2
def get_element4_tf(w1, h1, a1, w2, h2, a2):
element4 = (h1 + w1 * tf.sin(a1) ** 2 - h1 * tf.sin(a1) ** 2) ** 2 / 4 + (h2 ** 2 * tf.cos(a2) ** 2) / 4 + (w2 ** 2 * tf.sin(a2) ** 2) / 4 + (tf.sin(
2 * a1) ** 2 * (h1 - w1) ** 2) / 16 - ((2 * w1 ** 2 * w2 ** 2 + 2 ** (1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (
1 / 2) + 2 * h1 ** 2 * h2 ** 2 + 2 * h1 ** 2 * w2 ** 2 + 2 * h2 ** 2 * w1 ** 2 + 2 * h1 ** 2 * h2 ** 2 * tf.cos(
2 * a1 - 2 * a2) - 2 * h1 ** 2 * w2 ** 2 * tf.cos(2 * a1 - 2 * a2) - 2 * h2 ** 2 * w1 ** 2 * tf.cos(
2 * a1 - 2 * a2) + 2 * w1 ** 2 * w2 ** 2 * tf.cos(2 * a1 - 2 * a2)) ** (1 / 2) * ((2 ** (1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (1 / 2)) / 4 + (h1 ** 2 * h2 ** 2 * tf.cos(2 * a1)) / 2 + (h1 ** 2 * h2 ** 2 * tf.cos(2 * a2)) / 4 + (
h1 ** 2 * w2 ** 2 * tf.cos(
2 * a1)) / 2 - (
h2 ** 2 * w1 ** 2 * tf.cos(
2 * a1)) / 2 - (
h1 ** 2 * w2 ** 2 * tf.cos(
2 * a2)) / 4 + (
h2 ** 2 * w1 ** 2 * tf.cos(
2 * a2)) / 4 - (
w1 ** 2 * w2 ** 2 * tf.cos(
2 * a1)) / 2 - (
w1 ** 2 * w2 ** 2 * tf.cos(
2 * a2)) / 4 + (
h1 ** 2 * h2 ** 2 * tf.cos(
4 * a1 - 2 * a2)) / 4 - (
h1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2)) / 4 + (
h2 ** 2 * w1 ** 2 * tf.cos(
4 * a1 - 2 * a2)) / 4 - (
w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2)) / 4 + (
h1 * h2 ** 2 * w1 * tf.cos(
2 * a2)) / 2 - (
h1 * w1 * w2 ** 2 * tf.cos(
2 * a2)) / 2 - (
h1 * h2 ** 2 * w1 * tf.cos(
4 * a1 - 2 * a2)) / 2 + (
h1 * w1 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2)) / 2)) / (4 * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (1 / 2)) - ((2 * w1 ** 2 * w2 ** 2 - 2 ** (1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (
1 / 2) + 2 * h1 ** 2 * h2 ** 2 + 2 * h1 ** 2 * w2 ** 2 + 2 * h2 ** 2 * w1 ** 2 + 2 * h1 ** 2 * h2 ** 2 * tf.cos(
2 * a1 - 2 * a2) - 2 * h1 ** 2 * w2 ** 2 * tf.cos(2 * a1 - 2 * a2) - 2 * h2 ** 2 * w1 ** 2 * tf.cos(
2 * a1 - 2 * a2) + 2 * w1 ** 2 * w2 ** 2 * tf.cos(2 * a1 - 2 * a2)) ** (1 / 2) * ((2 ** (1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (1 / 2)) / 4 - (h1 ** 2 * h2 ** 2 * tf.cos(2 * a1)) / 2 - (h1 ** 2 * h2 ** 2 * tf.cos(2 * a2)) / 4 - (
h1 ** 2 * w2 ** 2 * tf.cos(
2 * a1)) / 2 + (
h2 ** 2 * w1 ** 2 * tf.cos(
2 * a1)) / 2 + (
h1 ** 2 * w2 ** 2 * tf.cos(
2 * a2)) / 4 - (
h2 ** 2 * w1 ** 2 * tf.cos(
2 * a2)) / 4 + (
w1 ** 2 * w2 ** 2 * tf.cos(
2 * a1)) / 2 + (
w1 ** 2 * w2 ** 2 * tf.cos(
2 * a2)) / 4 - (
h1 ** 2 * h2 ** 2 * tf.cos(
4 * a1 - 2 * a2)) / 4 + (
h1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2)) / 4 - (
h2 ** 2 * w1 ** 2 * tf.cos(
4 * a1 - 2 * a2)) / 4 + (
w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2)) / 4 - (
h1 * h2 ** 2 * w1 * tf.cos(
2 * a2)) / 2 + (
h1 * w1 * w2 ** 2 * tf.cos(
2 * a2)) / 2 + (
h1 * h2 ** 2 * w1 * tf.cos(
4 * a1 - 2 * a2)) / 2 - (
h1 * w1 * w2 ** 2 * tf.cos(
4 * a1 - 2 * a2)) / 2)) / (4 * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * tf.cos(4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * tf.cos(2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * tf.cos(4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * tf.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * tf.cos(4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * tf.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * tf.cos(
4 * a1 - 4 * a2)) ** (1 / 2))
return element4
def get_element1(w1, h1, a1, w2, h2, a2):
element1 = (w1 - w1 * np.sin(a1) ** 2 + h1 * np.sin(a1) ** 2) ** 2 / 4 + (w2 ** 2 * np.cos(a2) ** 2) / 4 + (h2 ** 2 * np.sin(a2) ** 2) / 4 + (np.sin(
2 * a1) ** 2 * (h1 - w1) ** 2) / 16 - ((2 * w1 ** 2 * w2 ** 2 + 2 ** (1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (
1 / 2) + 2 * h1 ** 2 * h2 ** 2 + 2 * h1 ** 2 * w2 ** 2 + 2 * h2 ** 2 * w1 ** 2 + 2 * h1 ** 2 * h2 ** 2 * np.cos(
2 * a1 - 2 * a2) - 2 * h1 ** 2 * w2 ** 2 * np.cos(2 * a1 - 2 * a2) - 2 * h2 ** 2 * w1 ** 2 * np.cos(
2 * a1 - 2 * a2) + 2 * w1 ** 2 * w2 ** 2 * np.cos(2 * a1 - 2 * a2)) ** (1 / 2) * (
(h1 ** 2 * h2 ** 2 * np.sin(2 * a1)) / 4 + (
h1 ** 2 * h2 ** 2 * np.sin(2 * a2)) / 8 + (
h1 ** 2 * w2 ** 2 * np.sin(2 * a1)) / 4 - (
h2 ** 2 * w1 ** 2 * np.sin(2 * a1)) / 4 - (
h1 ** 2 * w2 ** 2 * np.sin(2 * a2)) / 8 + (
h2 ** 2 * w1 ** 2 * np.sin(2 * a2)) / 8 - (
w1 ** 2 * w2 ** 2 * np.sin(2 * a1)) / 4 - (
w1 ** 2 * w2 ** 2 * np.sin(2 * a2)) / 8 + (
h1 ** 2 * h2 ** 2 * np.sin(4 * a1 - 2 * a2)) / 8 - (
h1 ** 2 * w2 ** 2 * np.sin(4 * a1 - 2 * a2)) / 8 + (
h2 ** 2 * w1 ** 2 * np.sin(4 * a1 - 2 * a2)) / 8 - (
w1 ** 2 * w2 ** 2 * np.sin(4 * a1 - 2 * a2)) / 8 + (
h1 * h2 ** 2 * w1 * np.sin(2 * a2)) / 4 - (
h1 * w1 * w2 ** 2 * np.sin(2 * a2)) / 4 - (
h1 * h2 ** 2 * w1 * np.sin(4 * a1 - 2 * a2)) / 4 + (
h1 * w1 * w2 ** 2 * np.sin(4 * a1 - 2 * a2)) / 4) * (2 ** (1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (1 / 2) - 2 * h1 ** 2 * h2 ** 2 * np.cos(2 * a1) - h1 ** 2 * h2 ** 2 * np.cos(
2 * a2) - 2 * h1 ** 2 * w2 ** 2 * np.cos(2 * a1) + 2 * h2 ** 2 * w1 ** 2 * np.cos(2 * a1) + h1 ** 2 * w2 ** 2 * np.cos(
2 * a2) - h2 ** 2 * w1 ** 2 * np.cos(2 * a2) + 2 * w1 ** 2 * w2 ** 2 * np.cos(2 * a1) + w1 ** 2 * w2 ** 2 * np.cos(
2 * a2) - h1 ** 2 * h2 ** 2 * np.cos(4 * a1 - 2 * a2) + h1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2) - h2 ** 2 * w1 ** 2 * np.cos(4 * a1 - 2 * a2) + w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2) - 2 * h1 * h2 ** 2 * w1 * np.cos(2 * a2) + 2 * h1 * w1 * w2 ** 2 * np.cos(
2 * a2) + 2 * h1 * h2 ** 2 * w1 * np.cos(4 * a1 - 2 * a2) - 2 * h1 * w1 * w2 ** 2 * np.cos(4 * a1 - 2 * a2))) / (2 * (
2 * h1 ** 2 * h2 ** 2 * np.sin(2 * a1) + h1 ** 2 * h2 ** 2 * np.sin(2 * a2) + 2 * h1 ** 2 * w2 ** 2 * np.sin(
2 * a1) - 2 * h2 ** 2 * w1 ** 2 * np.sin(2 * a1) - h1 ** 2 * w2 ** 2 * np.sin(2 * a2) + h2 ** 2 * w1 ** 2 * np.sin(
2 * a2) - 2 * w1 ** 2 * w2 ** 2 * np.sin(2 * a1) - w1 ** 2 * w2 ** 2 * np.sin(2 * a2) + h1 ** 2 * h2 ** 2 * np.sin(
4 * a1 - 2 * a2) - h1 ** 2 * w2 ** 2 * np.sin(4 * a1 - 2 * a2) + h2 ** 2 * w1 ** 2 * np.sin(
4 * a1 - 2 * a2) - w1 ** 2 * w2 ** 2 * np.sin(4 * a1 - 2 * a2) + 2 * h1 * h2 ** 2 * w1 * np.sin(
2 * a2) - 2 * h1 * w1 * w2 ** 2 * np.sin(2 * a2) - 2 * h1 * h2 ** 2 * w1 * np.sin(
4 * a1 - 2 * a2) + 2 * h1 * w1 * w2 ** 2 * np.sin(4 * a1 - 2 * a2)) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (
1 / 2)) - (
(
2 * w1 ** 2 * w2 ** 2 - 2 ** (
1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (
1 / 2) + 2 * h1 ** 2 * h2 ** 2 + 2 * h1 ** 2 * w2 ** 2 + 2 * h2 ** 2 * w1 ** 2 + 2 * h1 ** 2 * h2 ** 2 * np.cos(
2 * a1 - 2 * a2) - 2 * h1 ** 2 * w2 ** 2 * np.cos(
2 * a1 - 2 * a2) - 2 * h2 ** 2 * w1 ** 2 * np.cos(
2 * a1 - 2 * a2) + 2 * w1 ** 2 * w2 ** 2 * np.cos(
2 * a1 - 2 * a2)) ** (
1 / 2) * (
(
h1 ** 2 * h2 ** 2 * np.sin(
2 * a1)) / 4 + (
h1 ** 2 * h2 ** 2 * np.sin(
2 * a2)) / 8 + (
h1 ** 2 * w2 ** 2 * np.sin(
2 * a1)) / 4 - (
h2 ** 2 * w1 ** 2 * np.sin(
2 * a1)) / 4 - (
h1 ** 2 * w2 ** 2 * np.sin(
2 * a2)) / 8 + (
h2 ** 2 * w1 ** 2 * np.sin(
2 * a2)) / 8 - (
w1 ** 2 * w2 ** 2 * np.sin(
2 * a1)) / 4 - (
w1 ** 2 * w2 ** 2 * np.sin(
2 * a2)) / 8 + (
h1 ** 2 * h2 ** 2 * np.sin(
4 * a1 - 2 * a2)) / 8 - (
h1 ** 2 * w2 ** 2 * np.sin(
4 * a1 - 2 * a2)) / 8 + (
h2 ** 2 * w1 ** 2 * np.sin(
4 * a1 - 2 * a2)) / 8 - (
w1 ** 2 * w2 ** 2 * np.sin(
4 * a1 - 2 * a2)) / 8 + (
h1 * h2 ** 2 * w1 * np.sin(
2 * a2)) / 4 - (
h1 * w1 * w2 ** 2 * np.sin(
2 * a2)) / 4 - (
h1 * h2 ** 2 * w1 * np.sin(
4 * a1 - 2 * a2)) / 4 + (
h1 * w1 * w2 ** 2 * np.sin(
4 * a1 - 2 * a2)) / 4) * (
2 ** (
1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (
1 / 2) + 2 * h1 ** 2 * h2 ** 2 * np.cos(
2 * a1) + h1 ** 2 * h2 ** 2 * np.cos(
2 * a2) + 2 * h1 ** 2 * w2 ** 2 * np.cos(
2 * a1) - 2 * h2 ** 2 * w1 ** 2 * np.cos(
2 * a1) - h1 ** 2 * w2 ** 2 * np.cos(
2 * a2) + h2 ** 2 * w1 ** 2 * np.cos(
2 * a2) - 2 * w1 ** 2 * w2 ** 2 * np.cos(
2 * a1) - w1 ** 2 * w2 ** 2 * np.cos(
2 * a2) + h1 ** 2 * h2 ** 2 * np.cos(
4 * a1 - 2 * a2) - h1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2) + h2 ** 2 * w1 ** 2 * np.cos(
4 * a1 - 2 * a2) - w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2) + 2 * h1 * h2 ** 2 * w1 * np.cos(
2 * a2) - 2 * h1 * w1 * w2 ** 2 * np.cos(
2 * a2) - 2 * h1 * h2 ** 2 * w1 * np.cos(
4 * a1 - 2 * a2) + 2 * h1 * w1 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2))) / (
2 * (
2 * h1 ** 2 * h2 ** 2 * np.sin(
2 * a1) + h1 ** 2 * h2 ** 2 * np.sin(
2 * a2) + 2 * h1 ** 2 * w2 ** 2 * np.sin(
2 * a1) - 2 * h2 ** 2 * w1 ** 2 * np.sin(
2 * a1) - h1 ** 2 * w2 ** 2 * np.sin(
2 * a2) + h2 ** 2 * w1 ** 2 * np.sin(
2 * a2) - 2 * w1 ** 2 * w2 ** 2 * np.sin(
2 * a1) - w1 ** 2 * w2 ** 2 * np.sin(
2 * a2) + h1 ** 2 * h2 ** 2 * np.sin(
4 * a1 - 2 * a2) - h1 ** 2 * w2 ** 2 * np.sin(
4 * a1 - 2 * a2) + h2 ** 2 * w1 ** 2 * np.sin(
4 * a1 - 2 * a2) - w1 ** 2 * w2 ** 2 * np.sin(
4 * a1 - 2 * a2) + 2 * h1 * h2 ** 2 * w1 * np.sin(
2 * a2) - 2 * h1 * w1 * w2 ** 2 * np.sin(
2 * a2) - 2 * h1 * h2 ** 2 * w1 * np.sin(
4 * a1 - 2 * a2) + 2 * h1 * w1 * w2 ** 2 * np.sin(
4 * a1 - 2 * a2)) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (
1 / 2))
return element1
def get_element3(w1, h1, a1, w2, h2, a2):
element3 = (w2 ** 2 * np.sin(2 * a2)) / 8 - (h2 ** 2 * np.sin(2 * a2)) / 8 + ((2 * w1 ** 2 * w2 ** 2 + 2 ** (
1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (
1 / 2) + 2 * h1 ** 2 * h2 ** 2 + 2 * h1 ** 2 * w2 ** 2 + 2 * h2 ** 2 * w1 ** 2 + 2 * h1 ** 2 * h2 ** 2 * np.cos(
2 * a1 - 2 * a2) - 2 * h1 ** 2 * w2 ** 2 * np.cos(2 * a1 - 2 * a2) - 2 * h2 ** 2 * w1 ** 2 * np.cos(
2 * a1 - 2 * a2) + 2 * w1 ** 2 * w2 ** 2 * np.cos(2 * a1 - 2 * a2)) ** (1 / 2) * (
(h1 ** 2 * h2 ** 2 * np.sin(
2 * a1)) / 4 + (
h1 ** 2 * h2 ** 2 * np.sin(
2 * a2)) / 8 + (
h1 ** 2 * w2 ** 2 * np.sin(
2 * a1)) / 4 - (
h2 ** 2 * w1 ** 2 * np.sin(
2 * a1)) / 4 - (
h1 ** 2 * w2 ** 2 * np.sin(
2 * a2)) / 8 + (
h2 ** 2 * w1 ** 2 * np.sin(
2 * a2)) / 8 - (
w1 ** 2 * w2 ** 2 * np.sin(
2 * a1)) / 4 - (
w1 ** 2 * w2 ** 2 * np.sin(
2 * a2)) / 8 + (
h1 ** 2 * h2 ** 2 * np.sin(
4 * a1 - 2 * a2)) / 8 - (
h1 ** 2 * w2 ** 2 * np.sin(
4 * a1 - 2 * a2)) / 8 + (
h2 ** 2 * w1 ** 2 * np.sin(
4 * a1 - 2 * a2)) / 8 - (
w1 ** 2 * w2 ** 2 * np.sin(
4 * a1 - 2 * a2)) / 8 + (
h1 * h2 ** 2 * w1 * np.sin(
2 * a2)) / 4 - (
h1 * w1 * w2 ** 2 * np.sin(
2 * a2)) / 4 - (
h1 * h2 ** 2 * w1 * np.sin(
4 * a1 - 2 * a2)) / 4 + (
h1 * w1 * w2 ** 2 * np.sin(
4 * a1 - 2 * a2)) / 4)) / (
2 * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (
1 / 2)) - ((
2 * w1 ** 2 * w2 ** 2 - 2 ** (
1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (
1 / 2) + 2 * h1 ** 2 * h2 ** 2 + 2 * h1 ** 2 * w2 ** 2 + 2 * h2 ** 2 * w1 ** 2 + 2 * h1 ** 2 * h2 ** 2 * np.cos(
2 * a1 - 2 * a2) - 2 * h1 ** 2 * w2 ** 2 * np.cos(
2 * a1 - 2 * a2) - 2 * h2 ** 2 * w1 ** 2 * np.cos(
2 * a1 - 2 * a2) + 2 * w1 ** 2 * w2 ** 2 * np.cos(
2 * a1 - 2 * a2)) ** (
1 / 2) * (
(
h1 ** 2 * h2 ** 2 * np.sin(
2 * a1)) / 4 + (
h1 ** 2 * h2 ** 2 * np.sin(
2 * a2)) / 8 + (
h1 ** 2 * w2 ** 2 * np.sin(
2 * a1)) / 4 - (
h2 ** 2 * w1 ** 2 * np.sin(
2 * a1)) / 4 - (
h1 ** 2 * w2 ** 2 * np.sin(
2 * a2)) / 8 + (
h2 ** 2 * w1 ** 2 * np.sin(
2 * a2)) / 8 - (
w1 ** 2 * w2 ** 2 * np.sin(
2 * a1)) / 4 - (
w1 ** 2 * w2 ** 2 * np.sin(
2 * a2)) / 8 + (
h1 ** 2 * h2 ** 2 * np.sin(
4 * a1 - 2 * a2)) / 8 - (
h1 ** 2 * w2 ** 2 * np.sin(
4 * a1 - 2 * a2)) / 8 + (
h2 ** 2 * w1 ** 2 * np.sin(
4 * a1 - 2 * a2)) / 8 - (
w1 ** 2 * w2 ** 2 * np.sin(
4 * a1 - 2 * a2)) / 8 + (
h1 * h2 ** 2 * w1 * np.sin(
2 * a2)) / 4 - (
h1 * w1 * w2 ** 2 * np.sin(
2 * a2)) / 4 - (
h1 * h2 ** 2 * w1 * np.sin(
4 * a1 - 2 * a2)) / 4 + (
h1 * w1 * w2 ** 2 * np.sin(
4 * a1 - 2 * a2)) / 4)) / (
2 * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (
1 / 2)) - np.sin(2 * a1) * (
h1 / 4 - w1 / 4) * (
h1 / 2 + (
w1 * np.sin(
a1) ** 2) / 2 - (
h1 * np.sin(
a1) ** 2) / 2) - np.sin(
2 * a1) * (h1 / 4 - w1 / 4) * (
w1 / 2 - (
w1 * np.sin(
a1) ** 2) / 2 + (
h1 * np.sin(
a1) ** 2) / 2)
return element3
def get_element2(w1, h1, a1, w2, h2, a2):
element2 = (w2 ** 2 * np.sin(2 * a2)) / 8 - (h2 ** 2 * np.sin(2 * a2)) / 8 - np.sin(2 * a1) * (h1 / 4 - w1 / 4) * (
h1 / 2 + (w1 * np.sin(a1) ** 2) / 2 - (h1 * np.sin(a1) ** 2) / 2) - np.sin(2 * a1) * (h1 / 4 - w1 / 4) * (
w1 / 2 - (w1 * np.sin(a1) ** 2) / 2 + (h1 * np.sin(a1) ** 2) / 2) + ((2 * w1 ** 2 * w2 ** 2 + 2 ** (1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (
1 / 2) + 2 * h1 ** 2 * h2 ** 2 + 2 * h1 ** 2 * w2 ** 2 + 2 * h2 ** 2 * w1 ** 2 + 2 * h1 ** 2 * h2 ** 2 * np.cos(
2 * a1 - 2 * a2) - 2 * h1 ** 2 * w2 ** 2 * np.cos(2 * a1 - 2 * a2) - 2 * h2 ** 2 * w1 ** 2 * np.cos(
2 * a1 - 2 * a2) + 2 * w1 ** 2 * w2 ** 2 * np.cos(2 * a1 - 2 * a2)) ** (1 / 2) * (2 ** (1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (1 / 2) - 2 * h1 ** 2 * h2 ** 2 * np.cos(2 * a1) - h1 ** 2 * h2 ** 2 * np.cos(
2 * a2) - 2 * h1 ** 2 * w2 ** 2 * np.cos(2 * a1) + 2 * h2 ** 2 * w1 ** 2 * np.cos(2 * a1) + h1 ** 2 * w2 ** 2 * np.cos(
2 * a2) - h2 ** 2 * w1 ** 2 * np.cos(2 * a2) + 2 * w1 ** 2 * w2 ** 2 * np.cos(2 * a1) + w1 ** 2 * w2 ** 2 * np.cos(
2 * a2) - h1 ** 2 * h2 ** 2 * np.cos(4 * a1 - 2 * a2) + h1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2) - h2 ** 2 * w1 ** 2 * np.cos(4 * a1 - 2 * a2) + w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2) - 2 * h1 * h2 ** 2 * w1 * np.cos(2 * a2) + 2 * h1 * w1 * w2 ** 2 * np.cos(
2 * a2) + 2 * h1 * h2 ** 2 * w1 * np.cos(4 * a1 - 2 * a2) - 2 * h1 * w1 * w2 ** 2 * np.cos(4 * a1 - 2 * a2)) * ((2 ** (
1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (
1 / 2)) / 4 + (
h1 ** 2 * h2 ** 2 * np.cos(
2 * a1)) / 2 + (
h1 ** 2 * h2 ** 2 * np.cos(
2 * a2)) / 4 + (
h1 ** 2 * w2 ** 2 * np.cos(
2 * a1)) / 2 - (
h2 ** 2 * w1 ** 2 * np.cos(
2 * a1)) / 2 - (
h1 ** 2 * w2 ** 2 * np.cos(
2 * a2)) / 4 + (
h2 ** 2 * w1 ** 2 * np.cos(
2 * a2)) / 4 - (
w1 ** 2 * w2 ** 2 * np.cos(
2 * a1)) / 2 - (
w1 ** 2 * w2 ** 2 * np.cos(
2 * a2)) / 4 + (
h1 ** 2 * h2 ** 2 * np.cos(
4 * a1 - 2 * a2)) / 4 - (
h1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2)) / 4 + (
h2 ** 2 * w1 ** 2 * np.cos(
4 * a1 - 2 * a2)) / 4 - (
w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2)) / 4 + (
h1 * h2 ** 2 * w1 * np.cos(
2 * a2)) / 2 - (
h1 * w1 * w2 ** 2 * np.cos(
2 * a2)) / 2 - (
h1 * h2 ** 2 * w1 * np.cos(
4 * a1 - 2 * a2)) / 2 + (
h1 * w1 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2)) / 2)) / (
4 * (2 * h1 ** 2 * h2 ** 2 * np.sin(
2 * a1) + h1 ** 2 * h2 ** 2 * np.sin(
2 * a2) + 2 * h1 ** 2 * w2 ** 2 * np.sin(
2 * a1) - 2 * h2 ** 2 * w1 ** 2 * np.sin(
2 * a1) - h1 ** 2 * w2 ** 2 * np.sin(
2 * a2) + h2 ** 2 * w1 ** 2 * np.sin(
2 * a2) - 2 * w1 ** 2 * w2 ** 2 * np.sin(
2 * a1) - w1 ** 2 * w2 ** 2 * np.sin(
2 * a2) + h1 ** 2 * h2 ** 2 * np.sin(
4 * a1 - 2 * a2) - h1 ** 2 * w2 ** 2 * np.sin(
4 * a1 - 2 * a2) + h2 ** 2 * w1 ** 2 * np.sin(
4 * a1 - 2 * a2) - w1 ** 2 * w2 ** 2 * np.sin(
4 * a1 - 2 * a2) + 2 * h1 * h2 ** 2 * w1 * np.sin(
2 * a2) - 2 * h1 * w1 * w2 ** 2 * np.sin(
2 * a2) - 2 * h1 * h2 ** 2 * w1 * np.sin(
4 * a1 - 2 * a2) + 2 * h1 * w1 * w2 ** 2 * np.sin(
4 * a1 - 2 * a2)) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (1 / 2)) - ((
2 * w1 ** 2 * w2 ** 2 - 2 ** (
1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (
1 / 2) + 2 * h1 ** 2 * h2 ** 2 + 2 * h1 ** 2 * w2 ** 2 + 2 * h2 ** 2 * w1 ** 2 + 2 * h1 ** 2 * h2 ** 2 * np.cos(
2 * a1 - 2 * a2) - 2 * h1 ** 2 * w2 ** 2 * np.cos(
2 * a1 - 2 * a2) - 2 * h2 ** 2 * w1 ** 2 * np.cos(
2 * a1 - 2 * a2) + 2 * w1 ** 2 * w2 ** 2 * np.cos(
2 * a1 - 2 * a2)) ** (
1 / 2) * (
2 ** (1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (
1 / 2) + 2 * h1 ** 2 * h2 ** 2 * np.cos(
2 * a1) + h1 ** 2 * h2 ** 2 * np.cos(
2 * a2) + 2 * h1 ** 2 * w2 ** 2 * np.cos(
2 * a1) - 2 * h2 ** 2 * w1 ** 2 * np.cos(
2 * a1) - h1 ** 2 * w2 ** 2 * np.cos(
2 * a2) + h2 ** 2 * w1 ** 2 * np.cos(
2 * a2) - 2 * w1 ** 2 * w2 ** 2 * np.cos(
2 * a1) - w1 ** 2 * w2 ** 2 * np.cos(
2 * a2) + h1 ** 2 * h2 ** 2 * np.cos(
4 * a1 - 2 * a2) - h1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2) + h2 ** 2 * w1 ** 2 * np.cos(
4 * a1 - 2 * a2) - w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2) + 2 * h1 * h2 ** 2 * w1 * np.cos(
2 * a2) - 2 * h1 * w1 * w2 ** 2 * np.cos(
2 * a2) - 2 * h1 * h2 ** 2 * w1 * np.cos(
4 * a1 - 2 * a2) + 2 * h1 * w1 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2)) * (
(2 ** (1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (
1 / 2)) / 4 - (
h1 ** 2 * h2 ** 2 * np.cos(
2 * a1)) / 2 - (
h1 ** 2 * h2 ** 2 * np.cos(
2 * a2)) / 4 - (
h1 ** 2 * w2 ** 2 * np.cos(
2 * a1)) / 2 + (
h2 ** 2 * w1 ** 2 * np.cos(
2 * a1)) / 2 + (
h1 ** 2 * w2 ** 2 * np.cos(
2 * a2)) / 4 - (
h2 ** 2 * w1 ** 2 * np.cos(
2 * a2)) / 4 + (
w1 ** 2 * w2 ** 2 * np.cos(
2 * a1)) / 2 + (
w1 ** 2 * w2 ** 2 * np.cos(
2 * a2)) / 4 - (
h1 ** 2 * h2 ** 2 * np.cos(
4 * a1 - 2 * a2)) / 4 + (
h1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2)) / 4 - (
h2 ** 2 * w1 ** 2 * np.cos(
4 * a1 - 2 * a2)) / 4 + (
w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2)) / 4 - (
h1 * h2 ** 2 * w1 * np.cos(
2 * a2)) / 2 + (
h1 * w1 * w2 ** 2 * np.cos(
2 * a2)) / 2 + (
h1 * h2 ** 2 * w1 * np.cos(
4 * a1 - 2 * a2)) / 2 - (
h1 * w1 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2)) / 2)) / (
4 * (
2 * h1 ** 2 * h2 ** 2 * np.sin(
2 * a1) + h1 ** 2 * h2 ** 2 * np.sin(
2 * a2) + 2 * h1 ** 2 * w2 ** 2 * np.sin(
2 * a1) - 2 * h2 ** 2 * w1 ** 2 * np.sin(
2 * a1) - h1 ** 2 * w2 ** 2 * np.sin(
2 * a2) + h2 ** 2 * w1 ** 2 * np.sin(
2 * a2) - 2 * w1 ** 2 * w2 ** 2 * np.sin(
2 * a1) - w1 ** 2 * w2 ** 2 * np.sin(
2 * a2) + h1 ** 2 * h2 ** 2 * np.sin(
4 * a1 - 2 * a2) - h1 ** 2 * w2 ** 2 * np.sin(
4 * a1 - 2 * a2) + h2 ** 2 * w1 ** 2 * np.sin(
4 * a1 - 2 * a2) - w1 ** 2 * w2 ** 2 * np.sin(
4 * a1 - 2 * a2) + 2 * h1 * h2 ** 2 * w1 * np.sin(
2 * a2) - 2 * h1 * w1 * w2 ** 2 * np.sin(
2 * a2) - 2 * h1 * h2 ** 2 * w1 * np.sin(
4 * a1 - 2 * a2) + 2 * h1 * w1 * w2 ** 2 * np.sin(
4 * a1 - 2 * a2)) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(
4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (
1 / 2))
return element2
def get_element4(w1, h1, a1, w2, h2, a2):
element4 = (h1 + w1 * np.sin(a1) ** 2 - h1 * np.sin(a1) ** 2) ** 2 / 4 + (h2 ** 2 * np.cos(a2) ** 2) / 4 + (w2 ** 2 * np.sin(a2) ** 2) / 4 + (np.sin(
2 * a1) ** 2 * (h1 - w1) ** 2) / 16 - ((2 * w1 ** 2 * w2 ** 2 + 2 ** (1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (
1 / 2) + 2 * h1 ** 2 * h2 ** 2 + 2 * h1 ** 2 * w2 ** 2 + 2 * h2 ** 2 * w1 ** 2 + 2 * h1 ** 2 * h2 ** 2 * np.cos(
2 * a1 - 2 * a2) - 2 * h1 ** 2 * w2 ** 2 * np.cos(2 * a1 - 2 * a2) - 2 * h2 ** 2 * w1 ** 2 * np.cos(
2 * a1 - 2 * a2) + 2 * w1 ** 2 * w2 ** 2 * np.cos(2 * a1 - 2 * a2)) ** (1 / 2) * ((2 ** (1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (1 / 2)) / 4 + (h1 ** 2 * h2 ** 2 * np.cos(2 * a1)) / 2 + (h1 ** 2 * h2 ** 2 * np.cos(2 * a2)) / 4 + (
h1 ** 2 * w2 ** 2 * np.cos(
2 * a1)) / 2 - (
h2 ** 2 * w1 ** 2 * np.cos(
2 * a1)) / 2 - (
h1 ** 2 * w2 ** 2 * np.cos(
2 * a2)) / 4 + (
h2 ** 2 * w1 ** 2 * np.cos(
2 * a2)) / 4 - (
w1 ** 2 * w2 ** 2 * np.cos(
2 * a1)) / 2 - (
w1 ** 2 * w2 ** 2 * np.cos(
2 * a2)) / 4 + (
h1 ** 2 * h2 ** 2 * np.cos(
4 * a1 - 2 * a2)) / 4 - (
h1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2)) / 4 + (
h2 ** 2 * w1 ** 2 * np.cos(
4 * a1 - 2 * a2)) / 4 - (
w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2)) / 4 + (
h1 * h2 ** 2 * w1 * np.cos(
2 * a2)) / 2 - (
h1 * w1 * w2 ** 2 * np.cos(
2 * a2)) / 2 - (
h1 * h2 ** 2 * w1 * np.cos(
4 * a1 - 2 * a2)) / 2 + (
h1 * w1 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2)) / 2)) / (4 * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (1 / 2)) - ((2 * w1 ** 2 * w2 ** 2 - 2 ** (1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (
1 / 2) + 2 * h1 ** 2 * h2 ** 2 + 2 * h1 ** 2 * w2 ** 2 + 2 * h2 ** 2 * w1 ** 2 + 2 * h1 ** 2 * h2 ** 2 * np.cos(
2 * a1 - 2 * a2) - 2 * h1 ** 2 * w2 ** 2 * np.cos(2 * a1 - 2 * a2) - 2 * h2 ** 2 * w1 ** 2 * np.cos(
2 * a1 - 2 * a2) + 2 * w1 ** 2 * w2 ** 2 * np.cos(2 * a1 - 2 * a2)) ** (1 / 2) * ((2 ** (1 / 2) * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (1 / 2)) / 4 - (h1 ** 2 * h2 ** 2 * np.cos(2 * a1)) / 2 - (h1 ** 2 * h2 ** 2 * np.cos(2 * a2)) / 4 - (
h1 ** 2 * w2 ** 2 * np.cos(
2 * a1)) / 2 + (
h2 ** 2 * w1 ** 2 * np.cos(
2 * a1)) / 2 + (
h1 ** 2 * w2 ** 2 * np.cos(
2 * a2)) / 4 - (
h2 ** 2 * w1 ** 2 * np.cos(
2 * a2)) / 4 + (
w1 ** 2 * w2 ** 2 * np.cos(
2 * a1)) / 2 + (
w1 ** 2 * w2 ** 2 * np.cos(
2 * a2)) / 4 - (
h1 ** 2 * h2 ** 2 * np.cos(
4 * a1 - 2 * a2)) / 4 + (
h1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2)) / 4 - (
h2 ** 2 * w1 ** 2 * np.cos(
4 * a1 - 2 * a2)) / 4 + (
w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2)) / 4 - (
h1 * h2 ** 2 * w1 * np.cos(
2 * a2)) / 2 + (
h1 * w1 * w2 ** 2 * np.cos(
2 * a2)) / 2 + (
h1 * h2 ** 2 * w1 * np.cos(
4 * a1 - 2 * a2)) / 2 - (
h1 * w1 * w2 ** 2 * np.cos(
4 * a1 - 2 * a2)) / 2)) / (4 * (
3 * w1 ** 4 * w2 ** 4 + 3 * h1 ** 4 * h2 ** 4 + 3 * h1 ** 4 * w2 ** 4 + 3 * h2 ** 4 * w1 ** 4 + 4 * h1 ** 4 * h2 ** 4 * np.cos(
2 * a1 - 2 * a2) + h1 ** 4 * h2 ** 4 * np.cos(4 * a1 - 4 * a2) - 4 * h1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) - 4 * h2 ** 4 * w1 ** 4 * np.cos(2 * a1 - 2 * a2) + h1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + h2 ** 4 * w1 ** 4 * np.cos(4 * a1 - 4 * a2) + 4 * w1 ** 4 * w2 ** 4 * np.cos(
2 * a1 - 2 * a2) + w1 ** 4 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) + 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 + 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 + 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 + 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 - 2 * h1 ** 2 * h2 ** 4 * w1 ** 2 * np.cos(
4 * a1 - 4 * a2) - 2 * h1 ** 4 * h2 ** 2 * w2 ** 2 * np.cos(4 * a1 - 4 * a2) - 2 * h1 ** 2 * w1 ** 2 * w2 ** 4 * np.cos(
4 * a1 - 4 * a2) - 2 * h2 ** 2 * w1 ** 4 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2) - 20 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 + 4 * h1 ** 2 * h2 ** 2 * w1 ** 2 * w2 ** 2 * np.cos(
4 * a1 - 4 * a2)) ** (1 / 2))
return element4
if __name__ == '__main__':
print(get_element1(70, 10, -0.7854, 70, 10, -0.87266))
print(get_element2(70, 10, -0.7854, 70, 10, -0.87266))
print(get_element3(70, 10, -0.7854, 70, 10, -0.87266))
print(get_element4(70, 10, -0.7854, 70, 10, -0.87266)) | 135.001351 | 324 | 0.136765 | 16,677 | 199,802 | 1.637105 | 0.003418 | 0.071204 | 0.116035 | 0.110761 | 0.991576 | 0.991576 | 0.989964 | 0.989232 | 0.988536 | 0.979745 | 0 | 0.282398 | 0.748156 | 199,802 | 1,480 | 325 | 135.001351 | 0.26018 | 0.000811 | 0 | 0.932872 | 0 | 0 | 0.00004 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.005536 | false | 0 | 0.001384 | 0 | 0.012457 | 0.002768 | 0 | 0 | 1 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 |
fc550c97c71fb1f6b5e9000dc93fe17394080ab1 | 179 | py | Python | backend/driver/admin.py | crowdbotics-apps/flightlevel36zero-31803 | 6c81d2e9394440810c1a5561752a452619388c10 | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | backend/driver/admin.py | crowdbotics-apps/flightlevel36zero-31803 | 6c81d2e9394440810c1a5561752a452619388c10 | [
"FTL",
"AML",
"RSA-MD"
] | 7 | 2021-07-23T20:18:33.000Z | 2021-07-23T20:18:37.000Z | backend/driver/admin.py | crowdbotics-apps/flightlevel36zero-31803 | 6c81d2e9394440810c1a5561752a452619388c10 | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | from django.contrib import admin
from .models import DriverProfile, DriverOrder
admin.site.register(DriverOrder)
admin.site.register(DriverProfile)
# Register your models here.
| 22.375 | 46 | 0.826816 | 22 | 179 | 6.727273 | 0.545455 | 0.216216 | 0.27027 | 0.378378 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.100559 | 179 | 7 | 47 | 25.571429 | 0.919255 | 0.145251 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
fc5ec58aa06d865bfa41f796251bf5ccf271f8d5 | 12,456 | py | Python | openbook_hashtags/tests/tests_hashtag.py | TamaraAbells/okuna-api | f87d8e80d2f182c01dbce68155ded0078ee707e4 | [
"MIT"
] | 164 | 2019-07-29T17:59:06.000Z | 2022-03-19T21:36:01.000Z | openbook_hashtags/tests/tests_hashtag.py | TamaraAbells/okuna-api | f87d8e80d2f182c01dbce68155ded0078ee707e4 | [
"MIT"
] | 188 | 2019-03-16T09:53:25.000Z | 2019-07-25T14:57:24.000Z | openbook_hashtags/tests/tests_hashtag.py | TamaraAbells/okuna-api | f87d8e80d2f182c01dbce68155ded0078ee707e4 | [
"MIT"
] | 80 | 2019-08-03T17:49:08.000Z | 2022-02-28T16:56:33.000Z | import json
from django.urls import reverse
from faker import Faker
from rest_framework import status
from openbook_common.tests.helpers import make_user, make_authentication_headers_for_user, make_hashtag, \
make_fake_post_text, make_community, make_circle, make_moderation_category, make_global_moderator
from openbook_common.tests.models import OpenbookAPITestCase
from openbook_communities.models import Community
from openbook_moderation.models import ModeratedObject
fake = Faker()
class HashtagAPITests(OpenbookAPITestCase):
"""
HashtagAPITests
"""
def test_can_retrieve_hashtag(self):
"""
should be able to retrieve a hashtag and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
hashtag = make_hashtag()
hashtag_name = hashtag.name
url = self._get_url(hashtag_name=hashtag_name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
parsed_response = json.loads(response.content)
self.assertIn('name', parsed_response)
response_name = parsed_response['name']
self.assertEqual(response_name, hashtag_name)
def test_can_retrieve_foreign_user_reported_hashtag(self):
"""
should be able to retrieve a foreign user reported hashtag and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
hashtag = make_hashtag()
hashtag_name = hashtag.name
reporter = make_user()
report_category = make_moderation_category()
reporter.report_hashtag_with_name(hashtag_name=hashtag_name, category_id=report_category.pk)
url = self._get_url(hashtag_name=hashtag_name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
parsed_response = json.loads(response.content)
self.assertIn('name', parsed_response)
response_name = parsed_response['name']
self.assertEqual(response_name, hashtag_name)
def test_cant_retrieve_reported_hashtag(self):
"""
should not be able to retrieve a reported hashtag and return 403
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
hashtag = make_hashtag()
hashtag_name = hashtag.name
report_category = make_moderation_category()
user.report_hashtag_with_name(hashtag_name=hashtag_name, category_id=report_category.pk)
url = self._get_url(hashtag_name=hashtag_name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_cant_retrieve_reported_and_approved_hashtag(self):
"""
should not be able to retrieve a reported and approved hashtag and return 403
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
hashtag = make_hashtag()
hashtag_name = hashtag.name
reporter = make_user()
report_category = make_moderation_category()
reporter.report_hashtag_with_name(hashtag_name=hashtag_name, category_id=report_category.pk)
global_moderator = make_global_moderator()
moderated_object = ModeratedObject.get_or_create_moderated_object_for_hashtag(hashtag=hashtag,
category_id=report_category.pk)
global_moderator.approve_moderated_object(moderated_object=moderated_object)
url = self._get_url(hashtag_name=hashtag_name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_can_retrieve_hashtag_with_posts_count(self):
"""
should be able to retrieve a hashtag with its posts count and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
hashtag = make_hashtag()
hashtag_name = hashtag.name
amount_of_posts = 3
for i in range(0, amount_of_posts):
user = make_user()
post_text = '#%s' % hashtag_name
user.create_public_post(text=post_text)
url = self._get_url(hashtag_name=hashtag_name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
parsed_response = json.loads(response.content)
self.assertIn('posts_count', parsed_response)
posts_count = parsed_response['posts_count']
self.assertEqual(posts_count, amount_of_posts)
def _get_url(self, hashtag_name):
return reverse('hashtag', kwargs={
'hashtag_name': hashtag_name
})
class HashtagPostsAPITests(OpenbookAPITestCase):
"""
HashtagPostsAPITests
"""
def test_retrieves_public_community_post_with_hashtag(self):
"""
should retrieve posts with a given hashtag and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community_creator = make_user()
community = make_community(creator=community_creator)
post_creator = make_user()
post_creator.join_community_with_name(community_name=community.name)
hashtag = make_hashtag()
fake_post_text = make_fake_post_text() + ' and a little hashtag #%s' % hashtag.name
post_creator.create_community_post(community_name=community.name, text=fake_post_text)
url = self._get_url(hashtag_name=hashtag.name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
parsed_response = json.loads(response.content)
self.assertEqual(len(parsed_response), 1)
retrieved_posts = parsed_response[0]
self.assertEqual(retrieved_posts['text'], fake_post_text)
def test_retrieves_world_circle_post_with_hashtag(self):
"""
should retrieve world circle post with a given hashtag and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
post_creator = make_user()
hashtag = make_hashtag()
fake_post_text = make_fake_post_text() + ' and a little hashtag #%s' % hashtag.name
post_creator.create_public_post(text=fake_post_text)
url = self._get_url(hashtag_name=hashtag.name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
parsed_response = json.loads(response.content)
self.assertEqual(len(parsed_response), 1)
retrieved_posts = parsed_response[0]
self.assertEqual(retrieved_posts['text'], fake_post_text)
def test_does_not_retrieve_private_community_not_part_of_post_with_hashtag(self):
"""
should not retrieve a private community not part of post with a givne hashtag and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community_creator = make_user()
community = make_community(creator=community_creator, type=Community.COMMUNITY_TYPE_PRIVATE)
post_creator = make_user()
community_creator.invite_user_with_username_to_community_with_name(community_name=community.name,
username=post_creator.username)
post_creator.join_community_with_name(community_name=community.name)
hashtag = make_hashtag()
fake_post_text = make_fake_post_text() + ' and a little hashtag #%s' % hashtag.name
post_creator.create_community_post(community_name=community.name, text=fake_post_text)
url = self._get_url(hashtag_name=hashtag.name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
parsed_response = json.loads(response.content)
self.assertEqual(len(parsed_response), 0)
def test_does_not_retrieve_private_community_part_of_post_with_hashtag(self):
"""
should not retrieve a private community part of post with a givne hashtag and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
community_creator = make_user()
community = make_community(creator=community_creator, type=Community.COMMUNITY_TYPE_PRIVATE)
post_creator = make_user()
community_creator.invite_user_with_username_to_community_with_name(community_name=community.name,
username=post_creator.username)
post_creator.join_community_with_name(community_name=community.name)
community_creator.invite_user_with_username_to_community_with_name(community_name=community.name,
username=user.username)
user.join_community_with_name(community_name=community.name)
hashtag = make_hashtag()
fake_post_text = make_fake_post_text() + ' and a little hashtag #%s' % hashtag.name
post_creator.create_community_post(community_name=community.name, text=fake_post_text)
url = self._get_url(hashtag_name=hashtag.name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
parsed_response = json.loads(response.content)
self.assertEqual(len(parsed_response), 0)
def test_does_not_retrieve_encircled_post_with_hashtag(self):
"""
should not retrieve an encircled post with a givne hashtag and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
post_creator = make_user()
circle = make_circle(creator=post_creator)
hashtag = make_hashtag()
fake_post_text = make_fake_post_text() + ' and a little hashtag #%s' % hashtag.name
post_creator.create_encircled_post(circles_ids=[circle.pk], text=fake_post_text)
url = self._get_url(hashtag_name=hashtag.name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
parsed_response = json.loads(response.content)
self.assertEqual(len(parsed_response), 0)
def test_does_not_retrieve_post_from_blocked_person_with_hashtag(self):
"""
should not retrieve a post from a blocked person with a given hashtag and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
post_creator = make_user()
hashtag = make_hashtag()
fake_post_text = make_fake_post_text() + ' and a little hashtag #%s' % hashtag.name
post_creator.create_public_post(text=fake_post_text)
user.block_user_with_username(username=post_creator.username)
url = self._get_url(hashtag_name=hashtag.name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
parsed_response = json.loads(response.content)
self.assertEqual(len(parsed_response), 0)
def test_does_not_retrieve_post_from_blocking_person_with_hashtag(self):
"""
should not retrieve a post from a blocking person with a given hashtag and return 200
"""
user = make_user()
headers = make_authentication_headers_for_user(user)
post_creator = make_user()
hashtag = make_hashtag()
fake_post_text = make_fake_post_text() + ' and a little hashtag #%s' % hashtag.name
post_creator.create_public_post(text=fake_post_text)
post_creator.block_user_with_username(username=user.username)
url = self._get_url(hashtag_name=hashtag.name)
response = self.client.get(url, **headers)
self.assertEqual(response.status_code, status.HTTP_200_OK)
parsed_response = json.loads(response.content)
self.assertEqual(len(parsed_response), 0)
def _get_url(self, hashtag_name):
return reverse('hashtag-posts', kwargs={
'hashtag_name': hashtag_name
})
| 34.890756 | 117 | 0.686416 | 1,498 | 12,456 | 5.351802 | 0.077437 | 0.076837 | 0.050518 | 0.060372 | 0.857677 | 0.825496 | 0.810029 | 0.791942 | 0.779219 | 0.768617 | 0 | 0.008715 | 0.235389 | 12,456 | 356 | 118 | 34.988764 | 0.833053 | 0.074904 | 0 | 0.770408 | 0 | 0 | 0.023894 | 0 | 0 | 0 | 0 | 0 | 0.137755 | 1 | 0.071429 | false | 0 | 0.040816 | 0.010204 | 0.132653 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fc85e02f08647a97ce1596d24c15fdaa6bed1b6d | 5,557 | py | Python | python/ray/serve/tests/test_runtime_env.py | mgelbart/ray | 4cec2286572e368a4bd64aae467751a384eff62d | [
"Apache-2.0"
] | 22 | 2018-05-08T05:52:34.000Z | 2020-04-01T10:09:55.000Z | python/ray/serve/tests/test_runtime_env.py | mgelbart/ray | 4cec2286572e368a4bd64aae467751a384eff62d | [
"Apache-2.0"
] | 51 | 2018-05-17T05:55:28.000Z | 2020-03-18T06:49:49.000Z | python/ray/serve/tests/test_runtime_env.py | mgelbart/ray | 4cec2286572e368a4bd64aae467751a384eff62d | [
"Apache-2.0"
] | 10 | 2018-04-27T10:50:59.000Z | 2020-02-24T02:41:43.000Z | import pytest
import sys
import ray
from ray._private.test_utils import run_string_as_driver
@pytest.mark.parametrize("use_ray_client", [False, True])
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
def test_failure_condition(ray_start, tmp_dir, use_ray_client):
# Verify that the test conditions fail without passing the working dir.
with open("hello", "w") as f:
f.write("world")
driver = """
import ray
from ray import serve
if {use_ray_client}:
ray.util.connect("{client_addr}")
else:
ray.init(address="auto")
serve.start()
@serve.deployment
class Test:
def __call__(self, *args):
return open("hello").read()
Test.deploy()
handle = Test.get_handle()
try:
ray.get(handle.remote())
assert False, "Should not get here"
except FileNotFoundError:
pass
""".format(
use_ray_client=use_ray_client, client_addr=ray_start
)
run_string_as_driver(driver)
def connect_with_working_dir(use_ray_client: bool, ray_client_addr: str):
job_config = ray.job_config.JobConfig(runtime_env={"working_dir": "."})
if use_ray_client:
ray.util.connect(ray_client_addr, namespace="serve", job_config=job_config)
else:
ray.init(address="auto", namespace="serve", job_config=job_config)
@pytest.mark.parametrize("use_ray_client", [False, True])
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
def test_working_dir_basic(ray_start, tmp_dir, use_ray_client):
with open("hello", "w") as f:
f.write("world")
driver = """
import ray
from ray import serve
job_config = ray.job_config.JobConfig(runtime_env={{"working_dir": "."}})
if {use_ray_client}:
ray.util.connect("{client_addr}", job_config=job_config)
else:
ray.init(address="auto", job_config=job_config)
serve.start()
@serve.deployment
class Test:
def __call__(self, *args):
return open("hello").read()
Test.deploy()
handle = Test.get_handle()
assert ray.get(handle.remote()) == "world"
""".format(
use_ray_client=use_ray_client, client_addr=ray_start
)
run_string_as_driver(driver)
@pytest.mark.parametrize("use_ray_client", [False, True])
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
def test_working_dir_connect_from_new_driver(ray_start, tmp_dir, use_ray_client):
with open("hello", "w") as f:
f.write("world")
driver1 = """
import ray
from ray import serve
job_config = ray.job_config.JobConfig(runtime_env={{"working_dir": "."}})
if {use_ray_client}:
ray.util.connect("{client_addr}", namespace="serve", job_config=job_config)
else:
ray.init(address="auto", namespace="serve", job_config=job_config)
serve.start(detached=True)
@serve.deployment
class Test:
def __call__(self, *args):
return open("hello").read()
Test.deploy()
handle = Test.get_handle()
assert ray.get(handle.remote()) == "world"
""".format(
use_ray_client=use_ray_client, client_addr=ray_start
)
run_string_as_driver(driver1)
driver2 = """
import ray
from ray import serve
job_config = ray.job_config.JobConfig(runtime_env={{"working_dir": "."}})
if {use_ray_client}:
ray.util.connect("{client_addr}", namespace="serve", job_config=job_config)
else:
ray.init(address="auto", namespace="serve", job_config=job_config)
serve.start(detached=True)
Test = serve.get_deployment("Test")
handle = Test.get_handle()
assert ray.get(handle.remote()) == "world"
Test.delete()
""".format(
use_ray_client=use_ray_client, client_addr=ray_start
)
run_string_as_driver(driver2)
@pytest.mark.parametrize("use_ray_client", [False, True])
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
def test_working_dir_scale_up_in_new_driver(ray_start, tmp_dir, use_ray_client):
with open("hello", "w") as f:
f.write("world")
driver1 = """
import os
import ray
from ray import serve
job_config = ray.job_config.JobConfig(runtime_env={{"working_dir": "."}})
if {use_ray_client}:
ray.util.connect("{client_addr}", namespace="serve", job_config=job_config)
else:
ray.init(address="auto", namespace="serve", job_config=job_config)
serve.start(detached=True)
@serve.deployment(version="1")
class Test:
def __call__(self, *args):
return os.getpid(), open("hello").read()
Test.deploy()
handle = Test.get_handle()
assert ray.get(handle.remote())[1] == "world"
""".format(
use_ray_client=use_ray_client, client_addr=ray_start
)
run_string_as_driver(driver1)
with open("hello", "w") as f:
f.write("no longer world")
driver2 = """
import ray
from ray import serve
job_config = ray.job_config.JobConfig(runtime_env={{"working_dir": "."}})
if {use_ray_client}:
ray.util.connect("{client_addr}", namespace="serve", job_config=job_config)
else:
ray.init(address="auto", namespace="serve", job_config=job_config)
serve.start(detached=True)
Test = serve.get_deployment("Test")
Test.options(num_replicas=2).deploy()
handle = Test.get_handle()
results = ray.get([handle.remote() for _ in range(1000)])
print(set(results))
assert all(r[1] == "world" for r in results), (
"results should still come from the first env")
assert len(set(r[0] for r in results)) == 2, (
"make sure there are two replicas")
Test.delete()
""".format(
use_ray_client=use_ray_client, client_addr=ray_start
)
run_string_as_driver(driver2)
if __name__ == "__main__":
import sys
sys.exit(pytest.main(["-sv", __file__]))
| 26.336493 | 83 | 0.703617 | 807 | 5,557 | 4.577447 | 0.154895 | 0.08771 | 0.090958 | 0.058473 | 0.844342 | 0.831619 | 0.826746 | 0.811586 | 0.80536 | 0.784245 | 0 | 0.005485 | 0.147022 | 5,557 | 210 | 84 | 26.461905 | 0.77384 | 0.012417 | 0 | 0.754717 | 0 | 0 | 0.59825 | 0.218921 | 0 | 0 | 0 | 0 | 0.044025 | 1 | 0.031447 | false | 0.006289 | 0.113208 | 0 | 0.169811 | 0.006289 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fc9b0835c403f59085f1d06999249458a17dcce9 | 16,183 | py | Python | ofagent/indigo/submodules/loxigen-artifacts/pyloxi/loxi/of13/instruction_id.py | lanpinguo/apple-sauce | b16e7b78e58d0d17ad7f05476f38704a6b519ece | [
"Apache-2.0"
] | 1 | 2021-05-14T15:33:21.000Z | 2021-05-14T15:33:21.000Z | ofagent/indigo/submodules/loxigen-artifacts/pyloxi/loxi/of13/instruction_id.py | lanpinguo/apple-sauce | b16e7b78e58d0d17ad7f05476f38704a6b519ece | [
"Apache-2.0"
] | null | null | null | ofagent/indigo/submodules/loxigen-artifacts/pyloxi/loxi/of13/instruction_id.py | lanpinguo/apple-sauce | b16e7b78e58d0d17ad7f05476f38704a6b519ece | [
"Apache-2.0"
] | 2 | 2019-07-13T06:58:33.000Z | 2022-03-23T03:02:57.000Z | # Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
# Copyright (c) 2011, 2012 Open Networking Foundation
# Copyright (c) 2012, 2013 Big Switch Networks, Inc.
# See the file LICENSE.pyloxi which should have been included in the source distribution
# Automatically generated by LOXI from template module.py
# Do not modify
import struct
import loxi
import const
import common
import action
import instruction
import oxm
import action_id
import instruction_id
import meter_band
import bsn_tlv
import util
import loxi.generic_util
class instruction_id(loxi.OFObject):
subtypes = {}
def __init__(self, type=None):
if type != None:
self.type = type
else:
self.type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 0)
subclass = instruction_id.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = instruction_id()
obj.type = reader.read("!H")[0]
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len - (2 + 2))
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
return True
def pretty_print(self, q):
q.text("instruction_id {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
class apply_actions(instruction_id):
type = 4
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = apply_actions()
_type = reader.read("!H")[0]
assert(_type == 4)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len - (2 + 2))
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("apply_actions {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
instruction_id.subtypes[4] = apply_actions
class experimenter(instruction_id):
subtypes = {}
type = 65535
def __init__(self, experimenter=None):
if experimenter != None:
self.experimenter = experimenter
else:
self.experimenter = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 4)
subclass = experimenter.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = experimenter()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len - (2 + 2))
obj.experimenter = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.experimenter != other.experimenter: return False
return True
def pretty_print(self, q):
q.text("experimenter {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
instruction_id.subtypes[65535] = experimenter
class bsn(experimenter):
subtypes = {}
type = 65535
experimenter = 6035143
def __init__(self, subtype=None):
if subtype != None:
self.subtype = subtype
else:
self.subtype = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 8)
subclass = bsn.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = bsn()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len - (2 + 2))
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
obj.subtype = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.subtype != other.subtype: return False
return True
def pretty_print(self, q):
q.text("bsn {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
experimenter.subtypes[6035143] = bsn
class bsn_arp_offload(bsn):
type = 65535
experimenter = 6035143
subtype = 1
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_arp_offload()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len - (2 + 2))
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 1)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("bsn_arp_offload {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
bsn.subtypes[1] = bsn_arp_offload
class bsn_dhcp_offload(bsn):
type = 65535
experimenter = 6035143
subtype = 2
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_dhcp_offload()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len - (2 + 2))
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 2)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("bsn_dhcp_offload {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
bsn.subtypes[2] = bsn_dhcp_offload
class bsn_disable_split_horizon_check(bsn):
type = 65535
experimenter = 6035143
subtype = 3
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_disable_split_horizon_check()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len - (2 + 2))
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 3)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("bsn_disable_split_horizon_check {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
bsn.subtypes[3] = bsn_disable_split_horizon_check
class bsn_disable_src_mac_check(bsn):
type = 65535
experimenter = 6035143
subtype = 0
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_disable_src_mac_check()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len - (2 + 2))
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 0)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("bsn_disable_src_mac_check {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
bsn.subtypes[0] = bsn_disable_src_mac_check
class clear_actions(instruction_id):
type = 5
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = clear_actions()
_type = reader.read("!H")[0]
assert(_type == 5)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len - (2 + 2))
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("clear_actions {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
instruction_id.subtypes[5] = clear_actions
class goto_table(instruction_id):
type = 1
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = goto_table()
_type = reader.read("!H")[0]
assert(_type == 1)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len - (2 + 2))
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("goto_table {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
instruction_id.subtypes[1] = goto_table
class meter(instruction_id):
type = 6
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = meter()
_type = reader.read("!H")[0]
assert(_type == 6)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len - (2 + 2))
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("meter {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
instruction_id.subtypes[6] = meter
class write_actions(instruction_id):
type = 3
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = write_actions()
_type = reader.read("!H")[0]
assert(_type == 3)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len - (2 + 2))
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("write_actions {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
instruction_id.subtypes[3] = write_actions
class write_metadata(instruction_id):
type = 2
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = write_metadata()
_type = reader.read("!H")[0]
assert(_type == 2)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len - (2 + 2))
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("write_metadata {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
instruction_id.subtypes[2] = write_metadata
| 27.382403 | 88 | 0.557993 | 1,985 | 16,183 | 4.396977 | 0.067506 | 0.057287 | 0.049152 | 0.093263 | 0.833639 | 0.815651 | 0.815651 | 0.786549 | 0.775206 | 0.775206 | 0 | 0.027854 | 0.305629 | 16,183 | 590 | 89 | 27.428814 | 0.748865 | 0.045974 | 0 | 0.776398 | 0 | 0 | 0.02614 | 0.003632 | 0 | 0 | 0 | 0 | 0.043478 | 1 | 0.134576 | false | 0 | 0.026915 | 0.020704 | 0.351967 | 0.026915 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fca6d060a81f311a8da9d0e8bcb5a594869289b0 | 131 | py | Python | tests/conftest.py | clayman-micro/graph | 742015c276f89841310794e952280a06c24fe8ef | [
"MIT"
] | null | null | null | tests/conftest.py | clayman-micro/graph | 742015c276f89841310794e952280a06c24fe8ef | [
"MIT"
] | null | null | null | tests/conftest.py | clayman-micro/graph | 742015c276f89841310794e952280a06c24fe8ef | [
"MIT"
] | null | null | null | import faker # type: ignore
import pytest # type: ignore
@pytest.fixture(scope="session")
def fake():
return faker.Faker()
| 16.375 | 32 | 0.694656 | 17 | 131 | 5.352941 | 0.647059 | 0.21978 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.175573 | 131 | 7 | 33 | 18.714286 | 0.842593 | 0.19084 | 0 | 0 | 0 | 0 | 0.067961 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | true | 0 | 0.4 | 0.2 | 0.8 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
5d9abc716893c97883e7fabbf6db4eda4598e0c2 | 3,525 | py | Python | code/fir_libs_time.py | xing710/ModSimPy | 87f0f481926c40855223e2843bd728edb235c516 | [
"MIT"
] | null | null | null | code/fir_libs_time.py | xing710/ModSimPy | 87f0f481926c40855223e2843bd728edb235c516 | [
"MIT"
] | null | null | null | code/fir_libs_time.py | xing710/ModSimPy | 87f0f481926c40855223e2843bd728edb235c516 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# coding: utf-8
# In[4]:
get_ipython().run_cell_magic('time', '', "# # Configure Jupyter so figures appear in the notebook\n# %matplotlib inline\n\n# # Configure Jupyter to display the assigned value after an assignment\n# %config InteractiveShell.ast_node_interactivity='last_expr_or_assign'\n\n# import functions from the modsim.py module\nfrom modsim import *\n\nfrom scipy.signal import kaiserord, lfilter, firwin, freqz\n\nimport numpy as np\n \nimport wave\n \nimport struct\n \nimport matplotlib.pyplot as plt\n\nimport time\n\n# The sampling rate of the analog to digital convert\n \nsampling_rate = 48000.0\n \namplitude = 16000\n\n# frequency is the number of times a wave repeats a second\n \nfrequency = 1000\n \nnoisy_freq = 15000\n \nnum_samples = 48000\n \n# The sampling rate of the analog to digital convert\n \nsampling_rate = 48000\n\n\nh=[2, 10, 14, 7, -7, -17, -13 , 3,\n 19, 21, 4, -21, -32, -16, 18 , 43,\n 34, -8, -51, -56, -11, 53, 81 , 41,\n -44, -104, -81, 19, 119, 129, 24 , -119,\n -178, -88, 95, 222, 171, -41, -248 , -266,\n -50, 244, 366, 181, -195, -457, -353 , 85,\n 522, 568, 109, -540, -831, -424, 474 , 1163,\n 953, -245, -1661, -2042, -463, 2940, 6859 , 9469,\n 9969, 6859, 2940, -463, -2042, -1661, -245 , 953,\n 1163, 474, -424, -831, -540, 109, 568 , 522,\n 85, -353, -457, -195, 181, 366, 244 , -50,\n -266, -248, -41, 171, 222, 95, -88 , -178,\n -119, 24, 129, 119, 19, -81, -104 , -44,\n 41, 81, 53, -11, -56, -51, -8 , 34,\n 43, 18, -16, -32, -21, 4, 21 , 19,\n 3, -13, -17, -7, 7, 14, 10 , -2];\n#Create the sine wave and noise\n \nsine_wave = [np.sin(2 * np.pi * frequency * x1 / sampling_rate) for x1 in range(num_samples)]\n \nsine_noise = [np.sin(2 * np.pi * noisy_freq * x1/ sampling_rate) for x1 in range(num_samples)]\n \n#Convert them to numpy arrays\n \nsine_wave = np.array(sine_wave)\n \nsine_noise = np.array(sine_noise)\n\n# Add them to create a noisy signal\n \ncombined_signal = sine_wave + sine_noise\n\ndef fir_low (signal,sampling_rate):\n output= ['x' for n in range(sampling_rate)]\n for i in range (sampling_rate):\n acc=0\n for j in range(128): \n acc+=h[j]*signal[i-j]\n output[i]= acc\n return output\n\n\n# signal_after_filter=fir_low(combined_signal,sampling_rate)\n\n# plt.plot(signal_after_filter[:500])\n\n#------------------------------------------------\n# Create a FIR filter and apply it to x.\n#------------------------------------------------\n\n# The Nyquist rate of the signal.\nnyq_rate = sampling_rate / 2.0\n\n# The desired width of the transition from pass to stop,\n# relative to the Nyquist rate. We'll design the filter\n# with a 5 Hz transition width.\nwidth = 1000.0/nyq_rate\n\n# The desired attenuation in the stop band, in dB.\nripple_db = 60.0\n\n# Compute the order and Kaiser parameter for the FIR filter.\nN, beta = kaiserord(ripple_db, width)\n\n# The cutoff frequency of the filter.\ncutoff_hz = 1500.0\n\n# Use firwin with a Kaiser window to create a lowpass FIR filter.\ntaps = firwin(N, cutoff_hz/nyq_rate, window=('kaiser', beta))\n\n# Use lfilter to filter x with the FIR filter.\nfiltered_x = lfilter(taps, 1.0, combined_signal)\n\n# plt.plot(filtered_x[:500])")
# In[ ]:
| 235 | 3,459 | 0.609645 | 598 | 3,525 | 3.513378 | 0.364548 | 0.020942 | 0.014279 | 0.012375 | 0.123751 | 0.095193 | 0.095193 | 0.095193 | 0.095193 | 0.095193 | 0 | 0.139484 | 0.219007 | 3,525 | 14 | 3,460 | 251.785714 | 0.623683 | 0.013617 | 0 | 0 | 0 | 1 | 0.985587 | 0.139521 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 8 |
5dcf62051daf4150e385de6346f81a4c0e010c42 | 11,909 | py | Python | virtual/lib/python3.6/site-packages/tests/test_views_rate.py | garvinipkiss/the-ride | 7662cf4d27b6f42ade45446f36573d73f337d3df | [
"Unlicense"
] | null | null | null | virtual/lib/python3.6/site-packages/tests/test_views_rate.py | garvinipkiss/the-ride | 7662cf4d27b6f42ade45446f36573d73f337d3df | [
"Unlicense"
] | null | null | null | virtual/lib/python3.6/site-packages/tests/test_views_rate.py | garvinipkiss/the-ride | 7662cf4d27b6f42ade45446f36573d73f337d3df | [
"Unlicense"
] | null | null | null | from __future__ import unicode_literals
import json
import pytest
from random import randint
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
try:
from django.core.urlresolvers import reverse
except ImportError:
from django.urls import reverse
from django.test import override_settings, Client, TestCase
from model_mommy import mommy
from star_ratings import get_star_ratings_rating_model
from star_ratings.models import UserRating
from .models import Foo
@pytest.mark.django_db
class TestViewRate(TestCase):
csrf_checks = False
client = Client(REMOTE_ADDR='127.0.0.1')
def post_json(self, url, data, **kwargs):
if 'user' in kwargs:
self.client.login(username=kwargs['user'].username, password='password')
if 'xhr' in kwargs:
return self.client.post(url, json.dumps(data), content_type='application/json', HTTP_X_REQUESTED_WITH='XMLHttpRequest')
return self.client.post(url, json.dumps(data), content_type='application/json')
def get_user(self):
return get_user_model().objects.create_user(
username='username',
first_name='first',
last_name='last',
email='example@example.com',
password='password'
)
@override_settings(STAR_RATINGS_ANONYMOUS=False)
def test_view_is_called_when_nobody_is_logged_in_and_anon_ratings_is_false___user_is_forwarded_to_login(self):
foo = mommy.make(Foo)
ratings = get_star_ratings_rating_model().objects.for_instance(foo)
url = reverse('ratings:rate', args=(ratings.content_type_id, ratings.object_id))
response = self.post_json(url, {'score': 1})
self.assertRedirects(response, settings.LOGIN_URL + '?next=' + url, fetch_redirect_response=False)
@override_settings(STAR_RATINGS_ANONYMOUS=True)
def test_view_is_called_when_nobody_is_logged_in_and_anon_ratings_is_true___rating_is_created(self):
foo = mommy.make(Foo)
ratings = get_star_ratings_rating_model().objects.for_instance(foo)
score = randint(1, 5)
url = reverse('ratings:rate', args=(ratings.content_type_id, ratings.object_id))
self.post_json(url, {'score': score})
ct = ContentType.objects.get_for_model(foo)
self.assertTrue(UserRating.objects.filter(rating__object_id=foo.pk, rating__content_type=ct, score=score, ip='127.0.0.1').exists())
def test_user_is_logged_in_and_doesnt_already_have_a_rating___rating_is_created(self):
user = self.get_user()
foo = mommy.make(Foo)
ratings = get_star_ratings_rating_model().objects.for_instance(foo)
score = randint(1, 5)
url = reverse('ratings:rate', args=(ratings.content_type_id, ratings.object_id))
self.post_json(url, {'score': score}, user=user)
ct = ContentType.objects.get_for_model(foo)
self.assertTrue(UserRating.objects.filter(user=user, rating__object_id=foo.pk, rating__content_type=ct, score=score).exists())
def test_user_is_logged_in_and_doesnt_already_have_a_rating_no_next_url_is_given___redirected_to_root(self):
user = self.get_user()
foo = mommy.make(Foo)
ratings = get_star_ratings_rating_model().objects.for_instance(foo)
score = randint(1, 5)
url = reverse('ratings:rate', args=(ratings.content_type_id, ratings.object_id))
response = self.post_json(url, {'score': score}, user=user)
self.assertRedirects(response, '/', fetch_redirect_response=False)
def test_user_is_logged_in_and_doesnt_already_have_a_rating_next_url_is_given___redirected_to_next(self):
user = self.get_user()
foo = mommy.make(Foo)
ratings = get_star_ratings_rating_model().objects.for_instance(foo)
score = randint(1, 5)
url = reverse('ratings:rate', args=(ratings.content_type_id, ratings.object_id)) + '?next=/foo/bar'
response = self.post_json(url, {'score': score}, user=user)
self.assertRedirects(response, '/foo/bar', fetch_redirect_response=False)
def test_user_is_logged_in_and_doesnt_already_have_a_rating_request_is_ajax___rating_is_created(self):
user = self.get_user()
foo = mommy.make(Foo)
ratings = get_star_ratings_rating_model().objects.for_instance(foo)
score = randint(1, 5)
url = reverse('ratings:rate', args=(ratings.content_type_id, ratings.object_id))
self.post_json(url, {'score': score}, user=user, xhr=True)
ct = ContentType.objects.get_for_model(foo)
self.assertTrue(UserRating.objects.filter(user=user, rating__object_id=foo.pk, rating__content_type=ct, score=score).exists())
def test_user_is_logged_in_and_doesnt_already_have_a_rating_request_is_ajax___response_is_updated_aggregate_data(self):
user = self.get_user()
foo = mommy.make(Foo)
ratings = get_star_ratings_rating_model().objects.for_instance(foo)
score = randint(1, 5)
url = reverse('ratings:rate', args=(ratings.content_type_id, ratings.object_id))
response = self.post_json(
url, {'score': score}, user=user, xhr=True)
ratings = get_star_ratings_rating_model().objects.get(pk=ratings.pk)
expected = ratings.to_dict()
expected['user_rating'] = score
expected['percentage'] = float(expected['percentage'])
try:
json_resp = response.json()
except AttributeError:
json_resp = json.loads(response.content.decode())
self.assertEqual(expected, json_resp)
@override_settings(STAR_RATINGS_RERATE=True)
def test_user_is_logged_in_already_has_a_rating_rerate_is_true___rating_is_updated(self):
user = self.get_user()
foo = mommy.make(Foo)
ratings = get_star_ratings_rating_model().objects.for_instance(foo)
rating = mommy.make(UserRating, rating=ratings, score=1, user=user)
score = randint(2, 5)
url = reverse('ratings:rate', args=(ratings.content_type_id, ratings.object_id))
self.post_json(url, {'score': score}, user=user)
rating = UserRating.objects.get(pk=rating.pk)
self.assertEqual(score, rating.score)
@override_settings(STAR_RATINGS_RERATE=True)
def test_user_is_logged_in_already_has_a_rating_rerate_is_true___redirected_to_root(self):
user = self.get_user()
foo = mommy.make(Foo)
ratings = get_star_ratings_rating_model().objects.for_instance(foo)
mommy.make(UserRating, rating=ratings, score=1, user=user)
score = randint(2, 5)
url = reverse('ratings:rate', args=(ratings.content_type_id, ratings.object_id))
response = self.post_json(url, {'score': score}, user=user)
self.assertRedirects(response, '/', fetch_redirect_response=False)
@override_settings(STAR_RATINGS_RERATE=True)
def test_user_is_logged_in_already_has_a_rating_rerate_is_true___redirected_to_next(self):
user = self.get_user()
foo = mommy.make(Foo)
ratings = get_star_ratings_rating_model().objects.for_instance(foo)
mommy.make(UserRating, rating=ratings, score=1, user=user)
score = randint(2, 5)
url = reverse('ratings:rate', args=(ratings.content_type_id, ratings.object_id)) + '?next=/foo/bar'
response = self.post_json(url, {'score': score}, user=user)
self.assertRedirects(response, '/foo/bar', fetch_redirect_response=False)
@override_settings(STAR_RATINGS_RERATE=True)
def test_user_is_logged_in_already_has_a_rating_rerate_is_true_request_is_ajax___rating_is_updated(self):
user = self.get_user()
foo = mommy.make(Foo)
ratings = get_star_ratings_rating_model().objects.for_instance(foo)
rating = mommy.make(UserRating, rating=ratings, score=1, user=user)
score = randint(2, 5)
url = reverse('ratings:rate', args=(ratings.content_type_id, ratings.object_id))
self.post_json(url, {'score': score}, user=user, xhr=True)
rating = UserRating.objects.get(pk=rating.pk)
self.assertEqual(score, rating.score)
@override_settings(STAR_RATINGS_RERATE=True)
def test_user_is_logged_in_already_has_a_rating_rerate_is_true_request_is_ajax___response_is_updated_aggregate_data(self):
user = self.get_user()
foo = mommy.make(Foo)
ratings = get_star_ratings_rating_model().objects.for_instance(foo)
mommy.make(UserRating, rating=ratings, score=1, user=user)
score = randint(2, 5)
url = reverse('ratings:rate', args=(ratings.content_type_id, ratings.object_id))
response = self.post_json(url, {'score': score}, user=user, xhr=True)
ratings = get_star_ratings_rating_model().objects.get(pk=ratings.pk)
expected = ratings.to_dict()
expected['percentage'] = float(expected['percentage'])
expected['user_rating'] = score
try:
json_resp = response.json()
except AttributeError:
json_resp = json.loads(response.content.decode())
self.assertEqual(expected, json_resp)
@override_settings(STAR_RATINGS_RERATE=False)
def test_user_is_logged_in_already_has_a_rating_rerate_is_false___rating_is_not_changed(self):
user = self.get_user()
foo = mommy.make(Foo)
ratings = get_star_ratings_rating_model().objects.for_instance(foo)
rating = mommy.make(UserRating, rating=ratings, score=1, user=user)
orig_score = rating.score
score = randint(2, 5)
url = reverse('ratings:rate', args=(ratings.content_type_id, ratings.object_id))
self.post_json(url, {'score': score}, user=user)
rating = UserRating.objects.get(pk=rating.pk)
self.assertEqual(orig_score, rating.score)
@override_settings(STAR_RATINGS_RERATE=False)
def test_user_is_logged_in_already_has_a_rating_rerate_is_false___redirected_to_next(self):
user = self.get_user()
foo = mommy.make(Foo)
ratings = get_star_ratings_rating_model().objects.for_instance(foo)
mommy.make(UserRating, rating=ratings, score=1, user=user)
score = randint(2, 5)
url = reverse('ratings:rate', args=(ratings.content_type_id, ratings.object_id)) + '?next=/foo/bar'
response = self.post_json(url, {'score': score}, user=user)
self.assertRedirects(response, '/foo/bar', fetch_redirect_response=False)
@override_settings(STAR_RATINGS_RERATE=False)
def test_user_is_logged_in_already_has_a_rating_rerate_is_false_request_is_ajax___rating_is_not_changed(self):
user = self.get_user()
foo = mommy.make(Foo)
ratings = get_star_ratings_rating_model().objects.for_instance(foo)
rating = mommy.make(UserRating, rating=ratings, score=1, user=user)
orig_score = rating.score
score = randint(2, 5)
url = reverse('ratings:rate', args=(ratings.content_type_id, ratings.object_id))
self.post_json(url, {'score': score}, user=user, xhr=True, expect_errors=True)
rating = UserRating.objects.get(pk=rating.pk)
self.assertEqual(orig_score, rating.score)
@override_settings(STAR_RATINGS_RERATE=False)
def test_user_is_logged_in_already_has_a_rating_rerate_is_false_reuest_is_ajax___response_is_400(self):
user = self.get_user()
foo = mommy.make(Foo)
ratings = get_star_ratings_rating_model().objects.for_instance(foo)
mommy.make(UserRating, rating=ratings, score=1, user=user)
score = randint(2, 5)
url = reverse('ratings:rate', args=(ratings.content_type_id, ratings.object_id)) + '?next=/foo/bar'
response = self.post_json(url, {'score': score}, user=user, xhr=True, expect_errors=True)
self.assertEqual(400, response.status_code)
| 41.350694 | 139 | 0.706357 | 1,611 | 11,909 | 4.865922 | 0.090627 | 0.044904 | 0.032147 | 0.048476 | 0.87154 | 0.851512 | 0.84794 | 0.84794 | 0.841179 | 0.841179 | 0 | 0.005967 | 0.183811 | 11,909 | 287 | 140 | 41.494774 | 0.800514 | 0 | 0 | 0.702439 | 0 | 0 | 0.0461 | 0 | 0 | 0 | 0 | 0 | 0.078049 | 1 | 0.087805 | false | 0.009756 | 0.073171 | 0.004878 | 0.190244 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5ddda5a0d89eeca197e5f828f9de85da2612163f | 37,025 | py | Python | python-client/cloudera/director/d6_2/database_servers_api.py | daanknoope/director-sdk | a099fedd5afe365aedbb50daa75de048ef6f7ab4 | [
"Apache-2.0"
] | 24 | 2015-03-04T01:39:36.000Z | 2020-06-30T13:34:27.000Z | python-client/cloudera/director/d6_2/database_servers_api.py | daanknoope/director-sdk | a099fedd5afe365aedbb50daa75de048ef6f7ab4 | [
"Apache-2.0"
] | 5 | 2015-11-04T08:18:47.000Z | 2019-01-05T11:12:19.000Z | python-client/cloudera/director/d6_2/database_servers_api.py | daanknoope/director-sdk | a099fedd5afe365aedbb50daa75de048ef6f7ab4 | [
"Apache-2.0"
] | 26 | 2015-02-24T21:13:53.000Z | 2020-12-15T06:01:46.000Z | # coding: utf-8
"""
Licensed to Cloudera, Inc. under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. Cloudera, Inc. licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from cloudera.director.common.client import ApiClient
class DatabaseServersApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create(self, environment, external_database_server_template, **kwargs): # noqa: E501
"""Create a new external database server # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create(environment, external_database_server_template, async=True)
>>> result = thread.get()
:param async bool
:param str environment: (required)
:param ExternalDatabaseServerTemplate external_database_server_template: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_with_http_info(environment, external_database_server_template, **kwargs) # noqa: E501
else:
(data) = self.create_with_http_info(environment, external_database_server_template, **kwargs) # noqa: E501
return data
def create_with_http_info(self, environment, external_database_server_template, **kwargs): # noqa: E501
"""Create a new external database server # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_with_http_info(environment, external_database_server_template, async=True)
>>> result = thread.get()
:param async bool
:param str environment: (required)
:param ExternalDatabaseServerTemplate external_database_server_template: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['environment', 'external_database_server_template'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'environment' is set
if ('environment' not in params or
params['environment'] is None):
raise ValueError("Missing the required parameter `environment` when calling `create`") # noqa: E501
# verify the required parameter 'external_database_server_template' is set
if ('external_database_server_template' not in params or
params['external_database_server_template'] is None):
raise ValueError("Missing the required parameter `external_database_server_template` when calling `create`") # noqa: E501
collection_formats = {}
path_params = {}
if 'environment' in params:
path_params['environment'] = params['environment'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'external_database_server_template' in params:
body_params = params['external_database_server_template']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basic'] # noqa: E501
return self.api_client.call_api(
'/api/d6.2/environments/{environment}/databaseServers', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
model_package="cloudera.director.d6_2.models",
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete(self, environment, external_database_server, **kwargs): # noqa: E501
"""Delete an external database server by name # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete(environment, external_database_server, async=True)
>>> result = thread.get()
:param async bool
:param str environment: (required)
:param str external_database_server: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_with_http_info(environment, external_database_server, **kwargs) # noqa: E501
else:
(data) = self.delete_with_http_info(environment, external_database_server, **kwargs) # noqa: E501
return data
def delete_with_http_info(self, environment, external_database_server, **kwargs): # noqa: E501
"""Delete an external database server by name # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_with_http_info(environment, external_database_server, async=True)
>>> result = thread.get()
:param async bool
:param str environment: (required)
:param str external_database_server: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['environment', 'external_database_server'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'environment' is set
if ('environment' not in params or
params['environment'] is None):
raise ValueError("Missing the required parameter `environment` when calling `delete`") # noqa: E501
# verify the required parameter 'external_database_server' is set
if ('external_database_server' not in params or
params['external_database_server'] is None):
raise ValueError("Missing the required parameter `external_database_server` when calling `delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'environment' in params:
path_params['environment'] = params['environment'] # noqa: E501
if 'external_database_server' in params:
path_params['externalDatabaseServer'] = params['external_database_server'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basic'] # noqa: E501
return self.api_client.call_api(
'/api/d6.2/environments/{environment}/databaseServers/{externalDatabaseServer}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
model_package="cloudera.director.d6_2.models",
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_database_server_usage(self, environment, external_database_server, **kwargs): # noqa: E501
"""Get an external database server usage by name # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_database_server_usage(environment, external_database_server, async=True)
>>> result = thread.get()
:param async bool
:param str environment: (required)
:param str external_database_server: (required)
:return: ExternalDatabaseServerUsage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_database_server_usage_with_http_info(environment, external_database_server, **kwargs) # noqa: E501
else:
(data) = self.get_database_server_usage_with_http_info(environment, external_database_server, **kwargs) # noqa: E501
return data
def get_database_server_usage_with_http_info(self, environment, external_database_server, **kwargs): # noqa: E501
"""Get an external database server usage by name # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_database_server_usage_with_http_info(environment, external_database_server, async=True)
>>> result = thread.get()
:param async bool
:param str environment: (required)
:param str external_database_server: (required)
:return: ExternalDatabaseServerUsage
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['environment', 'external_database_server'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_database_server_usage" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'environment' is set
if ('environment' not in params or
params['environment'] is None):
raise ValueError("Missing the required parameter `environment` when calling `get_database_server_usage`") # noqa: E501
# verify the required parameter 'external_database_server' is set
if ('external_database_server' not in params or
params['external_database_server'] is None):
raise ValueError("Missing the required parameter `external_database_server` when calling `get_database_server_usage`") # noqa: E501
collection_formats = {}
path_params = {}
if 'environment' in params:
path_params['environment'] = params['environment'] # noqa: E501
if 'external_database_server' in params:
path_params['externalDatabaseServer'] = params['external_database_server'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basic'] # noqa: E501
return self.api_client.call_api(
'/api/d6.2/environments/{environment}/databaseServers/{externalDatabaseServer}/usage', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ExternalDatabaseServerUsage', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
model_package="cloudera.director.d6_2.models",
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_redacted(self, environment, external_database_server, **kwargs): # noqa: E501
"""Get an external database server by name # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_redacted(environment, external_database_server, async=True)
>>> result = thread.get()
:param async bool
:param str environment: (required)
:param str external_database_server: (required)
:return: ExternalDatabaseServer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_redacted_with_http_info(environment, external_database_server, **kwargs) # noqa: E501
else:
(data) = self.get_redacted_with_http_info(environment, external_database_server, **kwargs) # noqa: E501
return data
def get_redacted_with_http_info(self, environment, external_database_server, **kwargs): # noqa: E501
"""Get an external database server by name # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_redacted_with_http_info(environment, external_database_server, async=True)
>>> result = thread.get()
:param async bool
:param str environment: (required)
:param str external_database_server: (required)
:return: ExternalDatabaseServer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['environment', 'external_database_server'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_redacted" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'environment' is set
if ('environment' not in params or
params['environment'] is None):
raise ValueError("Missing the required parameter `environment` when calling `get_redacted`") # noqa: E501
# verify the required parameter 'external_database_server' is set
if ('external_database_server' not in params or
params['external_database_server'] is None):
raise ValueError("Missing the required parameter `external_database_server` when calling `get_redacted`") # noqa: E501
collection_formats = {}
path_params = {}
if 'environment' in params:
path_params['environment'] = params['environment'] # noqa: E501
if 'external_database_server' in params:
path_params['externalDatabaseServer'] = params['external_database_server'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basic'] # noqa: E501
return self.api_client.call_api(
'/api/d6.2/environments/{environment}/databaseServers/{externalDatabaseServer}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ExternalDatabaseServer', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
model_package="cloudera.director.d6_2.models",
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_status(self, environment, external_database_server, **kwargs): # noqa: E501
"""Get an external database server status by name # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_status(environment, external_database_server, async=True)
>>> result = thread.get()
:param async bool
:param str environment: (required)
:param str external_database_server: (required)
:return: Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_status_with_http_info(environment, external_database_server, **kwargs) # noqa: E501
else:
(data) = self.get_status_with_http_info(environment, external_database_server, **kwargs) # noqa: E501
return data
def get_status_with_http_info(self, environment, external_database_server, **kwargs): # noqa: E501
"""Get an external database server status by name # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_status_with_http_info(environment, external_database_server, async=True)
>>> result = thread.get()
:param async bool
:param str environment: (required)
:param str external_database_server: (required)
:return: Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['environment', 'external_database_server'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'environment' is set
if ('environment' not in params or
params['environment'] is None):
raise ValueError("Missing the required parameter `environment` when calling `get_status`") # noqa: E501
# verify the required parameter 'external_database_server' is set
if ('external_database_server' not in params or
params['external_database_server'] is None):
raise ValueError("Missing the required parameter `external_database_server` when calling `get_status`") # noqa: E501
collection_formats = {}
path_params = {}
if 'environment' in params:
path_params['environment'] = params['environment'] # noqa: E501
if 'external_database_server' in params:
path_params['externalDatabaseServer'] = params['external_database_server'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basic'] # noqa: E501
return self.api_client.call_api(
'/api/d6.2/environments/{environment}/databaseServers/{externalDatabaseServer}/status', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Status', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
model_package="cloudera.director.d6_2.models",
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_template_redacted(self, environment, external_database_server, **kwargs): # noqa: E501
"""Get an external database server template by name # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_template_redacted(environment, external_database_server, async=True)
>>> result = thread.get()
:param async bool
:param str environment: (required)
:param str external_database_server: (required)
:return: ExternalDatabaseServerTemplate
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_template_redacted_with_http_info(environment, external_database_server, **kwargs) # noqa: E501
else:
(data) = self.get_template_redacted_with_http_info(environment, external_database_server, **kwargs) # noqa: E501
return data
def get_template_redacted_with_http_info(self, environment, external_database_server, **kwargs): # noqa: E501
"""Get an external database server template by name # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_template_redacted_with_http_info(environment, external_database_server, async=True)
>>> result = thread.get()
:param async bool
:param str environment: (required)
:param str external_database_server: (required)
:return: ExternalDatabaseServerTemplate
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['environment', 'external_database_server'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_template_redacted" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'environment' is set
if ('environment' not in params or
params['environment'] is None):
raise ValueError("Missing the required parameter `environment` when calling `get_template_redacted`") # noqa: E501
# verify the required parameter 'external_database_server' is set
if ('external_database_server' not in params or
params['external_database_server'] is None):
raise ValueError("Missing the required parameter `external_database_server` when calling `get_template_redacted`") # noqa: E501
collection_formats = {}
path_params = {}
if 'environment' in params:
path_params['environment'] = params['environment'] # noqa: E501
if 'external_database_server' in params:
path_params['externalDatabaseServer'] = params['external_database_server'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basic'] # noqa: E501
return self.api_client.call_api(
'/api/d6.2/environments/{environment}/databaseServers/{externalDatabaseServer}/template', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ExternalDatabaseServerTemplate', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
model_package="cloudera.director.d6_2.models",
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list(self, environment, **kwargs): # noqa: E501
"""List all externalDatabaseServers # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list(environment, async=True)
>>> result = thread.get()
:param async bool
:param str environment: (required)
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.list_with_http_info(environment, **kwargs) # noqa: E501
else:
(data) = self.list_with_http_info(environment, **kwargs) # noqa: E501
return data
def list_with_http_info(self, environment, **kwargs): # noqa: E501
"""List all externalDatabaseServers # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_with_http_info(environment, async=True)
>>> result = thread.get()
:param async bool
:param str environment: (required)
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['environment'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'environment' is set
if ('environment' not in params or
params['environment'] is None):
raise ValueError("Missing the required parameter `environment` when calling `list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'environment' in params:
path_params['environment'] = params['environment'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basic'] # noqa: E501
return self.api_client.call_api(
'/api/d6.2/environments/{environment}/databaseServers', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[str]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
model_package="cloudera.director.d6_2.models",
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update(self, environment, external_database_server, updated_template, **kwargs): # noqa: E501
"""Update an existing external database server (unsupported) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update(environment, external_database_server, updated_template, async=True)
>>> result = thread.get()
:param async bool
:param str environment: (required)
:param str external_database_server: (required)
:param ExternalDatabaseServerTemplate updated_template: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.update_with_http_info(environment, external_database_server, updated_template, **kwargs) # noqa: E501
else:
(data) = self.update_with_http_info(environment, external_database_server, updated_template, **kwargs) # noqa: E501
return data
def update_with_http_info(self, environment, external_database_server, updated_template, **kwargs): # noqa: E501
"""Update an existing external database server (unsupported) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_with_http_info(environment, external_database_server, updated_template, async=True)
>>> result = thread.get()
:param async bool
:param str environment: (required)
:param str external_database_server: (required)
:param ExternalDatabaseServerTemplate updated_template: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['environment', 'external_database_server', 'updated_template'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'environment' is set
if ('environment' not in params or
params['environment'] is None):
raise ValueError("Missing the required parameter `environment` when calling `update`") # noqa: E501
# verify the required parameter 'external_database_server' is set
if ('external_database_server' not in params or
params['external_database_server'] is None):
raise ValueError("Missing the required parameter `external_database_server` when calling `update`") # noqa: E501
# verify the required parameter 'updated_template' is set
if ('updated_template' not in params or
params['updated_template'] is None):
raise ValueError("Missing the required parameter `updated_template` when calling `update`") # noqa: E501
collection_formats = {}
path_params = {}
if 'environment' in params:
path_params['environment'] = params['environment'] # noqa: E501
if 'external_database_server' in params:
path_params['externalDatabaseServer'] = params['external_database_server'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'updated_template' in params:
body_params = params['updated_template']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basic'] # noqa: E501
return self.api_client.call_api(
'/api/d6.2/environments/{environment}/databaseServers/{externalDatabaseServer}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
model_package="cloudera.director.d6_2.models",
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 42.314286 | 144 | 0.631627 | 4,018 | 37,025 | 5.586112 | 0.054256 | 0.07984 | 0.116641 | 0.072043 | 0.943996 | 0.9362 | 0.928358 | 0.924215 | 0.919626 | 0.907864 | 0 | 0.015797 | 0.281891 | 37,025 | 874 | 145 | 42.3627 | 0.828381 | 0.064146 | 0 | 0.794926 | 0 | 0 | 0.245005 | 0.117964 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.008457 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
5dfe40e08ea809e1df6bd183682e9a54c41dfd99 | 6,522 | py | Python | networks/CoDetectionCNN.py | naivete5656/BFP | 74c5604a9ba4eaa3ec3e2c76ef5e1282d7d10f18 | [
"MIT"
] | 8 | 2020-07-31T15:20:01.000Z | 2021-09-18T08:42:07.000Z | networks/CoDetectionCNN.py | naivete5656/BFP | 74c5604a9ba4eaa3ec3e2c76ef5e1282d7d10f18 | [
"MIT"
] | null | null | null | networks/CoDetectionCNN.py | naivete5656/BFP | 74c5604a9ba4eaa3ec3e2c76ef5e1282d7d10f18 | [
"MIT"
] | 5 | 2020-10-04T02:02:13.000Z | 2021-11-14T23:37:08.000Z | from .network_parts import *
import torch.nn as nn
class CoDetectionCNN(nn.Module):
def __init__(self, n_channels, n_classes, sig=True):
super().__init__()
filter_channel = [64, 256, 512]
self.inc = Inconv(n_channels, 64)
self.down = nn.ModuleList([Down(64, 128)])
self.down.append(Down(256, 256))
self.down.append(Down(256, 512))
self.down.append(Down(512, 512))
self.up1 = Up(1024, 256)
self.up2 = Up(512, 128)
self.up3_t = Up(256, 64)
self.up3_tn = Up(256, 64)
self.up4_t = Up(128, 32)
self.up4_tn = Up(128, 32)
self.outc_t = Outconv(32, n_classes, sig=sig)
self.outc_tn = Outconv(32, n_classes, sig=sig)
def forward(self, x):
x_inp1 = x[:, 0:1, :, :]
x_inp2 = x[:, 1::, :, :]
t_enc, tn_enc, dec = ([0] * 2 for _ in range(3))
enc = [0] * 4
t_enc[0] = self.inc(x_inp1)
tn_enc[0] = self.inc(x_inp2)
t_enc[1] = self.down[0](t_enc[0])
tn_enc[1] = self.down[0](tn_enc[0])
enc[0] = torch.cat([t_enc[1], tn_enc[1]], dim=1)
for i in range(3):
enc[i + 1] = self.down[i + 1](enc[i])
dec = self.up1(enc[-1], enc[-2])
dec = self.up2(dec, enc[-3])
t_dec = self.up3_t(dec, t_enc[-1])
tn_dec = self.up3_tn(dec, tn_enc[-1])
t_dec = self.up4_t(t_dec, t_enc[-2])
tn_dec = self.up4_tn(tn_dec, tn_enc[-2])
pred_t = self.outc_t(t_dec)
pred_tn = self.outc_tn(tn_dec)
return pred_t, pred_tn
class UNetSelfSuper(nn.Module):
def __init__(self, n_channels, n_classes, sig=True):
super().__init__()
filter_channel = [64, 256, 512]
self.inc = Inconv(n_channels, 64)
self.down = nn.ModuleList([Down(64, 128)])
self.down.append(Down(256, 256))
self.down.append(Down(256, 512))
self.down.append(Down(512, 512))
self.up1 = Up(1024, 256)
self.up2 = Up(512, 128)
self.up3_t = Up(256, 64)
self.up3_t1 = Upself(128, 128)
self.up3_tn = Up(256, 64)
self.up4_t = Up(128, 32)
self.up4_t1 = Upself(128, 32)
self.up4_tn = Up(128, 32)
self.outc_t = Outconv(32, n_classes, sig=sig)
self.outc_t1 = Outconv(32, n_classes, sig=sig)
self.outc_tn = Outconv(32, n_classes, sig=sig)
def forward(self, x):
x_inp1 = x[:, 0:1, :, :]
x_inp2 = x[:, 1::, :, :]
t_enc, tn_enc, dec = ([0] * 2 for _ in range(3))
enc = [0] * 4
t_enc[0] = self.inc(x_inp1)
tn_enc[0] = self.inc(x_inp2)
t_enc[1] = self.down[0](t_enc[0])
tn_enc[1] = self.down[0](tn_enc[0])
enc[0] = torch.cat([t_enc[1], tn_enc[1]], dim=1)
for i in range(3):
enc[i + 1] = self.down[i + 1](enc[i])
dec = self.up1(enc[-1], enc[-2])
dec = self.up2(dec, enc[-3])
t_dec = self.up3_t(dec, t_enc[-1])
t1_dec = self.up3_t1(dec)
tn_dec = self.up3_tn(dec, tn_enc[-1])
t_dec = self.up4_t(t_dec, t_enc[-2])
t1_dec = self.up3_t1(t1_dec)
tn_dec = self.up4_tn(tn_dec, tn_enc[-2])
pred_t = self.outc_t(t_dec)
pred_t1 = self.outc_t1(tn_dec)
pred_tn = self.outc_tn(tn_dec)
return pred_t, pred_tn, pred_t1
class UNetOldVer(nn.Module):
def __init__(self, n_channels, n_classes, sig=True):
super().__init__()
self.inc = Inconv(n_channels, 64)
self.down1 = Down(128, 128)
self.down2 = Down(128, 256)
self.down3 = Down(256, 512)
self.down4 = Down(512, 512)
self.up1 = Up(1024, 256)
self.up2 = Up(512, 128)
self.up3 = Up(256, 64)
self.up4 = Up(192, 64)
self.outc = Outconv(64, n_classes, sig=sig)
def forward(self, x):
x_inp1 = x[:, 0:1, :, :]
x_inp2 = x[:, 1::, :, :]
x1_1 = self.inc(x_inp1)
x1_2 = self.inc(x_inp2)
x1 = torch.cat([x1_1, x1_2], dim=1)
x2 = self.down1(x1)
x3 = self.down2(x2)
x4 = self.down3(x3)
x5 = self.down4(x4)
x = self.up1(x5, x4)
x = self.up2(x, x3)
x = self.up3(x, x2)
x = self.up4(x, x1)
x = self.outc(x)
return x, x1
class UNetOldVer2(nn.Module):
def __init__(self, n_channels, n_classes, sig=True):
super(UNet2, self).__init__()
self.inc = Inconv(n_channels, 64)
self.down1 = Down(64, 128)
self.down2 = Down(128, 256)
self.down3 = Down(256, 256)
self.down4 = Down(256, 256)
self.up1 = Up(1024, 512)
self.up2 = Up(1024, 256)
self.up3 = Up(512, 128)
self.up4 = Up(256, 128)
self.outc = Outconv(128, n_classes, sig=sig)
def forward(self, x):
x_inp1 = x[:, 0:1, :, :]
x_inp2 = x[:, 1::, :, :]
x1_1 = self.inc(x_inp1)
x1_2 = self.inc(x_inp2)
x1 = torch.cat([x1_1, x1_2], dim=1)
x2_1 = self.down1(x1_1)
x2_2 = self.down1(x1_2)
x2 = torch.cat([x2_1, x2_2], dim=1)
x3_1 = self.down2(x2_1)
x3_2 = self.down2(x2_2)
x3 = torch.cat([x3_1, x3_2], dim=1)
x4_1 = self.down3(x3_1)
x4_2 = self.down3(x3_2)
x4 = torch.cat([x4_1, x4_2], dim=1)
x5_1 = self.down4(x4_1)
x5_2 = self.down4(x4_2)
x5 = torch.cat([x5_1, x5_2], dim=1)
x = self.up1(x5, x4)
x = self.up2(x, x3)
x = self.up3(x, x2)
x = self.up4(x, x1)
x = self.outc(x)
return x, x1
class UNet_2d(nn.Module):
def __init__(self, n_channels, n_classes, sig=True):
super().__init__()
self.inc = Inconv(n_channels, 64)
self.down1 = Down(64, 128)
self.down2 = Down(128, 256)
self.down3 = Down(256, 512)
self.down4 = Down(512, 512)
self.up1 = Up(1024, 256)
self.up2 = Up(512, 128)
self.up3 = Up(256, 64)
self.up4 = Up(128, 64)
self.outc = Outconv(64, n_classes, sig=sig)
def forward(self, x):
x1 = self.inc(x)
x2 = self.down1(x1)
x3 = self.down2(x2)
x4 = self.down3(x3)
x5 = self.down4(x4)
x = self.up1(x5, x4)
x = self.up2(x, x3)
x = self.up3(x, x2)
x = self.up4(x, x1)
x = self.outc(x)
return x
if __name__ == "__main__":
import torch
x = torch.rand((10, 2, 256, 256))
net = UNet3(n_channels=1, n_classes=1)
net(x)
| 30.0553 | 56 | 0.523612 | 1,072 | 6,522 | 2.985075 | 0.079291 | 0.037188 | 0.044688 | 0.035 | 0.827813 | 0.816563 | 0.816563 | 0.816563 | 0.816563 | 0.816563 | 0 | 0.135722 | 0.315394 | 6,522 | 216 | 57 | 30.194444 | 0.580963 | 0 | 0 | 0.75 | 0 | 0 | 0.001227 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0 | 0.016667 | 0 | 0.127778 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f8d57c0124b0b5f8d67960ff193784982355ca8d | 6,152 | py | Python | build/lib/fdscraper/scrape/download.py | chinmaykurade/fdscraper | 074cea1c4e86c2c00531ef6049287652e4426565 | [
"Apache-2.0"
] | null | null | null | build/lib/fdscraper/scrape/download.py | chinmaykurade/fdscraper | 074cea1c4e86c2c00531ef6049287652e4426565 | [
"Apache-2.0"
] | null | null | null | build/lib/fdscraper/scrape/download.py | chinmaykurade/fdscraper | 074cea1c4e86c2c00531ef6049287652e4426565 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from . import webdriver,get_all_tables
from fdscraper import time,os,pickle
class Companies:
def __init__(self,companies=[],driver_path='chromedriver.exe'):
#Initializing the webdriver
options = webdriver.ChromeOptions()
#Uncomment the line below if you'd like to scrape without a new Chrome window every time.
options.add_argument('headless')
options.add_argument("window-size=1920,1080")
options.add_argument("start-maximized")
#Change the path to where chromedriver is in your home folder.
self.driver = webdriver.Chrome(executable_path=driver_path, options=options)
# driver.set_window_size(1120, 1000)
# driver.implicitly_wait(5) # seconds
self.companies = companies
def get_financials(self,file_path=None,out_path=None,verbose=0):
all_company_data = {}
if(file_path!=None):
with open(file_path,'r') as f:
data = f.read()
f.close()
companies = data.split(',')
companies = [idd.strip() for idd in companies]
self.companies = companies
if(verbose>0): print(self.companies)
for idd in self.companies[1:]:
url = f"https://www.screener.in/company/{idd}"
tic = time.time()
driver = self.driver
driver.get(url)
company_name = driver.find_element_by_xpath("/html/body/main/div[2]/div[1]/h1").text
if(verbose>0): print(company_name,end='-->')
# Try to get the consolidated figures if they exist
try:
consolidated_link = driver.find_element_by_xpath("/html/body/main/section[4]/div[1]/div[1]/p/a")
if(consolidated_link.text=="View Consolidated"):
driver.get(f"https://www.screener.in/company/{idd}/consolidated")
except:
pass
# Get all the data
company_data = get_all_tables(driver)
all_company_data[idd] = company_data
toc = time.time()
if(verbose>0): print(f"Time taken: {toc-tic} seconds")
if(out_path!=None):
pickle_out = open(os.path.join(out_path,self.companies[0]+'.pickle'),'wb')
pickle.dump(all_company_data,pickle_out)
pickle_out.close()
driver.quit()
return all_company_data
def from_file(file_path,driver_path,out_path):
"""Scrape all the financial data from screener of the specified companies
in the input file"""
with open(file_path,'r') as f:
data = f.read()
f.close()
ids = data.split(',')
ids = [idd.strip() for idd in ids]
print(ids)
all_company_data = {}
#Initializing the webdriver
options = webdriver.ChromeOptions()
#Uncomment the line below if you'd like to scrape without a new Chrome window every time.
options.add_argument('headless')
options.add_argument("window-size=1920,1080")
options.add_argument("start-maximized")
# options.addArguments("--headless");
#Change the path to where chromedriver is in your home folder.
driver = webdriver.Chrome(executable_path=driver_path, options=options)
driver.set_window_size(1120, 1000)
# driver.implicitly_wait(5) # seconds
for idd in ids[1:]:
url = f"https://www.screener.in/company/{idd}"
tic = time.time()
driver.get(url)
company_name = driver.find_element_by_xpath("/html/body/main/div[2]/div[1]/h1").text
print(company_name,end='-->')
# Try to get the consolidated figures if they exist
try:
consolidated_link = driver.find_element_by_xpath("/html/body/main/section[4]/div[1]/div[1]/p/a")
if(consolidated_link.text=="View Consolidated"):
driver.get(f"https://www.screener.in/company/{idd}/consolidated")
except:
pass
# Get all the data
company_data = get_all_tables(driver)
all_company_data[idd] = company_data
toc = time.time()
print(f"Time taken: {toc-tic} seconds")
pickle_out = open(os.path.join(out_path,ids[0]+'.pickle'),'wb')
pickle.dump(all_company_data,pickle_out)
pickle_out.close()
driver.quit()
return all_company_data
def from_list(ids,driver_path,out_path):
"""Scrape all the financial data from screener of the specified companies
in the input list"""
all_company_data = {}
#Initializing the webdriver
options = webdriver.ChromeOptions()
#Uncomment the line below if you'd like to scrape without a new Chrome window every time.
options.add_argument('headless')
options.add_argument("window-size=1920,1080")
options.add_argument("start-maximized")
# options.addArguments("--headless");
#Change the path to where chromedriver is in your home folder.
driver = webdriver.Chrome(executable_path=driver_path, options=options)
driver.set_window_size(1120, 1000)
# driver.implicitly_wait(5) # seconds
for idd in ids[1:]:
url = f"https://www.screener.in/company/{idd}"
tic = time.time()
driver.get(url)
company_name = driver.find_element_by_xpath("/html/body/main/div[2]/div[1]/h1").text
print(company_name,end='-->')
# Try to get the consolidated figures if they exist
try:
consolidated_link = driver.find_element_by_xpath("/html/body/main/section[4]/div[1]/div[1]/p/a")
if(consolidated_link.text=="View Consolidated"):
driver.get(f"https://www.screener.in/company/{idd}/consolidated")
except:
pass
# Get all the data
company_data = get_all_tables(driver)
all_company_data[idd] = company_data
toc = time.time()
print(f"Time taken: {toc-tic} seconds")
pickle_out = open(os.path.join(out_path,ids[0]+'.pickle'),'wb')
pickle.dump(all_company_data,pickle_out)
pickle_out.close()
driver.quit()
return all_company_data | 38.936709 | 112 | 0.625163 | 804 | 6,152 | 4.630597 | 0.160448 | 0.053183 | 0.045125 | 0.027397 | 0.878593 | 0.869997 | 0.869997 | 0.862477 | 0.854418 | 0.854418 | 0 | 0.017479 | 0.256014 | 6,152 | 158 | 113 | 38.936709 | 0.795936 | 0.186281 | 0 | 0.803738 | 0 | 0.028037 | 0.164182 | 0.058622 | 0 | 0 | 0 | 0 | 0 | 1 | 0.037383 | false | 0.028037 | 0.018692 | 0 | 0.093458 | 0.074766 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5d30979561abeaa59c193884dad372a5fc425efc | 4,634 | py | Python | test_cases/general/embed/py/test_generate/fromjsonable.py | Parquery/mapry | 93515307f9eba8447fe64b0ac7cc68b2d07205a7 | [
"MIT"
] | 11 | 2019-06-26T05:56:41.000Z | 2021-03-28T16:44:16.000Z | test_cases/general/embed/py/test_generate/fromjsonable.py | Parquery/mapry | 93515307f9eba8447fe64b0ac7cc68b2d07205a7 | [
"MIT"
] | 4 | 2019-10-18T14:43:59.000Z | 2020-04-02T19:12:07.000Z | test_cases/general/embed/py/test_generate/fromjsonable.py | Parquery/mapry | 93515307f9eba8447fe64b0ac7cc68b2d07205a7 | [
"MIT"
] | 3 | 2019-06-17T07:39:03.000Z | 2020-04-01T14:01:23.000Z | # File automatically generated by mapry. DO NOT EDIT OR APPEND!
"""parses JSONable objects."""
import typing
import some.graph
import some.graph.parse
def _empty_from(
value: typing.Any,
ref: str,
target: some.graph.Empty,
errors: some.graph.parse.Errors
) -> None:
"""
parses Empty from a JSONable value.
If ``errors``, the attributes of ``target`` have undefined values.
:param value: JSONable value
:param ref:
reference to the value (e.g., a reference path)
:param target: parsed ``value`` as Empty
:param errors: errors encountered during parsing
:return:
"""
if not isinstance(value, dict):
errors.add(
ref,
"Expected a dictionary, but got: {}".format(
type(value)))
return
def empty_from(
value: typing.Any,
ref: str,
errors: some.graph.parse.Errors
) -> typing.Optional[some.graph.Empty]:
"""
parses Empty from a JSONable value.
:param value: JSONable value
:param id: identifier of the instance
:param ref:
reference to the value (e.g., a reference path)
:param errors: errors encountered during parsing
:return: parsed instance, or None if ``errors``
"""
target = some.graph.parse.placeholder_empty()
_empty_from(
value=value,
ref=ref,
target=target,
errors=errors)
if not errors.empty():
return None
return target
def _non_empty_from(
value: typing.Any,
ref: str,
target: some.graph.NonEmpty,
errors: some.graph.parse.Errors
) -> None:
"""
parses NonEmpty from a JSONable value.
If ``errors``, the attributes of ``target`` have undefined values.
:param value: JSONable value
:param ref:
reference to the value (e.g., a reference path)
:param target: parsed ``value`` as NonEmpty
:param errors: errors encountered during parsing
:return:
"""
if not isinstance(value, dict):
errors.add(
ref,
"Expected a dictionary, but got: {}".format(
type(value)))
return
##
# Parse empty
##
value_0 = value.get(
'empty',
None)
if value_0 is None:
errors.add(
ref,
'Property is missing: empty')
else:
target_1 = (
some.graph.parse.placeholder_empty()
)
_empty_from(
value_0,
'/'.join((
ref, 'empty')),
target_1,
errors)
target.empty = target_1
if errors.full():
return
def non_empty_from(
value: typing.Any,
ref: str,
errors: some.graph.parse.Errors
) -> typing.Optional[some.graph.NonEmpty]:
"""
parses NonEmpty from a JSONable value.
:param value: JSONable value
:param id: identifier of the instance
:param ref:
reference to the value (e.g., a reference path)
:param errors: errors encountered during parsing
:return: parsed instance, or None if ``errors``
"""
target = some.graph.parse.placeholder_non_empty()
_non_empty_from(
value=value,
ref=ref,
target=target,
errors=errors)
if not errors.empty():
return None
return target
def some_graph_from(
value: typing.Any,
ref: str,
errors: some.graph.parse.Errors
) -> typing.Optional[some.graph.SomeGraph]:
"""
parses SomeGraph from a JSONable value.
:param value: JSONable value
:param ref: reference to the value (e.g., a reference path)
:param errors: errors encountered during parsing
:return: parsed SomeGraph, or None if ``errors``
"""
if errors.full():
return None
if not isinstance(value, dict):
errors.add(
ref,
"Expected a dictionary, but got: {}".format(type(value)))
return None
graph = some.graph.parse.placeholder_some_graph()
##
# Parse some_embed
##
value_0 = value.get(
'some_embed',
None)
if value_0 is None:
errors.add(
ref,
'Property is missing: some_embed')
else:
target_1 = (
some.graph.parse.placeholder_non_empty()
)
_non_empty_from(
value_0,
'/'.join((
ref, 'some_embed')),
target_1,
errors)
graph.some_embed = target_1
if errors.full():
return None
if not errors.empty():
return None
return graph
| 22.278846 | 70 | 0.572292 | 537 | 4,634 | 4.854749 | 0.139665 | 0.065593 | 0.064442 | 0.034522 | 0.844265 | 0.844265 | 0.811277 | 0.746068 | 0.724971 | 0.714615 | 0 | 0.003846 | 0.326716 | 4,634 | 207 | 71 | 22.386473 | 0.831731 | 0.318731 | 0 | 0.72973 | 1 | 0 | 0.064922 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.045045 | false | 0 | 0.027027 | 0 | 0.18018 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
5d38bb52c9a9e3b14f50f1aabad9c373424056f0 | 2,124 | py | Python | setup.py | bioinfocao/pysapc | db27b14d5935e025dbddb2187b2caee351f3c0fc | [
"BSD-3-Clause"
] | 11 | 2017-04-11T22:41:54.000Z | 2022-02-10T15:06:50.000Z | setup.py | bioinfocao/pysapc | db27b14d5935e025dbddb2187b2caee351f3c0fc | [
"BSD-3-Clause"
] | 2 | 2018-04-13T09:17:01.000Z | 2020-05-13T04:33:59.000Z | setup.py | bioinfocao/pysapc | db27b14d5935e025dbddb2187b2caee351f3c0fc | [
"BSD-3-Clause"
] | 8 | 2017-03-15T11:19:08.000Z | 2021-01-04T18:44:35.000Z | from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
import numpy
try:
# try with fopenmp
ext = Extension("pysapc.sparseAP_cy", ['pysapc/sparseAP_cy.pyx'],
extra_compile_args=['-fopenmp'],
extra_link_args=['-fopenmp'],
include_dirs=[numpy.get_include()],
)
setup(
name="pysapc",
version="1.2.0",
description="Sparse Affinity Propagation Clustering",
author="Huojun Cao",
author_email="bioinfocao@gmail.com",
url="https://github.com/bioinfocao/pysapc",
license="BSD 3 clause",
packages=["pysapc","pysapc.tests"],
#packages = find_packages(),
package_data = {
# If any package contains *.txt or *.rst files, include them:
'': ['*.txt', '*.rst'],
},
include_package_data=True,
install_requires=["numpy","scipy","pandas","cython"],
cmdclass = {"build_ext": build_ext},
ext_modules = [ext],
setup_requires=['wheel']
)
except:
# if fopenmp is not installed
ext = Extension("pysapc.sparseAP_cy", ['pysapc/sparseAP_cy.pyx'],
include_dirs=[numpy.get_include()],
)
setup(
name="pysapc",
version="1.2.0",
description="Sparse Affinity Propagation Clustering",
author="Huojun Cao",
author_email="bioinfocao@gmail.com",
url="https://github.com/bioinfocao/pysapc",
license="BSD 3 clause",
packages=["pysapc","pysapc.tests"],
#packages = find_packages(),
package_data = {
# If any package contains *.txt or *.rst files, include them:
'': ['*.txt', '*.rst'],
},
include_package_data=True,
install_requires=["numpy","scipy","pandas","cython"],
cmdclass = {"build_ext": build_ext},
ext_modules = [ext],
setup_requires=['wheel']
)
| 36.62069 | 78 | 0.540019 | 208 | 2,124 | 5.355769 | 0.350962 | 0.035907 | 0.057451 | 0.046679 | 0.822262 | 0.822262 | 0.822262 | 0.822262 | 0.822262 | 0.737882 | 0 | 0.005563 | 0.322976 | 2,124 | 57 | 79 | 37.263158 | 0.769124 | 0.103578 | 0 | 0.72 | 0 | 0 | 0.259783 | 0.023913 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.08 | 0 | 0.08 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5d3e2d77a1bfcb18d387ed0cf504f53d437dac35 | 12,791 | py | Python | integration_tests/test_md_incremental_refresh_book.py | PeregrineTradersDevTeam/md-data-reader-cme | 55130b06eed1bef5af4d32dba7b0fc619f7867c4 | [
"MIT"
] | 2 | 2021-03-15T20:40:27.000Z | 2021-07-13T22:55:58.000Z | integration_tests/test_md_incremental_refresh_book.py | PeregrineTradersDevTeam/md-data-reader-cme | 55130b06eed1bef5af4d32dba7b0fc619f7867c4 | [
"MIT"
] | 1 | 2021-03-10T13:41:38.000Z | 2021-03-10T13:59:37.000Z | integration_tests/test_md_incremental_refresh_book.py | PeregrineTradersDevTeam/md-data-reader-cme | 55130b06eed1bef5af4d32dba7b0fc619f7867c4 | [
"MIT"
] | 1 | 2020-07-03T07:06:45.000Z | 2020-07-03T07:06:45.000Z | def assert_row_0(row):
assert(row['match_event_indicator'] == 4) # 00000100
assert(row['transact_time'] == 1564704045568515145)
assert(len(row['md_entries']) == 1)
assert(len(row['order_id_entries']) == 1)
md_entries_0 = row['md_entries'][0]
assert(md_entries_0['md_entry_px_mantissa'] == 750000000)
assert(md_entries_0['md_entry_px_exponent'] == -9)
assert(md_entries_0['md_entry_size'] == 20)
assert(md_entries_0['security_id'] == 896851)
assert(md_entries_0['rpt_seq'] == 1169218)
assert(md_entries_0['number_of_orders'] == 5)
assert(md_entries_0['md_price_level'] == 2)
assert(md_entries_0['md_update_action'] == 1)
assert(md_entries_0['md_entry_type'] == 48)
order_id_entries_0 = row['order_id_entries'][0]
assert(order_id_entries_0['order_id'] == 826957745673)
assert(order_id_entries_0['md_order_priority'] == 27579355482)
assert(order_id_entries_0['md_display_qty'] == 1)
assert(order_id_entries_0['reference_id'] == 1)
assert(order_id_entries_0['order_update_action'] == 2)
def assert_row_1(row):
assert(row['match_event_indicator'] == -128) # 10000000
assert(row['transact_time'] == 1564704056576311565)
assert(len(row['md_entries']) == 0)
assert(len(row['order_id_entries']) == 0)
def assert_row_2(row):
assert(row['match_event_indicator'] == 4) # 00000100
assert(row['transact_time'] == 1564704057713180937)
assert(len(row['md_entries']) == 1)
assert(len(row['order_id_entries']) == 1)
md_entries_0 = row['md_entries'][0]
assert(md_entries_0['md_entry_px_mantissa'] == 9822000000000)
assert(md_entries_0['md_entry_px_exponent'] == -9)
assert(md_entries_0['md_entry_size'] == 22)
assert(md_entries_0['security_id'] == 21436)
assert(md_entries_0['rpt_seq'] == 5603353)
assert(md_entries_0['number_of_orders'] == 2)
assert(md_entries_0['md_price_level'] == 1)
assert(md_entries_0['md_update_action'] == 1)
assert(md_entries_0['md_entry_type'] == 49)
order_id_entries_0 = row['order_id_entries'][0]
assert(order_id_entries_0['order_id'] == 826957750228)
assert(order_id_entries_0['md_order_priority'] == 27579379610)
assert(order_id_entries_0['md_display_qty'] == 12)
assert(order_id_entries_0['reference_id'] == 1)
assert(order_id_entries_0['order_update_action'] == 0)
def assert_row_3(row):
assert(row['match_event_indicator'] == -112) # 10010000
assert(row['transact_time'] == 1564704060192125693)
assert(len(row['md_entries']) == 2)
assert(len(row['order_id_entries']) == 0)
md_entries_0 = row['md_entries'][0]
assert(md_entries_0['md_entry_px_mantissa'] == -19000000000)
assert(md_entries_0['md_entry_px_exponent'] == -9)
assert(md_entries_0['md_entry_size'] == 4092)
assert(md_entries_0['security_id'] == 165888)
assert(md_entries_0['rpt_seq'] == 5016582)
assert(md_entries_0['number_of_orders'] == None)
assert(md_entries_0['md_price_level'] == 1)
assert(md_entries_0['md_update_action'] == 1)
assert(md_entries_0['md_entry_type'] == 69)
md_entries_1 = row['md_entries'][1]
assert(md_entries_1['md_entry_px_mantissa'] == -19500000000)
assert(md_entries_1['md_entry_px_exponent'] == -9)
assert(md_entries_1['md_entry_size'] == 779)
assert(md_entries_1['security_id'] == 165888)
assert(md_entries_1['rpt_seq'] == 5016583)
assert(md_entries_1['number_of_orders'] == None)
assert(md_entries_1['md_price_level'] == 2)
assert(md_entries_1['md_update_action'] == 1)
assert(md_entries_1['md_entry_type'] == 69)
def assert_row_4(row):
assert(row['match_event_indicator'] == -112) # 10010000
assert(row['transact_time'] == 1564610433070678577)
assert(len(row['md_entries']) == 1)
assert(len(row['order_id_entries']) == 0)
md_entries_0 = row['md_entries'][0]
assert(md_entries_0['md_entry_px_mantissa'] == 11000000000)
assert(md_entries_0['md_entry_px_exponent'] == -9)
assert(md_entries_0['md_entry_size'] == 387)
assert(md_entries_0['security_id'] == 22497)
assert(md_entries_0['rpt_seq'] == 3391886)
assert(md_entries_0['number_of_orders'] == None)
assert(md_entries_0['md_price_level'] == 2)
assert(md_entries_0['md_update_action'] == 1)
assert(md_entries_0['md_entry_type'] == 69)
def assert_row_5(row):
assert(row['match_event_indicator'] == -112) # 10010000
assert(row['transact_time'] == 1564610597337395147)
assert(len(row['md_entries']) == 6)
assert(len(row['order_id_entries']) == 0)
md_entries_0 = row['md_entries'][0]
assert(md_entries_0['md_entry_px_mantissa'] == -14500000000)
assert(md_entries_0['md_entry_px_exponent'] == -9)
assert(md_entries_0['md_entry_size'] == 256)
assert(md_entries_0['security_id'] == 225528)
assert(md_entries_0['rpt_seq'] == 4995087)
assert(md_entries_0['number_of_orders'] == None)
assert(md_entries_0['md_price_level'] == 2)
assert(md_entries_0['md_update_action'] == 1)
assert(md_entries_0['md_entry_type'] == 70)
md_entries_1 = row['md_entries'][1]
assert(md_entries_1['md_entry_px_mantissa'] == 9000000000)
assert(md_entries_1['md_entry_px_exponent'] == -9)
assert(md_entries_1['md_entry_size'] == 256)
assert(md_entries_1['security_id'] == 503405)
assert(md_entries_1['rpt_seq'] == 5280046)
assert(md_entries_1['number_of_orders'] == None)
assert(md_entries_1['md_price_level'] == 2)
assert(md_entries_1['md_update_action'] == 1)
assert(md_entries_1['md_entry_type'] == 70)
md_entries_2 = row['md_entries'][2]
assert(md_entries_2['md_entry_px_mantissa'] == 9818500000000)
assert(md_entries_2['md_entry_px_exponent'] == -9)
assert(md_entries_2['md_entry_size'] == 1655)
assert(md_entries_2['security_id'] == 17606)
assert(md_entries_2['rpt_seq'] == 31128823)
assert(md_entries_2['number_of_orders'] == None)
assert(md_entries_2['md_price_level'] == 2)
assert(md_entries_2['md_update_action'] == 1)
assert(md_entries_2['md_entry_type'] == 69)
md_entries_3 = row['md_entries'][3]
assert(md_entries_3['md_entry_px_mantissa'] == -46000000000)
assert(md_entries_3['md_entry_px_exponent'] == -9)
assert(md_entries_3['md_entry_size'] == 256)
assert(md_entries_3['security_id'] == 17707)
assert(md_entries_3['rpt_seq'] == 4903272)
assert(md_entries_3['number_of_orders'] == None)
assert(md_entries_3['md_price_level'] == 2)
assert(md_entries_3['md_update_action'] == 1)
assert(md_entries_3['md_entry_type'] == 70)
md_entries_4 = row['md_entries'][4]
assert(md_entries_4['md_entry_px_mantissa'] == 3500000000)
assert(md_entries_4['md_entry_px_exponent'] == -9)
assert(md_entries_4['md_entry_size'] == 256)
assert(md_entries_4['security_id'] == 11069)
assert(md_entries_4['rpt_seq'] == 5243780)
assert(md_entries_4['number_of_orders'] == None)
assert(md_entries_4['md_price_level'] == 2)
assert(md_entries_4['md_update_action'] == 1)
assert(md_entries_4['md_entry_type'] == 70)
md_entries_5 = row['md_entries'][5]
assert(md_entries_5['md_entry_px_mantissa'] == 3000000000)
assert(md_entries_5['md_entry_px_exponent'] == -9)
assert(md_entries_5['md_entry_size'] == 1511)
assert(md_entries_5['security_id'] == 64887)
assert(md_entries_5['rpt_seq'] == 4080419)
assert(md_entries_5['number_of_orders'] == None)
assert(md_entries_5['md_price_level'] == 2)
assert(md_entries_5['md_update_action'] == 1)
assert(md_entries_5['md_entry_type'] == 69)
def assert_row_6(row):
assert(row['match_event_indicator'] == -112) # 10010000
assert(row['transact_time'] == 1564610599557911449)
assert(len(row['md_entries']) == 4)
assert(len(row['order_id_entries']) == 0)
md_entries_0 = row['md_entries'][0]
assert(md_entries_0['md_entry_px_mantissa'] == 9814500000000)
assert(md_entries_0['md_entry_px_exponent'] == -9)
assert(md_entries_0['md_entry_size'] == 1459)
assert(md_entries_0['security_id'] == 427)
assert(md_entries_0['rpt_seq'] == 22584194)
assert(md_entries_0['number_of_orders'] == None)
assert(md_entries_0['md_price_level'] == 2)
assert(md_entries_0['md_update_action'] == 1)
assert(md_entries_0['md_entry_type'] == 70)
md_entries_1 = row['md_entries'][1]
assert(md_entries_1['md_entry_px_mantissa'] == 23000000000)
assert(md_entries_1['md_entry_px_exponent'] == -9)
assert(md_entries_1['md_entry_size'] == 208)
assert(md_entries_1['security_id'] == 225582)
assert(md_entries_1['rpt_seq'] == 3575681)
assert(md_entries_1['number_of_orders'] == None)
assert(md_entries_1['md_price_level'] == 2)
assert(md_entries_1['md_update_action'] == 1)
assert(md_entries_1['md_entry_type'] == 69)
md_entries_2 = row['md_entries'][2]
assert(md_entries_2['md_entry_px_mantissa'] == 9808500000000)
assert(md_entries_2['md_entry_px_exponent'] == -9)
assert(md_entries_2['md_entry_size'] == 551)
assert(md_entries_2['security_id'] == 21436)
assert(md_entries_2['rpt_seq'] == 4277776)
assert(md_entries_2['number_of_orders'] == None)
assert(md_entries_2['md_price_level'] == 2)
assert(md_entries_2['md_update_action'] == 1)
assert(md_entries_2['md_entry_type'] == 70)
md_entries_3 = row['md_entries'][3]
assert(md_entries_3['md_entry_px_mantissa'] == 9817500000000)
assert(md_entries_3['md_entry_px_exponent'] == -9)
assert(md_entries_3['md_entry_size'] == 1967)
assert(md_entries_3['security_id'] == 193097)
assert(md_entries_3['rpt_seq'] == 26314001)
assert(md_entries_3['number_of_orders'] == None)
assert(md_entries_3['md_price_level'] == 2)
assert(md_entries_3['md_update_action'] == 1)
assert(md_entries_3['md_entry_type'] == 70)
def assert_row_7(row):
assert(row['match_event_indicator'] == -112) # 10010000
assert(row['transact_time'] == 1564610616921365887)
assert(len(row['md_entries']) == 4)
assert(len(row['order_id_entries']) == 0)
md_entries_0 = row['md_entries'][0]
assert(md_entries_0['md_entry_px_mantissa'] == 2000000000)
assert(md_entries_0['md_entry_px_exponent'] == -9)
assert(md_entries_0['md_entry_size'] == 1)
assert(md_entries_0['security_id'] == 887801)
assert(md_entries_0['rpt_seq'] == 2897354)
assert(md_entries_0['number_of_orders'] == None)
assert(md_entries_0['md_price_level'] == 1)
assert(md_entries_0['md_update_action'] == 2)
assert(md_entries_0['md_entry_type'] == 69)
md_entries_1 = row['md_entries'][1]
assert(md_entries_1['md_entry_px_mantissa'] == 1750000000)
assert(md_entries_1['md_entry_px_exponent'] == -9)
assert(md_entries_1['md_entry_size'] == 2)
assert(md_entries_1['security_id'] == 887801)
assert(md_entries_1['rpt_seq'] == 2897355)
assert(md_entries_1['number_of_orders'] == None)
assert(md_entries_1['md_price_level'] == 1)
assert(md_entries_1['md_update_action'] == 1)
assert(md_entries_1['md_entry_type'] == 69)
md_entries_2 = row['md_entries'][2]
assert(md_entries_2['md_entry_px_mantissa'] == 1500000000)
assert(md_entries_2['md_entry_px_exponent'] == -9)
assert(md_entries_2['md_entry_size'] == 80)
assert(md_entries_2['security_id'] == 887801)
assert(md_entries_2['rpt_seq'] == 2897356)
assert(md_entries_2['number_of_orders'] == None)
assert(md_entries_2['md_price_level'] == 2)
assert(md_entries_2['md_update_action'] == 0)
assert(md_entries_2['md_entry_type'] == 69)
md_entries_3 = row['md_entries'][3]
assert(md_entries_3['md_entry_px_mantissa'] == 1500000000)
assert(md_entries_3['md_entry_px_exponent'] == -9)
assert(md_entries_3['md_entry_size'] == 18)
assert(md_entries_3['security_id'] == 31147)
assert(md_entries_3['rpt_seq'] == 2801939)
assert(md_entries_3['number_of_orders'] == None)
assert(md_entries_3['md_price_level'] == 2)
assert(md_entries_3['md_update_action'] == 1)
assert(md_entries_3['md_entry_type'] == 70)
def run_test(sparkSession):
dataset1 = sparkSession.read.parquet('/test_decoded_data/test-1-md-incremental-refresh-book46.parquet').head(100000)
dataset2 = sparkSession.read.parquet('/test_decoded_data/test-2-md-incremental-refresh-book46.parquet').head(100000)
data_for_test_1 = [dataset1[i] for i in [44081, 65929, 67993, 73268]]
data_for_test_2 = [dataset2[i] for i in [49236, 87140, 87804, 98363]]
data_for_test = data_for_test_1 + data_for_test_2
assert_row_1(data_for_test[1])
assert_row_2(data_for_test[2])
assert_row_3(data_for_test[3])
assert_row_4(data_for_test[4])
assert_row_5(data_for_test[5])
assert_row_6(data_for_test[6])
assert_row_7(data_for_test[7])
| 45.682143 | 120 | 0.701353 | 1,967 | 12,791 | 4.099136 | 0.079309 | 0.242218 | 0.31812 | 0.125016 | 0.856009 | 0.845591 | 0.729877 | 0.648766 | 0.620985 | 0.620985 | 0 | 0.110505 | 0.139708 | 12,791 | 279 | 121 | 45.845878 | 0.622228 | 0.005551 | 0 | 0.458824 | 0 | 0 | 0.270474 | 0.02313 | 0 | 0 | 0 | 0 | 0.894118 | 1 | 0.035294 | false | 0 | 0 | 0 | 0.035294 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
53796869b814b122bb7310666eb13745cdde9fa8 | 16,167 | py | Python | tests/test_observable/test_window.py | yutiansut/RxPY | c3bbba77f9ebd7706c949141725e220096deabd4 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | tests/test_observable/test_window.py | yutiansut/RxPY | c3bbba77f9ebd7706c949141725e220096deabd4 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | tests/test_observable/test_window.py | yutiansut/RxPY | c3bbba77f9ebd7706c949141725e220096deabd4 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | import unittest
from rx.core import Observable
from rx.testing import TestScheduler, ReactiveTest
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed
on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
class TestWindow(unittest.TestCase):
def test_window_closings_basic(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(90, 1), on_next(180, 2), on_next(250, 3), on_next(260, 4), on_next(310, 5), on_next(340, 6), on_next(410, 7), on_next(420, 8), on_next(470, 9), on_next(550, 10), on_completed(590))
window = [1]
def create():
def closing():
curr = window[0]
window[0] += 1
return Observable.timer(curr * 100)
def mapper(w, i):
return w.map(lambda x: str(i) + ' ' + str(x))
return xs.window(closing).mapi(mapper).merge_all()
results = scheduler.start(create=create)
assert results.messages == [on_next(250, "0 3"), on_next(260, "0 4"), on_next(310, "1 5"), on_next(340, "1 6"), on_next(410, "1 7"), on_next(420, "1 8"), on_next(470, "1 9"), on_next(550, "2 10"), on_completed(590)]
assert xs.subscriptions == [subscribe(200, 590)]
def test_window_closings_dispose(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(90, 1), on_next(180, 2), on_next(250, 3), on_next(260, 4), on_next(310, 5), on_next(340, 6), on_next(410, 7), on_next(420, 8), on_next(470, 9), on_next(550, 10), on_completed(590))
window = [1]
def create():
def closing():
curr = window[0]
window[0] += 1
return Observable.timer(curr * 100)
def mapper(w, i):
return w.map(lambda x: str(i) + ' ' + str(x))
return xs.window(closing).mapi(mapper).merge_all()
results = scheduler.start(create=create, disposed=400)
assert results.messages == [on_next(250, "0 3"), on_next(260, "0 4"), on_next(310, "1 5"), on_next(340, "1 6")]
assert xs.subscriptions == [subscribe(200, 400)]
def test_window_closings_error(self):
ex = 'ex'
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(90, 1), on_next(180, 2), on_next(250, 3), on_next(260, 4), on_next(310, 5), on_next(340, 6), on_next(410, 7), on_next(420, 8), on_next(470, 9), on_next(550, 10), on_error(590, ex))
window = [1]
def create():
def closing():
curr = window[0]
window[0] += 1
return Observable.timer(curr * 100)
def mapper(w, i):
return w.map(lambda x: str(i) + ' ' + str(x))
return xs.window(closing).mapi(mapper).merge_all()
results = scheduler.start(create=create)
assert results.messages == [on_next(250, "0 3"), on_next(260, "0 4"), on_next(310, "1 5"), on_next(340, "1 6"), on_next(410, "1 7"), on_next(420, "1 8"), on_next(470, "1 9"), on_next(550, "2 10"), on_error(590, ex)]
assert xs.subscriptions == [subscribe(200, 590)]
def test_window_closings_on_error(self):
ex = 'ex'
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(90, 1), on_next(180, 2), on_next(250, 3), on_next(260, 4), on_next(310, 5), on_next(340, 6), on_next(410, 7), on_next(420, 8), on_next(470, 9), on_next(550, 10), on_completed(590))
window = [1]
def create():
def closing():
raise Exception(ex)
def mapper(w, i):
return w.map(lambda x: str(i) + ' ' + str(x))
return xs.window(closing).mapi(mapper).merge_all()
results = scheduler.start(create=create)
assert results.messages == [on_error(200, ex)]
assert xs.subscriptions == [subscribe(200, 200)]
def test_window_closings_window_close_error(self):
ex = 'ex'
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(90, 1), on_next(180, 2), on_next(250, 3), on_next(260, 4), on_next(310, 5), on_next(340, 6), on_next(410, 7), on_next(420, 8), on_next(470, 9), on_next(550, 10), on_completed(590))
window = 1
def create():
def closing():
return Observable.throw(ex)
def mapper(w, i):
return w.map(lambda x: str(i) + ' ' + str(x))
return xs.window(closing).mapi(mapper).merge_all()
results = scheduler.start(create=create)
assert results.messages == [on_error(200, ex)]
assert xs.subscriptions == [subscribe(200, 200)]
def test_window_closings_default(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(90, 1), on_next(180, 2), on_next(250, 3), on_next(260, 4), on_next(310, 5), on_next(340, 6), on_next(410, 7), on_next(420, 8), on_next(470, 9), on_next(550, 10), on_completed(590))
window = [1]
def create():
def closings():
w = window[0]
window[0] += 1
return Observable.timer(w * 100)
def mapper(w, i):
return w.map(lambda x: str(i) + ' ' + str(x))
return xs.window(window_closing_mapper=closings).mapi(mapper).merge_all()
results = scheduler.start(create=create)
assert results.messages == [on_next(250, "0 3"), on_next(260, "0 4"), on_next(310, "1 5"), on_next(340, "1 6"), on_next(410, "1 7"), on_next(420, "1 8"), on_next(470, "1 9"), on_next(550, "2 10"), on_completed(590)]
assert xs.subscriptions == [subscribe(200, 590)]
def test_window_opening_closings_basic(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(90, 1), on_next(180, 2), on_next(250, 3), on_next(260, 4), on_next(310, 5), on_next(340, 6), on_next(410, 7), on_next(420, 8), on_next(470, 9), on_next(550, 10), on_completed(590))
ys = scheduler.create_hot_observable(on_next(255, 50), on_next(330, 100), on_next(350, 50), on_next(400, 90), on_completed(900))
def create():
def closing(x):
return Observable.timer(x)
def mapper(w, i):
return w.map(lambda x: str(i) + ' ' + str(x))
return xs.window(ys, closing).mapi(mapper).merge_all()
results = scheduler.start(create=create)
assert results.messages == [on_next(260, "0 4"), on_next(340, "1 6"), on_next(410, "1 7"), on_next(410, "3 7"), on_next(420, "1 8"), on_next(420, "3 8"), on_next(470, "3 9"), on_completed(900)]
assert xs.subscriptions == [subscribe(200, 900)]
assert ys.subscriptions == [subscribe(200, 900)]
def test_window_opening_closings_on_error(self):
ex = 'ex'
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(90, 1), on_next(180, 2), on_next(250, 3), on_next(260, 4), on_next(310, 5), on_next(340, 6), on_next(410, 7), on_next(420, 8), on_next(470, 9), on_next(550, 10), on_completed(590))
ys = scheduler.create_hot_observable(on_next(255, 50), on_next(330, 100), on_next(350, 50), on_next(400, 90), on_completed(900))
def create():
def closing(x):
raise Exception(ex)
def mapper(w, i):
return w.map(lambda x: str(i) + ' ' + str(x))
return xs.window(ys, closing).mapi(mapper).merge_all()
results = scheduler.start(create=create)
assert results.messages == [on_error(255, ex)]
assert xs.subscriptions == [subscribe(200, 255)]
assert ys.subscriptions == [subscribe(200, 255)]
def test_window_opening_closings_dispose(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(90, 1), on_next(180, 2), on_next(250, 3), on_next(260, 4), on_next(310, 5), on_next(340, 6), on_next(410, 7), on_next(420, 8), on_next(470, 9), on_next(550, 10), on_completed(590))
ys = scheduler.create_hot_observable(on_next(255, 50), on_next(330, 100), on_next(350, 50), on_next(400, 90), on_completed(900))
def create():
def closing(x):
return Observable.timer(x)
def mapper(w, i):
return w.map(lambda x: str(i) + ' ' + str(x))
return xs.window(ys, closing).mapi(mapper).merge_all()
results = scheduler.start(create=create, disposed=415)
assert results.messages == [on_next(260, "0 4"), on_next(340, "1 6"), on_next(410, "1 7"), on_next(410, "3 7")]
assert xs.subscriptions == [subscribe(200, 415)]
assert ys.subscriptions == [subscribe(200, 415)]
def test_window_opening_closings_data_error(self):
ex = 'ex'
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(90, 1), on_next(180, 2), on_next(250, 3), on_next(260, 4), on_next(310, 5), on_next(340, 6), on_next(410, 7), on_error(415, ex))
ys = scheduler.create_hot_observable(on_next(255, 50), on_next(330, 100), on_next(350, 50), on_next(400, 90), on_completed(900))
def create():
def closing(x):
return Observable.timer(x)
def mapper(w, i):
return w.map(lambda x: str(i) + ' ' + str(x))
return xs.window(ys, closing).mapi(mapper).merge_all()
results = scheduler.start(create=create)
assert results.messages == [on_next(260, "0 4"), on_next(340, "1 6"), on_next(410, "1 7"), on_next(410, "3 7"), on_error(415, ex)]
assert xs.subscriptions == [subscribe(200, 415)]
assert ys.subscriptions == [subscribe(200, 415)]
def test_window_opening_closings_window_error(self):
ex = 'ex'
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(90, 1), on_next(180, 2), on_next(250, 3), on_next(260, 4), on_next(310, 5), on_next(340, 6), on_next(410, 7), on_next(420, 8), on_next(470, 9), on_next(550, 10), on_completed(590))
ys = scheduler.create_hot_observable(on_next(255, 50), on_next(330, 100), on_next(350, 50), on_next(400, 90), on_error(415, ex))
def create():
def closing(x):
return Observable.timer(x)
def mapper(w, i):
return w.map(lambda x: str(i) + ' ' + str(x))
return xs.window(ys, closing).mapi(mapper).merge_all()
results = scheduler.start(create=create)
assert results.messages == [on_next(260, "0 4"), on_next(340, "1 6"), on_next(410, "1 7"), on_next(410, "3 7"), on_error(415, ex)]
assert xs.subscriptions == [subscribe(200, 415)]
assert ys.subscriptions == [subscribe(200, 415)]
def test_window_boundaries_simple(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(90, 1),
on_next(180, 2),
on_next(250, 3),
on_next(260, 4),
on_next(310, 5),
on_next(340, 6),
on_next(410, 7),
on_next(420, 8),
on_next(470, 9),
on_next(550, 10),
on_completed(590)
)
ys = scheduler.create_hot_observable(
on_next(255, True),
on_next(330, True),
on_next(350, True),
on_next(400, True),
on_next(500, True),
on_completed(900)
)
def create():
def mapper(w, i):
return w.map(lambda x: str(i) + ' ' + str(x))
return xs.window(ys).mapi(mapper).merge_all()
res = scheduler.start(create=create)
assert res.messages == [
on_next(250, "0 3"),
on_next(260, "1 4"),
on_next(310, "1 5"),
on_next(340, "2 6"),
on_next(410, "4 7"),
on_next(420, "4 8"),
on_next(470, "4 9"),
on_next(550, "5 10"),
on_completed(590)]
assert xs.subscriptions == [
subscribe(200, 590)]
assert ys.subscriptions == [
subscribe(200, 590)]
def test_window_boundaries_close_boundaries(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(90, 1),
on_next(180, 2),
on_next(250, 3),
on_next(260, 4),
on_next(310, 5),
on_next(340, 6),
on_next(410, 7),
on_next(420, 8),
on_next(470, 9),
on_next(550, 10),
on_completed(590)
)
ys = scheduler.create_hot_observable(
on_next(255, True),
on_next(330, True),
on_next(350, True),
on_completed(400)
)
def create():
def mapper(w, i):
return w.map(lambda x: str(i) + ' ' + str(x))
return xs.window(ys).mapi(mapper).merge_all()
res = scheduler.start(create=create)
assert res.messages == [
on_next(250, "0 3"),
on_next(260, "1 4"),
on_next(310, "1 5"),
on_next(340, "2 6"),
on_completed(400)]
assert xs.subscriptions == [
subscribe(200, 400)]
assert ys.subscriptions == [
subscribe(200, 400)]
def test_window_boundaries_throwSource(self):
ex = 'ex'
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(90, 1),
on_next(180, 2),
on_next(250, 3),
on_next(260, 4),
on_next(310, 5),
on_next(340, 6),
on_next(380, 7),
on_error(400, ex)
)
ys = scheduler.create_hot_observable(
on_next(255, True),
on_next(330, True),
on_next(350, True),
on_completed(500)
)
def create():
def mapper(w, i):
return w.map(lambda x: str(i) + ' ' + str(x))
return xs.window(ys).mapi(mapper).merge_all()
res = scheduler.start(create=create)
assert res.messages == [
on_next(250, "0 3"),
on_next(260, "1 4"),
on_next(310, "1 5"),
on_next(340, "2 6"),
on_next(380, "3 7"),
on_error(400, ex)]
assert xs.subscriptions == [
subscribe(200, 400)]
assert ys.subscriptions == [
subscribe(200, 400)]
def test_window_boundaries_throw_boundaries(self):
ex = 'ex'
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(90, 1),
on_next(180, 2),
on_next(250, 3),
on_next(260, 4),
on_next(310, 5),
on_next(340, 6),
on_next(410, 7),
on_next(420, 8),
on_next(470, 9),
on_next(550, 10),
on_completed(590)
)
ys = scheduler.create_hot_observable(
on_next(255, True),
on_next(330, True),
on_next(350, True),
on_error(400, ex)
)
def create():
def mapper(w, i):
return w.map(lambda x: str(i) + ' ' + str(x))
return xs.window(ys).mapi(mapper).merge_all()
res = scheduler.start(create=create)
assert res.messages == [
on_next(250, "0 3"),
on_next(260, "1 4"),
on_next(310, "1 5"),
on_next(340, "2 6"),
on_error(400, ex)]
assert xs.subscriptions == [
subscribe(200, 400)]
assert ys.subscriptions == [
subscribe(200, 400)]
if __name__ == '__main__':
unittest.main()
| 38.677033 | 233 | 0.550257 | 2,224 | 16,167 | 3.811151 | 0.044964 | 0.175555 | 0.028669 | 0.079283 | 0.921189 | 0.898537 | 0.891576 | 0.881902 | 0.875885 | 0.875413 | 0 | 0.120813 | 0.305746 | 16,167 | 417 | 234 | 38.769784 | 0.634355 | 0 | 0 | 0.784615 | 0 | 0 | 0.015278 | 0 | 0 | 0 | 0 | 0 | 0.12 | 1 | 0.172308 | false | 0 | 0.009231 | 0.061538 | 0.304615 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5379d2e1717581cbc768274b3b154fdd65bc0bbc | 24,970 | py | Python | test/process_file/test_process_file.py | StoDevX/stograde | 5b4cd58724e8e5218c7a7f2cc2d4f788e71a7931 | [
"MIT"
] | 7 | 2016-08-05T00:41:11.000Z | 2019-08-22T11:12:10.000Z | test/process_file/test_process_file.py | StoDevX/cs251-toolkit | a40f358289d67cce7b24fd557230079fae830b7d | [
"MIT"
] | 145 | 2016-08-04T01:07:11.000Z | 2019-09-09T22:07:13.000Z | test/process_file/test_process_file.py | stograde/stograde | 17d901a86ff80d20e9f7f798bd27375de34eccb7 | [
"MIT"
] | 3 | 2017-02-06T21:52:46.000Z | 2019-02-18T10:35:01.000Z | import os
import textwrap
import pytest
from stograde.common import chdir
from stograde.common.run_status import RunStatus
from stograde.process_file.compile_result import CompileResult
from stograde.process_file.file_result import FileResult
from stograde.process_file.process_file import get_file, parse_command, compile_file, test_file, process_file
from stograde.process_file.test_result import TestResult
from stograde.specs.file_options import FileOptions
from stograde.specs.spec_file import SpecFile
from test.utils import git, touch
_dir = os.path.dirname(os.path.realpath(__file__))
# ----------------------------- get_file -----------------------------
@pytest.mark.datafiles(os.path.join(_dir, 'fixtures'))
def test_get_file_success(datafiles):
spec = SpecFile('a_file.txt', [], [], [], FileOptions())
result = FileResult(file_name='a_file.txt')
with chdir(str(datafiles)):
git('init')
git('config', 'user.email', 'an_email@email_provider.com')
git('config', 'user.name', 'Some Random Name')
git('add', 'a_file.txt')
git('commit', '-m', '"Add file"', '--date="Tue Apr 21 12:28:03 2020 -0500"')
ret = get_file(spec, result)
assert ret is True
assert result.file_name == 'a_file.txt'
assert not result.actual_name
assert result.contents == 'contents\n'
assert not result.compile_results
assert not result.test_results
assert result.file_missing is False
assert result.last_modified == 'Tue Apr 21 12:28:03 2020 -0500'
assert not result.other_files
assert result.optional is False
assert result.compile_optional is False
@pytest.mark.datafiles(os.path.join(_dir, 'fixtures'))
def test_get_file_alternate(datafiles):
spec = SpecFile('b_file.txt', ['another_file.txt', 'a_file.txt'], [], [], FileOptions())
result = FileResult(file_name='b_file.txt')
with chdir(str(datafiles)):
git('init')
git('config', 'user.email', 'an_email@email_provider.com')
git('config', 'user.name', 'Some Random Name')
git('add', 'another_file.txt')
git('commit', '-m', '"Add file"', '--date="Tue Apr 21 12:28:03 2020 -0500"')
ret = get_file(spec, result)
assert ret is True
assert result.file_name == 'b_file.txt'
assert result.actual_name == 'another_file.txt'
assert result.contents == 'other contents\n'
assert not result.compile_results
assert not result.test_results
assert result.file_missing is False
assert result.last_modified == 'Tue Apr 21 12:28:03 2020 -0500'
assert result.other_files == ['a_file.txt']
assert result.optional is False
assert result.compile_optional is False
@pytest.mark.datafiles(os.path.join(_dir, 'fixtures'))
def test_get_file_hide_contents(datafiles):
spec = SpecFile('a_file.txt', [], [], [], FileOptions(hide_contents=True,
optional=True,
compile_optional=True))
result = FileResult(file_name='a_file.txt')
with chdir(str(datafiles)):
git('init')
git('config', 'user.email', 'an_email@email_provider.com')
git('config', 'user.name', 'Some Random Name')
git('add', 'a_file.txt')
git('commit', '-m', '"Add file"', '--date="Tue Apr 21 12:28:03 2020 -0500"')
ret = get_file(spec, result)
assert ret is True
assert result.file_name == 'a_file.txt'
assert not result.actual_name
assert result.contents == ''
assert not result.compile_results
assert not result.test_results
assert result.file_missing is False
assert result.last_modified == 'Tue Apr 21 12:28:03 2020 -0500'
assert not result.other_files
assert result.optional is False
assert result.compile_optional is True
@pytest.mark.datafiles(os.path.join(_dir, 'fixtures'))
def test_get_file_truncated_contents(datafiles):
spec = SpecFile('a_file.txt', [], [], [], FileOptions(truncate_contents=4))
result = FileResult(file_name='a_file.txt')
with chdir(str(datafiles)):
git('init')
git('config', 'user.email', 'an_email@email_provider.com')
git('config', 'user.name', 'Some Random Name')
git('add', 'a_file.txt')
git('commit', '-m', '"Add file"', '--date="Tue Apr 21 12:28:03 2020 -0500"')
ret = get_file(spec, result)
assert ret is True
assert result.file_name == 'a_file.txt'
assert not result.actual_name
assert result.contents == 'cont'
assert not result.compile_results
assert not result.test_results
assert result.file_missing is False
assert result.last_modified == 'Tue Apr 21 12:28:03 2020 -0500'
assert not result.other_files
assert result.optional is False
assert result.compile_optional is False
@pytest.mark.datafiles(os.path.join(_dir, 'fixtures'))
def test_get_file_missing(datafiles):
spec = SpecFile('b_file.txt', [], [], [], FileOptions(optional=True))
result = FileResult(file_name='b_file.txt')
with chdir(str(datafiles)):
ret = get_file(spec, result)
assert ret is False
assert result.file_name == 'b_file.txt'
assert not result.actual_name
assert not result.contents
assert not result.compile_results
assert not result.test_results
assert result.file_missing is True
assert not result.last_modified
assert set(result.other_files) == {'a_file.txt', 'another_file.txt', 'compile_file', 'process_file', 'test_file'}
assert result.optional is True
assert result.compile_optional is False
# ----------------------------- parse_command -----------------------------
def test_parse_command_with_file():
assert parse_command('g++ --std=c++11 $@ -o $@.exec',
file_name='a_file.cpp',
supporting_dir='') == 'g++ --std=c++11 ./a_file.cpp -o ./a_file.cpp.exec'
def test_parse_command_with_supporting():
assert parse_command('cat $SUPPORT',
file_name='',
supporting_dir='data/supporting/hw1') == 'cat data/supporting/hw1'
def test_parse_command_no_replacements():
assert parse_command('echo A',
file_name='file',
supporting_dir='dir') == 'echo A'
# ----------------------------- compile_file -----------------------------
@pytest.mark.datafiles(os.path.join(_dir, 'fixtures', 'compile_file'))
def test_compile_file_success(datafiles):
spec = SpecFile(file_name='good.cpp',
compile_commands=['g++ --std=c++11 $@ -o $@.exec', 'echo A'],
test_commands=[],
options=FileOptions())
result = FileResult(file_name='good.cpp')
with chdir(str(datafiles)):
ret = compile_file(file_spec=spec, results=result, supporting_dir='')
assert ret is True
assert result.compile_results == [CompileResult(command='g++ --std=c++11 ./good.cpp -o ./good.cpp.exec',
output='',
status=RunStatus.SUCCESS),
CompileResult(command='echo A',
output='A\n',
status=RunStatus.SUCCESS)]
@pytest.mark.datafiles(os.path.join(_dir, 'fixtures', 'compile_file'))
def test_compile_file_alternate(datafiles):
spec = SpecFile(file_name='missing.cpp',
compile_commands=['g++ --std=c++11 $@ -o $@.exec', 'echo A'],
test_commands=[],
options=FileOptions())
result = FileResult(file_name='missing.cpp', actual_name='good.cpp')
with chdir(str(datafiles)):
ret = compile_file(file_spec=spec, results=result, supporting_dir='')
assert ret is True
assert result.actual_name == 'good.cpp'
assert result.compile_results == [CompileResult(command='g++ --std=c++11 ./good.cpp -o ./good.cpp.exec',
output='',
status=RunStatus.SUCCESS),
CompileResult(command='echo A',
output='A\n',
status=RunStatus.SUCCESS)]
@pytest.mark.datafiles(os.path.join(_dir, 'fixtures', 'compile_file'))
def test_compile_file_failure(datafiles):
spec = SpecFile(file_name='bad.cpp',
compile_commands=['g++ --std=c++11 $@ -o $@.exec', 'echo A'],
test_commands=[],
options=FileOptions())
result = FileResult(file_name='bad.cpp')
with chdir(str(datafiles)):
ret = compile_file(file_spec=spec, results=result, supporting_dir='')
assert ret is False
for c_result in result.compile_results:
c_result.output = c_result.output.replace('\r\n', '\n')
assert result.compile_results == [CompileResult(command='g++ --std=c++11 ./bad.cpp -o ./bad.cpp.exec',
output='./bad.cpp: In function ‘int main()’:\n'
'./bad.cpp:7:13: error: expected ‘}’ at end of input\n'
' 7 | return 0;\n'
' | ^\n'
'./bad.cpp:5:12: note: to match this ‘{’\n'
' 5 | int main() {\n'
' | ^\n',
status=RunStatus.CALLED_PROCESS_ERROR)] \
or result.compile_results == [CompileResult(command='g++ --std=c++11 ./bad.cpp -o ./bad.cpp.exec',
output='./bad.cpp: In function ‘int main()’:\n'
'./bad.cpp:7:13: error: expected ‘}’ at end of input\n'
' return 0;\n'
' ^\n',
status=RunStatus.CALLED_PROCESS_ERROR)]
# ----------------------------- test_file -----------------------------
@pytest.mark.datafiles(os.path.join(_dir, 'fixtures', 'test_file'))
def test_test_file_success(datafiles):
spec = SpecFile(file_name='good.cpp',
compile_commands=[],
test_commands=['make good.cpp.exec', '$@.exec'],
options=FileOptions())
result = FileResult(file_name='good.cpp')
with chdir(str(datafiles)):
test_file(file_spec=spec, file_results=result, supporting_dir='', interact=False)
assert result.test_results == [TestResult(command='make good.cpp.exec',
output='g++ --std=c++11 good.cpp -o good.cpp.exec\n',
error=False,
status=RunStatus.SUCCESS,
truncated_after=None),
TestResult(command='./good.cpp.exec',
output='Hello\n',
error=False,
status=RunStatus.SUCCESS,
truncated_after=None)]
@pytest.mark.datafiles(os.path.join(_dir, 'fixtures', 'test_file'))
def test_test_file_alternate(datafiles):
spec = SpecFile(file_name='missing.cpp',
compile_commands=[],
test_commands=['make good.cpp.exec', '$@.exec'],
options=FileOptions())
result = FileResult(file_name='missing.cpp', actual_name='good.cpp')
with chdir(str(datafiles)):
test_file(file_spec=spec, file_results=result, supporting_dir='', interact=False)
assert result.test_results == [TestResult(command='make good.cpp.exec',
output='g++ --std=c++11 good.cpp -o good.cpp.exec\n',
error=False,
status=RunStatus.SUCCESS,
truncated_after=None),
TestResult(command='./good.cpp.exec',
output='Hello\n',
error=False,
status=RunStatus.SUCCESS,
truncated_after=None)]
@pytest.mark.datafiles(os.path.join(_dir, 'fixtures', 'test_file'))
def test_test_file_error(datafiles):
spec = SpecFile(file_name='error.cpp',
compile_commands=[],
test_commands=['', 'make error.cpp.exec', '$@.exec'],
options=FileOptions())
result = FileResult(file_name='error.cpp')
with chdir(str(datafiles)):
test_file(file_spec=spec, file_results=result, supporting_dir='', interact=False)
assert result.test_results == [TestResult(command='make error.cpp.exec',
output='g++ --std=c++11 error.cpp -o error.cpp.exec\n',
error=False,
status=RunStatus.SUCCESS,
truncated_after=None),
TestResult(command='./error.cpp.exec',
output="Command '['./error.cpp.exec']' returned non-zero exit status 1.",
error=True,
status=RunStatus.CALLED_PROCESS_ERROR,
truncated_after=None)]
@pytest.mark.datafiles(os.path.join(_dir, 'fixtures', 'test_file'))
def test_test_file_truncated(datafiles):
spec = SpecFile(file_name='chatty.cpp',
compile_commands=[],
test_commands=['make chatty.cpp.exec', '$@.exec'],
options=FileOptions(truncate_output=180)) # 5 lines * 36 chars
result = FileResult(file_name='chatty.cpp')
with chdir(str(datafiles)):
test_file(file_spec=spec, file_results=result, supporting_dir='', interact=False)
assert result.test_results == [TestResult(command='make chatty.cpp.exec',
output='g++ --std=c++11 chatty.cpp -o chatty.cpp.exec\n',
error=False,
status=RunStatus.SUCCESS,
truncated_after=None),
TestResult(command='./chatty.cpp.exec',
output="Hi, I'm chatty, I like to say a lot\n"
"Hi, I'm chatty, I like to say a lot\n"
"Hi, I'm chatty, I like to say a lot\n"
"Hi, I'm chatty, I like to say a lot\n"
"Hi, I'm chatty, I like to say a lot\n",
error=False,
status=RunStatus.SUCCESS,
truncated_after=180)]
# ----------------------------- process_file -----------------------------
def test_process_file_fail_get(tmpdir):
spec = SpecFile(file_name='not_a_file.txt',
compile_commands=['compile me', 'and again'],
test_commands=['a test', 'another test'],
options=FileOptions())
with tmpdir.as_cwd():
touch('other_file.txt')
result = process_file(file_spec=spec,
supporting_dir='.',
interact=False,
skip_web_compile=False)
assert result.file_name == 'not_a_file.txt'
assert not result.contents
assert not result.compile_results
assert not result.test_results
assert result.file_missing is True
assert not result.last_modified
assert result.other_files == ['other_file.txt']
assert result.optional is False
assert result.compile_optional is False
@pytest.mark.datafiles(os.path.join(_dir, 'fixtures', 'process_file'))
def test_process_file_fail_compile(datafiles):
spec = SpecFile(file_name='bad.cpp',
compile_commands=['g++ --std=c++11 $@ -o $@.exec'],
test_commands=['a test', 'another test'],
options=FileOptions())
with chdir(str(datafiles)):
git('init')
git('config', 'user.email', 'an_email@email_provider.com')
git('config', 'user.name', 'Some Random Name')
git('add', 'bad.cpp')
git('commit', '-m', '"Add file"', '--date="Tue Apr 21 12:28:03 2020 -0500"')
result = process_file(file_spec=spec,
supporting_dir='.',
interact=False,
skip_web_compile=False)
assert result.file_name == 'bad.cpp'
assert '\n' + result.contents == textwrap.dedent('''
#include <iostream>\n
using namespace std;\n
int main() {
cout << "Hello" << endl;
return 0;
''')
for c_result in result.compile_results:
c_result.output = c_result.output.replace('\r\n', '\n')
assert result.compile_results == [CompileResult(command='g++ --std=c++11 ./bad.cpp -o ./bad.cpp.exec',
output='./bad.cpp: In function ‘int main()’:\n'
'./bad.cpp:7:13: error: expected ‘}’ at end of input\n'
' 7 | return 0;\n'
' | ^\n'
'./bad.cpp:5:12: note: to match this ‘{’\n'
' 5 | int main() {\n'
' | ^\n',
status=RunStatus.CALLED_PROCESS_ERROR)] \
or result.compile_results == [CompileResult(command='g++ --std=c++11 ./bad.cpp -o ./bad.cpp.exec',
output='./bad.cpp: In function ‘int main()’:\n'
'./bad.cpp:7:13: error: expected ‘}’ at end of input\n'
' return 0;\n'
' ^\n',
status=RunStatus.CALLED_PROCESS_ERROR)]
assert not result.test_results
assert result.file_missing is False
assert result.last_modified == 'Tue Apr 21 12:28:03 2020 -0500'
assert not result.other_files
assert result.optional is False
assert result.compile_optional is False
@pytest.mark.datafiles(os.path.join(_dir, 'fixtures', 'process_file'))
def test_process_file_success(datafiles):
spec = SpecFile(file_name='good.cpp',
compile_commands=['g++ --std=c++11 $@ -o $@.exec'],
test_commands=['$@.exec'],
options=FileOptions())
with chdir(str(datafiles)):
git('init')
git('config', 'user.email', 'an_email@email_provider.com')
git('config', 'user.name', 'Some Random Name')
git('add', 'good.cpp')
git('commit', '-m', '"Add file"', '--date="Tue Apr 21 12:28:03 2020 -0500"')
result = process_file(file_spec=spec,
supporting_dir='.',
interact=False,
skip_web_compile=False)
assert result.file_name == 'good.cpp'
assert '\n' + result.contents == textwrap.dedent('''
#include <iostream>\n
using namespace std;\n
int main() {
cout << "Hello" << endl;
return 0;
}
''')
assert result.compile_results == [CompileResult(command='g++ --std=c++11 ./good.cpp -o ./good.cpp.exec',
output='',
status=RunStatus.SUCCESS)]
assert result.test_results == [TestResult(command='./good.cpp.exec',
output='Hello\n',
error=False,
status=RunStatus.SUCCESS,
truncated_after=None)]
assert result.file_missing is False
assert result.last_modified == 'Tue Apr 21 12:28:03 2020 -0500'
assert not result.other_files
assert result.optional is False
assert result.compile_optional is False
@pytest.mark.datafiles(os.path.join(_dir, 'fixtures', 'process_file'))
def test_process_file_alternate(datafiles):
spec = SpecFile(file_name='missing.cpp',
alternate_names=['good.cpp', 'bad.cpp'],
compile_commands=['g++ --std=c++11 $@ -o $@.exec'],
test_commands=['$@.exec'],
options=FileOptions())
with chdir(str(datafiles)):
git('init')
git('config', 'user.email', 'an_email@email_provider.com')
git('config', 'user.name', 'Some Random Name')
git('add', 'good.cpp')
git('commit', '-m', '"Add file"', '--date="Tue Apr 21 12:28:03 2020 -0500"')
result = process_file(file_spec=spec,
supporting_dir='.',
interact=False,
skip_web_compile=False)
assert result.file_name == 'missing.cpp'
assert result.actual_name == 'good.cpp'
assert '\n' + result.contents == textwrap.dedent('''
#include <iostream>\n
using namespace std;\n
int main() {
cout << "Hello" << endl;
return 0;
}
''')
assert result.compile_results == [CompileResult(command='g++ --std=c++11 ./good.cpp -o ./good.cpp.exec',
output='',
status=RunStatus.SUCCESS)]
assert result.test_results == [TestResult(command='./good.cpp.exec',
output='Hello\n',
error=False,
status=RunStatus.SUCCESS,
truncated_after=None)]
assert result.file_missing is False
assert result.last_modified == 'Tue Apr 21 12:28:03 2020 -0500'
assert result.other_files == ['bad.cpp']
assert result.optional is False
assert result.compile_optional is False
@pytest.mark.datafiles(os.path.join(_dir, 'fixtures', 'process_file'))
def test_process_file_skip_web(datafiles):
spec = SpecFile(file_name='good.cpp',
compile_commands=['g++ --std=c++11 $@ -o $@.exec'],
test_commands=['$@.exec'],
options=FileOptions(web_file=True))
with chdir(str(datafiles)):
git('init')
git('config', 'user.email', 'an_email@email_provider.com')
git('config', 'user.name', 'Some Random Name')
git('add', 'good.cpp')
git('commit', '-m', '"Add file"', '--date="Tue Apr 21 12:28:03 2020 -0500"')
result = process_file(file_spec=spec,
supporting_dir='.',
interact=False,
skip_web_compile=True)
assert result.file_name == 'good.cpp'
assert '\n' + result.contents == textwrap.dedent('''
#include <iostream>\n
using namespace std;\n
int main() {
cout << "Hello" << endl;
return 0;
}
''')
assert not result.compile_results
assert not result.test_results
assert result.file_missing is False
assert result.last_modified == 'Tue Apr 21 12:28:03 2020 -0500'
assert not result.other_files
assert result.optional is False
assert result.compile_optional is False
| 44.749104 | 119 | 0.508971 | 2,634 | 24,970 | 4.669704 | 0.0653 | 0.068293 | 0.035366 | 0.011951 | 0.886016 | 0.867154 | 0.846748 | 0.835935 | 0.813333 | 0.79878 | 0 | 0.021345 | 0.360192 | 24,970 | 557 | 120 | 44.829443 | 0.74856 | 0.015098 | 0 | 0.788419 | 0 | 0 | 0.210038 | 0.00968 | 0 | 0 | 0 | 0 | 0.256125 | 1 | 0.044543 | false | 0 | 0.026726 | 0 | 0.080178 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5392c62bda6b14a18288c453066db07398249edc | 16,283 | py | Python | tests/unit/states/test_test.py | byteskeptical/salt | 637fe0b04f38b2274191b005d73b3c6707d7f400 | [
"Apache-2.0"
] | 5 | 2017-02-07T05:39:29.000Z | 2020-06-13T02:07:33.000Z | tests/unit/states/test_test.py | byteskeptical/salt | 637fe0b04f38b2274191b005d73b3c6707d7f400 | [
"Apache-2.0"
] | 86 | 2017-01-27T11:54:46.000Z | 2020-05-20T06:25:26.000Z | tests/unit/states/test_test.py | byteskeptical/salt | 637fe0b04f38b2274191b005d73b3c6707d7f400 | [
"Apache-2.0"
] | 11 | 2017-01-26T19:36:29.000Z | 2021-12-11T07:54:16.000Z | # -*- coding: utf-8 -*-
'''
:codeauthor: Rahul Handay <rahulha@saltstack.com>
'''
# Import Python Libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
patch,
MagicMock,
NO_MOCK,
NO_MOCK_REASON
)
# Import Salt Libs
from salt.exceptions import SaltInvocationError
import salt.states.test as test
from salt.utils.odict import OrderedDict
from salt.ext import six
@skipIf(NO_MOCK, NO_MOCK_REASON)
class TestTestCase(TestCase, LoaderModuleMockMixin):
'''
Validate the test state
'''
def setup_loader_modules(self):
return {test: {'__low__': {'__reqs__': {'watch': ''}}}}
def test_succeed_without_changes(self):
'''
Test to returns successful.
'''
ret = {'name': 'salt',
'changes': {},
'result': True,
'comment': ''}
with patch.dict(test.__opts__, {"test": False}):
ret.update({'comment': 'Success!'})
self.assertDictEqual(test.succeed_without_changes('salt'), ret)
def test_fail_without_changes(self):
'''
Test to returns failure.
'''
ret = {'name': 'salt',
'changes': {},
'result': False,
'comment': ''}
with patch.dict(test.__opts__, {"test": False}):
ret.update({'comment': 'Failure!'})
self.assertDictEqual(test.fail_without_changes('salt'), ret)
def test_succeed_with_changes(self):
'''
Test to returns successful and changes is not empty
'''
ret = {'name': 'salt',
'changes': {},
'result': False,
'comment': ''}
with patch.dict(test.__opts__, {"test": False}):
ret.update({'changes': {'testing': {'new': 'Something pretended'
' to change',
'old': 'Unchanged'}},
'comment': 'Success!', 'result': True})
self.assertDictEqual(test.succeed_with_changes('salt'), ret)
def test_fail_with_changes(self):
'''
Test to returns failure and changes is not empty.
'''
ret = {'name': 'salt',
'changes': {},
'result': False,
'comment': ''}
with patch.dict(test.__opts__, {"test": False}):
ret.update({'changes': {'testing': {'new': 'Something pretended'
' to change',
'old': 'Unchanged'}},
'comment': 'Success!',
'result': True})
self.assertDictEqual(test.succeed_with_changes('salt'), ret)
def test_configurable_test_state(self):
'''
Test test.configurable_test_state with and without comment
'''
# Configure mock parameters
mock_name = 'cheese_shop'
mock_comment = "I'm afraid we're fresh out of Red Leicester sir."
mock_changes = {
'testing': {
'old': 'Unchanged',
'new': 'Something pretended to change'
}
}
# Test default state with comment
with patch.dict(test.__opts__, {'test': False}):
mock_ret = {'name': mock_name,
'changes': mock_changes,
'result': True,
'comment': ''}
ret = test.configurable_test_state(mock_name)
self.assertDictEqual(ret, mock_ret)
# Test default state without comment
with patch.dict(test.__opts__, {'test': False}):
mock_ret = {'name': mock_name,
'changes': mock_changes,
'result': True,
'comment': mock_comment}
ret = test.configurable_test_state(mock_name,
comment=mock_comment)
self.assertDictEqual(ret, mock_ret)
def test_configurable_test_state_changes(self):
'''
Test test.configurable_test_state with permutations of changes and with
comment
'''
# Configure mock parameters
mock_name = 'cheese_shop'
mock_comment = "I'm afraid we're fresh out of Red Leicester sir."
mock_changes = {
'testing': {
'old': 'Unchanged',
'new': 'Something pretended to change'
}
}
# Test changes=Random and comment
with patch.dict(test.__opts__, {'test': False}):
ret = test.configurable_test_state(mock_name,
changes='Random',
comment=mock_comment)
self.assertEqual(ret['name'], mock_name)
self.assertIn(ret['changes'], [mock_changes, {}])
self.assertEqual(ret['result'], True)
self.assertEqual(ret['comment'], mock_comment)
# Test changes=True and comment
with patch.dict(test.__opts__, {'test': False}):
mock_ret = {'name': mock_name,
'changes': mock_changes,
'result': True,
'comment': mock_comment}
ret = test.configurable_test_state(mock_name,
changes=True,
comment=mock_comment)
self.assertDictEqual(ret, mock_ret)
# Test changes=False and comment
with patch.dict(test.__opts__, {'test': False}):
mock_ret = {'name': mock_name,
'changes': {},
'result': True,
'comment': mock_comment}
ret = test.configurable_test_state(mock_name,
changes=False,
comment=mock_comment)
self.assertDictEqual(ret, mock_ret)
# Test changes=Cheese
with patch.dict(test.__opts__, {'test': False}):
self.assertRaises(SaltInvocationError,
test.configurable_test_state,
mock_name,
changes='Cheese')
def test_configurable_test_state_result(self):
'''
Test test.configurable_test_state with permutations of result and with
comment
'''
# Configure mock parameters
mock_name = 'cheese_shop'
mock_comment = "I'm afraid we're fresh out of Red Leicester sir."
mock_changes = {
'testing': {
'old': 'Unchanged',
'new': 'Something pretended to change'
}
}
# Test result=Random and comment
with patch.dict(test.__opts__, {'test': False}):
ret = test.configurable_test_state(mock_name,
result='Random',
comment=mock_comment)
self.assertEqual(ret['name'], mock_name)
self.assertEqual(ret['changes'], mock_changes)
self.assertIn(ret['result'], [True, False])
self.assertEqual(ret['comment'], mock_comment)
# Test result=True and comment
with patch.dict(test.__opts__, {'test': False}):
mock_ret = {'name': mock_name,
'changes': mock_changes,
'result': True,
'comment': mock_comment}
ret = test.configurable_test_state(mock_name,
result=True,
comment=mock_comment)
self.assertDictEqual(ret, mock_ret)
# Test result=False and comment
with patch.dict(test.__opts__, {'test': False}):
mock_ret = {'name': mock_name,
'changes': mock_changes,
'result': False,
'comment': mock_comment}
ret = test.configurable_test_state(mock_name,
result=False,
comment=mock_comment)
self.assertDictEqual(ret, mock_ret)
# Test result=Cheese
with patch.dict(test.__opts__, {'test': False}):
self.assertRaises(SaltInvocationError,
test.configurable_test_state,
mock_name,
result='Cheese')
def test_configurable_test_state_test(self):
'''
Test test.configurable_test_state with test=True with and without
comment
'''
# Configure mock parameters
mock_name = 'cheese_shop'
mock_comment = "I'm afraid we're fresh out of Red Leicester sir."
mock_changes = {
'testing': {
'old': 'Unchanged',
'new': 'Something pretended to change'
}
}
# Test test=True without comment
with patch.dict(test.__opts__, {'test': True}):
mock_ret = {'name': mock_name,
'changes': mock_changes,
'result': None,
'comment': 'This is a test'}
ret = test.configurable_test_state(mock_name)
self.assertDictEqual(ret, mock_ret)
# Test test=True with comment
with patch.dict(test.__opts__, {'test': True}):
mock_ret = {'name': mock_name,
'changes': mock_changes,
'result': None,
'comment': mock_comment}
ret = test.configurable_test_state(mock_name,
comment=mock_comment)
self.assertDictEqual(ret, mock_ret)
# Test test=True and changes=True with comment
with patch.dict(test.__opts__, {'test': True}):
mock_ret = {'name': mock_name,
'changes': mock_changes,
'result': None,
'comment': mock_comment}
ret = test.configurable_test_state(mock_name,
changes=True,
comment=mock_comment)
self.assertDictEqual(ret, mock_ret)
# Test test=True and changes=False with comment
with patch.dict(test.__opts__, {'test': True}):
mock_ret = {'name': mock_name,
'changes': {},
'result': True,
'comment': mock_comment}
ret = test.configurable_test_state(mock_name,
changes=False,
comment=mock_comment)
self.assertDictEqual(ret, mock_ret)
def test_mod_watch(self):
'''
Test to call this function via a watch statement
'''
ret = {'name': 'salt',
'changes': {},
'result': True,
'comment': ''}
ret.update({'changes': {'Requisites with changes': []},
'comment': 'Watch statement fired.'})
self.assertDictEqual(test.mod_watch('salt'), ret)
def test_check_pillar_present(self):
'''
Test to ensure the check_pillar function
works properly with the 'present' keyword in
the absence of a 'type' keyword.
'''
ret = {
'name': 'salt',
'changes': {},
'result': True,
'comment': ''
}
pillar_return = 'I am a pillar.'
pillar_mock = MagicMock(return_value=pillar_return)
with patch.dict(test.__salt__, {'pillar.get': pillar_mock}):
self.assertEqual(test.check_pillar('salt', present='my_pillar'), ret)
def test_check_pillar_string(self):
'''
Test to ensure the check_pillar function
works properly with the 'key_type' checks,
using the string key_type.
'''
ret = {
'name': 'salt',
'changes': {},
'result': True,
'comment': ''
}
pillar_return = 'I am a pillar.'
pillar_mock = MagicMock(return_value=pillar_return)
with patch.dict(test.__salt__, {'pillar.get': pillar_mock}):
self.assertEqual(test.check_pillar('salt', string='my_pillar'), ret)
# With unicode (py2) or str (py3) strings
pillar_return = six.text_type('I am a pillar.')
pillar_mock = MagicMock(return_value=pillar_return)
with patch.dict(test.__salt__, {'pillar.get': pillar_mock}):
self.assertEqual(test.check_pillar('salt', string='my_pillar'), ret)
# With a dict
pillar_return = {'this': 'dictionary'}
pillar_mock = MagicMock(return_value=pillar_return)
with patch.dict(test.__salt__, {'pillar.get': pillar_mock}):
self.assertFalse(test.check_pillar('salt', string='my_pillar')['result'])
# With a list
pillar_return = ['I am a pillar.']
pillar_mock = MagicMock(return_value=pillar_return)
with patch.dict(test.__salt__, {'pillar.get': pillar_mock}):
self.assertFalse(test.check_pillar('salt', string='my_pillar')['result'])
# With a boolean
pillar_return = True
pillar_mock = MagicMock(return_value=pillar_return)
with patch.dict(test.__salt__, {'pillar.get': pillar_mock}):
self.assertFalse(test.check_pillar('salt', string='my_pillar')['result'])
# With an int
pillar_return = 1
pillar_mock = MagicMock(return_value=pillar_return)
with patch.dict(test.__salt__, {'pillar.get': pillar_mock}):
self.assertFalse(test.check_pillar('salt', string='my_pillar')['result'])
def test_check_pillar_dictionary(self):
'''
Test to ensure the check_pillar function
works properly with the 'key_type' checks,
using the dictionary key_type.
'''
ret = {
'name': 'salt',
'changes': {},
'result': True,
'comment': ''
}
pillar_return = {'this': 'dictionary'}
pillar_mock = MagicMock(return_value=pillar_return)
with patch.dict(test.__salt__, {'pillar.get': pillar_mock}):
self.assertEqual(test.check_pillar('salt', dictionary='my_pillar'), ret)
# With an ordered dict
pillar_return = OrderedDict({'this': 'dictionary'})
pillar_mock = MagicMock(return_value=pillar_return)
with patch.dict(test.__salt__, {'pillar.get': pillar_mock}):
self.assertEqual(test.check_pillar('salt', dictionary='my_pillar'), ret)
# With a string
pillar_return = 'I am a pillar.'
pillar_mock = MagicMock(return_value=pillar_return)
with patch.dict(test.__salt__, {'pillar.get': pillar_mock}):
self.assertFalse(test.check_pillar('salt', dictionary='my_pillar')['result'])
# With a list
pillar_return = ['I am a pillar.']
pillar_mock = MagicMock(return_value=pillar_return)
with patch.dict(test.__salt__, {'pillar.get': pillar_mock}):
self.assertFalse(test.check_pillar('salt', dictionary='my_pillar')['result'])
# With a boolean
pillar_return = True
pillar_mock = MagicMock(return_value=pillar_return)
with patch.dict(test.__salt__, {'pillar.get': pillar_mock}):
self.assertFalse(test.check_pillar('salt', dictionary='my_pillar')['result'])
# With an int
pillar_return = 1
pillar_mock = MagicMock(return_value=pillar_return)
with patch.dict(test.__salt__, {'pillar.get': pillar_mock}):
self.assertFalse(test.check_pillar('salt', dictionary='my_pillar')['result'])
| 40.91206 | 89 | 0.52607 | 1,584 | 16,283 | 5.142045 | 0.094697 | 0.034254 | 0.049478 | 0.064702 | 0.85795 | 0.841007 | 0.800982 | 0.773235 | 0.768938 | 0.756415 | 0 | 0.000483 | 0.363692 | 16,283 | 397 | 90 | 41.015113 | 0.785638 | 0.104772 | 0 | 0.773852 | 0 | 0 | 0.129119 | 0 | 0 | 0 | 0 | 0 | 0.134276 | 1 | 0.045936 | false | 0 | 0.028269 | 0.003534 | 0.081272 | 0.003534 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
54d9359b93ce583adeb65c2751fd0f6e9885e849 | 7,909 | py | Python | google/cloud/datalabeling_v1beta1/gapic/data_labeling_service_client_config.py | vam-google/python-datalabeling | 17d0c0efc03be147317761c006a3c206d0680e26 | [
"Apache-2.0"
] | null | null | null | google/cloud/datalabeling_v1beta1/gapic/data_labeling_service_client_config.py | vam-google/python-datalabeling | 17d0c0efc03be147317761c006a3c206d0680e26 | [
"Apache-2.0"
] | 40 | 2019-07-16T10:04:48.000Z | 2020-01-20T09:04:59.000Z | google/cloud/datalabeling_v1beta1/gapic/data_labeling_service_client_config.py | vam-google/python-datalabeling | 17d0c0efc03be147317761c006a3c206d0680e26 | [
"Apache-2.0"
] | 2 | 2019-07-18T00:05:31.000Z | 2019-11-27T14:17:22.000Z | config = {
"interfaces": {
"google.cloud.datalabeling.v1beta1.DataLabelingService": {
"retry_codes": {
"idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
"non_idempotent": [],
},
"retry_params": {
"default": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
"initial_rpc_timeout_millis": 20000,
"rpc_timeout_multiplier": 1.0,
"max_rpc_timeout_millis": 20000,
"total_timeout_millis": 600000,
}
},
"methods": {
"CreateDataset": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"GetDataset": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"ListDatasets": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"DeleteDataset": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"ImportData": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"ExportData": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"GetDataItem": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"ListDataItems": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"GetAnnotatedDataset": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"ListAnnotatedDatasets": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"DeleteAnnotatedDataset": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"LabelImage": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"LabelVideo": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"LabelText": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"GetExample": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"ListExamples": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"CreateAnnotationSpecSet": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"GetAnnotationSpecSet": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"ListAnnotationSpecSets": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"DeleteAnnotationSpecSet": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"CreateInstruction": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"GetInstruction": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"ListInstructions": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"DeleteInstruction": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"GetEvaluation": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"SearchEvaluations": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"SearchExampleComparisons": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"CreateEvaluationJob": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"UpdateEvaluationJob": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"GetEvaluationJob": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"PauseEvaluationJob": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"ResumeEvaluationJob": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"DeleteEvaluationJob": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"ListEvaluationJobs": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
},
}
}
}
| 40.768041 | 67 | 0.41712 | 479 | 7,909 | 6.448852 | 0.139875 | 0.155714 | 0.237941 | 0.253156 | 0.721916 | 0.721916 | 0.721916 | 0.721916 | 0.721916 | 0.721916 | 0 | 0.048379 | 0.477304 | 7,909 | 193 | 68 | 40.979275 | 0.698839 | 0 | 0 | 0.528497 | 0 | 0 | 0.391832 | 0.041472 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.005181 | 0 | 0.005181 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
54dc5ad5186a557298ed4bcf643e9835d4b4db22 | 210,843 | py | Python | subversion/bindings/swig/python/wc.py | ruchirarya/svn | 81502a213251c2af21361a942bd9a8cd7d3adb9f | [
"Apache-2.0"
] | 7 | 2018-01-18T06:13:21.000Z | 2020-07-09T03:46:16.000Z | depe/subversion/subversion/bindings/swig/python/wc.py | louis-tru/TouchCode2 | 91c182aeaa37fba16e381ea749d32906dab1aeea | [
"BSD-3-Clause-Clear"
] | 4 | 2015-01-12T22:23:41.000Z | 2015-01-12T22:33:52.000Z | src/subversion/subversion/bindings/swig/python/wc.py | schwern/alien-svn | 7423b08f9bc4fdf0ac0d7ea53495269b21b3e8f9 | [
"Apache-2.0"
] | 1 | 2020-11-04T07:19:37.000Z | 2020-11-04T07:19:37.000Z | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 2.0.9
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_wc', [dirname(__file__)])
except ImportError:
import _wc
return _wc
if fp is not None:
try:
_mod = imp.load_module('_wc', fp, pathname, description)
finally:
fp.close()
return _mod
_wc = swig_import_helper()
del swig_import_helper
else:
import _wc
del version_info
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
def _copy_metadata_deep(value, old_value):
"""Copy all attributes of old_value into value, recursively traversing
lists and dicts if needed."""
if value is None or old_value is None or value is old_value: return
if isinstance(value, dict):
for k, v in value.iteritems():
_copy_metadata_deep(v, old_value[k])
elif isinstance(value, list):
for v, old_v in zip(value, old_value):
_copy_metadata_deep(v, old_v)
else:
try:
value.__dict__.update(old_value.__dict__)
except AttributeError:
pass
def _assert_valid_deep(value):
"""Assert value's validity, recursively traversing lists and dicts."""
if isinstance(value, dict):
for v in value.itervalues():
_assert_valid_deep(v)
elif isinstance(value, list):
for v in value:
_assert_valid_deep(v)
else:
if hasattr(value, "assert_valid"):
value.assert_valid()
import libsvn.core
import libsvn.delta
import libsvn.ra
def svn_wc_version():
"""svn_wc_version() -> svn_version_t const *"""
return _wc.svn_wc_version()
SVN_WC_TRANSLATE_FROM_NF = _wc.SVN_WC_TRANSLATE_FROM_NF
SVN_WC_TRANSLATE_TO_NF = _wc.SVN_WC_TRANSLATE_TO_NF
SVN_WC_TRANSLATE_FORCE_EOL_REPAIR = _wc.SVN_WC_TRANSLATE_FORCE_EOL_REPAIR
SVN_WC_TRANSLATE_NO_OUTPUT_CLEANUP = _wc.SVN_WC_TRANSLATE_NO_OUTPUT_CLEANUP
SVN_WC_TRANSLATE_FORCE_COPY = _wc.SVN_WC_TRANSLATE_FORCE_COPY
SVN_WC_TRANSLATE_USE_GLOBAL_TMP = _wc.SVN_WC_TRANSLATE_USE_GLOBAL_TMP
def svn_wc_context_create(*args):
"""svn_wc_context_create(svn_config_t const * config, apr_pool_t result_pool, apr_pool_t scratch_pool) -> svn_error_t"""
return _wc.svn_wc_context_create(*args)
def svn_wc_context_destroy(*args):
"""svn_wc_context_destroy(svn_wc_context_t * wc_ctx) -> svn_error_t"""
return _wc.svn_wc_context_destroy(*args)
def svn_wc_adm_open3(*args):
"""
svn_wc_adm_open3(svn_wc_adm_access_t * associated, char const * path, svn_boolean_t write_lock, int levels_to_lock,
svn_cancel_func_t cancel_func, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_adm_open3(*args)
def svn_wc_adm_open2(*args):
"""
svn_wc_adm_open2(svn_wc_adm_access_t * associated, char const * path, svn_boolean_t write_lock, int levels_to_lock,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_adm_open2(*args)
def svn_wc_adm_open(*args):
"""
svn_wc_adm_open(svn_wc_adm_access_t * associated, char const * path, svn_boolean_t write_lock, svn_boolean_t tree_lock,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_adm_open(*args)
def svn_wc_adm_probe_open3(*args):
"""
svn_wc_adm_probe_open3(svn_wc_adm_access_t * associated, char const * path, svn_boolean_t write_lock, int levels_to_lock,
svn_cancel_func_t cancel_func, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_adm_probe_open3(*args)
def svn_wc_adm_probe_open2(*args):
"""
svn_wc_adm_probe_open2(svn_wc_adm_access_t * associated, char const * path, svn_boolean_t write_lock, int levels_to_lock,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_adm_probe_open2(*args)
def svn_wc_adm_probe_open(*args):
"""
svn_wc_adm_probe_open(svn_wc_adm_access_t * associated, char const * path, svn_boolean_t write_lock, svn_boolean_t tree_lock,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_adm_probe_open(*args)
def svn_wc_adm_open_anchor(*args):
"""
svn_wc_adm_open_anchor(char const * path, svn_boolean_t write_lock, int levels_to_lock, svn_cancel_func_t cancel_func,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_adm_open_anchor(*args)
def svn_wc_adm_retrieve(*args):
"""svn_wc_adm_retrieve(svn_wc_adm_access_t * associated, char const * path, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_adm_retrieve(*args)
def svn_wc_adm_probe_retrieve(*args):
"""svn_wc_adm_probe_retrieve(svn_wc_adm_access_t * associated, char const * path, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_adm_probe_retrieve(*args)
def svn_wc_adm_probe_try3(*args):
"""
svn_wc_adm_probe_try3(svn_wc_adm_access_t * associated, char const * path, svn_boolean_t write_lock, int levels_to_lock,
svn_cancel_func_t cancel_func, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_adm_probe_try3(*args)
def svn_wc_adm_probe_try2(*args):
"""
svn_wc_adm_probe_try2(svn_wc_adm_access_t * associated, char const * path, svn_boolean_t write_lock, int levels_to_lock,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_adm_probe_try2(*args)
def svn_wc_adm_probe_try(*args):
"""
svn_wc_adm_probe_try(svn_wc_adm_access_t * associated, char const * path, svn_boolean_t write_lock, svn_boolean_t tree_lock,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_adm_probe_try(*args)
def svn_wc_adm_close2(*args):
"""svn_wc_adm_close2(svn_wc_adm_access_t * adm_access, apr_pool_t scratch_pool) -> svn_error_t"""
return _wc.svn_wc_adm_close2(*args)
def svn_wc_adm_close(*args):
"""svn_wc_adm_close(svn_wc_adm_access_t * adm_access) -> svn_error_t"""
return _wc.svn_wc_adm_close(*args)
def svn_wc_adm_access_path(*args):
"""svn_wc_adm_access_path(svn_wc_adm_access_t const * adm_access) -> char const *"""
return _wc.svn_wc_adm_access_path(*args)
def svn_wc_adm_access_pool(*args):
"""svn_wc_adm_access_pool(svn_wc_adm_access_t const * adm_access) -> apr_pool_t"""
return _wc.svn_wc_adm_access_pool(*args)
def svn_wc_adm_locked(*args):
"""svn_wc_adm_locked(svn_wc_adm_access_t const * adm_access) -> svn_boolean_t"""
return _wc.svn_wc_adm_locked(*args)
def svn_wc_locked2(*args):
"""svn_wc_locked2(svn_wc_context_t * wc_ctx, char const * local_abspath, apr_pool_t scratch_pool) -> svn_error_t"""
return _wc.svn_wc_locked2(*args)
def svn_wc_locked(*args):
"""svn_wc_locked(char const * path, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_locked(*args)
SVN_WC_ADM_DIR_NAME = _wc.SVN_WC_ADM_DIR_NAME
def svn_wc_is_adm_dir(*args):
"""svn_wc_is_adm_dir(char const * name, apr_pool_t pool) -> svn_boolean_t"""
return _wc.svn_wc_is_adm_dir(*args)
def svn_wc_get_adm_dir(*args):
"""svn_wc_get_adm_dir(apr_pool_t pool) -> char const *"""
return _wc.svn_wc_get_adm_dir(*args)
def svn_wc_set_adm_dir(*args):
"""svn_wc_set_adm_dir(char const * name, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_set_adm_dir(*args)
def svn_wc_init_traversal_info(*args):
"""svn_wc_init_traversal_info(apr_pool_t pool) -> svn_wc_traversal_info_t *"""
return _wc.svn_wc_init_traversal_info(*args)
def svn_wc_edited_externals(*args):
"""svn_wc_edited_externals(svn_wc_traversal_info_t * traversal_info)"""
return _wc.svn_wc_edited_externals(*args)
def svn_wc_traversed_depths(*args):
"""svn_wc_traversed_depths(svn_wc_traversal_info_t * traversal_info)"""
return _wc.svn_wc_traversed_depths(*args)
class svn_wc_external_item2_t:
"""Proxy of C svn_wc_external_item2_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_external_item2_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_external_item2_t, name)
__repr__ = _swig_repr
__swig_setmethods__["target_dir"] = _wc.svn_wc_external_item2_t_target_dir_set
__swig_getmethods__["target_dir"] = _wc.svn_wc_external_item2_t_target_dir_get
__swig_setmethods__["url"] = _wc.svn_wc_external_item2_t_url_set
__swig_getmethods__["url"] = _wc.svn_wc_external_item2_t_url_get
__swig_setmethods__["revision"] = _wc.svn_wc_external_item2_t_revision_set
__swig_getmethods__["revision"] = _wc.svn_wc_external_item2_t_revision_get
__swig_setmethods__["peg_revision"] = _wc.svn_wc_external_item2_t_peg_revision_set
__swig_getmethods__["peg_revision"] = _wc.svn_wc_external_item2_t_peg_revision_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_external_item2_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self):
"""__init__(svn_wc_external_item2_t self) -> svn_wc_external_item2_t"""
this = _wc.new_svn_wc_external_item2_t()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _wc.delete_svn_wc_external_item2_t
__del__ = lambda self : None;
svn_wc_external_item2_t_swigregister = _wc.svn_wc_external_item2_t_swigregister
svn_wc_external_item2_t_swigregister(svn_wc_external_item2_t)
def svn_wc_external_item2_create(*args):
"""svn_wc_external_item2_create(apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_external_item2_create(*args)
def svn_wc_external_item_create(*args):
"""svn_wc_external_item_create(apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_external_item_create(*args)
def svn_wc_external_item2_dup(*args):
"""svn_wc_external_item2_dup(svn_wc_external_item2_t item, apr_pool_t pool) -> svn_wc_external_item2_t"""
return _wc.svn_wc_external_item2_dup(*args)
class svn_wc_external_item_t:
"""Proxy of C svn_wc_external_item_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_external_item_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_external_item_t, name)
__repr__ = _swig_repr
__swig_setmethods__["target_dir"] = _wc.svn_wc_external_item_t_target_dir_set
__swig_getmethods__["target_dir"] = _wc.svn_wc_external_item_t_target_dir_get
__swig_setmethods__["url"] = _wc.svn_wc_external_item_t_url_set
__swig_getmethods__["url"] = _wc.svn_wc_external_item_t_url_get
__swig_setmethods__["revision"] = _wc.svn_wc_external_item_t_revision_set
__swig_getmethods__["revision"] = _wc.svn_wc_external_item_t_revision_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_external_item_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self):
"""__init__(svn_wc_external_item_t self) -> svn_wc_external_item_t"""
this = _wc.new_svn_wc_external_item_t()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _wc.delete_svn_wc_external_item_t
__del__ = lambda self : None;
svn_wc_external_item_t_swigregister = _wc.svn_wc_external_item_t_swigregister
svn_wc_external_item_t_swigregister(svn_wc_external_item_t)
def svn_wc_external_item_dup(*args):
"""svn_wc_external_item_dup(svn_wc_external_item_t item, apr_pool_t pool) -> svn_wc_external_item_t"""
return _wc.svn_wc_external_item_dup(*args)
def svn_wc_parse_externals_description3(*args):
"""
svn_wc_parse_externals_description3(char const * parent_directory, char const * desc, svn_boolean_t canonicalize_url,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_parse_externals_description3(*args)
def svn_wc_parse_externals_description2(*args):
"""svn_wc_parse_externals_description2(char const * parent_directory, char const * desc, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_parse_externals_description2(*args)
def svn_wc_parse_externals_description(*args):
"""svn_wc_parse_externals_description(char const * parent_directory, char const * desc, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_parse_externals_description(*args)
svn_wc_notify_add = _wc.svn_wc_notify_add
svn_wc_notify_copy = _wc.svn_wc_notify_copy
svn_wc_notify_delete = _wc.svn_wc_notify_delete
svn_wc_notify_restore = _wc.svn_wc_notify_restore
svn_wc_notify_revert = _wc.svn_wc_notify_revert
svn_wc_notify_failed_revert = _wc.svn_wc_notify_failed_revert
svn_wc_notify_resolved = _wc.svn_wc_notify_resolved
svn_wc_notify_skip = _wc.svn_wc_notify_skip
svn_wc_notify_update_delete = _wc.svn_wc_notify_update_delete
svn_wc_notify_update_add = _wc.svn_wc_notify_update_add
svn_wc_notify_update_update = _wc.svn_wc_notify_update_update
svn_wc_notify_update_completed = _wc.svn_wc_notify_update_completed
svn_wc_notify_update_external = _wc.svn_wc_notify_update_external
svn_wc_notify_status_completed = _wc.svn_wc_notify_status_completed
svn_wc_notify_status_external = _wc.svn_wc_notify_status_external
svn_wc_notify_commit_modified = _wc.svn_wc_notify_commit_modified
svn_wc_notify_commit_added = _wc.svn_wc_notify_commit_added
svn_wc_notify_commit_deleted = _wc.svn_wc_notify_commit_deleted
svn_wc_notify_commit_replaced = _wc.svn_wc_notify_commit_replaced
svn_wc_notify_commit_postfix_txdelta = _wc.svn_wc_notify_commit_postfix_txdelta
svn_wc_notify_blame_revision = _wc.svn_wc_notify_blame_revision
svn_wc_notify_locked = _wc.svn_wc_notify_locked
svn_wc_notify_unlocked = _wc.svn_wc_notify_unlocked
svn_wc_notify_failed_lock = _wc.svn_wc_notify_failed_lock
svn_wc_notify_failed_unlock = _wc.svn_wc_notify_failed_unlock
svn_wc_notify_exists = _wc.svn_wc_notify_exists
svn_wc_notify_changelist_set = _wc.svn_wc_notify_changelist_set
svn_wc_notify_changelist_clear = _wc.svn_wc_notify_changelist_clear
svn_wc_notify_changelist_moved = _wc.svn_wc_notify_changelist_moved
svn_wc_notify_merge_begin = _wc.svn_wc_notify_merge_begin
svn_wc_notify_foreign_merge_begin = _wc.svn_wc_notify_foreign_merge_begin
svn_wc_notify_update_replace = _wc.svn_wc_notify_update_replace
svn_wc_notify_property_added = _wc.svn_wc_notify_property_added
svn_wc_notify_property_modified = _wc.svn_wc_notify_property_modified
svn_wc_notify_property_deleted = _wc.svn_wc_notify_property_deleted
svn_wc_notify_property_deleted_nonexistent = _wc.svn_wc_notify_property_deleted_nonexistent
svn_wc_notify_revprop_set = _wc.svn_wc_notify_revprop_set
svn_wc_notify_revprop_deleted = _wc.svn_wc_notify_revprop_deleted
svn_wc_notify_merge_completed = _wc.svn_wc_notify_merge_completed
svn_wc_notify_tree_conflict = _wc.svn_wc_notify_tree_conflict
svn_wc_notify_failed_external = _wc.svn_wc_notify_failed_external
svn_wc_notify_update_started = _wc.svn_wc_notify_update_started
svn_wc_notify_update_skip_obstruction = _wc.svn_wc_notify_update_skip_obstruction
svn_wc_notify_update_skip_working_only = _wc.svn_wc_notify_update_skip_working_only
svn_wc_notify_update_skip_access_denied = _wc.svn_wc_notify_update_skip_access_denied
svn_wc_notify_update_external_removed = _wc.svn_wc_notify_update_external_removed
svn_wc_notify_update_shadowed_add = _wc.svn_wc_notify_update_shadowed_add
svn_wc_notify_update_shadowed_update = _wc.svn_wc_notify_update_shadowed_update
svn_wc_notify_update_shadowed_delete = _wc.svn_wc_notify_update_shadowed_delete
svn_wc_notify_merge_record_info = _wc.svn_wc_notify_merge_record_info
svn_wc_notify_upgraded_path = _wc.svn_wc_notify_upgraded_path
svn_wc_notify_merge_record_info_begin = _wc.svn_wc_notify_merge_record_info_begin
svn_wc_notify_merge_elide_info = _wc.svn_wc_notify_merge_elide_info
svn_wc_notify_patch = _wc.svn_wc_notify_patch
svn_wc_notify_patch_applied_hunk = _wc.svn_wc_notify_patch_applied_hunk
svn_wc_notify_patch_rejected_hunk = _wc.svn_wc_notify_patch_rejected_hunk
svn_wc_notify_patch_hunk_already_applied = _wc.svn_wc_notify_patch_hunk_already_applied
svn_wc_notify_commit_copied = _wc.svn_wc_notify_commit_copied
svn_wc_notify_commit_copied_replaced = _wc.svn_wc_notify_commit_copied_replaced
svn_wc_notify_url_redirect = _wc.svn_wc_notify_url_redirect
svn_wc_notify_path_nonexistent = _wc.svn_wc_notify_path_nonexistent
svn_wc_notify_exclude = _wc.svn_wc_notify_exclude
svn_wc_notify_failed_conflict = _wc.svn_wc_notify_failed_conflict
svn_wc_notify_failed_missing = _wc.svn_wc_notify_failed_missing
svn_wc_notify_failed_out_of_date = _wc.svn_wc_notify_failed_out_of_date
svn_wc_notify_failed_no_parent = _wc.svn_wc_notify_failed_no_parent
svn_wc_notify_failed_locked = _wc.svn_wc_notify_failed_locked
svn_wc_notify_failed_forbidden_by_server = _wc.svn_wc_notify_failed_forbidden_by_server
svn_wc_notify_skip_conflicted = _wc.svn_wc_notify_skip_conflicted
svn_wc_notify_update_broken_lock = _wc.svn_wc_notify_update_broken_lock
svn_wc_notify_failed_obstruction = _wc.svn_wc_notify_failed_obstruction
svn_wc_notify_conflict_resolver_starting = _wc.svn_wc_notify_conflict_resolver_starting
svn_wc_notify_conflict_resolver_done = _wc.svn_wc_notify_conflict_resolver_done
svn_wc_notify_left_local_modifications = _wc.svn_wc_notify_left_local_modifications
svn_wc_notify_foreign_copy_begin = _wc.svn_wc_notify_foreign_copy_begin
svn_wc_notify_move_broken = _wc.svn_wc_notify_move_broken
svn_wc_notify_state_inapplicable = _wc.svn_wc_notify_state_inapplicable
svn_wc_notify_state_unknown = _wc.svn_wc_notify_state_unknown
svn_wc_notify_state_unchanged = _wc.svn_wc_notify_state_unchanged
svn_wc_notify_state_missing = _wc.svn_wc_notify_state_missing
svn_wc_notify_state_obstructed = _wc.svn_wc_notify_state_obstructed
svn_wc_notify_state_changed = _wc.svn_wc_notify_state_changed
svn_wc_notify_state_merged = _wc.svn_wc_notify_state_merged
svn_wc_notify_state_conflicted = _wc.svn_wc_notify_state_conflicted
svn_wc_notify_state_source_missing = _wc.svn_wc_notify_state_source_missing
svn_wc_notify_lock_state_inapplicable = _wc.svn_wc_notify_lock_state_inapplicable
svn_wc_notify_lock_state_unknown = _wc.svn_wc_notify_lock_state_unknown
svn_wc_notify_lock_state_unchanged = _wc.svn_wc_notify_lock_state_unchanged
svn_wc_notify_lock_state_locked = _wc.svn_wc_notify_lock_state_locked
svn_wc_notify_lock_state_unlocked = _wc.svn_wc_notify_lock_state_unlocked
class svn_wc_notify_t:
"""Proxy of C svn_wc_notify_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_notify_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_notify_t, name)
__repr__ = _swig_repr
__swig_setmethods__["path"] = _wc.svn_wc_notify_t_path_set
__swig_getmethods__["path"] = _wc.svn_wc_notify_t_path_get
__swig_setmethods__["action"] = _wc.svn_wc_notify_t_action_set
__swig_getmethods__["action"] = _wc.svn_wc_notify_t_action_get
__swig_setmethods__["kind"] = _wc.svn_wc_notify_t_kind_set
__swig_getmethods__["kind"] = _wc.svn_wc_notify_t_kind_get
__swig_setmethods__["mime_type"] = _wc.svn_wc_notify_t_mime_type_set
__swig_getmethods__["mime_type"] = _wc.svn_wc_notify_t_mime_type_get
__swig_setmethods__["lock"] = _wc.svn_wc_notify_t_lock_set
__swig_getmethods__["lock"] = _wc.svn_wc_notify_t_lock_get
__swig_setmethods__["err"] = _wc.svn_wc_notify_t_err_set
__swig_getmethods__["err"] = _wc.svn_wc_notify_t_err_get
__swig_setmethods__["content_state"] = _wc.svn_wc_notify_t_content_state_set
__swig_getmethods__["content_state"] = _wc.svn_wc_notify_t_content_state_get
__swig_setmethods__["prop_state"] = _wc.svn_wc_notify_t_prop_state_set
__swig_getmethods__["prop_state"] = _wc.svn_wc_notify_t_prop_state_get
__swig_setmethods__["lock_state"] = _wc.svn_wc_notify_t_lock_state_set
__swig_getmethods__["lock_state"] = _wc.svn_wc_notify_t_lock_state_get
__swig_setmethods__["revision"] = _wc.svn_wc_notify_t_revision_set
__swig_getmethods__["revision"] = _wc.svn_wc_notify_t_revision_get
__swig_setmethods__["changelist_name"] = _wc.svn_wc_notify_t_changelist_name_set
__swig_getmethods__["changelist_name"] = _wc.svn_wc_notify_t_changelist_name_get
__swig_setmethods__["merge_range"] = _wc.svn_wc_notify_t_merge_range_set
__swig_getmethods__["merge_range"] = _wc.svn_wc_notify_t_merge_range_get
__swig_setmethods__["url"] = _wc.svn_wc_notify_t_url_set
__swig_getmethods__["url"] = _wc.svn_wc_notify_t_url_get
__swig_setmethods__["path_prefix"] = _wc.svn_wc_notify_t_path_prefix_set
__swig_getmethods__["path_prefix"] = _wc.svn_wc_notify_t_path_prefix_get
__swig_setmethods__["prop_name"] = _wc.svn_wc_notify_t_prop_name_set
__swig_getmethods__["prop_name"] = _wc.svn_wc_notify_t_prop_name_get
__swig_setmethods__["rev_props"] = _wc.svn_wc_notify_t_rev_props_set
__swig_getmethods__["rev_props"] = _wc.svn_wc_notify_t_rev_props_get
__swig_setmethods__["old_revision"] = _wc.svn_wc_notify_t_old_revision_set
__swig_getmethods__["old_revision"] = _wc.svn_wc_notify_t_old_revision_get
__swig_setmethods__["hunk_original_start"] = _wc.svn_wc_notify_t_hunk_original_start_set
__swig_getmethods__["hunk_original_start"] = _wc.svn_wc_notify_t_hunk_original_start_get
__swig_setmethods__["hunk_original_length"] = _wc.svn_wc_notify_t_hunk_original_length_set
__swig_getmethods__["hunk_original_length"] = _wc.svn_wc_notify_t_hunk_original_length_get
__swig_setmethods__["hunk_modified_start"] = _wc.svn_wc_notify_t_hunk_modified_start_set
__swig_getmethods__["hunk_modified_start"] = _wc.svn_wc_notify_t_hunk_modified_start_get
__swig_setmethods__["hunk_modified_length"] = _wc.svn_wc_notify_t_hunk_modified_length_set
__swig_getmethods__["hunk_modified_length"] = _wc.svn_wc_notify_t_hunk_modified_length_get
__swig_setmethods__["hunk_matched_line"] = _wc.svn_wc_notify_t_hunk_matched_line_set
__swig_getmethods__["hunk_matched_line"] = _wc.svn_wc_notify_t_hunk_matched_line_get
__swig_setmethods__["hunk_fuzz"] = _wc.svn_wc_notify_t_hunk_fuzz_set
__swig_getmethods__["hunk_fuzz"] = _wc.svn_wc_notify_t_hunk_fuzz_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_notify_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self):
"""__init__(svn_wc_notify_t self) -> svn_wc_notify_t"""
this = _wc.new_svn_wc_notify_t()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _wc.delete_svn_wc_notify_t
__del__ = lambda self : None;
svn_wc_notify_t_swigregister = _wc.svn_wc_notify_t_swigregister
svn_wc_notify_t_swigregister(svn_wc_notify_t)
def svn_wc_create_notify(*args):
"""svn_wc_create_notify(char const * path, svn_wc_notify_action_t action, apr_pool_t pool) -> svn_wc_notify_t"""
return _wc.svn_wc_create_notify(*args)
def svn_wc_create_notify_url(*args):
"""svn_wc_create_notify_url(char const * url, svn_wc_notify_action_t action, apr_pool_t pool) -> svn_wc_notify_t"""
return _wc.svn_wc_create_notify_url(*args)
def svn_wc_dup_notify(*args):
"""svn_wc_dup_notify(svn_wc_notify_t notify, apr_pool_t pool) -> svn_wc_notify_t"""
return _wc.svn_wc_dup_notify(*args)
svn_wc_conflict_action_edit = _wc.svn_wc_conflict_action_edit
svn_wc_conflict_action_add = _wc.svn_wc_conflict_action_add
svn_wc_conflict_action_delete = _wc.svn_wc_conflict_action_delete
svn_wc_conflict_action_replace = _wc.svn_wc_conflict_action_replace
svn_wc_conflict_reason_edited = _wc.svn_wc_conflict_reason_edited
svn_wc_conflict_reason_obstructed = _wc.svn_wc_conflict_reason_obstructed
svn_wc_conflict_reason_deleted = _wc.svn_wc_conflict_reason_deleted
svn_wc_conflict_reason_missing = _wc.svn_wc_conflict_reason_missing
svn_wc_conflict_reason_unversioned = _wc.svn_wc_conflict_reason_unversioned
svn_wc_conflict_reason_added = _wc.svn_wc_conflict_reason_added
svn_wc_conflict_reason_replaced = _wc.svn_wc_conflict_reason_replaced
svn_wc_conflict_reason_moved_away = _wc.svn_wc_conflict_reason_moved_away
svn_wc_conflict_reason_moved_here = _wc.svn_wc_conflict_reason_moved_here
svn_wc_conflict_kind_text = _wc.svn_wc_conflict_kind_text
svn_wc_conflict_kind_property = _wc.svn_wc_conflict_kind_property
svn_wc_conflict_kind_tree = _wc.svn_wc_conflict_kind_tree
svn_wc_operation_none = _wc.svn_wc_operation_none
svn_wc_operation_update = _wc.svn_wc_operation_update
svn_wc_operation_switch = _wc.svn_wc_operation_switch
svn_wc_operation_merge = _wc.svn_wc_operation_merge
class svn_wc_conflict_version_t:
"""Proxy of C svn_wc_conflict_version_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_conflict_version_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_conflict_version_t, name)
__repr__ = _swig_repr
__swig_setmethods__["repos_url"] = _wc.svn_wc_conflict_version_t_repos_url_set
__swig_getmethods__["repos_url"] = _wc.svn_wc_conflict_version_t_repos_url_get
__swig_setmethods__["peg_rev"] = _wc.svn_wc_conflict_version_t_peg_rev_set
__swig_getmethods__["peg_rev"] = _wc.svn_wc_conflict_version_t_peg_rev_get
__swig_setmethods__["path_in_repos"] = _wc.svn_wc_conflict_version_t_path_in_repos_set
__swig_getmethods__["path_in_repos"] = _wc.svn_wc_conflict_version_t_path_in_repos_get
__swig_setmethods__["node_kind"] = _wc.svn_wc_conflict_version_t_node_kind_set
__swig_getmethods__["node_kind"] = _wc.svn_wc_conflict_version_t_node_kind_get
__swig_setmethods__["repos_uuid"] = _wc.svn_wc_conflict_version_t_repos_uuid_set
__swig_getmethods__["repos_uuid"] = _wc.svn_wc_conflict_version_t_repos_uuid_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_conflict_version_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self):
"""__init__(svn_wc_conflict_version_t self) -> svn_wc_conflict_version_t"""
this = _wc.new_svn_wc_conflict_version_t()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _wc.delete_svn_wc_conflict_version_t
__del__ = lambda self : None;
svn_wc_conflict_version_t_swigregister = _wc.svn_wc_conflict_version_t_swigregister
svn_wc_conflict_version_t_swigregister(svn_wc_conflict_version_t)
def svn_wc_conflict_version_create2(*args):
"""
svn_wc_conflict_version_create2(char const * repos_root_url, char const * repos_uuid, char const * repos_relpath,
svn_revnum_t revision, svn_node_kind_t kind, apr_pool_t result_pool) -> svn_wc_conflict_version_t
"""
return _wc.svn_wc_conflict_version_create2(*args)
def svn_wc_conflict_version_create(*args):
"""
svn_wc_conflict_version_create(char const * repos_url, char const * path_in_repos, svn_revnum_t peg_rev, svn_node_kind_t node_kind,
apr_pool_t pool) -> svn_wc_conflict_version_t
"""
return _wc.svn_wc_conflict_version_create(*args)
def svn_wc_conflict_version_dup(*args):
"""svn_wc_conflict_version_dup(svn_wc_conflict_version_t version, apr_pool_t pool) -> svn_wc_conflict_version_t"""
return _wc.svn_wc_conflict_version_dup(*args)
class svn_wc_conflict_description2_t:
"""Proxy of C svn_wc_conflict_description2_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_conflict_description2_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_conflict_description2_t, name)
__repr__ = _swig_repr
__swig_setmethods__["local_abspath"] = _wc.svn_wc_conflict_description2_t_local_abspath_set
__swig_getmethods__["local_abspath"] = _wc.svn_wc_conflict_description2_t_local_abspath_get
__swig_setmethods__["node_kind"] = _wc.svn_wc_conflict_description2_t_node_kind_set
__swig_getmethods__["node_kind"] = _wc.svn_wc_conflict_description2_t_node_kind_get
__swig_setmethods__["kind"] = _wc.svn_wc_conflict_description2_t_kind_set
__swig_getmethods__["kind"] = _wc.svn_wc_conflict_description2_t_kind_get
__swig_setmethods__["property_name"] = _wc.svn_wc_conflict_description2_t_property_name_set
__swig_getmethods__["property_name"] = _wc.svn_wc_conflict_description2_t_property_name_get
__swig_setmethods__["is_binary"] = _wc.svn_wc_conflict_description2_t_is_binary_set
__swig_getmethods__["is_binary"] = _wc.svn_wc_conflict_description2_t_is_binary_get
__swig_setmethods__["mime_type"] = _wc.svn_wc_conflict_description2_t_mime_type_set
__swig_getmethods__["mime_type"] = _wc.svn_wc_conflict_description2_t_mime_type_get
__swig_setmethods__["action"] = _wc.svn_wc_conflict_description2_t_action_set
__swig_getmethods__["action"] = _wc.svn_wc_conflict_description2_t_action_get
__swig_setmethods__["reason"] = _wc.svn_wc_conflict_description2_t_reason_set
__swig_getmethods__["reason"] = _wc.svn_wc_conflict_description2_t_reason_get
__swig_setmethods__["base_abspath"] = _wc.svn_wc_conflict_description2_t_base_abspath_set
__swig_getmethods__["base_abspath"] = _wc.svn_wc_conflict_description2_t_base_abspath_get
__swig_setmethods__["their_abspath"] = _wc.svn_wc_conflict_description2_t_their_abspath_set
__swig_getmethods__["their_abspath"] = _wc.svn_wc_conflict_description2_t_their_abspath_get
__swig_setmethods__["my_abspath"] = _wc.svn_wc_conflict_description2_t_my_abspath_set
__swig_getmethods__["my_abspath"] = _wc.svn_wc_conflict_description2_t_my_abspath_get
__swig_setmethods__["merged_file"] = _wc.svn_wc_conflict_description2_t_merged_file_set
__swig_getmethods__["merged_file"] = _wc.svn_wc_conflict_description2_t_merged_file_get
__swig_setmethods__["operation"] = _wc.svn_wc_conflict_description2_t_operation_set
__swig_getmethods__["operation"] = _wc.svn_wc_conflict_description2_t_operation_get
__swig_setmethods__["src_left_version"] = _wc.svn_wc_conflict_description2_t_src_left_version_set
__swig_getmethods__["src_left_version"] = _wc.svn_wc_conflict_description2_t_src_left_version_get
__swig_setmethods__["src_right_version"] = _wc.svn_wc_conflict_description2_t_src_right_version_set
__swig_getmethods__["src_right_version"] = _wc.svn_wc_conflict_description2_t_src_right_version_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_conflict_description2_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self):
"""__init__(svn_wc_conflict_description2_t self) -> svn_wc_conflict_description2_t"""
this = _wc.new_svn_wc_conflict_description2_t()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _wc.delete_svn_wc_conflict_description2_t
__del__ = lambda self : None;
svn_wc_conflict_description2_t_swigregister = _wc.svn_wc_conflict_description2_t_swigregister
svn_wc_conflict_description2_t_swigregister(svn_wc_conflict_description2_t)
class svn_wc_conflict_description_t:
"""Proxy of C svn_wc_conflict_description_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_conflict_description_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_conflict_description_t, name)
__repr__ = _swig_repr
__swig_setmethods__["path"] = _wc.svn_wc_conflict_description_t_path_set
__swig_getmethods__["path"] = _wc.svn_wc_conflict_description_t_path_get
__swig_setmethods__["node_kind"] = _wc.svn_wc_conflict_description_t_node_kind_set
__swig_getmethods__["node_kind"] = _wc.svn_wc_conflict_description_t_node_kind_get
__swig_setmethods__["kind"] = _wc.svn_wc_conflict_description_t_kind_set
__swig_getmethods__["kind"] = _wc.svn_wc_conflict_description_t_kind_get
__swig_setmethods__["property_name"] = _wc.svn_wc_conflict_description_t_property_name_set
__swig_getmethods__["property_name"] = _wc.svn_wc_conflict_description_t_property_name_get
__swig_setmethods__["is_binary"] = _wc.svn_wc_conflict_description_t_is_binary_set
__swig_getmethods__["is_binary"] = _wc.svn_wc_conflict_description_t_is_binary_get
__swig_setmethods__["mime_type"] = _wc.svn_wc_conflict_description_t_mime_type_set
__swig_getmethods__["mime_type"] = _wc.svn_wc_conflict_description_t_mime_type_get
__swig_setmethods__["access"] = _wc.svn_wc_conflict_description_t_access_set
__swig_getmethods__["access"] = _wc.svn_wc_conflict_description_t_access_get
__swig_setmethods__["action"] = _wc.svn_wc_conflict_description_t_action_set
__swig_getmethods__["action"] = _wc.svn_wc_conflict_description_t_action_get
__swig_setmethods__["reason"] = _wc.svn_wc_conflict_description_t_reason_set
__swig_getmethods__["reason"] = _wc.svn_wc_conflict_description_t_reason_get
__swig_setmethods__["base_file"] = _wc.svn_wc_conflict_description_t_base_file_set
__swig_getmethods__["base_file"] = _wc.svn_wc_conflict_description_t_base_file_get
__swig_setmethods__["their_file"] = _wc.svn_wc_conflict_description_t_their_file_set
__swig_getmethods__["their_file"] = _wc.svn_wc_conflict_description_t_their_file_get
__swig_setmethods__["my_file"] = _wc.svn_wc_conflict_description_t_my_file_set
__swig_getmethods__["my_file"] = _wc.svn_wc_conflict_description_t_my_file_get
__swig_setmethods__["merged_file"] = _wc.svn_wc_conflict_description_t_merged_file_set
__swig_getmethods__["merged_file"] = _wc.svn_wc_conflict_description_t_merged_file_get
__swig_setmethods__["operation"] = _wc.svn_wc_conflict_description_t_operation_set
__swig_getmethods__["operation"] = _wc.svn_wc_conflict_description_t_operation_get
__swig_setmethods__["src_left_version"] = _wc.svn_wc_conflict_description_t_src_left_version_set
__swig_getmethods__["src_left_version"] = _wc.svn_wc_conflict_description_t_src_left_version_get
__swig_setmethods__["src_right_version"] = _wc.svn_wc_conflict_description_t_src_right_version_set
__swig_getmethods__["src_right_version"] = _wc.svn_wc_conflict_description_t_src_right_version_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_conflict_description_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self):
"""__init__(svn_wc_conflict_description_t self) -> svn_wc_conflict_description_t"""
this = _wc.new_svn_wc_conflict_description_t()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _wc.delete_svn_wc_conflict_description_t
__del__ = lambda self : None;
svn_wc_conflict_description_t_swigregister = _wc.svn_wc_conflict_description_t_swigregister
svn_wc_conflict_description_t_swigregister(svn_wc_conflict_description_t)
def svn_wc_conflict_description_create_text2(*args):
"""svn_wc_conflict_description_create_text2(char const * local_abspath, apr_pool_t result_pool) -> svn_wc_conflict_description2_t"""
return _wc.svn_wc_conflict_description_create_text2(*args)
def svn_wc_conflict_description_create_text(*args):
"""svn_wc_conflict_description_create_text(char const * path, svn_wc_adm_access_t * adm_access, apr_pool_t pool) -> svn_wc_conflict_description_t"""
return _wc.svn_wc_conflict_description_create_text(*args)
def svn_wc_conflict_description_create_prop2(*args):
"""
svn_wc_conflict_description_create_prop2(char const * local_abspath, svn_node_kind_t node_kind, char const * property_name,
apr_pool_t result_pool) -> svn_wc_conflict_description2_t
"""
return _wc.svn_wc_conflict_description_create_prop2(*args)
def svn_wc_conflict_description_create_prop(*args):
"""
svn_wc_conflict_description_create_prop(char const * path, svn_wc_adm_access_t * adm_access, svn_node_kind_t node_kind, char const * property_name,
apr_pool_t pool) -> svn_wc_conflict_description_t
"""
return _wc.svn_wc_conflict_description_create_prop(*args)
def svn_wc_conflict_description_create_tree2(*args):
"""
svn_wc_conflict_description_create_tree2(char const * local_abspath, svn_node_kind_t node_kind, svn_wc_operation_t operation,
svn_wc_conflict_version_t src_left_version, svn_wc_conflict_version_t src_right_version,
apr_pool_t result_pool) -> svn_wc_conflict_description2_t
"""
return _wc.svn_wc_conflict_description_create_tree2(*args)
def svn_wc_conflict_description_create_tree(*args):
"""
svn_wc_conflict_description_create_tree(char const * path, svn_wc_adm_access_t * adm_access, svn_node_kind_t node_kind, svn_wc_operation_t operation,
svn_wc_conflict_version_t src_left_version, svn_wc_conflict_version_t src_right_version,
apr_pool_t pool) -> svn_wc_conflict_description_t
"""
return _wc.svn_wc_conflict_description_create_tree(*args)
def svn_wc__conflict_description2_dup(*args):
"""svn_wc__conflict_description2_dup(svn_wc_conflict_description2_t conflict, apr_pool_t result_pool) -> svn_wc_conflict_description2_t"""
return _wc.svn_wc__conflict_description2_dup(*args)
svn_wc_conflict_choose_postpone = _wc.svn_wc_conflict_choose_postpone
svn_wc_conflict_choose_base = _wc.svn_wc_conflict_choose_base
svn_wc_conflict_choose_theirs_full = _wc.svn_wc_conflict_choose_theirs_full
svn_wc_conflict_choose_mine_full = _wc.svn_wc_conflict_choose_mine_full
svn_wc_conflict_choose_theirs_conflict = _wc.svn_wc_conflict_choose_theirs_conflict
svn_wc_conflict_choose_mine_conflict = _wc.svn_wc_conflict_choose_mine_conflict
svn_wc_conflict_choose_merged = _wc.svn_wc_conflict_choose_merged
svn_wc_conflict_choose_unspecified = _wc.svn_wc_conflict_choose_unspecified
class svn_wc_conflict_result_t:
"""Proxy of C svn_wc_conflict_result_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_conflict_result_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_conflict_result_t, name)
__repr__ = _swig_repr
__swig_setmethods__["choice"] = _wc.svn_wc_conflict_result_t_choice_set
__swig_getmethods__["choice"] = _wc.svn_wc_conflict_result_t_choice_get
__swig_setmethods__["merged_file"] = _wc.svn_wc_conflict_result_t_merged_file_set
__swig_getmethods__["merged_file"] = _wc.svn_wc_conflict_result_t_merged_file_get
__swig_setmethods__["save_merged"] = _wc.svn_wc_conflict_result_t_save_merged_set
__swig_getmethods__["save_merged"] = _wc.svn_wc_conflict_result_t_save_merged_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_conflict_result_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self):
"""__init__(svn_wc_conflict_result_t self) -> svn_wc_conflict_result_t"""
this = _wc.new_svn_wc_conflict_result_t()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _wc.delete_svn_wc_conflict_result_t
__del__ = lambda self : None;
svn_wc_conflict_result_t_swigregister = _wc.svn_wc_conflict_result_t_swigregister
svn_wc_conflict_result_t_swigregister(svn_wc_conflict_result_t)
def svn_wc_create_conflict_result(*args):
"""svn_wc_create_conflict_result(svn_wc_conflict_choice_t choice, char const * merged_file, apr_pool_t pool) -> svn_wc_conflict_result_t"""
return _wc.svn_wc_create_conflict_result(*args)
class svn_wc_diff_callbacks4_t:
"""Proxy of C svn_wc_diff_callbacks4_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_diff_callbacks4_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_diff_callbacks4_t, name)
__repr__ = _swig_repr
__swig_setmethods__["file_opened"] = _wc.svn_wc_diff_callbacks4_t_file_opened_set
__swig_getmethods__["file_opened"] = _wc.svn_wc_diff_callbacks4_t_file_opened_get
__swig_setmethods__["file_changed"] = _wc.svn_wc_diff_callbacks4_t_file_changed_set
__swig_getmethods__["file_changed"] = _wc.svn_wc_diff_callbacks4_t_file_changed_get
__swig_setmethods__["file_added"] = _wc.svn_wc_diff_callbacks4_t_file_added_set
__swig_getmethods__["file_added"] = _wc.svn_wc_diff_callbacks4_t_file_added_get
__swig_setmethods__["file_deleted"] = _wc.svn_wc_diff_callbacks4_t_file_deleted_set
__swig_getmethods__["file_deleted"] = _wc.svn_wc_diff_callbacks4_t_file_deleted_get
__swig_setmethods__["dir_deleted"] = _wc.svn_wc_diff_callbacks4_t_dir_deleted_set
__swig_getmethods__["dir_deleted"] = _wc.svn_wc_diff_callbacks4_t_dir_deleted_get
__swig_setmethods__["dir_opened"] = _wc.svn_wc_diff_callbacks4_t_dir_opened_set
__swig_getmethods__["dir_opened"] = _wc.svn_wc_diff_callbacks4_t_dir_opened_get
__swig_setmethods__["dir_added"] = _wc.svn_wc_diff_callbacks4_t_dir_added_set
__swig_getmethods__["dir_added"] = _wc.svn_wc_diff_callbacks4_t_dir_added_get
__swig_setmethods__["dir_props_changed"] = _wc.svn_wc_diff_callbacks4_t_dir_props_changed_set
__swig_getmethods__["dir_props_changed"] = _wc.svn_wc_diff_callbacks4_t_dir_props_changed_get
__swig_setmethods__["dir_closed"] = _wc.svn_wc_diff_callbacks4_t_dir_closed_set
__swig_getmethods__["dir_closed"] = _wc.svn_wc_diff_callbacks4_t_dir_closed_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_diff_callbacks4_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def file_opened(self, *args):
return svn_wc_diff_callbacks4_invoke_file_opened(self, *args)
def file_changed(self, *args):
return svn_wc_diff_callbacks4_invoke_file_changed(self, *args)
def file_added(self, *args):
return svn_wc_diff_callbacks4_invoke_file_added(self, *args)
def file_deleted(self, *args):
return svn_wc_diff_callbacks4_invoke_file_deleted(self, *args)
def dir_deleted(self, *args):
return svn_wc_diff_callbacks4_invoke_dir_deleted(self, *args)
def dir_opened(self, *args):
return svn_wc_diff_callbacks4_invoke_dir_opened(self, *args)
def dir_added(self, *args):
return svn_wc_diff_callbacks4_invoke_dir_added(self, *args)
def dir_props_changed(self, *args):
return svn_wc_diff_callbacks4_invoke_dir_props_changed(self, *args)
def dir_closed(self, *args):
return svn_wc_diff_callbacks4_invoke_dir_closed(self, *args)
def __init__(self):
"""__init__(svn_wc_diff_callbacks4_t self) -> svn_wc_diff_callbacks4_t"""
this = _wc.new_svn_wc_diff_callbacks4_t()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _wc.delete_svn_wc_diff_callbacks4_t
__del__ = lambda self : None;
svn_wc_diff_callbacks4_t_swigregister = _wc.svn_wc_diff_callbacks4_t_swigregister
svn_wc_diff_callbacks4_t_swigregister(svn_wc_diff_callbacks4_t)
class svn_wc_diff_callbacks3_t:
"""Proxy of C svn_wc_diff_callbacks3_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_diff_callbacks3_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_diff_callbacks3_t, name)
__repr__ = _swig_repr
__swig_setmethods__["file_changed"] = _wc.svn_wc_diff_callbacks3_t_file_changed_set
__swig_getmethods__["file_changed"] = _wc.svn_wc_diff_callbacks3_t_file_changed_get
__swig_setmethods__["file_added"] = _wc.svn_wc_diff_callbacks3_t_file_added_set
__swig_getmethods__["file_added"] = _wc.svn_wc_diff_callbacks3_t_file_added_get
__swig_setmethods__["file_deleted"] = _wc.svn_wc_diff_callbacks3_t_file_deleted_set
__swig_getmethods__["file_deleted"] = _wc.svn_wc_diff_callbacks3_t_file_deleted_get
__swig_setmethods__["dir_added"] = _wc.svn_wc_diff_callbacks3_t_dir_added_set
__swig_getmethods__["dir_added"] = _wc.svn_wc_diff_callbacks3_t_dir_added_get
__swig_setmethods__["dir_deleted"] = _wc.svn_wc_diff_callbacks3_t_dir_deleted_set
__swig_getmethods__["dir_deleted"] = _wc.svn_wc_diff_callbacks3_t_dir_deleted_get
__swig_setmethods__["dir_props_changed"] = _wc.svn_wc_diff_callbacks3_t_dir_props_changed_set
__swig_getmethods__["dir_props_changed"] = _wc.svn_wc_diff_callbacks3_t_dir_props_changed_get
__swig_setmethods__["dir_opened"] = _wc.svn_wc_diff_callbacks3_t_dir_opened_set
__swig_getmethods__["dir_opened"] = _wc.svn_wc_diff_callbacks3_t_dir_opened_get
__swig_setmethods__["dir_closed"] = _wc.svn_wc_diff_callbacks3_t_dir_closed_set
__swig_getmethods__["dir_closed"] = _wc.svn_wc_diff_callbacks3_t_dir_closed_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_diff_callbacks3_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def file_changed(self, *args):
return svn_wc_diff_callbacks3_invoke_file_changed(self, *args)
def file_added(self, *args):
return svn_wc_diff_callbacks3_invoke_file_added(self, *args)
def file_deleted(self, *args):
return svn_wc_diff_callbacks3_invoke_file_deleted(self, *args)
def dir_added(self, *args):
return svn_wc_diff_callbacks3_invoke_dir_added(self, *args)
def dir_deleted(self, *args):
return svn_wc_diff_callbacks3_invoke_dir_deleted(self, *args)
def dir_props_changed(self, *args):
return svn_wc_diff_callbacks3_invoke_dir_props_changed(self, *args)
def dir_opened(self, *args):
return svn_wc_diff_callbacks3_invoke_dir_opened(self, *args)
def dir_closed(self, *args):
return svn_wc_diff_callbacks3_invoke_dir_closed(self, *args)
def __init__(self):
"""__init__(svn_wc_diff_callbacks3_t self) -> svn_wc_diff_callbacks3_t"""
this = _wc.new_svn_wc_diff_callbacks3_t()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _wc.delete_svn_wc_diff_callbacks3_t
__del__ = lambda self : None;
svn_wc_diff_callbacks3_t_swigregister = _wc.svn_wc_diff_callbacks3_t_swigregister
svn_wc_diff_callbacks3_t_swigregister(svn_wc_diff_callbacks3_t)
class svn_wc_diff_callbacks2_t:
"""Proxy of C svn_wc_diff_callbacks2_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_diff_callbacks2_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_diff_callbacks2_t, name)
__repr__ = _swig_repr
__swig_setmethods__["file_changed"] = _wc.svn_wc_diff_callbacks2_t_file_changed_set
__swig_getmethods__["file_changed"] = _wc.svn_wc_diff_callbacks2_t_file_changed_get
__swig_setmethods__["file_added"] = _wc.svn_wc_diff_callbacks2_t_file_added_set
__swig_getmethods__["file_added"] = _wc.svn_wc_diff_callbacks2_t_file_added_get
__swig_setmethods__["file_deleted"] = _wc.svn_wc_diff_callbacks2_t_file_deleted_set
__swig_getmethods__["file_deleted"] = _wc.svn_wc_diff_callbacks2_t_file_deleted_get
__swig_setmethods__["dir_added"] = _wc.svn_wc_diff_callbacks2_t_dir_added_set
__swig_getmethods__["dir_added"] = _wc.svn_wc_diff_callbacks2_t_dir_added_get
__swig_setmethods__["dir_deleted"] = _wc.svn_wc_diff_callbacks2_t_dir_deleted_set
__swig_getmethods__["dir_deleted"] = _wc.svn_wc_diff_callbacks2_t_dir_deleted_get
__swig_setmethods__["dir_props_changed"] = _wc.svn_wc_diff_callbacks2_t_dir_props_changed_set
__swig_getmethods__["dir_props_changed"] = _wc.svn_wc_diff_callbacks2_t_dir_props_changed_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_diff_callbacks2_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def file_changed(self, *args):
return svn_wc_diff_callbacks2_invoke_file_changed(self, *args)
def file_added(self, *args):
return svn_wc_diff_callbacks2_invoke_file_added(self, *args)
def file_deleted(self, *args):
return svn_wc_diff_callbacks2_invoke_file_deleted(self, *args)
def dir_added(self, *args):
return svn_wc_diff_callbacks2_invoke_dir_added(self, *args)
def dir_deleted(self, *args):
return svn_wc_diff_callbacks2_invoke_dir_deleted(self, *args)
def dir_props_changed(self, *args):
return svn_wc_diff_callbacks2_invoke_dir_props_changed(self, *args)
def __init__(self):
"""__init__(svn_wc_diff_callbacks2_t self) -> svn_wc_diff_callbacks2_t"""
this = _wc.new_svn_wc_diff_callbacks2_t()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _wc.delete_svn_wc_diff_callbacks2_t
__del__ = lambda self : None;
svn_wc_diff_callbacks2_t_swigregister = _wc.svn_wc_diff_callbacks2_t_swigregister
svn_wc_diff_callbacks2_t_swigregister(svn_wc_diff_callbacks2_t)
class svn_wc_diff_callbacks_t:
"""Proxy of C svn_wc_diff_callbacks_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_diff_callbacks_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_diff_callbacks_t, name)
__repr__ = _swig_repr
__swig_setmethods__["file_changed"] = _wc.svn_wc_diff_callbacks_t_file_changed_set
__swig_getmethods__["file_changed"] = _wc.svn_wc_diff_callbacks_t_file_changed_get
__swig_setmethods__["file_added"] = _wc.svn_wc_diff_callbacks_t_file_added_set
__swig_getmethods__["file_added"] = _wc.svn_wc_diff_callbacks_t_file_added_get
__swig_setmethods__["file_deleted"] = _wc.svn_wc_diff_callbacks_t_file_deleted_set
__swig_getmethods__["file_deleted"] = _wc.svn_wc_diff_callbacks_t_file_deleted_get
__swig_setmethods__["dir_added"] = _wc.svn_wc_diff_callbacks_t_dir_added_set
__swig_getmethods__["dir_added"] = _wc.svn_wc_diff_callbacks_t_dir_added_get
__swig_setmethods__["dir_deleted"] = _wc.svn_wc_diff_callbacks_t_dir_deleted_set
__swig_getmethods__["dir_deleted"] = _wc.svn_wc_diff_callbacks_t_dir_deleted_get
__swig_setmethods__["props_changed"] = _wc.svn_wc_diff_callbacks_t_props_changed_set
__swig_getmethods__["props_changed"] = _wc.svn_wc_diff_callbacks_t_props_changed_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_diff_callbacks_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def file_changed(self, *args):
return svn_wc_diff_callbacks_invoke_file_changed(self, *args)
def file_added(self, *args):
return svn_wc_diff_callbacks_invoke_file_added(self, *args)
def file_deleted(self, *args):
return svn_wc_diff_callbacks_invoke_file_deleted(self, *args)
def dir_added(self, *args):
return svn_wc_diff_callbacks_invoke_dir_added(self, *args)
def dir_deleted(self, *args):
return svn_wc_diff_callbacks_invoke_dir_deleted(self, *args)
def props_changed(self, *args):
return svn_wc_diff_callbacks_invoke_props_changed(self, *args)
def __init__(self):
"""__init__(svn_wc_diff_callbacks_t self) -> svn_wc_diff_callbacks_t"""
this = _wc.new_svn_wc_diff_callbacks_t()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _wc.delete_svn_wc_diff_callbacks_t
__del__ = lambda self : None;
svn_wc_diff_callbacks_t_swigregister = _wc.svn_wc_diff_callbacks_t_swigregister
svn_wc_diff_callbacks_t_swigregister(svn_wc_diff_callbacks_t)
def svn_wc_check_wc2(*args):
"""svn_wc_check_wc2(svn_wc_context_t * wc_ctx, char const * local_abspath, apr_pool_t scratch_pool) -> svn_error_t"""
return _wc.svn_wc_check_wc2(*args)
def svn_wc_check_wc(*args):
"""svn_wc_check_wc(char const * path, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_check_wc(*args)
def svn_wc_has_binary_prop(*args):
"""svn_wc_has_binary_prop(char const * path, svn_wc_adm_access_t * adm_access, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_has_binary_prop(*args)
def svn_wc_text_modified_p2(*args):
"""svn_wc_text_modified_p2(svn_wc_context_t * wc_ctx, char const * local_abspath, svn_boolean_t unused, apr_pool_t scratch_pool) -> svn_error_t"""
return _wc.svn_wc_text_modified_p2(*args)
def svn_wc_text_modified_p(*args):
"""
svn_wc_text_modified_p(char const * filename, svn_boolean_t force_comparison, svn_wc_adm_access_t * adm_access,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_text_modified_p(*args)
def svn_wc_props_modified_p2(*args):
"""svn_wc_props_modified_p2(svn_wc_context_t * wc_ctx, char const * local_abspath, apr_pool_t scratch_pool) -> svn_error_t"""
return _wc.svn_wc_props_modified_p2(*args)
def svn_wc_props_modified_p(*args):
"""svn_wc_props_modified_p(char const * path, svn_wc_adm_access_t * adm_access, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_props_modified_p(*args)
svn_wc_schedule_normal = _wc.svn_wc_schedule_normal
svn_wc_schedule_add = _wc.svn_wc_schedule_add
svn_wc_schedule_delete = _wc.svn_wc_schedule_delete
svn_wc_schedule_replace = _wc.svn_wc_schedule_replace
SVN_WC_ENTRY_WORKING_SIZE_UNKNOWN = _wc.SVN_WC_ENTRY_WORKING_SIZE_UNKNOWN
class svn_wc_entry_t:
"""Proxy of C svn_wc_entry_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_entry_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_entry_t, name)
__repr__ = _swig_repr
__swig_setmethods__["name"] = _wc.svn_wc_entry_t_name_set
__swig_getmethods__["name"] = _wc.svn_wc_entry_t_name_get
__swig_setmethods__["revision"] = _wc.svn_wc_entry_t_revision_set
__swig_getmethods__["revision"] = _wc.svn_wc_entry_t_revision_get
__swig_setmethods__["url"] = _wc.svn_wc_entry_t_url_set
__swig_getmethods__["url"] = _wc.svn_wc_entry_t_url_get
__swig_setmethods__["repos"] = _wc.svn_wc_entry_t_repos_set
__swig_getmethods__["repos"] = _wc.svn_wc_entry_t_repos_get
__swig_setmethods__["uuid"] = _wc.svn_wc_entry_t_uuid_set
__swig_getmethods__["uuid"] = _wc.svn_wc_entry_t_uuid_get
__swig_setmethods__["kind"] = _wc.svn_wc_entry_t_kind_set
__swig_getmethods__["kind"] = _wc.svn_wc_entry_t_kind_get
__swig_setmethods__["schedule"] = _wc.svn_wc_entry_t_schedule_set
__swig_getmethods__["schedule"] = _wc.svn_wc_entry_t_schedule_get
__swig_setmethods__["copied"] = _wc.svn_wc_entry_t_copied_set
__swig_getmethods__["copied"] = _wc.svn_wc_entry_t_copied_get
__swig_setmethods__["deleted"] = _wc.svn_wc_entry_t_deleted_set
__swig_getmethods__["deleted"] = _wc.svn_wc_entry_t_deleted_get
__swig_setmethods__["absent"] = _wc.svn_wc_entry_t_absent_set
__swig_getmethods__["absent"] = _wc.svn_wc_entry_t_absent_get
__swig_setmethods__["incomplete"] = _wc.svn_wc_entry_t_incomplete_set
__swig_getmethods__["incomplete"] = _wc.svn_wc_entry_t_incomplete_get
__swig_setmethods__["copyfrom_url"] = _wc.svn_wc_entry_t_copyfrom_url_set
__swig_getmethods__["copyfrom_url"] = _wc.svn_wc_entry_t_copyfrom_url_get
__swig_setmethods__["copyfrom_rev"] = _wc.svn_wc_entry_t_copyfrom_rev_set
__swig_getmethods__["copyfrom_rev"] = _wc.svn_wc_entry_t_copyfrom_rev_get
__swig_setmethods__["conflict_old"] = _wc.svn_wc_entry_t_conflict_old_set
__swig_getmethods__["conflict_old"] = _wc.svn_wc_entry_t_conflict_old_get
__swig_setmethods__["conflict_new"] = _wc.svn_wc_entry_t_conflict_new_set
__swig_getmethods__["conflict_new"] = _wc.svn_wc_entry_t_conflict_new_get
__swig_setmethods__["conflict_wrk"] = _wc.svn_wc_entry_t_conflict_wrk_set
__swig_getmethods__["conflict_wrk"] = _wc.svn_wc_entry_t_conflict_wrk_get
__swig_setmethods__["prejfile"] = _wc.svn_wc_entry_t_prejfile_set
__swig_getmethods__["prejfile"] = _wc.svn_wc_entry_t_prejfile_get
__swig_setmethods__["text_time"] = _wc.svn_wc_entry_t_text_time_set
__swig_getmethods__["text_time"] = _wc.svn_wc_entry_t_text_time_get
__swig_setmethods__["prop_time"] = _wc.svn_wc_entry_t_prop_time_set
__swig_getmethods__["prop_time"] = _wc.svn_wc_entry_t_prop_time_get
__swig_setmethods__["checksum"] = _wc.svn_wc_entry_t_checksum_set
__swig_getmethods__["checksum"] = _wc.svn_wc_entry_t_checksum_get
__swig_setmethods__["cmt_rev"] = _wc.svn_wc_entry_t_cmt_rev_set
__swig_getmethods__["cmt_rev"] = _wc.svn_wc_entry_t_cmt_rev_get
__swig_setmethods__["cmt_date"] = _wc.svn_wc_entry_t_cmt_date_set
__swig_getmethods__["cmt_date"] = _wc.svn_wc_entry_t_cmt_date_get
__swig_setmethods__["cmt_author"] = _wc.svn_wc_entry_t_cmt_author_set
__swig_getmethods__["cmt_author"] = _wc.svn_wc_entry_t_cmt_author_get
__swig_setmethods__["lock_token"] = _wc.svn_wc_entry_t_lock_token_set
__swig_getmethods__["lock_token"] = _wc.svn_wc_entry_t_lock_token_get
__swig_setmethods__["lock_owner"] = _wc.svn_wc_entry_t_lock_owner_set
__swig_getmethods__["lock_owner"] = _wc.svn_wc_entry_t_lock_owner_get
__swig_setmethods__["lock_comment"] = _wc.svn_wc_entry_t_lock_comment_set
__swig_getmethods__["lock_comment"] = _wc.svn_wc_entry_t_lock_comment_get
__swig_setmethods__["lock_creation_date"] = _wc.svn_wc_entry_t_lock_creation_date_set
__swig_getmethods__["lock_creation_date"] = _wc.svn_wc_entry_t_lock_creation_date_get
__swig_setmethods__["has_props"] = _wc.svn_wc_entry_t_has_props_set
__swig_getmethods__["has_props"] = _wc.svn_wc_entry_t_has_props_get
__swig_setmethods__["has_prop_mods"] = _wc.svn_wc_entry_t_has_prop_mods_set
__swig_getmethods__["has_prop_mods"] = _wc.svn_wc_entry_t_has_prop_mods_get
__swig_setmethods__["cachable_props"] = _wc.svn_wc_entry_t_cachable_props_set
__swig_getmethods__["cachable_props"] = _wc.svn_wc_entry_t_cachable_props_get
__swig_setmethods__["present_props"] = _wc.svn_wc_entry_t_present_props_set
__swig_getmethods__["present_props"] = _wc.svn_wc_entry_t_present_props_get
__swig_setmethods__["changelist"] = _wc.svn_wc_entry_t_changelist_set
__swig_getmethods__["changelist"] = _wc.svn_wc_entry_t_changelist_get
__swig_setmethods__["working_size"] = _wc.svn_wc_entry_t_working_size_set
__swig_getmethods__["working_size"] = _wc.svn_wc_entry_t_working_size_get
__swig_setmethods__["keep_local"] = _wc.svn_wc_entry_t_keep_local_set
__swig_getmethods__["keep_local"] = _wc.svn_wc_entry_t_keep_local_get
__swig_setmethods__["depth"] = _wc.svn_wc_entry_t_depth_set
__swig_getmethods__["depth"] = _wc.svn_wc_entry_t_depth_get
__swig_setmethods__["tree_conflict_data"] = _wc.svn_wc_entry_t_tree_conflict_data_set
__swig_getmethods__["tree_conflict_data"] = _wc.svn_wc_entry_t_tree_conflict_data_get
__swig_setmethods__["file_external_path"] = _wc.svn_wc_entry_t_file_external_path_set
__swig_getmethods__["file_external_path"] = _wc.svn_wc_entry_t_file_external_path_get
__swig_setmethods__["file_external_peg_rev"] = _wc.svn_wc_entry_t_file_external_peg_rev_set
__swig_getmethods__["file_external_peg_rev"] = _wc.svn_wc_entry_t_file_external_peg_rev_get
__swig_setmethods__["file_external_rev"] = _wc.svn_wc_entry_t_file_external_rev_set
__swig_getmethods__["file_external_rev"] = _wc.svn_wc_entry_t_file_external_rev_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_entry_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self):
"""__init__(svn_wc_entry_t self) -> svn_wc_entry_t"""
this = _wc.new_svn_wc_entry_t()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _wc.delete_svn_wc_entry_t
__del__ = lambda self : None;
svn_wc_entry_t_swigregister = _wc.svn_wc_entry_t_swigregister
svn_wc_entry_t_swigregister(svn_wc_entry_t)
SVN_WC_ENTRY_THIS_DIR = _wc.SVN_WC_ENTRY_THIS_DIR
def svn_wc_entry(*args):
"""svn_wc_entry(char const * path, svn_wc_adm_access_t * adm_access, svn_boolean_t show_hidden, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_entry(*args)
def svn_wc_entries_read(*args):
"""svn_wc_entries_read(svn_wc_adm_access_t * adm_access, svn_boolean_t show_hidden, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_entries_read(*args)
def svn_wc_entry_dup(*args):
"""svn_wc_entry_dup(svn_wc_entry_t entry, apr_pool_t pool) -> svn_wc_entry_t"""
return _wc.svn_wc_entry_dup(*args)
class svn_wc_info_t:
"""Proxy of C svn_wc_info_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_info_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_info_t, name)
__repr__ = _swig_repr
__swig_setmethods__["schedule"] = _wc.svn_wc_info_t_schedule_set
__swig_getmethods__["schedule"] = _wc.svn_wc_info_t_schedule_get
__swig_setmethods__["copyfrom_url"] = _wc.svn_wc_info_t_copyfrom_url_set
__swig_getmethods__["copyfrom_url"] = _wc.svn_wc_info_t_copyfrom_url_get
__swig_setmethods__["copyfrom_rev"] = _wc.svn_wc_info_t_copyfrom_rev_set
__swig_getmethods__["copyfrom_rev"] = _wc.svn_wc_info_t_copyfrom_rev_get
__swig_setmethods__["checksum"] = _wc.svn_wc_info_t_checksum_set
__swig_getmethods__["checksum"] = _wc.svn_wc_info_t_checksum_get
__swig_setmethods__["changelist"] = _wc.svn_wc_info_t_changelist_set
__swig_getmethods__["changelist"] = _wc.svn_wc_info_t_changelist_get
__swig_setmethods__["depth"] = _wc.svn_wc_info_t_depth_set
__swig_getmethods__["depth"] = _wc.svn_wc_info_t_depth_get
__swig_setmethods__["recorded_size"] = _wc.svn_wc_info_t_recorded_size_set
__swig_getmethods__["recorded_size"] = _wc.svn_wc_info_t_recorded_size_get
__swig_setmethods__["recorded_time"] = _wc.svn_wc_info_t_recorded_time_set
__swig_getmethods__["recorded_time"] = _wc.svn_wc_info_t_recorded_time_get
__swig_setmethods__["conflicts"] = _wc.svn_wc_info_t_conflicts_set
__swig_getmethods__["conflicts"] = _wc.svn_wc_info_t_conflicts_get
__swig_setmethods__["wcroot_abspath"] = _wc.svn_wc_info_t_wcroot_abspath_set
__swig_getmethods__["wcroot_abspath"] = _wc.svn_wc_info_t_wcroot_abspath_get
__swig_setmethods__["moved_from_abspath"] = _wc.svn_wc_info_t_moved_from_abspath_set
__swig_getmethods__["moved_from_abspath"] = _wc.svn_wc_info_t_moved_from_abspath_get
__swig_setmethods__["moved_to_abspath"] = _wc.svn_wc_info_t_moved_to_abspath_set
__swig_getmethods__["moved_to_abspath"] = _wc.svn_wc_info_t_moved_to_abspath_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_info_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self):
"""__init__(svn_wc_info_t self) -> svn_wc_info_t"""
this = _wc.new_svn_wc_info_t()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _wc.delete_svn_wc_info_t
__del__ = lambda self : None;
svn_wc_info_t_swigregister = _wc.svn_wc_info_t_swigregister
svn_wc_info_t_swigregister(svn_wc_info_t)
def svn_wc_info_dup(*args):
"""svn_wc_info_dup(svn_wc_info_t info, apr_pool_t pool) -> svn_wc_info_t"""
return _wc.svn_wc_info_dup(*args)
def svn_wc_conflicted_p3(*args):
"""svn_wc_conflicted_p3(svn_wc_context_t * wc_ctx, char const * local_abspath, apr_pool_t scratch_pool) -> svn_error_t"""
return _wc.svn_wc_conflicted_p3(*args)
def svn_wc_conflicted_p2(*args):
"""svn_wc_conflicted_p2(char const * path, svn_wc_adm_access_t * adm_access, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_conflicted_p2(*args)
def svn_wc_conflicted_p(*args):
"""svn_wc_conflicted_p(char const * dir_path, svn_wc_entry_t entry, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_conflicted_p(*args)
def svn_wc_get_ancestry(*args):
"""svn_wc_get_ancestry(char const * path, svn_wc_adm_access_t * adm_access, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_get_ancestry(*args)
class svn_wc_entry_callbacks2_t:
"""Proxy of C svn_wc_entry_callbacks2_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_entry_callbacks2_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_entry_callbacks2_t, name)
__repr__ = _swig_repr
__swig_setmethods__["found_entry"] = _wc.svn_wc_entry_callbacks2_t_found_entry_set
__swig_getmethods__["found_entry"] = _wc.svn_wc_entry_callbacks2_t_found_entry_get
__swig_setmethods__["handle_error"] = _wc.svn_wc_entry_callbacks2_t_handle_error_set
__swig_getmethods__["handle_error"] = _wc.svn_wc_entry_callbacks2_t_handle_error_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_entry_callbacks2_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def found_entry(self, *args):
return svn_wc_entry_callbacks2_invoke_found_entry(self, *args)
def handle_error(self, *args):
return svn_wc_entry_callbacks2_invoke_handle_error(self, *args)
def __init__(self):
"""__init__(svn_wc_entry_callbacks2_t self) -> svn_wc_entry_callbacks2_t"""
this = _wc.new_svn_wc_entry_callbacks2_t()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _wc.delete_svn_wc_entry_callbacks2_t
__del__ = lambda self : None;
svn_wc_entry_callbacks2_t_swigregister = _wc.svn_wc_entry_callbacks2_t_swigregister
svn_wc_entry_callbacks2_t_swigregister(svn_wc_entry_callbacks2_t)
class svn_wc_entry_callbacks_t:
"""Proxy of C svn_wc_entry_callbacks_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_entry_callbacks_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_entry_callbacks_t, name)
__repr__ = _swig_repr
__swig_setmethods__["found_entry"] = _wc.svn_wc_entry_callbacks_t_found_entry_set
__swig_getmethods__["found_entry"] = _wc.svn_wc_entry_callbacks_t_found_entry_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_entry_callbacks_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def found_entry(self, *args):
return svn_wc_entry_callbacks_invoke_found_entry(self, *args)
def __init__(self):
"""__init__(svn_wc_entry_callbacks_t self) -> svn_wc_entry_callbacks_t"""
this = _wc.new_svn_wc_entry_callbacks_t()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _wc.delete_svn_wc_entry_callbacks_t
__del__ = lambda self : None;
svn_wc_entry_callbacks_t_swigregister = _wc.svn_wc_entry_callbacks_t_swigregister
svn_wc_entry_callbacks_t_swigregister(svn_wc_entry_callbacks_t)
def svn_wc_walk_entries3(*args):
"""
svn_wc_walk_entries3(char const * path, svn_wc_adm_access_t * adm_access, svn_wc_entry_callbacks2_t walk_callbacks,
void * walk_baton, svn_depth_t depth, svn_boolean_t show_hidden,
svn_cancel_func_t cancel_func, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_walk_entries3(*args)
def svn_wc_walk_entries2(*args):
"""
svn_wc_walk_entries2(char const * path, svn_wc_adm_access_t * adm_access, svn_wc_entry_callbacks_t walk_callbacks,
void * walk_baton, svn_boolean_t show_hidden, svn_cancel_func_t cancel_func,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_walk_entries2(*args)
def svn_wc_walk_entries(*args):
"""
svn_wc_walk_entries(char const * path, svn_wc_adm_access_t * adm_access, svn_wc_entry_callbacks_t walk_callbacks,
void * walk_baton, svn_boolean_t show_hidden, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_walk_entries(*args)
def svn_wc_mark_missing_deleted(*args):
"""svn_wc_mark_missing_deleted(char const * path, svn_wc_adm_access_t * parent, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_mark_missing_deleted(*args)
def svn_wc_ensure_adm4(*args):
"""
svn_wc_ensure_adm4(svn_wc_context_t * wc_ctx, char const * local_abspath, char const * url, char const * repos_root_url,
char const * repos_uuid, svn_revnum_t revision, svn_depth_t depth,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_ensure_adm4(*args)
def svn_wc_ensure_adm3(*args):
"""
svn_wc_ensure_adm3(char const * path, char const * uuid, char const * url, char const * repos, svn_revnum_t revision,
svn_depth_t depth, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_ensure_adm3(*args)
def svn_wc_ensure_adm2(*args):
"""
svn_wc_ensure_adm2(char const * path, char const * uuid, char const * url, char const * repos, svn_revnum_t revision,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_ensure_adm2(*args)
def svn_wc_ensure_adm(*args):
"""svn_wc_ensure_adm(char const * path, char const * uuid, char const * url, svn_revnum_t revision, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_ensure_adm(*args)
def svn_wc_maybe_set_repos_root(*args):
"""svn_wc_maybe_set_repos_root(svn_wc_adm_access_t * adm_access, char const * path, char const * repos, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_maybe_set_repos_root(*args)
svn_wc_status_none = _wc.svn_wc_status_none
svn_wc_status_unversioned = _wc.svn_wc_status_unversioned
svn_wc_status_normal = _wc.svn_wc_status_normal
svn_wc_status_added = _wc.svn_wc_status_added
svn_wc_status_missing = _wc.svn_wc_status_missing
svn_wc_status_deleted = _wc.svn_wc_status_deleted
svn_wc_status_replaced = _wc.svn_wc_status_replaced
svn_wc_status_modified = _wc.svn_wc_status_modified
svn_wc_status_merged = _wc.svn_wc_status_merged
svn_wc_status_conflicted = _wc.svn_wc_status_conflicted
svn_wc_status_ignored = _wc.svn_wc_status_ignored
svn_wc_status_obstructed = _wc.svn_wc_status_obstructed
svn_wc_status_external = _wc.svn_wc_status_external
svn_wc_status_incomplete = _wc.svn_wc_status_incomplete
class svn_wc_status3_t:
"""Proxy of C svn_wc_status3_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_status3_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_status3_t, name)
__repr__ = _swig_repr
__swig_setmethods__["kind"] = _wc.svn_wc_status3_t_kind_set
__swig_getmethods__["kind"] = _wc.svn_wc_status3_t_kind_get
__swig_setmethods__["depth"] = _wc.svn_wc_status3_t_depth_set
__swig_getmethods__["depth"] = _wc.svn_wc_status3_t_depth_get
__swig_setmethods__["filesize"] = _wc.svn_wc_status3_t_filesize_set
__swig_getmethods__["filesize"] = _wc.svn_wc_status3_t_filesize_get
__swig_setmethods__["versioned"] = _wc.svn_wc_status3_t_versioned_set
__swig_getmethods__["versioned"] = _wc.svn_wc_status3_t_versioned_get
__swig_setmethods__["conflicted"] = _wc.svn_wc_status3_t_conflicted_set
__swig_getmethods__["conflicted"] = _wc.svn_wc_status3_t_conflicted_get
__swig_setmethods__["node_status"] = _wc.svn_wc_status3_t_node_status_set
__swig_getmethods__["node_status"] = _wc.svn_wc_status3_t_node_status_get
__swig_setmethods__["text_status"] = _wc.svn_wc_status3_t_text_status_set
__swig_getmethods__["text_status"] = _wc.svn_wc_status3_t_text_status_get
__swig_setmethods__["prop_status"] = _wc.svn_wc_status3_t_prop_status_set
__swig_getmethods__["prop_status"] = _wc.svn_wc_status3_t_prop_status_get
__swig_setmethods__["copied"] = _wc.svn_wc_status3_t_copied_set
__swig_getmethods__["copied"] = _wc.svn_wc_status3_t_copied_get
__swig_setmethods__["revision"] = _wc.svn_wc_status3_t_revision_set
__swig_getmethods__["revision"] = _wc.svn_wc_status3_t_revision_get
__swig_setmethods__["changed_rev"] = _wc.svn_wc_status3_t_changed_rev_set
__swig_getmethods__["changed_rev"] = _wc.svn_wc_status3_t_changed_rev_get
__swig_setmethods__["changed_date"] = _wc.svn_wc_status3_t_changed_date_set
__swig_getmethods__["changed_date"] = _wc.svn_wc_status3_t_changed_date_get
__swig_setmethods__["changed_author"] = _wc.svn_wc_status3_t_changed_author_set
__swig_getmethods__["changed_author"] = _wc.svn_wc_status3_t_changed_author_get
__swig_setmethods__["repos_root_url"] = _wc.svn_wc_status3_t_repos_root_url_set
__swig_getmethods__["repos_root_url"] = _wc.svn_wc_status3_t_repos_root_url_get
__swig_setmethods__["repos_uuid"] = _wc.svn_wc_status3_t_repos_uuid_set
__swig_getmethods__["repos_uuid"] = _wc.svn_wc_status3_t_repos_uuid_get
__swig_setmethods__["repos_relpath"] = _wc.svn_wc_status3_t_repos_relpath_set
__swig_getmethods__["repos_relpath"] = _wc.svn_wc_status3_t_repos_relpath_get
__swig_setmethods__["switched"] = _wc.svn_wc_status3_t_switched_set
__swig_getmethods__["switched"] = _wc.svn_wc_status3_t_switched_get
__swig_setmethods__["locked"] = _wc.svn_wc_status3_t_locked_set
__swig_getmethods__["locked"] = _wc.svn_wc_status3_t_locked_get
__swig_setmethods__["lock"] = _wc.svn_wc_status3_t_lock_set
__swig_getmethods__["lock"] = _wc.svn_wc_status3_t_lock_get
__swig_setmethods__["changelist"] = _wc.svn_wc_status3_t_changelist_set
__swig_getmethods__["changelist"] = _wc.svn_wc_status3_t_changelist_get
__swig_setmethods__["ood_kind"] = _wc.svn_wc_status3_t_ood_kind_set
__swig_getmethods__["ood_kind"] = _wc.svn_wc_status3_t_ood_kind_get
__swig_setmethods__["repos_node_status"] = _wc.svn_wc_status3_t_repos_node_status_set
__swig_getmethods__["repos_node_status"] = _wc.svn_wc_status3_t_repos_node_status_get
__swig_setmethods__["repos_text_status"] = _wc.svn_wc_status3_t_repos_text_status_set
__swig_getmethods__["repos_text_status"] = _wc.svn_wc_status3_t_repos_text_status_get
__swig_setmethods__["repos_prop_status"] = _wc.svn_wc_status3_t_repos_prop_status_set
__swig_getmethods__["repos_prop_status"] = _wc.svn_wc_status3_t_repos_prop_status_get
__swig_setmethods__["repos_lock"] = _wc.svn_wc_status3_t_repos_lock_set
__swig_getmethods__["repos_lock"] = _wc.svn_wc_status3_t_repos_lock_get
__swig_setmethods__["ood_changed_rev"] = _wc.svn_wc_status3_t_ood_changed_rev_set
__swig_getmethods__["ood_changed_rev"] = _wc.svn_wc_status3_t_ood_changed_rev_get
__swig_setmethods__["ood_changed_date"] = _wc.svn_wc_status3_t_ood_changed_date_set
__swig_getmethods__["ood_changed_date"] = _wc.svn_wc_status3_t_ood_changed_date_get
__swig_setmethods__["ood_changed_author"] = _wc.svn_wc_status3_t_ood_changed_author_set
__swig_getmethods__["ood_changed_author"] = _wc.svn_wc_status3_t_ood_changed_author_get
__swig_setmethods__["moved_from_abspath"] = _wc.svn_wc_status3_t_moved_from_abspath_set
__swig_getmethods__["moved_from_abspath"] = _wc.svn_wc_status3_t_moved_from_abspath_get
__swig_setmethods__["moved_to_abspath"] = _wc.svn_wc_status3_t_moved_to_abspath_set
__swig_getmethods__["moved_to_abspath"] = _wc.svn_wc_status3_t_moved_to_abspath_get
__swig_setmethods__["file_external"] = _wc.svn_wc_status3_t_file_external_set
__swig_getmethods__["file_external"] = _wc.svn_wc_status3_t_file_external_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_status3_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self):
"""__init__(svn_wc_status3_t self) -> svn_wc_status3_t"""
this = _wc.new_svn_wc_status3_t()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _wc.delete_svn_wc_status3_t
__del__ = lambda self : None;
svn_wc_status3_t_swigregister = _wc.svn_wc_status3_t_swigregister
svn_wc_status3_t_swigregister(svn_wc_status3_t)
class svn_wc_status2_t:
"""Proxy of C svn_wc_status2_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_status2_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_status2_t, name)
__repr__ = _swig_repr
__swig_setmethods__["entry"] = _wc.svn_wc_status2_t_entry_set
__swig_getmethods__["entry"] = _wc.svn_wc_status2_t_entry_get
__swig_setmethods__["text_status"] = _wc.svn_wc_status2_t_text_status_set
__swig_getmethods__["text_status"] = _wc.svn_wc_status2_t_text_status_get
__swig_setmethods__["prop_status"] = _wc.svn_wc_status2_t_prop_status_set
__swig_getmethods__["prop_status"] = _wc.svn_wc_status2_t_prop_status_get
__swig_setmethods__["locked"] = _wc.svn_wc_status2_t_locked_set
__swig_getmethods__["locked"] = _wc.svn_wc_status2_t_locked_get
__swig_setmethods__["copied"] = _wc.svn_wc_status2_t_copied_set
__swig_getmethods__["copied"] = _wc.svn_wc_status2_t_copied_get
__swig_setmethods__["switched"] = _wc.svn_wc_status2_t_switched_set
__swig_getmethods__["switched"] = _wc.svn_wc_status2_t_switched_get
__swig_setmethods__["repos_text_status"] = _wc.svn_wc_status2_t_repos_text_status_set
__swig_getmethods__["repos_text_status"] = _wc.svn_wc_status2_t_repos_text_status_get
__swig_setmethods__["repos_prop_status"] = _wc.svn_wc_status2_t_repos_prop_status_set
__swig_getmethods__["repos_prop_status"] = _wc.svn_wc_status2_t_repos_prop_status_get
__swig_setmethods__["repos_lock"] = _wc.svn_wc_status2_t_repos_lock_set
__swig_getmethods__["repos_lock"] = _wc.svn_wc_status2_t_repos_lock_get
__swig_setmethods__["url"] = _wc.svn_wc_status2_t_url_set
__swig_getmethods__["url"] = _wc.svn_wc_status2_t_url_get
__swig_setmethods__["ood_last_cmt_rev"] = _wc.svn_wc_status2_t_ood_last_cmt_rev_set
__swig_getmethods__["ood_last_cmt_rev"] = _wc.svn_wc_status2_t_ood_last_cmt_rev_get
__swig_setmethods__["ood_last_cmt_date"] = _wc.svn_wc_status2_t_ood_last_cmt_date_set
__swig_getmethods__["ood_last_cmt_date"] = _wc.svn_wc_status2_t_ood_last_cmt_date_get
__swig_setmethods__["ood_kind"] = _wc.svn_wc_status2_t_ood_kind_set
__swig_getmethods__["ood_kind"] = _wc.svn_wc_status2_t_ood_kind_get
__swig_setmethods__["ood_last_cmt_author"] = _wc.svn_wc_status2_t_ood_last_cmt_author_set
__swig_getmethods__["ood_last_cmt_author"] = _wc.svn_wc_status2_t_ood_last_cmt_author_get
__swig_setmethods__["tree_conflict"] = _wc.svn_wc_status2_t_tree_conflict_set
__swig_getmethods__["tree_conflict"] = _wc.svn_wc_status2_t_tree_conflict_get
__swig_setmethods__["file_external"] = _wc.svn_wc_status2_t_file_external_set
__swig_getmethods__["file_external"] = _wc.svn_wc_status2_t_file_external_get
__swig_setmethods__["pristine_text_status"] = _wc.svn_wc_status2_t_pristine_text_status_set
__swig_getmethods__["pristine_text_status"] = _wc.svn_wc_status2_t_pristine_text_status_get
__swig_setmethods__["pristine_prop_status"] = _wc.svn_wc_status2_t_pristine_prop_status_set
__swig_getmethods__["pristine_prop_status"] = _wc.svn_wc_status2_t_pristine_prop_status_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_status2_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self):
"""__init__(svn_wc_status2_t self) -> svn_wc_status2_t"""
this = _wc.new_svn_wc_status2_t()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _wc.delete_svn_wc_status2_t
__del__ = lambda self : None;
svn_wc_status2_t_swigregister = _wc.svn_wc_status2_t_swigregister
svn_wc_status2_t_swigregister(svn_wc_status2_t)
class svn_wc_status_t:
"""Proxy of C svn_wc_status_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_status_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_status_t, name)
__repr__ = _swig_repr
__swig_setmethods__["entry"] = _wc.svn_wc_status_t_entry_set
__swig_getmethods__["entry"] = _wc.svn_wc_status_t_entry_get
__swig_setmethods__["text_status"] = _wc.svn_wc_status_t_text_status_set
__swig_getmethods__["text_status"] = _wc.svn_wc_status_t_text_status_get
__swig_setmethods__["prop_status"] = _wc.svn_wc_status_t_prop_status_set
__swig_getmethods__["prop_status"] = _wc.svn_wc_status_t_prop_status_get
__swig_setmethods__["locked"] = _wc.svn_wc_status_t_locked_set
__swig_getmethods__["locked"] = _wc.svn_wc_status_t_locked_get
__swig_setmethods__["copied"] = _wc.svn_wc_status_t_copied_set
__swig_getmethods__["copied"] = _wc.svn_wc_status_t_copied_get
__swig_setmethods__["switched"] = _wc.svn_wc_status_t_switched_set
__swig_getmethods__["switched"] = _wc.svn_wc_status_t_switched_get
__swig_setmethods__["repos_text_status"] = _wc.svn_wc_status_t_repos_text_status_set
__swig_getmethods__["repos_text_status"] = _wc.svn_wc_status_t_repos_text_status_get
__swig_setmethods__["repos_prop_status"] = _wc.svn_wc_status_t_repos_prop_status_set
__swig_getmethods__["repos_prop_status"] = _wc.svn_wc_status_t_repos_prop_status_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_status_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self):
"""__init__(svn_wc_status_t self) -> svn_wc_status_t"""
this = _wc.new_svn_wc_status_t()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _wc.delete_svn_wc_status_t
__del__ = lambda self : None;
svn_wc_status_t_swigregister = _wc.svn_wc_status_t_swigregister
svn_wc_status_t_swigregister(svn_wc_status_t)
def svn_wc_dup_status3(*args):
"""svn_wc_dup_status3(svn_wc_status3_t orig_stat, apr_pool_t pool) -> svn_wc_status3_t"""
return _wc.svn_wc_dup_status3(*args)
def svn_wc_dup_status2(*args):
"""svn_wc_dup_status2(svn_wc_status2_t orig_stat, apr_pool_t pool) -> svn_wc_status2_t"""
return _wc.svn_wc_dup_status2(*args)
def svn_wc_dup_status(*args):
"""svn_wc_dup_status(svn_wc_status_t orig_stat, apr_pool_t pool) -> svn_wc_status_t"""
return _wc.svn_wc_dup_status(*args)
def svn_wc_status3(*args):
"""svn_wc_status3(svn_wc_context_t * wc_ctx, char const * local_abspath, apr_pool_t result_pool, apr_pool_t scratch_pool) -> svn_error_t"""
return _wc.svn_wc_status3(*args)
def svn_wc_status2(*args):
"""svn_wc_status2(char const * path, svn_wc_adm_access_t * adm_access, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_status2(*args)
def svn_wc_status(*args):
"""svn_wc_status(char const * path, svn_wc_adm_access_t * adm_access, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_status(*args)
def svn_wc_walk_status(*args):
"""
svn_wc_walk_status(svn_wc_context_t * wc_ctx, char const * local_abspath, svn_depth_t depth, svn_boolean_t get_all,
svn_boolean_t no_ignore, svn_boolean_t ignore_text_mods, apr_array_header_t ignore_patterns,
svn_wc_status_func4_t status_func, void * status_baton,
svn_cancel_func_t cancel_func, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_walk_status(*args)
def svn_wc_get_status_editor5(*args):
"""
svn_wc_get_status_editor5(svn_wc_context_t * wc_ctx, char const * anchor_abspath, char const * target_basename,
svn_depth_t depth, svn_boolean_t get_all, svn_boolean_t no_ignore, svn_boolean_t depth_as_sticky,
svn_boolean_t server_performs_filtering, apr_array_header_t ignore_patterns,
svn_wc_status_func4_t status_func, void * status_baton,
svn_cancel_func_t cancel_func, apr_pool_t result_pool, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_get_status_editor5(*args)
def svn_wc_get_status_editor4(*args):
"""
svn_wc_get_status_editor4(svn_wc_adm_access_t * anchor, char const * target, svn_depth_t depth, svn_boolean_t get_all,
svn_boolean_t no_ignore, apr_array_header_t ignore_patterns, svn_wc_status_func3_t status_func,
void * status_baton, svn_cancel_func_t cancel_func,
svn_wc_traversal_info_t * traversal_info, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_get_status_editor4(*args)
def svn_wc_get_status_editor3(*args):
"""
svn_wc_get_status_editor3(svn_wc_adm_access_t * anchor, char const * target, svn_depth_t depth, svn_boolean_t get_all,
svn_boolean_t no_ignore, apr_array_header_t ignore_patterns, svn_wc_status_func2_t status_func,
svn_cancel_func_t cancel_func, svn_wc_traversal_info_t * traversal_info,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_get_status_editor3(*args)
def svn_wc_get_status_editor2(*args):
"""
svn_wc_get_status_editor2(svn_wc_adm_access_t * anchor, char const * target, apr_hash_t config, svn_boolean_t recurse,
svn_boolean_t get_all, svn_boolean_t no_ignore, svn_wc_status_func2_t status_func,
svn_cancel_func_t cancel_func, svn_wc_traversal_info_t * traversal_info,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_get_status_editor2(*args)
def svn_wc_get_status_editor(*args):
"""
svn_wc_get_status_editor(svn_wc_adm_access_t * anchor, char const * target, apr_hash_t config, svn_boolean_t recurse,
svn_boolean_t get_all, svn_boolean_t no_ignore, svn_wc_status_func_t status_func,
svn_cancel_func_t cancel_func, svn_wc_traversal_info_t * traversal_info,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_get_status_editor(*args)
def svn_wc_status_set_repos_locks(*args):
"""svn_wc_status_set_repos_locks(void * set_locks_baton, apr_hash_t locks, char const * repos_root, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_status_set_repos_locks(*args)
def svn_wc_copy3(*args):
"""
svn_wc_copy3(svn_wc_context_t * wc_ctx, char const * src_abspath, char const * dst_abspath, svn_boolean_t metadata_only,
svn_cancel_func_t cancel_func, svn_wc_notify_func2_t notify_func,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_copy3(*args)
def svn_wc_copy2(*args):
"""
svn_wc_copy2(char const * src, svn_wc_adm_access_t * dst_parent, char const * dst_basename, svn_cancel_func_t cancel_func,
svn_wc_notify_func2_t notify_func, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_copy2(*args)
def svn_wc_copy(*args):
"""
svn_wc_copy(char const * src, svn_wc_adm_access_t * dst_parent, char const * dst_basename, svn_cancel_func_t cancel_func,
svn_wc_notify_func_t notify_func, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_copy(*args)
def svn_wc_move(*args):
"""
svn_wc_move(svn_wc_context_t * wc_ctx, char const * src_abspath, char const * dst_abspath, svn_boolean_t metadata_only,
svn_cancel_func_t cancel_func, svn_wc_notify_func2_t notify_func,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_move(*args)
def svn_wc_delete4(*args):
"""
svn_wc_delete4(svn_wc_context_t * wc_ctx, char const * local_abspath, svn_boolean_t keep_local,
svn_boolean_t delete_unversioned_target, svn_cancel_func_t cancel_func, svn_wc_notify_func2_t notify_func,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_delete4(*args)
def svn_wc_delete3(*args):
"""
svn_wc_delete3(char const * path, svn_wc_adm_access_t * adm_access, svn_cancel_func_t cancel_func,
svn_wc_notify_func2_t notify_func, svn_boolean_t keep_local, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_delete3(*args)
def svn_wc_delete2(*args):
"""
svn_wc_delete2(char const * path, svn_wc_adm_access_t * adm_access, svn_cancel_func_t cancel_func,
svn_wc_notify_func2_t notify_func, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_delete2(*args)
def svn_wc_delete(*args):
"""
svn_wc_delete(char const * path, svn_wc_adm_access_t * adm_access, svn_cancel_func_t cancel_func,
svn_wc_notify_func_t notify_func, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_delete(*args)
def svn_wc_add_from_disk2(*args):
"""
svn_wc_add_from_disk2(svn_wc_context_t * wc_ctx, char const * local_abspath, apr_hash_t props, svn_wc_notify_func2_t notify_func,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_add_from_disk2(*args)
def svn_wc_add_from_disk(*args):
"""
svn_wc_add_from_disk(svn_wc_context_t * wc_ctx, char const * local_abspath, svn_wc_notify_func2_t notify_func,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_add_from_disk(*args)
def svn_wc_add4(*args):
"""
svn_wc_add4(svn_wc_context_t * wc_ctx, char const * local_abspath, svn_depth_t depth, char const * copyfrom_url,
svn_revnum_t copyfrom_rev, svn_cancel_func_t cancel_func,
svn_wc_notify_func2_t notify_func, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_add4(*args)
def svn_wc_add3(*args):
"""
svn_wc_add3(char const * path, svn_wc_adm_access_t * parent_access, svn_depth_t depth, char const * copyfrom_url,
svn_revnum_t copyfrom_rev, svn_cancel_func_t cancel_func,
svn_wc_notify_func2_t notify_func, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_add3(*args)
def svn_wc_add2(*args):
"""
svn_wc_add2(char const * path, svn_wc_adm_access_t * parent_access, char const * copyfrom_url,
svn_revnum_t copyfrom_rev, svn_cancel_func_t cancel_func, svn_wc_notify_func2_t notify_func,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_add2(*args)
def svn_wc_add(*args):
"""
svn_wc_add(char const * path, svn_wc_adm_access_t * parent_access, char const * copyfrom_url,
svn_revnum_t copyfrom_rev, svn_cancel_func_t cancel_func, svn_wc_notify_func_t notify_func,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_add(*args)
def svn_wc_add_repos_file4(*args):
"""
svn_wc_add_repos_file4(svn_wc_context_t * wc_ctx, char const * local_abspath, svn_stream_t * new_base_contents,
svn_stream_t * new_contents, apr_hash_t new_base_props, apr_hash_t new_props,
char const * copyfrom_url, svn_revnum_t copyfrom_rev, svn_cancel_func_t cancel_func,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_add_repos_file4(*args)
def svn_wc_add_repos_file3(*args):
"""
svn_wc_add_repos_file3(char const * dst_path, svn_wc_adm_access_t * adm_access, svn_stream_t * new_base_contents,
svn_stream_t * new_contents, apr_hash_t new_base_props, apr_hash_t new_props,
char const * copyfrom_url, svn_revnum_t copyfrom_rev, svn_cancel_func_t cancel_func,
svn_wc_notify_func2_t notify_func, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_add_repos_file3(*args)
def svn_wc_add_repos_file2(*args):
"""
svn_wc_add_repos_file2(char const * dst_path, svn_wc_adm_access_t * adm_access, char const * new_text_base_path,
char const * new_text_path, apr_hash_t new_base_props, apr_hash_t new_props,
char const * copyfrom_url, svn_revnum_t copyfrom_rev, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_add_repos_file2(*args)
def svn_wc_add_repos_file(*args):
"""
svn_wc_add_repos_file(char const * dst_path, svn_wc_adm_access_t * adm_access, char const * new_text_path,
apr_hash_t new_props, char const * copyfrom_url, svn_revnum_t copyfrom_rev,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_add_repos_file(*args)
def svn_wc_remove_from_revision_control2(*args):
"""
svn_wc_remove_from_revision_control2(svn_wc_context_t * wc_ctx, char const * local_abspath, svn_boolean_t destroy_wf,
svn_boolean_t instant_error, svn_cancel_func_t cancel_func, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_remove_from_revision_control2(*args)
def svn_wc_remove_from_revision_control(*args):
"""
svn_wc_remove_from_revision_control(svn_wc_adm_access_t * adm_access, char const * name, svn_boolean_t destroy_wf, svn_boolean_t instant_error,
svn_cancel_func_t cancel_func, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_remove_from_revision_control(*args)
def svn_wc_resolved_conflict5(*args):
"""
svn_wc_resolved_conflict5(svn_wc_context_t * wc_ctx, char const * local_abspath, svn_depth_t depth, svn_boolean_t resolve_text,
char const * resolve_prop, svn_boolean_t resolve_tree,
svn_wc_conflict_choice_t conflict_choice, svn_cancel_func_t cancel_func, svn_wc_notify_func2_t notify_func,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_resolved_conflict5(*args)
def svn_wc_resolved_conflict4(*args):
"""
svn_wc_resolved_conflict4(char const * path, svn_wc_adm_access_t * adm_access, svn_boolean_t resolve_text,
svn_boolean_t resolve_props, svn_boolean_t resolve_tree, svn_depth_t depth,
svn_wc_conflict_choice_t conflict_choice, svn_wc_notify_func2_t notify_func,
svn_cancel_func_t cancel_func, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_resolved_conflict4(*args)
def svn_wc_resolved_conflict3(*args):
"""
svn_wc_resolved_conflict3(char const * path, svn_wc_adm_access_t * adm_access, svn_boolean_t resolve_text,
svn_boolean_t resolve_props, svn_depth_t depth, svn_wc_conflict_choice_t conflict_choice,
svn_wc_notify_func2_t notify_func, svn_cancel_func_t cancel_func,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_resolved_conflict3(*args)
def svn_wc_resolved_conflict2(*args):
"""
svn_wc_resolved_conflict2(char const * path, svn_wc_adm_access_t * adm_access, svn_boolean_t resolve_text,
svn_boolean_t resolve_props, svn_boolean_t recurse, svn_wc_notify_func2_t notify_func,
svn_cancel_func_t cancel_func, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_resolved_conflict2(*args)
def svn_wc_resolved_conflict(*args):
"""
svn_wc_resolved_conflict(char const * path, svn_wc_adm_access_t * adm_access, svn_boolean_t resolve_text,
svn_boolean_t resolve_props, svn_boolean_t recurse, svn_wc_notify_func_t notify_func,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_resolved_conflict(*args)
def svn_wc_committed_queue_create(*args):
"""svn_wc_committed_queue_create(apr_pool_t pool) -> svn_wc_committed_queue_t *"""
return _wc.svn_wc_committed_queue_create(*args)
def svn_wc_queue_committed3(*args):
"""
svn_wc_queue_committed3(svn_wc_committed_queue_t * queue, svn_wc_context_t * wc_ctx, char const * local_abspath,
svn_boolean_t recurse, apr_array_header_t wcprop_changes, svn_boolean_t remove_lock,
svn_boolean_t remove_changelist, svn_checksum_t sha1_checksum,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_queue_committed3(*args)
def svn_wc_queue_committed2(*args):
"""
svn_wc_queue_committed2(svn_wc_committed_queue_t * queue, char const * path, svn_wc_adm_access_t * adm_access,
svn_boolean_t recurse, apr_array_header_t wcprop_changes, svn_boolean_t remove_lock,
svn_boolean_t remove_changelist, svn_checksum_t md5_checksum,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_queue_committed2(*args)
def svn_wc_queue_committed(*args):
"""
svn_wc_queue_committed(char const * path, svn_wc_adm_access_t * adm_access, svn_boolean_t recurse, apr_array_header_t wcprop_changes,
svn_boolean_t remove_lock, svn_boolean_t remove_changelist,
unsigned char const * digest, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_queue_committed(*args)
def svn_wc_process_committed_queue2(*args):
"""
svn_wc_process_committed_queue2(svn_wc_committed_queue_t * queue, svn_wc_context_t * wc_ctx, svn_revnum_t new_revnum,
char const * rev_date, char const * rev_author, svn_cancel_func_t cancel_func,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_process_committed_queue2(*args)
def svn_wc_process_committed_queue(*args):
"""
svn_wc_process_committed_queue(svn_wc_committed_queue_t * queue, svn_wc_adm_access_t * adm_access, svn_revnum_t new_revnum,
char const * rev_date, char const * rev_author, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_process_committed_queue(*args)
def svn_wc_process_committed4(*args):
"""
svn_wc_process_committed4(char const * path, svn_wc_adm_access_t * adm_access, svn_boolean_t recurse, svn_revnum_t new_revnum,
char const * rev_date, char const * rev_author, apr_array_header_t wcprop_changes,
svn_boolean_t remove_lock, svn_boolean_t remove_changelist,
unsigned char const * digest, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_process_committed4(*args)
def svn_wc_process_committed3(*args):
"""
svn_wc_process_committed3(char const * path, svn_wc_adm_access_t * adm_access, svn_boolean_t recurse, svn_revnum_t new_revnum,
char const * rev_date, char const * rev_author, apr_array_header_t wcprop_changes,
svn_boolean_t remove_lock, unsigned char const * digest,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_process_committed3(*args)
def svn_wc_process_committed2(*args):
"""
svn_wc_process_committed2(char const * path, svn_wc_adm_access_t * adm_access, svn_boolean_t recurse, svn_revnum_t new_revnum,
char const * rev_date, char const * rev_author, apr_array_header_t wcprop_changes,
svn_boolean_t remove_lock, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_process_committed2(*args)
def svn_wc_process_committed(*args):
"""
svn_wc_process_committed(char const * path, svn_wc_adm_access_t * adm_access, svn_boolean_t recurse, svn_revnum_t new_revnum,
char const * rev_date, char const * rev_author, apr_array_header_t wcprop_changes,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_process_committed(*args)
def svn_wc_crawl_revisions5(*args):
"""
svn_wc_crawl_revisions5(svn_wc_context_t * wc_ctx, char const * local_abspath, svn_ra_reporter3_t reporter,
void * report_baton, svn_boolean_t restore_files, svn_depth_t depth, svn_boolean_t honor_depth_exclude,
svn_boolean_t depth_compatibility_trick, svn_boolean_t use_commit_times,
svn_cancel_func_t cancel_func, svn_wc_notify_func2_t notify_func,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_crawl_revisions5(*args)
def svn_wc_crawl_revisions4(*args):
"""
svn_wc_crawl_revisions4(char const * path, svn_wc_adm_access_t * adm_access, svn_ra_reporter3_t reporter,
void * report_baton, svn_boolean_t restore_files, svn_depth_t depth, svn_boolean_t honor_depth_exclude,
svn_boolean_t depth_compatibility_trick, svn_boolean_t use_commit_times,
svn_wc_notify_func2_t notify_func, svn_wc_traversal_info_t * traversal_info,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_crawl_revisions4(*args)
def svn_wc_crawl_revisions3(*args):
"""
svn_wc_crawl_revisions3(char const * path, svn_wc_adm_access_t * adm_access, svn_ra_reporter3_t reporter,
void * report_baton, svn_boolean_t restore_files, svn_depth_t depth, svn_boolean_t depth_compatibility_trick,
svn_boolean_t use_commit_times, svn_wc_notify_func2_t notify_func,
svn_wc_traversal_info_t * traversal_info, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_crawl_revisions3(*args)
def svn_wc_crawl_revisions2(*args):
"""
svn_wc_crawl_revisions2(char const * path, svn_wc_adm_access_t * adm_access, svn_ra_reporter2_t reporter,
svn_boolean_t restore_files, svn_boolean_t recurse, svn_boolean_t use_commit_times,
svn_wc_notify_func2_t notify_func, svn_wc_traversal_info_t * traversal_info,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_crawl_revisions2(*args)
def svn_wc_crawl_revisions(*args):
"""
svn_wc_crawl_revisions(char const * path, svn_wc_adm_access_t * adm_access, svn_ra_reporter_t reporter,
void * report_baton, svn_boolean_t restore_files, svn_boolean_t recurse, svn_boolean_t use_commit_times,
svn_wc_notify_func_t notify_func, svn_wc_traversal_info_t * traversal_info,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_crawl_revisions(*args)
def svn_wc_check_root(*args):
"""svn_wc_check_root(svn_wc_context_t * wc_ctx, char const * local_abspath, apr_pool_t scratch_pool) -> svn_error_t"""
return _wc.svn_wc_check_root(*args)
def svn_wc_is_wc_root2(*args):
"""svn_wc_is_wc_root2(svn_wc_context_t * wc_ctx, char const * local_abspath, apr_pool_t scratch_pool) -> svn_error_t"""
return _wc.svn_wc_is_wc_root2(*args)
def svn_wc_is_wc_root(*args):
"""svn_wc_is_wc_root(char const * path, svn_wc_adm_access_t * adm_access, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_is_wc_root(*args)
def svn_wc_get_actual_target2(*args):
"""svn_wc_get_actual_target2(svn_wc_context_t * wc_ctx, char const * path, apr_pool_t result_pool, apr_pool_t scratch_pool) -> svn_error_t"""
return _wc.svn_wc_get_actual_target2(*args)
def svn_wc_get_actual_target(*args):
"""svn_wc_get_actual_target(char const * path, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_get_actual_target(*args)
def svn_wc_get_update_editor4(*args):
"""
svn_wc_get_update_editor4(svn_wc_context_t * wc_ctx, char const * anchor_abspath, char const * target_basename,
svn_boolean_t use_commit_times, svn_depth_t depth, svn_boolean_t depth_is_sticky,
svn_boolean_t allow_unver_obstructions, svn_boolean_t adds_as_modification,
svn_boolean_t server_performs_filtering, svn_boolean_t clean_checkout,
char const * diff3_cmd, apr_array_header_t preserved_exts, svn_wc_dirents_func_t fetch_dirents_func,
void * fetch_dirents_baton, svn_wc_conflict_resolver_func2_t conflict_func,
void * conflict_baton, svn_wc_external_update_t external_func,
void * external_baton, svn_cancel_func_t cancel_func,
svn_wc_notify_func2_t notify_func, apr_pool_t result_pool, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_get_update_editor4(*args)
def svn_wc_get_update_editor3(*args):
"""
svn_wc_get_update_editor3(svn_wc_adm_access_t * anchor, char const * target, svn_boolean_t use_commit_times,
svn_depth_t depth, svn_boolean_t depth_is_sticky, svn_boolean_t allow_unver_obstructions,
svn_wc_notify_func2_t notify_func, svn_cancel_func_t cancel_func,
svn_wc_conflict_resolver_func_t conflict_func, void * conflict_baton,
svn_wc_get_file_t fetch_func, void * fetch_baton, char const * diff3_cmd,
apr_array_header_t preserved_exts, svn_wc_traversal_info_t * ti, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_get_update_editor3(*args)
def svn_wc_get_update_editor2(*args):
"""
svn_wc_get_update_editor2(svn_wc_adm_access_t * anchor, char const * target, svn_boolean_t use_commit_times,
svn_boolean_t recurse, svn_wc_notify_func2_t notify_func, svn_cancel_func_t cancel_func,
char const * diff3_cmd, svn_wc_traversal_info_t * ti, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_get_update_editor2(*args)
def svn_wc_get_update_editor(*args):
"""
svn_wc_get_update_editor(svn_wc_adm_access_t * anchor, char const * target, svn_boolean_t use_commit_times,
svn_boolean_t recurse, svn_wc_notify_func_t notify_func, svn_cancel_func_t cancel_func,
char const * diff3_cmd, svn_wc_traversal_info_t * ti, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_get_update_editor(*args)
def svn_wc_get_switch_editor4(*args):
"""
svn_wc_get_switch_editor4(svn_wc_context_t * wc_ctx, char const * anchor_abspath, char const * target_basename,
char const * switch_url, svn_boolean_t use_commit_times, svn_depth_t depth,
svn_boolean_t depth_is_sticky, svn_boolean_t allow_unver_obstructions,
svn_boolean_t server_performs_filtering, char const * diff3_cmd, apr_array_header_t preserved_exts,
svn_wc_dirents_func_t fetch_dirents_func, void * fetch_dirents_baton,
svn_wc_conflict_resolver_func2_t conflict_func, void * conflict_baton,
svn_wc_external_update_t external_func, void * external_baton,
svn_cancel_func_t cancel_func, svn_wc_notify_func2_t notify_func, apr_pool_t result_pool,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_get_switch_editor4(*args)
def svn_wc_get_switch_editor3(*args):
"""
svn_wc_get_switch_editor3(svn_wc_adm_access_t * anchor, char const * target, char const * switch_url, svn_boolean_t use_commit_times,
svn_depth_t depth, svn_boolean_t depth_is_sticky,
svn_boolean_t allow_unver_obstructions, svn_wc_notify_func2_t notify_func,
svn_cancel_func_t cancel_func, svn_wc_conflict_resolver_func_t conflict_func,
void * conflict_baton, char const * diff3_cmd, apr_array_header_t preserved_exts,
svn_wc_traversal_info_t * ti, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_get_switch_editor3(*args)
def svn_wc_get_switch_editor2(*args):
"""
svn_wc_get_switch_editor2(svn_wc_adm_access_t * anchor, char const * target, char const * switch_url, svn_boolean_t use_commit_times,
svn_boolean_t recurse, svn_wc_notify_func2_t notify_func,
svn_cancel_func_t cancel_func, char const * diff3_cmd, svn_wc_traversal_info_t * ti,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_get_switch_editor2(*args)
def svn_wc_get_switch_editor(*args):
"""
svn_wc_get_switch_editor(svn_wc_adm_access_t * anchor, char const * target, char const * switch_url, svn_boolean_t use_commit_times,
svn_boolean_t recurse, svn_wc_notify_func_t notify_func,
svn_cancel_func_t cancel_func, char const * diff3_cmd, svn_wc_traversal_info_t * ti,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_get_switch_editor(*args)
def svn_wc_prop_list2(*args):
"""svn_wc_prop_list2(svn_wc_context_t * wc_ctx, char const * local_abspath, apr_pool_t result_pool, apr_pool_t scratch_pool) -> svn_error_t"""
return _wc.svn_wc_prop_list2(*args)
def svn_wc_prop_list(*args):
"""svn_wc_prop_list(char const * path, svn_wc_adm_access_t * adm_access, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_prop_list(*args)
def svn_wc_get_pristine_props(*args):
"""svn_wc_get_pristine_props(svn_wc_context_t * wc_ctx, char const * local_abspath, apr_pool_t result_pool, apr_pool_t scratch_pool) -> svn_error_t"""
return _wc.svn_wc_get_pristine_props(*args)
def svn_wc_prop_get2(*args):
"""
svn_wc_prop_get2(svn_wc_context_t * wc_ctx, char const * local_abspath, char const * name, apr_pool_t result_pool,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_prop_get2(*args)
def svn_wc_prop_get(*args):
"""svn_wc_prop_get(char const * name, char const * path, svn_wc_adm_access_t * adm_access, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_prop_get(*args)
def svn_wc_prop_set4(*args):
"""
svn_wc_prop_set4(svn_wc_context_t * wc_ctx, char const * local_abspath, char const * name, svn_string_t const * value,
svn_depth_t depth, svn_boolean_t skip_checks, apr_array_header_t changelist_filter,
svn_cancel_func_t cancel_func, svn_wc_notify_func2_t notify_func,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_prop_set4(*args)
def svn_wc_prop_set3(*args):
"""
svn_wc_prop_set3(char const * name, svn_string_t const * value, char const * path, svn_wc_adm_access_t * adm_access,
svn_boolean_t skip_checks, svn_wc_notify_func2_t notify_func,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_prop_set3(*args)
def svn_wc_prop_set2(*args):
"""
svn_wc_prop_set2(char const * name, svn_string_t const * value, char const * path, svn_wc_adm_access_t * adm_access,
svn_boolean_t skip_checks, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_prop_set2(*args)
def svn_wc_prop_set(*args):
"""
svn_wc_prop_set(char const * name, svn_string_t const * value, char const * path, svn_wc_adm_access_t * adm_access,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_prop_set(*args)
def svn_wc_is_normal_prop(*args):
"""svn_wc_is_normal_prop(char const * name) -> svn_boolean_t"""
return _wc.svn_wc_is_normal_prop(*args)
def svn_wc_is_wc_prop(*args):
"""svn_wc_is_wc_prop(char const * name) -> svn_boolean_t"""
return _wc.svn_wc_is_wc_prop(*args)
def svn_wc_is_entry_prop(*args):
"""svn_wc_is_entry_prop(char const * name) -> svn_boolean_t"""
return _wc.svn_wc_is_entry_prop(*args)
def svn_wc_canonicalize_svn_prop(*args):
"""
svn_wc_canonicalize_svn_prop(char const * propname, svn_string_t const * propval, char const * path, svn_node_kind_t kind,
svn_boolean_t skip_some_checks, svn_wc_canonicalize_svn_prop_get_file_t prop_getter,
void * getter_baton, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_canonicalize_svn_prop(*args)
def svn_wc_get_diff_editor6(*args):
"""
svn_wc_get_diff_editor6(svn_wc_context_t * wc_ctx, char const * anchor_abspath, char const * target, svn_depth_t depth,
svn_boolean_t ignore_ancestry, svn_boolean_t show_copies_as_adds,
svn_boolean_t use_git_diff_format, svn_boolean_t use_text_base, svn_boolean_t reverse_order,
svn_boolean_t server_performs_filtering, apr_array_header_t changelist_filter,
svn_wc_diff_callbacks4_t callbacks, void * callback_baton,
svn_cancel_func_t cancel_func, apr_pool_t result_pool, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_get_diff_editor6(*args)
def svn_wc_get_diff_editor5(*args):
"""
svn_wc_get_diff_editor5(svn_wc_adm_access_t * anchor, char const * target, svn_wc_diff_callbacks3_t callbacks,
void * callback_baton, svn_depth_t depth, svn_boolean_t ignore_ancestry,
svn_boolean_t use_text_base, svn_boolean_t reverse_order, svn_cancel_func_t cancel_func,
apr_array_header_t changelist_filter, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_get_diff_editor5(*args)
def svn_wc_get_diff_editor4(*args):
"""
svn_wc_get_diff_editor4(svn_wc_adm_access_t * anchor, char const * target, svn_wc_diff_callbacks2_t callbacks,
svn_depth_t depth, svn_boolean_t ignore_ancestry, svn_boolean_t use_text_base,
svn_boolean_t reverse_order, svn_cancel_func_t cancel_func, apr_array_header_t changelist_filter,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_get_diff_editor4(*args)
def svn_wc_get_diff_editor3(*args):
"""
svn_wc_get_diff_editor3(svn_wc_adm_access_t * anchor, char const * target, svn_wc_diff_callbacks2_t callbacks,
svn_boolean_t recurse, svn_boolean_t ignore_ancestry, svn_boolean_t use_text_base,
svn_boolean_t reverse_order, svn_cancel_func_t cancel_func, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_get_diff_editor3(*args)
def svn_wc_get_diff_editor2(*args):
"""
svn_wc_get_diff_editor2(svn_wc_adm_access_t * anchor, char const * target, svn_wc_diff_callbacks_t callbacks,
void * callback_baton, svn_boolean_t recurse, svn_boolean_t ignore_ancestry,
svn_boolean_t use_text_base, svn_boolean_t reverse_order, svn_cancel_func_t cancel_func,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_get_diff_editor2(*args)
def svn_wc_get_diff_editor(*args):
"""
svn_wc_get_diff_editor(svn_wc_adm_access_t * anchor, char const * target, svn_wc_diff_callbacks_t callbacks,
void * callback_baton, svn_boolean_t recurse, svn_boolean_t use_text_base,
svn_boolean_t reverse_order, svn_cancel_func_t cancel_func, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_get_diff_editor(*args)
def svn_wc_diff6(*args):
"""
svn_wc_diff6(svn_wc_context_t * wc_ctx, char const * target_abspath, svn_wc_diff_callbacks4_t callbacks,
void * callback_baton, svn_depth_t depth, svn_boolean_t ignore_ancestry,
svn_boolean_t show_copies_as_adds, svn_boolean_t use_git_diff_format,
apr_array_header_t changelist_filter, svn_cancel_func_t cancel_func, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_diff6(*args)
def svn_wc_diff5(*args):
"""
svn_wc_diff5(svn_wc_adm_access_t * anchor, char const * target, svn_wc_diff_callbacks3_t callbacks,
void * callback_baton, svn_depth_t depth, svn_boolean_t ignore_ancestry,
apr_array_header_t changelist_filter, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_diff5(*args)
def svn_wc_diff4(*args):
"""
svn_wc_diff4(svn_wc_adm_access_t * anchor, char const * target, svn_wc_diff_callbacks2_t callbacks,
svn_depth_t depth, svn_boolean_t ignore_ancestry, apr_array_header_t changelist_filter,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_diff4(*args)
def svn_wc_diff3(*args):
"""
svn_wc_diff3(svn_wc_adm_access_t * anchor, char const * target, svn_wc_diff_callbacks2_t callbacks,
svn_boolean_t recurse, svn_boolean_t ignore_ancestry, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_diff3(*args)
def svn_wc_diff2(*args):
"""
svn_wc_diff2(svn_wc_adm_access_t * anchor, char const * target, svn_wc_diff_callbacks_t callbacks,
void * callback_baton, svn_boolean_t recurse, svn_boolean_t ignore_ancestry,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_diff2(*args)
def svn_wc_diff(*args):
"""
svn_wc_diff(svn_wc_adm_access_t * anchor, char const * target, svn_wc_diff_callbacks_t callbacks,
void * callback_baton, svn_boolean_t recurse, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_diff(*args)
def svn_wc_get_prop_diffs2(*args):
"""svn_wc_get_prop_diffs2(svn_wc_context_t * wc_ctx, char const * local_abspath, apr_pool_t result_pool, apr_pool_t scratch_pool) -> svn_error_t"""
return _wc.svn_wc_get_prop_diffs2(*args)
def svn_wc_get_prop_diffs(*args):
"""svn_wc_get_prop_diffs(char const * path, svn_wc_adm_access_t * adm_access, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_get_prop_diffs(*args)
svn_wc_merge_unchanged = _wc.svn_wc_merge_unchanged
svn_wc_merge_merged = _wc.svn_wc_merge_merged
svn_wc_merge_conflict = _wc.svn_wc_merge_conflict
svn_wc_merge_no_merge = _wc.svn_wc_merge_no_merge
def svn_wc_merge5(*args):
"""
svn_wc_merge5(enum svn_wc_notify_state_t * merge_props_state, svn_wc_context_t * wc_ctx, char const * left_abspath,
char const * right_abspath, char const * target_abspath,
char const * left_label, char const * right_label, char const * target_label,
svn_wc_conflict_version_t left_version, svn_wc_conflict_version_t right_version,
svn_boolean_t dry_run, char const * diff3_cmd, apr_array_header_t merge_options,
apr_hash_t original_props, apr_array_header_t prop_diff,
svn_wc_conflict_resolver_func2_t conflict_func, void * conflict_baton, svn_cancel_func_t cancel_func,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_merge5(*args)
def svn_wc_merge4(*args):
"""
svn_wc_merge4(svn_wc_context_t * wc_ctx, char const * left_abspath, char const * right_abspath,
char const * target_abspath, char const * left_label, char const * right_label,
char const * target_label, svn_wc_conflict_version_t left_version, svn_wc_conflict_version_t right_version,
svn_boolean_t dry_run, char const * diff3_cmd,
apr_array_header_t merge_options, apr_array_header_t prop_diff,
svn_wc_conflict_resolver_func2_t conflict_func, void * conflict_baton, svn_cancel_func_t cancel_func,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_merge4(*args)
def svn_wc_merge3(*args):
"""
svn_wc_merge3(char const * left, char const * right, char const * merge_target, svn_wc_adm_access_t * adm_access,
char const * left_label, char const * right_label, char const * target_label,
svn_boolean_t dry_run, char const * diff3_cmd, apr_array_header_t merge_options,
apr_array_header_t prop_diff, svn_wc_conflict_resolver_func_t conflict_func,
void * conflict_baton, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_merge3(*args)
def svn_wc_merge2(*args):
"""
svn_wc_merge2(char const * left, char const * right, char const * merge_target, svn_wc_adm_access_t * adm_access,
char const * left_label, char const * right_label, char const * target_label,
svn_boolean_t dry_run, char const * diff3_cmd, apr_array_header_t merge_options,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_merge2(*args)
def svn_wc_merge(*args):
"""
svn_wc_merge(char const * left, char const * right, char const * merge_target, svn_wc_adm_access_t * adm_access,
char const * left_label, char const * right_label, char const * target_label,
svn_boolean_t dry_run, char const * diff3_cmd, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_merge(*args)
def svn_wc_merge_props3(*args):
"""
svn_wc_merge_props3(svn_wc_notify_state_t * state, svn_wc_context_t * wc_ctx, char const * local_abspath,
svn_wc_conflict_version_t left_version, svn_wc_conflict_version_t right_version,
apr_hash_t baseprops, apr_array_header_t propchanges, svn_boolean_t dry_run,
svn_wc_conflict_resolver_func2_t conflict_func, void * conflict_baton,
svn_cancel_func_t cancel_func, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_merge_props3(*args)
def svn_wc_merge_props2(*args):
"""
svn_wc_merge_props2(svn_wc_notify_state_t * state, char const * path, svn_wc_adm_access_t * adm_access,
apr_hash_t baseprops, apr_array_header_t propchanges, svn_boolean_t base_merge,
svn_boolean_t dry_run, svn_wc_conflict_resolver_func_t conflict_func,
void * conflict_baton, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_merge_props2(*args)
def svn_wc_merge_props(*args):
"""
svn_wc_merge_props(svn_wc_notify_state_t * state, char const * path, svn_wc_adm_access_t * adm_access,
apr_hash_t baseprops, apr_array_header_t propchanges, svn_boolean_t base_merge,
svn_boolean_t dry_run, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_merge_props(*args)
def svn_wc_merge_prop_diffs(*args):
"""
svn_wc_merge_prop_diffs(svn_wc_notify_state_t * state, char const * path, svn_wc_adm_access_t * adm_access,
apr_array_header_t propchanges, svn_boolean_t base_merge, svn_boolean_t dry_run,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_merge_prop_diffs(*args)
def svn_wc_get_pristine_contents2(*args):
"""svn_wc_get_pristine_contents2(svn_wc_context_t * wc_ctx, char const * local_abspath, apr_pool_t result_pool, apr_pool_t scratch_pool) -> svn_error_t"""
return _wc.svn_wc_get_pristine_contents2(*args)
def svn_wc_get_pristine_contents(*args):
"""svn_wc_get_pristine_contents(char const * path, apr_pool_t result_pool, apr_pool_t scratch_pool) -> svn_error_t"""
return _wc.svn_wc_get_pristine_contents(*args)
def svn_wc_get_pristine_copy_path(*args):
"""svn_wc_get_pristine_copy_path(char const * path, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_get_pristine_copy_path(*args)
def svn_wc_cleanup3(*args):
"""
svn_wc_cleanup3(svn_wc_context_t * wc_ctx, char const * local_abspath, svn_cancel_func_t cancel_func,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_cleanup3(*args)
def svn_wc_cleanup2(*args):
"""svn_wc_cleanup2(char const * path, char const * diff3_cmd, svn_cancel_func_t cancel_func, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_cleanup2(*args)
def svn_wc_cleanup(*args):
"""
svn_wc_cleanup(char const * path, svn_wc_adm_access_t * optional_adm_access, char const * diff3_cmd,
svn_cancel_func_t cancel_func, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_cleanup(*args)
def svn_wc_upgrade(*args):
"""
svn_wc_upgrade(svn_wc_context_t * wc_ctx, char const * local_abspath, svn_wc_upgrade_get_repos_info_t repos_info_func,
void * repos_info_baton, svn_cancel_func_t cancel_func,
svn_wc_notify_func2_t notify_func, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_upgrade(*args)
def svn_wc_relocate4(*args):
"""
svn_wc_relocate4(svn_wc_context_t * wc_ctx, char const * wcroot_abspath, char const * _from, char const * to,
svn_wc_relocation_validator3_t validator, void * validator_baton,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_relocate4(*args)
def svn_wc_relocate3(*args):
"""
svn_wc_relocate3(char const * path, svn_wc_adm_access_t * adm_access, char const * _from, char const * to,
svn_boolean_t recurse, svn_wc_relocation_validator3_t validator, void * validator_baton,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_relocate3(*args)
def svn_wc_relocate2(*args):
"""
svn_wc_relocate2(char const * path, svn_wc_adm_access_t * adm_access, char const * _from, char const * to,
svn_boolean_t recurse, svn_wc_relocation_validator2_t validator, void * validator_baton,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_relocate2(*args)
def svn_wc_relocate(*args):
"""
svn_wc_relocate(char const * path, svn_wc_adm_access_t * adm_access, char const * _from, char const * to,
svn_boolean_t recurse, svn_wc_relocation_validator_t validator, void * validator_baton,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_relocate(*args)
def svn_wc_revert4(*args):
"""
svn_wc_revert4(svn_wc_context_t * wc_ctx, char const * local_abspath, svn_depth_t depth, svn_boolean_t use_commit_times,
apr_array_header_t changelist_filter, svn_cancel_func_t cancel_func,
svn_wc_notify_func2_t notify_func, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_revert4(*args)
def svn_wc_revert3(*args):
"""
svn_wc_revert3(char const * path, svn_wc_adm_access_t * parent_access, svn_depth_t depth, svn_boolean_t use_commit_times,
apr_array_header_t changelist_filter, svn_cancel_func_t cancel_func,
svn_wc_notify_func2_t notify_func, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_revert3(*args)
def svn_wc_revert2(*args):
"""
svn_wc_revert2(char const * path, svn_wc_adm_access_t * parent_access, svn_boolean_t recursive,
svn_boolean_t use_commit_times, svn_cancel_func_t cancel_func, svn_wc_notify_func2_t notify_func,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_revert2(*args)
def svn_wc_revert(*args):
"""
svn_wc_revert(char const * path, svn_wc_adm_access_t * parent_access, svn_boolean_t recursive,
svn_boolean_t use_commit_times, svn_cancel_func_t cancel_func, svn_wc_notify_func_t notify_func,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_revert(*args)
def svn_wc_restore(*args):
"""
svn_wc_restore(svn_wc_context_t * wc_ctx, char const * local_abspath, svn_boolean_t use_commit_times,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_restore(*args)
def svn_wc_create_tmp_file2(*args):
"""svn_wc_create_tmp_file2(char const * path, svn_io_file_del_t delete_when, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_create_tmp_file2(*args)
def svn_wc_create_tmp_file(*args):
"""svn_wc_create_tmp_file(char const * path, svn_boolean_t delete_on_close, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_create_tmp_file(*args)
def svn_wc_translated_file2(*args):
"""
svn_wc_translated_file2(char const * src, char const * versioned_file, svn_wc_adm_access_t * adm_access,
apr_uint32_t flags, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_translated_file2(*args)
def svn_wc_translated_file(*args):
"""
svn_wc_translated_file(char const * vfile, svn_wc_adm_access_t * adm_access, svn_boolean_t force_repair,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_translated_file(*args)
def svn_wc_translated_stream(*args):
"""
svn_wc_translated_stream(char const * path, char const * versioned_file, svn_wc_adm_access_t * adm_access,
apr_uint32_t flags, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_translated_stream(*args)
def svn_wc_transmit_text_deltas3(*args):
"""
svn_wc_transmit_text_deltas3(svn_wc_context_t * wc_ctx, char const * local_abspath, svn_boolean_t fulltext, svn_delta_editor_t editor,
void * file_baton, apr_pool_t result_pool, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_transmit_text_deltas3(*args)
def svn_wc_transmit_text_deltas2(*args):
"""
svn_wc_transmit_text_deltas2(char const * path, svn_wc_adm_access_t * adm_access, svn_boolean_t fulltext, svn_delta_editor_t editor,
void * file_baton, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_transmit_text_deltas2(*args)
def svn_wc_transmit_text_deltas(*args):
"""
svn_wc_transmit_text_deltas(char const * path, svn_wc_adm_access_t * adm_access, svn_boolean_t fulltext, svn_delta_editor_t editor,
void * file_baton, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_transmit_text_deltas(*args)
def svn_wc_transmit_prop_deltas2(*args):
"""
svn_wc_transmit_prop_deltas2(svn_wc_context_t * wc_ctx, char const * local_abspath, svn_delta_editor_t editor,
void * baton, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_transmit_prop_deltas2(*args)
def svn_wc_transmit_prop_deltas(*args):
"""
svn_wc_transmit_prop_deltas(char const * path, svn_wc_adm_access_t * adm_access, svn_wc_entry_t entry, svn_delta_editor_t editor,
void * baton, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_transmit_prop_deltas(*args)
def svn_wc_get_default_ignores(*args):
"""svn_wc_get_default_ignores(apr_hash_t config, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_get_default_ignores(*args)
def svn_wc_get_ignores2(*args):
"""
svn_wc_get_ignores2(svn_wc_context_t * wc_ctx, char const * local_abspath, apr_hash_t config, apr_pool_t result_pool,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_get_ignores2(*args)
def svn_wc_get_ignores(*args):
"""svn_wc_get_ignores(apr_hash_t config, svn_wc_adm_access_t * adm_access, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_get_ignores(*args)
def svn_wc_match_ignore_list(*args):
"""svn_wc_match_ignore_list(char const * str, apr_array_header_t list, apr_pool_t pool) -> svn_boolean_t"""
return _wc.svn_wc_match_ignore_list(*args)
def svn_wc_add_lock2(*args):
"""svn_wc_add_lock2(svn_wc_context_t * wc_ctx, char const * abspath, svn_lock_t lock, apr_pool_t scratch_pool) -> svn_error_t"""
return _wc.svn_wc_add_lock2(*args)
def svn_wc_add_lock(*args):
"""svn_wc_add_lock(char const * path, svn_lock_t lock, svn_wc_adm_access_t * adm_access, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_add_lock(*args)
def svn_wc_remove_lock2(*args):
"""svn_wc_remove_lock2(svn_wc_context_t * wc_ctx, char const * local_abspath, apr_pool_t scratch_pool) -> svn_error_t"""
return _wc.svn_wc_remove_lock2(*args)
def svn_wc_remove_lock(*args):
"""svn_wc_remove_lock(char const * path, svn_wc_adm_access_t * adm_access, apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_remove_lock(*args)
class svn_wc_revision_status_t:
"""Proxy of C svn_wc_revision_status_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_revision_status_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_revision_status_t, name)
__repr__ = _swig_repr
__swig_setmethods__["min_rev"] = _wc.svn_wc_revision_status_t_min_rev_set
__swig_getmethods__["min_rev"] = _wc.svn_wc_revision_status_t_min_rev_get
__swig_setmethods__["max_rev"] = _wc.svn_wc_revision_status_t_max_rev_set
__swig_getmethods__["max_rev"] = _wc.svn_wc_revision_status_t_max_rev_get
__swig_setmethods__["switched"] = _wc.svn_wc_revision_status_t_switched_set
__swig_getmethods__["switched"] = _wc.svn_wc_revision_status_t_switched_get
__swig_setmethods__["modified"] = _wc.svn_wc_revision_status_t_modified_set
__swig_getmethods__["modified"] = _wc.svn_wc_revision_status_t_modified_get
__swig_setmethods__["sparse_checkout"] = _wc.svn_wc_revision_status_t_sparse_checkout_set
__swig_getmethods__["sparse_checkout"] = _wc.svn_wc_revision_status_t_sparse_checkout_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_revision_status_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self):
"""__init__(svn_wc_revision_status_t self) -> svn_wc_revision_status_t"""
this = _wc.new_svn_wc_revision_status_t()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _wc.delete_svn_wc_revision_status_t
__del__ = lambda self : None;
svn_wc_revision_status_t_swigregister = _wc.svn_wc_revision_status_t_swigregister
svn_wc_revision_status_t_swigregister(svn_wc_revision_status_t)
def svn_wc_revision_status2(*args):
"""
svn_wc_revision_status2(svn_wc_context_t * wc_ctx, char const * local_abspath, char const * trail_url, svn_boolean_t committed,
svn_cancel_func_t cancel_func, apr_pool_t result_pool,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_revision_status2(*args)
def svn_wc_revision_status(*args):
"""
svn_wc_revision_status(char const * wc_path, char const * trail_url, svn_boolean_t committed, svn_cancel_func_t cancel_func,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_revision_status(*args)
def svn_wc_set_changelist2(*args):
"""
svn_wc_set_changelist2(svn_wc_context_t * wc_ctx, char const * local_abspath, char const * changelist, svn_depth_t depth,
apr_array_header_t changelist_filter, svn_cancel_func_t cancel_func,
svn_wc_notify_func2_t notify_func, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_set_changelist2(*args)
def svn_wc_set_changelist(*args):
"""
svn_wc_set_changelist(char const * path, char const * changelist, svn_wc_adm_access_t * adm_access, svn_cancel_func_t cancel_func,
svn_wc_notify_func2_t notify_func, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_set_changelist(*args)
def svn_wc_get_changelists(*args):
"""
svn_wc_get_changelists(svn_wc_context_t * wc_ctx, char const * local_abspath, svn_depth_t depth, apr_array_header_t changelist_filter,
svn_changelist_receiver_t callback_func, void * callback_baton,
svn_cancel_func_t cancel_func, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_get_changelists(*args)
def svn_wc_crop_tree2(*args):
"""
svn_wc_crop_tree2(svn_wc_context_t * wc_ctx, char const * local_abspath, svn_depth_t depth, svn_cancel_func_t cancel_func,
svn_wc_notify_func2_t notify_func, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_crop_tree2(*args)
def svn_wc_crop_tree(*args):
"""
svn_wc_crop_tree(svn_wc_adm_access_t * anchor, char const * target, svn_depth_t depth, svn_wc_notify_func2_t notify_func,
svn_cancel_func_t cancel_func, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_crop_tree(*args)
def svn_wc_exclude(*args):
"""
svn_wc_exclude(svn_wc_context_t * wc_ctx, char const * local_abspath, svn_cancel_func_t cancel_func,
svn_wc_notify_func2_t notify_func, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_exclude(*args)
def svn_wc_read_kind2(*args):
"""
svn_wc_read_kind2(svn_wc_context_t * wc_ctx, char const * local_abspath, svn_boolean_t show_deleted,
svn_boolean_t show_hidden, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_read_kind2(*args)
def svn_wc_read_kind(*args):
"""svn_wc_read_kind(svn_wc_context_t * wc_ctx, char const * abspath, svn_boolean_t show_hidden, apr_pool_t scratch_pool) -> svn_error_t"""
return _wc.svn_wc_read_kind(*args)
class svn_wc_context_t:
"""Proxy of C svn_wc_context_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_context_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_context_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_context_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
svn_wc_context_t_swigregister = _wc.svn_wc_context_t_swigregister
svn_wc_context_t_swigregister(svn_wc_context_t)
class svn_wc_adm_access_t:
"""Proxy of C svn_wc_adm_access_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_adm_access_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_adm_access_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_adm_access_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
svn_wc_adm_access_t_swigregister = _wc.svn_wc_adm_access_t_swigregister
svn_wc_adm_access_t_swigregister(svn_wc_adm_access_t)
class svn_wc_traversal_info_t:
"""Proxy of C svn_wc_traversal_info_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_traversal_info_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_traversal_info_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_traversal_info_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
svn_wc_traversal_info_t_swigregister = _wc.svn_wc_traversal_info_t_swigregister
svn_wc_traversal_info_t_swigregister(svn_wc_traversal_info_t)
class svn_wc_committed_queue_t:
"""Proxy of C svn_wc_committed_queue_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_committed_queue_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_committed_queue_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_committed_queue_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
svn_wc_committed_queue_t_swigregister = _wc.svn_wc_committed_queue_t_swigregister
svn_wc_committed_queue_t_swigregister(svn_wc_committed_queue_t)
def svn_wc_diff_callbacks4_invoke_file_opened(*args):
"""
svn_wc_diff_callbacks4_invoke_file_opened(svn_wc_diff_callbacks4_t _obj, char const * path, svn_revnum_t rev, void * diff_baton,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks4_invoke_file_opened(*args)
def svn_wc_diff_callbacks4_invoke_file_changed(*args):
"""
svn_wc_diff_callbacks4_invoke_file_changed(svn_wc_diff_callbacks4_t _obj, svn_wc_notify_state_t * contentstate, svn_wc_notify_state_t * propstate,
char const * path, char const * tmpfile1, char const * tmpfile2,
svn_revnum_t rev1, svn_revnum_t rev2, char const * mimetype1,
char const * mimetype2, apr_array_header_t propchanges, apr_hash_t originalprops,
void * diff_baton, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks4_invoke_file_changed(*args)
def svn_wc_diff_callbacks4_invoke_file_added(*args):
"""
svn_wc_diff_callbacks4_invoke_file_added(svn_wc_diff_callbacks4_t _obj, svn_wc_notify_state_t * contentstate, svn_wc_notify_state_t * propstate,
char const * path, char const * tmpfile1, char const * tmpfile2,
svn_revnum_t rev1, svn_revnum_t rev2, char const * mimetype1,
char const * mimetype2, char const * copyfrom_path, svn_revnum_t copyfrom_revision,
apr_array_header_t propchanges, apr_hash_t originalprops, void * diff_baton,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks4_invoke_file_added(*args)
def svn_wc_diff_callbacks4_invoke_file_deleted(*args):
"""
svn_wc_diff_callbacks4_invoke_file_deleted(svn_wc_diff_callbacks4_t _obj, svn_wc_notify_state_t * state, char const * path,
char const * tmpfile1, char const * tmpfile2, char const * mimetype1, char const * mimetype2,
apr_hash_t originalprops, void * diff_baton, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks4_invoke_file_deleted(*args)
def svn_wc_diff_callbacks4_invoke_dir_deleted(*args):
"""
svn_wc_diff_callbacks4_invoke_dir_deleted(svn_wc_diff_callbacks4_t _obj, svn_wc_notify_state_t * state, char const * path,
void * diff_baton, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks4_invoke_dir_deleted(*args)
def svn_wc_diff_callbacks4_invoke_dir_opened(*args):
"""
svn_wc_diff_callbacks4_invoke_dir_opened(svn_wc_diff_callbacks4_t _obj, char const * path, svn_revnum_t rev, void * diff_baton,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks4_invoke_dir_opened(*args)
def svn_wc_diff_callbacks4_invoke_dir_added(*args):
"""
svn_wc_diff_callbacks4_invoke_dir_added(svn_wc_diff_callbacks4_t _obj, svn_wc_notify_state_t * state, char const * path,
svn_revnum_t rev, char const * copyfrom_path, svn_revnum_t copyfrom_revision,
void * diff_baton, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks4_invoke_dir_added(*args)
def svn_wc_diff_callbacks4_invoke_dir_props_changed(*args):
"""
svn_wc_diff_callbacks4_invoke_dir_props_changed(svn_wc_diff_callbacks4_t _obj, svn_wc_notify_state_t * propstate, char const * path,
svn_boolean_t dir_was_added, apr_array_header_t propchanges, apr_hash_t original_props,
void * diff_baton, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks4_invoke_dir_props_changed(*args)
def svn_wc_diff_callbacks4_invoke_dir_closed(*args):
"""
svn_wc_diff_callbacks4_invoke_dir_closed(svn_wc_diff_callbacks4_t _obj, svn_wc_notify_state_t * contentstate, svn_wc_notify_state_t * propstate,
char const * path, svn_boolean_t dir_was_added, void * diff_baton,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks4_invoke_dir_closed(*args)
def svn_wc_diff_callbacks3_invoke_file_changed(*args):
"""
svn_wc_diff_callbacks3_invoke_file_changed(svn_wc_diff_callbacks3_t _obj, svn_wc_adm_access_t * adm_access, svn_wc_notify_state_t * contentstate,
svn_wc_notify_state_t * propstate, char const * path,
char const * tmpfile1, char const * tmpfile2, svn_revnum_t rev1, svn_revnum_t rev2,
char const * mimetype1, char const * mimetype2, apr_array_header_t propchanges,
apr_hash_t originalprops, void * diff_baton) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks3_invoke_file_changed(*args)
def svn_wc_diff_callbacks3_invoke_file_added(*args):
"""
svn_wc_diff_callbacks3_invoke_file_added(svn_wc_diff_callbacks3_t _obj, svn_wc_adm_access_t * adm_access, svn_wc_notify_state_t * contentstate,
svn_wc_notify_state_t * propstate, char const * path,
char const * tmpfile1, char const * tmpfile2, svn_revnum_t rev1, svn_revnum_t rev2,
char const * mimetype1, char const * mimetype2, apr_array_header_t propchanges,
apr_hash_t originalprops, void * diff_baton) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks3_invoke_file_added(*args)
def svn_wc_diff_callbacks3_invoke_file_deleted(*args):
"""
svn_wc_diff_callbacks3_invoke_file_deleted(svn_wc_diff_callbacks3_t _obj, svn_wc_adm_access_t * adm_access, svn_wc_notify_state_t * state,
char const * path, char const * tmpfile1, char const * tmpfile2,
char const * mimetype1, char const * mimetype2, apr_hash_t originalprops,
void * diff_baton) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks3_invoke_file_deleted(*args)
def svn_wc_diff_callbacks3_invoke_dir_added(*args):
"""
svn_wc_diff_callbacks3_invoke_dir_added(svn_wc_diff_callbacks3_t _obj, svn_wc_adm_access_t * adm_access, svn_wc_notify_state_t * state,
char const * path, svn_revnum_t rev, void * diff_baton) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks3_invoke_dir_added(*args)
def svn_wc_diff_callbacks3_invoke_dir_deleted(*args):
"""
svn_wc_diff_callbacks3_invoke_dir_deleted(svn_wc_diff_callbacks3_t _obj, svn_wc_adm_access_t * adm_access, svn_wc_notify_state_t * state,
char const * path, void * diff_baton) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks3_invoke_dir_deleted(*args)
def svn_wc_diff_callbacks3_invoke_dir_props_changed(*args):
"""
svn_wc_diff_callbacks3_invoke_dir_props_changed(svn_wc_diff_callbacks3_t _obj, svn_wc_adm_access_t * adm_access, svn_wc_notify_state_t * propstate,
char const * path, apr_array_header_t propchanges, apr_hash_t original_props,
void * diff_baton) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks3_invoke_dir_props_changed(*args)
def svn_wc_diff_callbacks3_invoke_dir_opened(*args):
"""
svn_wc_diff_callbacks3_invoke_dir_opened(svn_wc_diff_callbacks3_t _obj, svn_wc_adm_access_t * adm_access, char const * path,
svn_revnum_t rev, void * diff_baton) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks3_invoke_dir_opened(*args)
def svn_wc_diff_callbacks3_invoke_dir_closed(*args):
"""
svn_wc_diff_callbacks3_invoke_dir_closed(svn_wc_diff_callbacks3_t _obj, svn_wc_adm_access_t * adm_access, svn_wc_notify_state_t * contentstate,
svn_wc_notify_state_t * propstate, char const * path,
void * diff_baton) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks3_invoke_dir_closed(*args)
def svn_wc_diff_callbacks2_invoke_file_changed(*args):
"""
svn_wc_diff_callbacks2_invoke_file_changed(svn_wc_diff_callbacks2_t _obj, svn_wc_adm_access_t * adm_access, svn_wc_notify_state_t * contentstate,
svn_wc_notify_state_t * propstate, char const * path,
char const * tmpfile1, char const * tmpfile2, svn_revnum_t rev1, svn_revnum_t rev2,
char const * mimetype1, char const * mimetype2, apr_array_header_t propchanges,
apr_hash_t originalprops, void * diff_baton) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks2_invoke_file_changed(*args)
def svn_wc_diff_callbacks2_invoke_file_added(*args):
"""
svn_wc_diff_callbacks2_invoke_file_added(svn_wc_diff_callbacks2_t _obj, svn_wc_adm_access_t * adm_access, svn_wc_notify_state_t * contentstate,
svn_wc_notify_state_t * propstate, char const * path,
char const * tmpfile1, char const * tmpfile2, svn_revnum_t rev1, svn_revnum_t rev2,
char const * mimetype1, char const * mimetype2, apr_array_header_t propchanges,
apr_hash_t originalprops, void * diff_baton) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks2_invoke_file_added(*args)
def svn_wc_diff_callbacks2_invoke_file_deleted(*args):
"""
svn_wc_diff_callbacks2_invoke_file_deleted(svn_wc_diff_callbacks2_t _obj, svn_wc_adm_access_t * adm_access, svn_wc_notify_state_t * state,
char const * path, char const * tmpfile1, char const * tmpfile2,
char const * mimetype1, char const * mimetype2, apr_hash_t originalprops,
void * diff_baton) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks2_invoke_file_deleted(*args)
def svn_wc_diff_callbacks2_invoke_dir_added(*args):
"""
svn_wc_diff_callbacks2_invoke_dir_added(svn_wc_diff_callbacks2_t _obj, svn_wc_adm_access_t * adm_access, svn_wc_notify_state_t * state,
char const * path, svn_revnum_t rev, void * diff_baton) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks2_invoke_dir_added(*args)
def svn_wc_diff_callbacks2_invoke_dir_deleted(*args):
"""
svn_wc_diff_callbacks2_invoke_dir_deleted(svn_wc_diff_callbacks2_t _obj, svn_wc_adm_access_t * adm_access, svn_wc_notify_state_t * state,
char const * path, void * diff_baton) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks2_invoke_dir_deleted(*args)
def svn_wc_diff_callbacks2_invoke_dir_props_changed(*args):
"""
svn_wc_diff_callbacks2_invoke_dir_props_changed(svn_wc_diff_callbacks2_t _obj, svn_wc_adm_access_t * adm_access, svn_wc_notify_state_t * state,
char const * path, apr_array_header_t propchanges, apr_hash_t original_props,
void * diff_baton) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks2_invoke_dir_props_changed(*args)
def svn_wc_diff_callbacks_invoke_file_changed(*args):
"""
svn_wc_diff_callbacks_invoke_file_changed(svn_wc_diff_callbacks_t _obj, svn_wc_adm_access_t * adm_access, svn_wc_notify_state_t * state,
char const * path, char const * tmpfile1, char const * tmpfile2,
svn_revnum_t rev1, svn_revnum_t rev2, char const * mimetype1, char const * mimetype2,
void * diff_baton) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks_invoke_file_changed(*args)
def svn_wc_diff_callbacks_invoke_file_added(*args):
"""
svn_wc_diff_callbacks_invoke_file_added(svn_wc_diff_callbacks_t _obj, svn_wc_adm_access_t * adm_access, svn_wc_notify_state_t * state,
char const * path, char const * tmpfile1, char const * tmpfile2,
svn_revnum_t rev1, svn_revnum_t rev2, char const * mimetype1, char const * mimetype2,
void * diff_baton) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks_invoke_file_added(*args)
def svn_wc_diff_callbacks_invoke_file_deleted(*args):
"""
svn_wc_diff_callbacks_invoke_file_deleted(svn_wc_diff_callbacks_t _obj, svn_wc_adm_access_t * adm_access, svn_wc_notify_state_t * state,
char const * path, char const * tmpfile1, char const * tmpfile2,
char const * mimetype1, char const * mimetype2, void * diff_baton) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks_invoke_file_deleted(*args)
def svn_wc_diff_callbacks_invoke_dir_added(*args):
"""
svn_wc_diff_callbacks_invoke_dir_added(svn_wc_diff_callbacks_t _obj, svn_wc_adm_access_t * adm_access, svn_wc_notify_state_t * state,
char const * path, svn_revnum_t rev, void * diff_baton) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks_invoke_dir_added(*args)
def svn_wc_diff_callbacks_invoke_dir_deleted(*args):
"""
svn_wc_diff_callbacks_invoke_dir_deleted(svn_wc_diff_callbacks_t _obj, svn_wc_adm_access_t * adm_access, svn_wc_notify_state_t * state,
char const * path, void * diff_baton) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks_invoke_dir_deleted(*args)
def svn_wc_diff_callbacks_invoke_props_changed(*args):
"""
svn_wc_diff_callbacks_invoke_props_changed(svn_wc_diff_callbacks_t _obj, svn_wc_adm_access_t * adm_access, svn_wc_notify_state_t * state,
char const * path, apr_array_header_t propchanges, apr_hash_t original_props,
void * diff_baton) -> svn_error_t
"""
return _wc.svn_wc_diff_callbacks_invoke_props_changed(*args)
def svn_wc_entry_callbacks2_invoke_found_entry(*args):
"""
svn_wc_entry_callbacks2_invoke_found_entry(svn_wc_entry_callbacks2_t _obj, char const * path, svn_wc_entry_t entry, void * walk_baton,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_entry_callbacks2_invoke_found_entry(*args)
def svn_wc_entry_callbacks2_invoke_handle_error(*args):
"""
svn_wc_entry_callbacks2_invoke_handle_error(svn_wc_entry_callbacks2_t _obj, char const * path, svn_error_t err, void * walk_baton,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_entry_callbacks2_invoke_handle_error(*args)
def svn_wc_entry_callbacks_invoke_found_entry(*args):
"""
svn_wc_entry_callbacks_invoke_found_entry(svn_wc_entry_callbacks_t _obj, char const * path, svn_wc_entry_t entry, void * walk_baton,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_entry_callbacks_invoke_found_entry(*args)
def svn_wc_invoke_external_update(*args):
"""
svn_wc_invoke_external_update(svn_wc_external_update_t _obj, void * baton, char const * local_abspath, svn_string_t const * old_val,
svn_string_t const * new_val, svn_depth_t depth, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_invoke_external_update(*args)
def svn_wc_invoke_notify_func2(*args):
"""svn_wc_invoke_notify_func2(svn_wc_notify_func2_t _obj, void * baton, svn_wc_notify_t notify, apr_pool_t pool)"""
return _wc.svn_wc_invoke_notify_func2(*args)
def svn_wc_invoke_notify_func(*args):
"""
svn_wc_invoke_notify_func(svn_wc_notify_func_t _obj, void * baton, char const * path, svn_wc_notify_action_t action,
svn_node_kind_t kind, char const * mime_type, svn_wc_notify_state_t content_state,
svn_wc_notify_state_t prop_state, svn_revnum_t revision)
"""
return _wc.svn_wc_invoke_notify_func(*args)
def svn_wc_invoke_conflict_resolver_func2(*args):
"""
svn_wc_invoke_conflict_resolver_func2(svn_wc_conflict_resolver_func2_t _obj, svn_wc_conflict_description2_t description,
void * baton, apr_pool_t result_pool, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_invoke_conflict_resolver_func2(*args)
def svn_wc_invoke_conflict_resolver_func(*args):
"""
svn_wc_invoke_conflict_resolver_func(svn_wc_conflict_resolver_func_t _obj, svn_wc_conflict_description_t description,
void * baton, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_invoke_conflict_resolver_func(*args)
def svn_wc_invoke_status_func4(*args):
"""
svn_wc_invoke_status_func4(svn_wc_status_func4_t _obj, void * baton, char const * local_abspath, svn_wc_status3_t status,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_invoke_status_func4(*args)
def svn_wc_invoke_status_func3(*args):
"""
svn_wc_invoke_status_func3(svn_wc_status_func3_t _obj, void * baton, char const * path, svn_wc_status2_t status,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_invoke_status_func3(*args)
def svn_wc_invoke_status_func2(*args):
"""svn_wc_invoke_status_func2(svn_wc_status_func2_t _obj, void * baton, char const * path, svn_wc_status2_t status)"""
return _wc.svn_wc_invoke_status_func2(*args)
def svn_wc_invoke_status_func(*args):
"""svn_wc_invoke_status_func(svn_wc_status_func_t _obj, void * baton, char const * path, svn_wc_status_t status)"""
return _wc.svn_wc_invoke_status_func(*args)
def svn_wc_invoke_get_file(*args):
"""
svn_wc_invoke_get_file(svn_wc_get_file_t _obj, void * baton, char const * path, svn_revnum_t revision, svn_stream_t * stream,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_invoke_get_file(*args)
def svn_wc_invoke_dirents_func(*args):
"""
svn_wc_invoke_dirents_func(svn_wc_dirents_func_t _obj, void * baton, char const * repos_root_url, char const * repos_relpath,
apr_pool_t result_pool, apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_invoke_dirents_func(*args)
def svn_wc_invoke_canonicalize_svn_prop_get_file(*args):
"""
svn_wc_invoke_canonicalize_svn_prop_get_file(svn_wc_canonicalize_svn_prop_get_file_t _obj, svn_stream_t * stream, void * baton,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_invoke_canonicalize_svn_prop_get_file(*args)
def svn_wc_invoke_upgrade_get_repos_info(*args):
"""
svn_wc_invoke_upgrade_get_repos_info(svn_wc_upgrade_get_repos_info_t _obj, void * baton, char const * url, apr_pool_t result_pool,
apr_pool_t scratch_pool) -> svn_error_t
"""
return _wc.svn_wc_invoke_upgrade_get_repos_info(*args)
def svn_wc_invoke_relocation_validator3(*args):
"""
svn_wc_invoke_relocation_validator3(svn_wc_relocation_validator3_t _obj, void * baton, char const * uuid, char const * url,
char const * root_url, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_invoke_relocation_validator3(*args)
def svn_wc_invoke_relocation_validator2(*args):
"""
svn_wc_invoke_relocation_validator2(svn_wc_relocation_validator2_t _obj, void * baton, char const * uuid, char const * url,
svn_boolean_t root, apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_wc_invoke_relocation_validator2(*args)
def svn_wc_invoke_relocation_validator(*args):
"""svn_wc_invoke_relocation_validator(svn_wc_relocation_validator_t _obj, void * baton, char const * uuid, char const * url) -> svn_error_t"""
return _wc.svn_wc_invoke_relocation_validator(*args)
def svn_changelist_invoke_receiver(*args):
"""
svn_changelist_invoke_receiver(svn_changelist_receiver_t _obj, void * baton, char const * path, char const * changelist,
apr_pool_t pool) -> svn_error_t
"""
return _wc.svn_changelist_invoke_receiver(*args)
class svn_wc_external_update_t:
"""Proxy of C svn_wc_external_update_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_external_update_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_external_update_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_external_update_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_wc_invoke_external_update(self, *args)
svn_wc_external_update_t_swigregister = _wc.svn_wc_external_update_t_swigregister
svn_wc_external_update_t_swigregister(svn_wc_external_update_t)
class svn_wc_notify_func2_t:
"""Proxy of C svn_wc_notify_func2_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_notify_func2_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_notify_func2_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_notify_func2_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_wc_invoke_notify_func2(self, *args)
svn_wc_notify_func2_t_swigregister = _wc.svn_wc_notify_func2_t_swigregister
svn_wc_notify_func2_t_swigregister(svn_wc_notify_func2_t)
class svn_wc_notify_func_t:
"""Proxy of C svn_wc_notify_func_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_notify_func_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_notify_func_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_notify_func_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_wc_invoke_notify_func(self, *args)
svn_wc_notify_func_t_swigregister = _wc.svn_wc_notify_func_t_swigregister
svn_wc_notify_func_t_swigregister(svn_wc_notify_func_t)
class svn_wc_conflict_resolver_func2_t:
"""Proxy of C svn_wc_conflict_resolver_func2_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_conflict_resolver_func2_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_conflict_resolver_func2_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_conflict_resolver_func2_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_wc_invoke_conflict_resolver_func2(self, *args)
svn_wc_conflict_resolver_func2_t_swigregister = _wc.svn_wc_conflict_resolver_func2_t_swigregister
svn_wc_conflict_resolver_func2_t_swigregister(svn_wc_conflict_resolver_func2_t)
class svn_wc_conflict_resolver_func_t:
"""Proxy of C svn_wc_conflict_resolver_func_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_conflict_resolver_func_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_conflict_resolver_func_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_conflict_resolver_func_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_wc_invoke_conflict_resolver_func(self, *args)
svn_wc_conflict_resolver_func_t_swigregister = _wc.svn_wc_conflict_resolver_func_t_swigregister
svn_wc_conflict_resolver_func_t_swigregister(svn_wc_conflict_resolver_func_t)
class svn_wc_status_func4_t:
"""Proxy of C svn_wc_status_func4_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_status_func4_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_status_func4_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_status_func4_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_wc_invoke_status_func4(self, *args)
svn_wc_status_func4_t_swigregister = _wc.svn_wc_status_func4_t_swigregister
svn_wc_status_func4_t_swigregister(svn_wc_status_func4_t)
class svn_wc_status_func3_t:
"""Proxy of C svn_wc_status_func3_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_status_func3_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_status_func3_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_status_func3_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_wc_invoke_status_func3(self, *args)
svn_wc_status_func3_t_swigregister = _wc.svn_wc_status_func3_t_swigregister
svn_wc_status_func3_t_swigregister(svn_wc_status_func3_t)
class svn_wc_status_func2_t:
"""Proxy of C svn_wc_status_func2_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_status_func2_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_status_func2_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_status_func2_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_wc_invoke_status_func2(self, *args)
svn_wc_status_func2_t_swigregister = _wc.svn_wc_status_func2_t_swigregister
svn_wc_status_func2_t_swigregister(svn_wc_status_func2_t)
class svn_wc_status_func_t:
"""Proxy of C svn_wc_status_func_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_status_func_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_status_func_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_status_func_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_wc_invoke_status_func(self, *args)
svn_wc_status_func_t_swigregister = _wc.svn_wc_status_func_t_swigregister
svn_wc_status_func_t_swigregister(svn_wc_status_func_t)
class svn_wc_get_file_t:
"""Proxy of C svn_wc_get_file_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_get_file_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_get_file_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_get_file_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_wc_invoke_get_file(self, *args)
svn_wc_get_file_t_swigregister = _wc.svn_wc_get_file_t_swigregister
svn_wc_get_file_t_swigregister(svn_wc_get_file_t)
class svn_wc_dirents_func_t:
"""Proxy of C svn_wc_dirents_func_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_dirents_func_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_dirents_func_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_dirents_func_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_wc_invoke_dirents_func(self, *args)
svn_wc_dirents_func_t_swigregister = _wc.svn_wc_dirents_func_t_swigregister
svn_wc_dirents_func_t_swigregister(svn_wc_dirents_func_t)
class svn_wc_canonicalize_svn_prop_get_file_t:
"""Proxy of C svn_wc_canonicalize_svn_prop_get_file_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_canonicalize_svn_prop_get_file_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_canonicalize_svn_prop_get_file_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_canonicalize_svn_prop_get_file_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_wc_invoke_canonicalize_svn_prop_get_file(self, *args)
svn_wc_canonicalize_svn_prop_get_file_t_swigregister = _wc.svn_wc_canonicalize_svn_prop_get_file_t_swigregister
svn_wc_canonicalize_svn_prop_get_file_t_swigregister(svn_wc_canonicalize_svn_prop_get_file_t)
class svn_wc_upgrade_get_repos_info_t:
"""Proxy of C svn_wc_upgrade_get_repos_info_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_upgrade_get_repos_info_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_upgrade_get_repos_info_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_upgrade_get_repos_info_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_wc_invoke_upgrade_get_repos_info(self, *args)
svn_wc_upgrade_get_repos_info_t_swigregister = _wc.svn_wc_upgrade_get_repos_info_t_swigregister
svn_wc_upgrade_get_repos_info_t_swigregister(svn_wc_upgrade_get_repos_info_t)
class svn_wc_relocation_validator3_t:
"""Proxy of C svn_wc_relocation_validator3_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_relocation_validator3_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_relocation_validator3_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_relocation_validator3_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_wc_invoke_relocation_validator3(self, *args)
svn_wc_relocation_validator3_t_swigregister = _wc.svn_wc_relocation_validator3_t_swigregister
svn_wc_relocation_validator3_t_swigregister(svn_wc_relocation_validator3_t)
class svn_wc_relocation_validator2_t:
"""Proxy of C svn_wc_relocation_validator2_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_relocation_validator2_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_relocation_validator2_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_relocation_validator2_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_wc_invoke_relocation_validator2(self, *args)
svn_wc_relocation_validator2_t_swigregister = _wc.svn_wc_relocation_validator2_t_swigregister
svn_wc_relocation_validator2_t_swigregister(svn_wc_relocation_validator2_t)
class svn_wc_relocation_validator_t:
"""Proxy of C svn_wc_relocation_validator_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_relocation_validator_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_wc_relocation_validator_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_wc_relocation_validator_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_wc_invoke_relocation_validator(self, *args)
svn_wc_relocation_validator_t_swigregister = _wc.svn_wc_relocation_validator_t_swigregister
svn_wc_relocation_validator_t_swigregister(svn_wc_relocation_validator_t)
class svn_changelist_receiver_t:
"""Proxy of C svn_changelist_receiver_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_changelist_receiver_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_changelist_receiver_t, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_changelist_receiver_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
_copy_metadata_deep(value, members.get(name))
_assert_valid_deep(value)
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_changelist_invoke_receiver(self, *args)
svn_changelist_receiver_t_swigregister = _wc.svn_changelist_receiver_t_swigregister
svn_changelist_receiver_t_swigregister(svn_changelist_receiver_t)
def svn_wc_swig_init_asp_dot_net_hack(*args):
"""svn_wc_swig_init_asp_dot_net_hack(apr_pool_t pool) -> svn_error_t"""
return _wc.svn_wc_swig_init_asp_dot_net_hack(*args)
svn_wc_swig_init_asp_dot_net_hack()
# This file is compatible with both classic and new-style classes.
| 43.472784 | 156 | 0.758835 | 31,862 | 210,843 | 4.321449 | 0.018831 | 0.088968 | 0.044942 | 0.025303 | 0.938391 | 0.856584 | 0.791437 | 0.744499 | 0.715034 | 0.663091 | 0 | 0.005744 | 0.155234 | 210,843 | 4,849 | 157 | 43.481749 | 0.767305 | 0.313281 | 0 | 0.442748 | 1 | 0 | 0.068894 | 0.000302 | 0 | 0 | 0 | 0 | 0.078244 | 1 | 0.200382 | false | 0.000382 | 0.019847 | 0.019084 | 0.480153 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
54e66065be11b5a56823edae920dcb4b03357081 | 2,335 | py | Python | test/container/test_php.py | renatomefi/docker-php | d319d34ef46902907cd4f32080a70aafaecf0e42 | [
"MIT"
] | null | null | null | test/container/test_php.py | renatomefi/docker-php | d319d34ef46902907cd4f32080a70aafaecf0e42 | [
"MIT"
] | null | null | null | test/container/test_php.py | renatomefi/docker-php | d319d34ef46902907cd4f32080a70aafaecf0e42 | [
"MIT"
] | null | null | null | import pytest
@pytest.mark.php_app
def test_php_runs_as_app(host):
output = host.run('php -r "echo getmygid();"')
assert output.stdout == '1000'
output = host.run('php -r "echo getmyuid();"')
assert output.stdout == '1000'
@pytest.mark.php_root
def test_php_runs_as_root(host):
output = host.run('php -r "echo getmygid();"')
assert output.stdout == '0'
output = host.run('php -r "echo getmyuid();"')
assert output.stdout == '0'
@pytest.mark.php_nts
@pytest.mark.php_zts
def test_php_pcntl_is_enabled(host):
output = host.run('php -r "exit(function_exists(\'pcntl_signal\') ? 0 : 255);"')
assert output.rc == 0
output = host.run('php -r "exit(function_exists(\'pcntl_async_signals\') ? 0 : 255);"')
assert output.rc == 0
@pytest.mark.php_nts
@pytest.mark.php_zts
def test_php_ext_uv_is_enabled(host):
output = host.run('php -r "exit(function_exists(\'uv_loop_new\') ? 0 : 255);"')
assert output.rc == 0
output = host.run('php -r "exit(function_exists(\'uv_timer_init\') ? 0 : 255);"')
assert output.rc == 0
@pytest.mark.php_zts
def test_php_ext_parallel_is_enabled(host):
output = host.run('php -r "exit(class_exists(\'parallel\\Runtime\') ? 0 : 255);"')
assert output.rc == 0
output = host.run('php -r "exit(class_exists(\'parallel\\Future\') ? 0 : 255);"')
assert output.rc == 0
@pytest.mark.php_nts
def test_php_ext_parallel_is_not_enabled(host):
output = host.run('php -r "exit(class_exists(\'parallel\\Runtime\') ? 0 : 255);"')
assert output.rc == 255
output = host.run('php -r "exit(class_exists(\'parallel\\Future\') ? 0 : 255);"')
assert output.rc == 255
@pytest.mark.php_zts
def test_php_ext_parallel_is_functional(host):
output = host.run('php /tests/container/functional/parallel.php')
assert output.rc == 33
output = host.run('php /tests/container/functional/parallel-multi.php')
assert output.rc == 65
@pytest.mark.php_zts
def test_php_ext_uv_is_functional(host):
output = host.run('php /tests/container/functional/uv-timer.php')
assert output.stdout == '0123finished'
assert output.rc == 0
@pytest.mark.php_nts
@pytest.mark.php_zts
def test_php_ext_vips_is_enabled(host):
output = host.run('php -r "exit(function_exists(\'vips_version\') ? 0 : 255);"')
assert output.rc == 0 | 32.887324 | 91 | 0.674946 | 356 | 2,335 | 4.224719 | 0.151685 | 0.135638 | 0.138298 | 0.170213 | 0.867021 | 0.845745 | 0.813165 | 0.813165 | 0.775931 | 0.753989 | 0 | 0.0345 | 0.155889 | 2,335 | 71 | 92 | 32.887324 | 0.728564 | 0 | 0 | 0.581818 | 0 | 0 | 0.275685 | 0.103168 | 0 | 0 | 0 | 0 | 0.309091 | 1 | 0.163636 | false | 0 | 0.018182 | 0 | 0.181818 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
070e7b126793c0b46daf1f0ffc882aa8c83e085f | 11,436 | bzl | Python | rules/private/proto_repository_tools_srcs.bzl | heartless-clown/rules_proto | 99c0d0c7a00c1df7221afc3331b5d859a02c420f | [
"Apache-2.0"
] | 249 | 2018-10-24T21:11:08.000Z | 2022-03-31T03:28:34.000Z | rules/private/proto_repository_tools_srcs.bzl | heartless-clown/rules_proto | 99c0d0c7a00c1df7221afc3331b5d859a02c420f | [
"Apache-2.0"
] | 147 | 2018-12-05T18:58:13.000Z | 2022-03-26T15:41:07.000Z | rules/private/proto_repository_tools_srcs.bzl | heartless-clown/rules_proto | 99c0d0c7a00c1df7221afc3331b5d859a02c420f | [
"Apache-2.0"
] | 126 | 2018-11-20T22:34:48.000Z | 2022-03-18T13:42:05.000Z | """ Code generated by list_repository_tools_srcs.go; DO NOT EDIT."""
PROTO_REPOSITORY_TOOLS_SRCS = [
"@build_stack_rules_proto//:BUILD.bazel",
"@build_stack_rules_proto//cmd/depsgen:BUILD.bazel",
"@build_stack_rules_proto//cmd/depsgen:config.go",
"@build_stack_rules_proto//cmd/depsgen:depsgen.go",
"@build_stack_rules_proto//cmd/depsgen:generator.go",
"@build_stack_rules_proto//cmd/depsgen:template.go",
"@build_stack_rules_proto//cmd/examplegen:BUILD.bazel",
"@build_stack_rules_proto//cmd/examplegen:config.go",
"@build_stack_rules_proto//cmd/examplegen:examplegen.go",
"@build_stack_rules_proto//cmd/examplegen:generator.go",
"@build_stack_rules_proto//cmd/examplegen:linewriter.go",
"@build_stack_rules_proto//cmd/examplegen:template.go",
"@build_stack_rules_proto//cmd/gazelle:BUILD.bazel",
"@build_stack_rules_proto//cmd/gazelle:diff.go",
"@build_stack_rules_proto//cmd/gazelle:fix-update.go",
"@build_stack_rules_proto//cmd/gazelle:fix.go",
"@build_stack_rules_proto//cmd/gazelle:gazelle.go",
"@build_stack_rules_proto//cmd/gazelle:langs.go",
"@build_stack_rules_proto//cmd/gazelle:metaresolver.go",
"@build_stack_rules_proto//cmd/gazelle:print.go",
"@build_stack_rules_proto//cmd/gazelle:update-repos.go",
"@build_stack_rules_proto//cmd/gazelle:wspace.go",
"@build_stack_rules_proto//cmd/gencopy:BUILD.bazel",
"@build_stack_rules_proto//cmd/gencopy:gencopy.go",
"@build_stack_rules_proto//deps:BUILD.bazel",
"@build_stack_rules_proto//docs:BUILD.bazel",
"@build_stack_rules_proto//docs/_site:BUILD.bazel",
"@build_stack_rules_proto//example:BUILD.bazel",
"@build_stack_rules_proto//example/golden:BUILD.bazel",
"@build_stack_rules_proto//example/person:BUILD.bazel",
"@build_stack_rules_proto//example/place:BUILD.bazel",
"@build_stack_rules_proto//example/routeguide:BUILD.bazel",
"@build_stack_rules_proto//example/routeguide/cc:BUILD.bazel",
"@build_stack_rules_proto//example/routeguide/closure:BUILD.bazel",
"@build_stack_rules_proto//example/routeguide/java:BUILD.bazel",
"@build_stack_rules_proto//example/routeguide/nodejs:BUILD.bazel",
"@build_stack_rules_proto//example/routeguide/scala:BUILD.bazel",
"@build_stack_rules_proto//example/thing:BUILD.bazel",
"@build_stack_rules_proto//example/toolchain/prebuilt:BUILD.bazel",
"@build_stack_rules_proto//example/toolchain/standard:BUILD.bazel",
"@build_stack_rules_proto//language/example:BUILD.bazel",
"@build_stack_rules_proto//language/example:example.go",
"@build_stack_rules_proto//language/protobuf:BUILD.bazel",
"@build_stack_rules_proto//language/protobuf/oldtestdata/gogo:BUILD.bazel",
"@build_stack_rules_proto//language/protobuf/oldtestdata/java:BUILD.bazel",
"@build_stack_rules_proto//language/protobuf:protobuf.go",
"@build_stack_rules_proto//pkg:BUILD.bazel",
"@build_stack_rules_proto//pkg/goldentest:BUILD.bazel",
"@build_stack_rules_proto//pkg/goldentest:cases.go",
"@build_stack_rules_proto//pkg/language/noop:BUILD.bazel",
"@build_stack_rules_proto//pkg/language/noop:noop.go",
"@build_stack_rules_proto//pkg/language/protobuf:BUILD.bazel",
"@build_stack_rules_proto//pkg/language/protobuf:config.go",
"@build_stack_rules_proto//pkg/language/protobuf:fix.go",
"@build_stack_rules_proto//pkg/language/protobuf:generate.go",
"@build_stack_rules_proto//pkg/language/protobuf:kinds.go",
"@build_stack_rules_proto//pkg/language/protobuf:lang.go",
"@build_stack_rules_proto//pkg/language/protobuf:override.go",
"@build_stack_rules_proto//pkg/language/protobuf:resolve.go",
"@build_stack_rules_proto//pkg/plugin/builtin:BUILD.bazel",
"@build_stack_rules_proto//pkg/plugin/builtin:cpp_plugin.go",
"@build_stack_rules_proto//pkg/plugin/builtin:csharp_plugin.go",
"@build_stack_rules_proto//pkg/plugin/builtin:doc.go",
"@build_stack_rules_proto//pkg/plugin/builtin:grpc_grpc_cpp.go",
"@build_stack_rules_proto//pkg/plugin/builtin:java_plugin.go",
"@build_stack_rules_proto//pkg/plugin/builtin:js_closure_plugin.go",
"@build_stack_rules_proto//pkg/plugin/builtin:js_common_plugin.go",
"@build_stack_rules_proto//pkg/plugin/builtin:objc_plugin.go",
"@build_stack_rules_proto//pkg/plugin/builtin:php_plugin.go",
"@build_stack_rules_proto//pkg/plugin/builtin:python_plugin.go",
"@build_stack_rules_proto//pkg/plugin/builtin:ruby_plugin.go",
"@build_stack_rules_proto//pkg/plugin/gogo/protobuf:BUILD.bazel",
"@build_stack_rules_proto//pkg/plugin/gogo/protobuf:protoc-gen-gogo.go",
"@build_stack_rules_proto//pkg/plugin/golang/protobuf:BUILD.bazel",
"@build_stack_rules_proto//pkg/plugin/golang/protobuf:protoc-gen-go.go",
"@build_stack_rules_proto//pkg/plugin/grpc/grpc:BUILD.bazel",
"@build_stack_rules_proto//pkg/plugin/grpc/grpc:protoc-gen-grpc-python.go",
"@build_stack_rules_proto//pkg/plugin/grpc/grpcgo:BUILD.bazel",
"@build_stack_rules_proto//pkg/plugin/grpc/grpcgo:protoc-gen-go-grpc.go",
"@build_stack_rules_proto//pkg/plugin/grpc/grpcjava:BUILD.bazel",
"@build_stack_rules_proto//pkg/plugin/grpc/grpcjava:protoc-gen-grpc-java.go",
"@build_stack_rules_proto//pkg/plugin/grpc/grpcnode:BUILD.bazel",
"@build_stack_rules_proto//pkg/plugin/grpc/grpcnode:protoc-gen-grpc-node.go",
"@build_stack_rules_proto//pkg/plugin/grpcecosystem/grpcgateway:BUILD.bazel",
"@build_stack_rules_proto//pkg/plugin/grpcecosystem/grpcgateway:protoc-gen-grpc-gateway.go",
"@build_stack_rules_proto//pkg/plugin/scalapb/scalapb:BUILD.bazel",
"@build_stack_rules_proto//pkg/plugin/scalapb/scalapb:protoc_gen_scala.go",
"@build_stack_rules_proto//pkg/plugin/stackb/grpc_js:BUILD.bazel",
"@build_stack_rules_proto//pkg/plugin/stackb/grpc_js:protoc-gen-grpc-js.go",
"@build_stack_rules_proto//pkg/plugin/stephenh/ts-proto:BUILD.bazel",
"@build_stack_rules_proto//pkg/plugin/stephenh/ts-proto:protoc-gen-ts-proto.go",
"@build_stack_rules_proto//pkg/plugintest:BUILD.bazel",
"@build_stack_rules_proto//pkg/plugintest:case.go",
"@build_stack_rules_proto//pkg/plugintest:doc.go",
"@build_stack_rules_proto//pkg/plugintest:utils.go",
"@build_stack_rules_proto//pkg/protoc:BUILD.bazel",
"@build_stack_rules_proto//pkg/protoc:depsresolver.go",
"@build_stack_rules_proto//pkg/protoc:file.go",
"@build_stack_rules_proto//pkg/protoc:intent.go",
"@build_stack_rules_proto//pkg/protoc:language_config.go",
"@build_stack_rules_proto//pkg/protoc:language_plugin_config.go",
"@build_stack_rules_proto//pkg/protoc:language_rule.go",
"@build_stack_rules_proto//pkg/protoc:language_rule_config.go",
"@build_stack_rules_proto//pkg/protoc:other_proto_library.go",
"@build_stack_rules_proto//pkg/protoc:package.go",
"@build_stack_rules_proto//pkg/protoc:package_config.go",
"@build_stack_rules_proto//pkg/protoc:plugin.go",
"@build_stack_rules_proto//pkg/protoc:plugin_configuration.go",
"@build_stack_rules_proto//pkg/protoc:plugin_context.go",
"@build_stack_rules_proto//pkg/protoc:plugin_registry.go",
"@build_stack_rules_proto//pkg/protoc:proto_compile.go",
"@build_stack_rules_proto//pkg/protoc:proto_compiled_sources.go",
"@build_stack_rules_proto//pkg/protoc:proto_descriptor_set.go",
"@build_stack_rules_proto//pkg/protoc:proto_enum_option_collector.go",
"@build_stack_rules_proto//pkg/protoc:proto_library.go",
"@build_stack_rules_proto//pkg/protoc:protoc_configuration.go",
"@build_stack_rules_proto//pkg/protoc:registry.go",
"@build_stack_rules_proto//pkg/protoc:resolver.go",
"@build_stack_rules_proto//pkg/protoc:rewrite.go",
"@build_stack_rules_proto//pkg/protoc:rule_provider.go",
"@build_stack_rules_proto//pkg/protoc:rule_registry.go",
"@build_stack_rules_proto//pkg/protoc:ruleindex.go",
"@build_stack_rules_proto//pkg/protoc:syntaxutil.go",
"@build_stack_rules_proto//pkg/protoc:yconfig.go",
"@build_stack_rules_proto//pkg/rule/rules_cc:BUILD.bazel",
"@build_stack_rules_proto//pkg/rule/rules_cc:cc_library.go",
"@build_stack_rules_proto//pkg/rule/rules_cc:grpc_cc_library.go",
"@build_stack_rules_proto//pkg/rule/rules_cc:proto_cc_library.go",
"@build_stack_rules_proto//pkg/rule/rules_closure:BUILD.bazel",
"@build_stack_rules_proto//pkg/rule/rules_closure:closure_js_library.go",
"@build_stack_rules_proto//pkg/rule/rules_closure:grpc_closure_js_library.go",
"@build_stack_rules_proto//pkg/rule/rules_closure:proto_closure_js_library.go",
"@build_stack_rules_proto//pkg/rule/rules_go:BUILD.bazel",
"@build_stack_rules_proto//pkg/rule/rules_go:go_library.go",
"@build_stack_rules_proto//pkg/rule/rules_java:BUILD.bazel",
"@build_stack_rules_proto//pkg/rule/rules_java:grpc_java_library.go",
"@build_stack_rules_proto//pkg/rule/rules_java:java_library.go",
"@build_stack_rules_proto//pkg/rule/rules_java:proto_java_library.go",
"@build_stack_rules_proto//pkg/rule/rules_nodejs:BUILD.bazel",
"@build_stack_rules_proto//pkg/rule/rules_nodejs:grpc_nodejs_library.go",
"@build_stack_rules_proto//pkg/rule/rules_nodejs:js_library.go",
"@build_stack_rules_proto//pkg/rule/rules_nodejs:proto_nodejs_library.go",
"@build_stack_rules_proto//pkg/rule/rules_nodejs:proto_ts_library.go",
"@build_stack_rules_proto//pkg/rule/rules_python:BUILD.bazel",
"@build_stack_rules_proto//pkg/rule/rules_python:grpc_py_library.go",
"@build_stack_rules_proto//pkg/rule/rules_python:proto_py_library.go",
"@build_stack_rules_proto//pkg/rule/rules_python:py_library.go",
"@build_stack_rules_proto//pkg/rule/rules_scala:BUILD.bazel",
"@build_stack_rules_proto//pkg/rule/rules_scala:scala_library.go",
"@build_stack_rules_proto//pkg/rule/rules_scala:scala_proto_library.go",
"@build_stack_rules_proto//plugin:BUILD.bazel",
"@build_stack_rules_proto//plugin/builtin:BUILD.bazel",
"@build_stack_rules_proto//plugin/gogo/protobuf:BUILD.bazel",
"@build_stack_rules_proto//plugin/golang/protobuf:BUILD.bazel",
"@build_stack_rules_proto//plugin/grpc/grpc:BUILD.bazel",
"@build_stack_rules_proto//plugin/grpc/grpc-go:BUILD.bazel",
"@build_stack_rules_proto//plugin/grpc/grpc-java:BUILD.bazel",
"@build_stack_rules_proto//plugin/grpc/grpc-node:BUILD.bazel",
"@build_stack_rules_proto//plugin/grpc-ecosystem/grpc-gateway:BUILD.bazel",
"@build_stack_rules_proto//plugin/scalapb/scalapb:BUILD.bazel",
"@build_stack_rules_proto//plugin/stackb/grpc_js:BUILD.bazel",
"@build_stack_rules_proto//plugin/stephenh/ts-proto:BUILD.bazel",
"@build_stack_rules_proto//rules:BUILD.bazel",
"@build_stack_rules_proto//rules/cc:BUILD.bazel",
"@build_stack_rules_proto//rules/closure:BUILD.bazel",
"@build_stack_rules_proto//rules/go:BUILD.bazel",
"@build_stack_rules_proto//rules/java:BUILD.bazel",
"@build_stack_rules_proto//rules/nodejs:BUILD.bazel",
"@build_stack_rules_proto//rules/private:BUILD.bazel",
"@build_stack_rules_proto//rules/private:list_repository_tools_srcs.go",
"@build_stack_rules_proto//rules/proto:BUILD.bazel",
"@build_stack_rules_proto//rules/py:BUILD.bazel",
"@build_stack_rules_proto//rules/scala:BUILD.bazel",
"@build_stack_rules_proto//rules/ts:BUILD.bazel",
"@build_stack_rules_proto//toolchain:BUILD.bazel",
]
| 63.888268 | 96 | 0.771773 | 1,655 | 11,436 | 4.948036 | 0.071903 | 0.214922 | 0.320552 | 0.427403 | 0.91281 | 0.900598 | 0.882281 | 0.696544 | 0.351081 | 0.145561 | 0 | 0 | 0.077562 | 11,436 | 178 | 97 | 64.247191 | 0.776282 | 0.005334 | 0 | 0 | 1 | 0.00565 | 0.873768 | 0.873768 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.00565 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
07233d010d2175cb7d8f455a8d905b9259d85618 | 124,971 | py | Python | simulation/creatures.py | SFSeeger/Lifegrid | 44afc9dc5e49229e7d2dcfddcfbbf4dc81acf180 | [
"MIT"
] | null | null | null | simulation/creatures.py | SFSeeger/Lifegrid | 44afc9dc5e49229e7d2dcfddcfbbf4dc81acf180 | [
"MIT"
] | null | null | null | simulation/creatures.py | SFSeeger/Lifegrid | 44afc9dc5e49229e7d2dcfddcfbbf4dc81acf180 | [
"MIT"
] | null | null | null | import numpy as np
creatures = {}
creatures['Orbitum'] = {
'name': 'Orbium',
'R': 13,
'T': 10,
'kernels': [
{'m': 0.15, 's': 0.015, 'b': [1], 'h': 1, 'r': 1, 'c0': 0, 'c1': 0}
],
'cells': [
[
0,
0,
0,
0,
0,
0,
0.1,
0.14,
0.1,
0,
0,
0.03,
0.03,
0,
0,
0.3,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0.08,
0.24,
0.3,
0.3,
0.18,
0.14,
0.15,
0.16,
0.15,
0.09,
0.2,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0.15,
0.34,
0.44,
0.46,
0.38,
0.18,
0.14,
0.11,
0.13,
0.19,
0.18,
0.45,
0,
0,
0,
],
[
0,
0,
0,
0,
0.06,
0.13,
0.39,
0.5,
0.5,
0.37,
0.06,
0,
0,
0,
0.02,
0.16,
0.68,
0,
0,
0,
],
[
0,
0,
0,
0.11,
0.17,
0.17,
0.33,
0.4,
0.38,
0.28,
0.14,
0,
0,
0,
0,
0,
0.18,
0.42,
0,
0,
],
[
0,
0,
0.09,
0.18,
0.13,
0.06,
0.08,
0.26,
0.32,
0.32,
0.27,
0,
0,
0,
0,
0,
0,
0.82,
0,
0,
],
[
0.27,
0,
0.16,
0.12,
0,
0,
0,
0.25,
0.38,
0.44,
0.45,
0.34,
0,
0,
0,
0,
0,
0.22,
0.17,
0,
],
[
0,
0.07,
0.2,
0.02,
0,
0,
0,
0.31,
0.48,
0.57,
0.6,
0.57,
0,
0,
0,
0,
0,
0,
0.49,
0,
],
[
0,
0.59,
0.19,
0,
0,
0,
0,
0.2,
0.57,
0.69,
0.76,
0.76,
0.49,
0,
0,
0,
0,
0,
0.36,
0,
],
[
0,
0.58,
0.19,
0,
0,
0,
0,
0,
0.67,
0.83,
0.9,
0.92,
0.87,
0.12,
0,
0,
0,
0,
0.22,
0.07,
],
[
0,
0,
0.46,
0,
0,
0,
0,
0,
0.7,
0.93,
1,
1,
1,
0.61,
0,
0,
0,
0,
0.18,
0.11,
],
[
0,
0,
0.82,
0,
0,
0,
0,
0,
0.47,
1,
1,
0.98,
1,
0.96,
0.27,
0,
0,
0,
0.19,
0.1,
],
[
0,
0,
0.46,
0,
0,
0,
0,
0,
0.25,
1,
1,
0.84,
0.92,
0.97,
0.54,
0.14,
0.04,
0.1,
0.21,
0.05,
],
[
0,
0,
0,
0.4,
0,
0,
0,
0,
0.09,
0.8,
1,
0.82,
0.8,
0.85,
0.63,
0.31,
0.18,
0.19,
0.2,
0.01,
],
[
0,
0,
0,
0.36,
0.1,
0,
0,
0,
0.05,
0.54,
0.86,
0.79,
0.74,
0.72,
0.6,
0.39,
0.28,
0.24,
0.13,
0,
],
[
0,
0,
0,
0.01,
0.3,
0.07,
0,
0,
0.08,
0.36,
0.64,
0.7,
0.64,
0.6,
0.51,
0.39,
0.29,
0.19,
0.04,
0,
],
[
0,
0,
0,
0,
0.1,
0.24,
0.14,
0.1,
0.15,
0.29,
0.45,
0.53,
0.52,
0.46,
0.4,
0.31,
0.21,
0.08,
0,
0,
],
[
0,
0,
0,
0,
0,
0.08,
0.21,
0.21,
0.22,
0.29,
0.36,
0.39,
0.37,
0.33,
0.26,
0.18,
0.09,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0.03,
0.13,
0.19,
0.22,
0.24,
0.24,
0.23,
0.18,
0.13,
0.05,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0.02,
0.06,
0.08,
0.09,
0.07,
0.05,
0.01,
0,
0,
0,
0,
0,
],
],
}
creatures['geminium'] = {
'name': 'Hydrogeminium',
'R': 18,
'T': 10,
'kernels': [
{
'm': 0.26,
's': 0.036,
'b': [0.5, 1, 0.667],
'h': 1,
'r': 1,
'c0': 0,
'c1': 0,
}
],
'cells': [
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.01,
0.02,
0.03,
0.04,
0.04,
0.04,
0.03,
0.02,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.04,
0.1,
0.16,
0.2,
0.23,
0.25,
0.24,
0.21,
0.18,
0.14,
0.1,
0.07,
0.03,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.01,
0.09,
0.2,
0.33,
0.44,
0.52,
0.56,
0.58,
0.55,
0.51,
0.44,
0.37,
0.3,
0.23,
0.16,
0.08,
0.01,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.13,
0.29,
0.45,
0.6,
0.75,
0.85,
0.9,
0.91,
0.88,
0.82,
0.74,
0.64,
0.55,
0.46,
0.36,
0.25,
0.12,
0.03,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.14,
0.38,
0.6,
0.78,
0.93,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.99,
0.89,
0.78,
0.67,
0.56,
0.44,
0.3,
0.15,
0.04,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.08,
0.39,
0.74,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.98,
0.85,
0.74,
0.62,
0.47,
0.3,
0.14,
0.03,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.32,
0.76,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.88,
0.75,
0.61,
0.45,
0.27,
0.11,
0.01,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.35,
0.83,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.88,
0.73,
0.57,
0.38,
0.19,
0.05,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.5,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.99,
1.0,
1.0,
1.0,
1.0,
0.99,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.85,
0.67,
0.47,
0.27,
0.11,
0.01,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.55,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.93,
0.83,
0.79,
0.84,
0.88,
0.89,
0.9,
0.93,
0.98,
1.0,
1.0,
1.0,
1.0,
0.98,
0.79,
0.57,
0.34,
0.15,
0.03,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.47,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.9,
0.72,
0.54,
0.44,
0.48,
0.6,
0.7,
0.76,
0.82,
0.91,
0.99,
1.0,
1.0,
1.0,
1.0,
0.91,
0.67,
0.41,
0.19,
0.05,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.27,
0.99,
1.0,
1.0,
1.0,
1.0,
0.9,
0.71,
0.65,
0.55,
0.38,
0.2,
0.14,
0.21,
0.36,
0.52,
0.64,
0.73,
0.84,
0.95,
1.0,
1.0,
1.0,
1.0,
1.0,
0.78,
0.49,
0.24,
0.07,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.14,
0.63,
0.96,
1.0,
1.0,
1.0,
0.84,
0.17,
0,
0,
0,
0,
0,
0,
0,
0.13,
0.35,
0.51,
0.64,
0.77,
0.91,
0.99,
1.0,
1.0,
1.0,
1.0,
0.88,
0.58,
0.29,
0.09,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.07,
0.38,
0.72,
0.95,
1.0,
1.0,
1.0,
0.22,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.11,
0.33,
0.5,
0.67,
0.86,
0.99,
1.0,
1.0,
1.0,
1.0,
0.95,
0.64,
0.33,
0.1,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.32,
0.49,
0.71,
0.93,
1.0,
1.0,
1.0,
0.56,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.1,
0.31,
0.52,
0.79,
0.98,
1.0,
1.0,
1.0,
1.0,
0.98,
0.67,
0.35,
0.11,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.01,
0.6,
0.83,
0.98,
1.0,
1.0,
0.68,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.15,
0.38,
0.71,
0.97,
1.0,
1.0,
1.0,
1.0,
0.97,
0.67,
0.35,
0.11,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.51,
0.96,
1.0,
1.0,
0.18,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.09,
0.34,
0.68,
0.95,
1.0,
1.0,
1.0,
1.0,
0.91,
0.61,
0.32,
0.1,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.13,
0.56,
0.99,
1.0,
1.0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.17,
0.45,
0.76,
0.96,
1.0,
1.0,
1.0,
1.0,
0.82,
0.52,
0.26,
0.07,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.33,
0.7,
0.94,
1.0,
1.0,
0.44,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.33,
0.68,
0.91,
0.99,
1.0,
1.0,
1.0,
1.0,
0.71,
0.42,
0.19,
0.03,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.53,
0.89,
1.0,
1.0,
1.0,
0.8,
0.43,
0.04,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.47,
0.86,
1.0,
1.0,
1.0,
1.0,
1.0,
0.95,
0.58,
0.32,
0.12,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.77,
0.99,
1.0,
0.97,
0.58,
0.41,
0.33,
0.18,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.54,
0.95,
1.0,
1.0,
1.0,
1.0,
1.0,
0.8,
0.44,
0.21,
0.06,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.39,
0.83,
1.0,
1.0,
0.55,
0.11,
0.05,
0.15,
0.22,
0.06,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.58,
0.99,
1.0,
1.0,
1.0,
1.0,
1.0,
0.59,
0.29,
0.11,
0.01,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.04,
0.55,
0.81,
0.86,
0.97,
1.0,
1.0,
0.5,
0,
0,
0.01,
0.09,
0.03,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.26,
0.78,
1.0,
1.0,
1.0,
1.0,
1.0,
0.66,
0.35,
0.13,
0.03,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.33,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.93,
0.11,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.23,
0.73,
0.95,
1.0,
1.0,
1.0,
1.0,
1.0,
0.62,
0.35,
0.12,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.51,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.72,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.56,
0.25,
0.09,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.12,
0.38,
1.0,
1.0,
1.0,
0.66,
0.08,
0.55,
1.0,
1.0,
1.0,
0.03,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.35,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.67,
0.12,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.6,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.49,
0,
0,
0.87,
1.0,
0.88,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.7,
0.07,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.04,
0.21,
0.48,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0,
0,
0.04,
0.42,
0.26,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.12,
0.21,
0.34,
0.58,
1.0,
1.0,
1.0,
0.99,
0.97,
0.99,
0.46,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.5,
1.0,
1.0,
1.0,
1.0,
0.96,
0,
0.31,
1.0,
1.0,
1.0,
0.53,
0,
0,
0,
0,
0,
0,
0,
0,
0.2,
0.21,
0,
0,
0,
0.27,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.87,
0.52,
0.01,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0.84,
1.0,
1.0,
1.0,
1.0,
1.0,
0,
0,
0,
0.83,
1.0,
1.0,
0.52,
0,
0,
0,
0,
0,
0,
0,
0.26,
0.82,
0.59,
0.02,
0,
0,
0.46,
1.0,
1.0,
1.0,
1.0,
1.0,
0.9,
0.55,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0.39,
0.99,
1.0,
1.0,
1.0,
1.0,
0.78,
0.04,
0,
0,
0,
0.93,
0.92,
0,
0,
0,
0,
0,
0,
0,
0,
0.69,
1.0,
1.0,
0.36,
0,
0,
1.0,
1.0,
0.65,
0.66,
0.97,
0.87,
0.54,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0.55,
0.75,
0.59,
0.74,
1.0,
1.0,
0,
0,
0.75,
0.71,
0.18,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.29,
0,
0,
0.45,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.47,
0.39,
0.71,
0.25,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0.69,
0.81,
0.8,
0.92,
1.0,
0.13,
0,
0,
0.13,
0.94,
0.58,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1.0,
1.0,
0.34,
0,
0.04,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.24,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0.63,
0.85,
0.9,
0.98,
1.0,
0.09,
0,
0,
0.02,
1.0,
0.64,
0,
0,
0,
0,
0,
0,
0,
0,
0.59,
1.0,
1.0,
0.84,
0,
0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.64,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0.64,
0.65,
0.67,
1.0,
1.0,
0.21,
0.01,
0,
0.04,
0.02,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.69,
1.0,
1.0,
1.0,
0.29,
0.37,
1.0,
1.0,
0.6,
0.63,
1.0,
0.84,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0.44,
0.73,
0.73,
0.85,
1.0,
0.97,
0.23,
0.05,
0,
0,
0,
0,
0,
0,
0,
0,
0.06,
0,
0,
0,
0.97,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.33,
0.24,
0.67,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0.12,
0.55,
0.9,
0.9,
1.0,
1.0,
1.0,
0.43,
0.04,
0,
0,
0,
0,
0,
0,
0,
0.31,
0.54,
0,
0,
0,
0.88,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0.29,
0.71,
1.0,
1.0,
1.0,
1.0,
0.79,
0.28,
0,
0,
0,
0,
0,
0,
0,
0,
0.4,
0.77,
0.54,
0,
0,
0.87,
1.0,
1.0,
1.0,
1.0,
1.0,
0.31,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0.16,
0.27,
0.41,
0.72,
0.99,
1.0,
1.0,
0.82,
0.42,
0.09,
0,
0,
0,
0,
0,
0,
0,
0,
0.1,
0.55,
0.58,
0.58,
0.77,
0.99,
1.0,
1.0,
1.0,
1.0,
0.63,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0.31,
0.48,
0.45,
0.46,
0.63,
0.88,
1.0,
0.83,
0.59,
0.28,
0.06,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.32,
0.7,
0.95,
1.0,
1.0,
1.0,
1.0,
0.7,
0.58,
0.12,
0.04,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0.23,
0.54,
0.53,
0.48,
0.57,
0.59,
0.65,
0.63,
0.55,
0.35,
0.13,
0.03,
0.02,
0.09,
0.74,
1.0,
0.09,
0,
0,
0,
0.32,
0.86,
1.0,
1.0,
1.0,
1.0,
0.57,
0.44,
0.31,
0.16,
0.01,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0.31,
0.45,
0.31,
0.18,
0.28,
0.39,
0.47,
0.54,
0.5,
0.35,
0.2,
0.16,
0.28,
0.75,
1.0,
0.42,
0.01,
0,
0,
0.6,
1.0,
1.0,
1.0,
1.0,
0.51,
0.29,
0.09,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0.14,
0.3,
0.4,
0.54,
0.71,
0.74,
0.65,
0.49,
0.35,
0.27,
0.47,
0.6,
0.6,
0.72,
0.98,
1.0,
1.0,
1.0,
1.0,
0.65,
0.33,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0.06,
0.33,
0.53,
0.69,
0.94,
0.99,
1.0,
0.84,
0.41,
0.16,
0.15,
0.96,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.73,
0.13,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0.42,
0.86,
0.98,
0.98,
0.99,
1.0,
0.94,
0.63,
0.32,
0.62,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.65,
0.23,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0.07,
0.62,
0.95,
1.0,
1.0,
0.99,
0.98,
0.99,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.98,
0.14,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0.03,
0.46,
0.89,
1.0,
1.0,
0.97,
0.83,
0.75,
0.81,
0.94,
1.0,
1.0,
1.0,
1.0,
0.99,
0.03,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0.14,
0.57,
0.88,
0.93,
0.81,
0.58,
0.45,
0.48,
0.64,
0.86,
0.97,
0.99,
0.99,
0.42,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0.23,
0.45,
0.47,
0.39,
0.29,
0.19,
0.2,
0.46,
0.28,
0.03,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.08,
0.22,
0.24,
0.15,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.07,
0.22,
0.14,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
],
}
creatures['fish'] = {
'name': 'K=3 Fish',
'R': 10,
'T': 5,
'kernels': [
{
'b': [1, 5 / 12, 2 / 3],
'm': 0.156,
's': 0.0118,
'h': 1,
'r': 1,
'c0': 0,
'c1': 0,
},
{
'b': [1 / 12, 1],
'm': 0.193,
's': 0.049,
'h': 1,
'r': 1,
'c0': 0,
'c1': 0,
},
{'b': [1], 'm': 0.342, 's': 0.0891, 'h': 1, 'r': 1, 'c0': 0, 'c1': 0},
],
'cells': [
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.06,
0.1,
0.04,
0.02,
0.01,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0.15,
0.37,
0.5,
0.44,
0.19,
0.23,
0.3,
0.23,
0.15,
0.01,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0.32,
0.78,
0.26,
0,
0.11,
0.11,
0.1,
0.08,
0.18,
0.16,
0.17,
0.24,
0.09,
0,
0,
0,
],
[
0,
0,
0,
0,
0.45,
0.16,
0,
0,
0,
0,
0,
0.15,
0.15,
0.16,
0.15,
0.1,
0.09,
0.21,
0.24,
0.12,
0,
0,
],
[
0,
0,
0,
0.1,
0,
0,
0,
0,
0,
0,
0,
0.17,
0.39,
0.43,
0.34,
0.25,
0.15,
0.16,
0.15,
0.25,
0.03,
0,
],
[
0,
0.15,
0.06,
0,
0,
0,
0,
0,
0,
0,
0.24,
0.72,
0.92,
0.85,
0.61,
0.47,
0.39,
0.27,
0.12,
0.18,
0.17,
0,
],
[
0,
0.08,
0,
0,
0,
0,
0,
0,
0,
0,
1.0,
1.0,
1.0,
1.0,
0.73,
0.6,
0.56,
0.31,
0.12,
0.15,
0.24,
0.01,
],
[
0,
0.16,
0,
0,
0,
0,
0,
0,
0,
0.76,
1.0,
1.0,
1.0,
1.0,
0.76,
0.72,
0.65,
0.39,
0.1,
0.17,
0.24,
0.05,
],
[
0,
0.05,
0,
0,
0,
0,
0,
0,
0.21,
0.83,
1.0,
1.0,
1.0,
1.0,
0.86,
0.85,
0.76,
0.36,
0.17,
0.13,
0.21,
0.07,
],
[
0,
0.05,
0,
0,
0.02,
0,
0,
0,
0.4,
0.91,
1.0,
1.0,
1.0,
1.0,
1.0,
0.95,
0.79,
0.36,
0.21,
0.09,
0.18,
0.04,
],
[
0.06,
0.08,
0,
0.18,
0.21,
0.1,
0.03,
0.38,
0.92,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.64,
0.31,
0.12,
0.07,
0.25,
0,
],
[
0.05,
0.12,
0.27,
0.4,
0.34,
0.42,
0.93,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.97,
0.33,
0.16,
0.05,
0.1,
0.26,
0,
],
[
0,
0.25,
0.21,
0.39,
0.99,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.86,
0.89,
0.94,
0.83,
0.13,
0,
0,
0.04,
0.21,
0.18,
0,
],
[
0,
0.06,
0.29,
0.63,
0.84,
0.97,
1.0,
1.0,
1.0,
0.96,
0.46,
0.33,
0.36,
0,
0,
0,
0,
0,
0.03,
0.35,
0,
0,
],
[
0,
0,
0.13,
0.22,
0.59,
0.85,
0.99,
1.0,
0.98,
0.25,
0,
0,
0,
0,
0,
0,
0,
0,
0.34,
0.14,
0,
0,
],
[
0,
0,
0,
0,
0.33,
0.7,
0.95,
0.8,
0.33,
0.11,
0,
0,
0,
0,
0,
0,
0,
0.11,
0.26,
0,
0,
0,
],
[
0,
0,
0,
0,
0.16,
0.56,
0.52,
0.51,
0.4,
0.18,
0.01,
0,
0,
0,
0,
0,
0,
0.42,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0.01,
0,
0.33,
0.47,
0.33,
0.05,
0,
0,
0,
0,
0,
0,
0.35,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0.26,
0.32,
0.13,
0,
0,
0,
0,
0,
0,
0,
0.34,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0.22,
0.25,
0.03,
0,
0,
0,
0,
0,
0,
0.46,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0.09,
0.2,
0.22,
0.23,
0.23,
0.22,
0.3,
0.3,
0,
0,
0,
0,
0,
0,
0,
0,
],
],
}
creatures['emitter'] = {
'name': 'Smooth glider gun',
'R': 13,
'T': 2,
'kernels': [
{
'b': [1],
'm': 0.184,
's': 0.0632,
'h': 0.076,
'r': 0.56,
'c0': 0,
'c1': 0,
},
{
'b': [1],
'm': 0.1,
's': 0.1511,
'h': 0.516,
'r': 0.76,
'c0': 0,
'c1': 0,
},
{
'b': [1],
'm': 0.246,
's': 0.047,
'h': 0.554,
'r': 0.5,
'c0': 0,
'c1': 0,
},
{
'b': [1 / 12, 1],
'm': 0.1,
's': 0.0553,
'h': 0.294,
'r': 0.84,
'c0': 1,
'c1': 1,
},
{
'b': [1],
'm': 0.324,
's': 0.0782,
'h': 0.594,
'r': 0.97,
'c0': 1,
'c1': 1,
},
{
'b': [5 / 6, 1],
'm': 0.229,
's': 0.0321,
'h': 0.612,
'r': 0.98,
'c0': 1,
'c1': 1,
},
{
'b': [1],
'm': 0.29,
's': 0.0713,
'h': 0.396,
'r': 0.87,
'c0': 2,
'c1': 2,
},
{
'b': [1],
'm': 0.484,
's': 0.1343,
'h': 0.244,
'r': 0.96,
'c0': 2,
'c1': 2,
},
{
'b': [1],
'm': 0.592,
's': 0.1807,
'h': 0.562,
'r': 0.93,
'c0': 2,
'c1': 2,
},
{
'b': [1],
'm': 0.398,
's': 0.1411,
'h': 0.36,
'r': 0.89,
'c0': 0,
'c1': 1,
},
{
'b': [1],
'm': 0.388,
's': 0.1144,
'h': 0.192,
'r': 0.67,
'c0': 0,
'c1': 2,
},
{
'b': [1, 11 / 12, 0],
'm': 0.312,
's': 0.0697,
'h': 0.462,
'r': 0.58,
'c0': 1,
'c1': 0,
},
{
'b': [1],
'm': 0.327,
's': 0.1036,
'h': 0.608,
'r': 1.0,
'c0': 1,
'c1': 2,
},
{
'b': [1],
'm': 0.471,
's': 0.1176,
'h': 0.394,
'r': 0.8,
'c0': 2,
'c1': 0,
},
{
'b': [1, 1 / 12],
'm': 0.1,
's': 0.0573,
'h': 0.14,
'r': 0.62,
'c0': 2,
'c1': 1,
},
],
'cells': [
[
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.15,
0.48,
0.19,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.61,
1.00,
1.00,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0.12,
0.60,
1.00,
1.00,
1.00,
1.00,
0,
0,
0,
0,
0,
0.19,
0.61,
0.11,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0.36,
0,
0,
0,
0,
0,
1.00,
1.00,
1.00,
0.72,
0.40,
0,
0,
0,
0.91,
1.00,
0.61,
0.26,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0.34,
0.76,
0.10,
0,
0,
0,
0,
0,
0.96,
1.00,
0.96,
0.83,
0.88,
0.72,
0.86,
1.00,
1.00,
0.87,
0.44,
0.05,
0,
0,
0,
0,
],
[
0,
0,
0,
0.12,
0.49,
0.89,
0.16,
0,
0,
0,
0,
0,
0.52,
0.96,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
0.50,
0.03,
0,
0,
0,
0,
],
[
0,
0,
0,
0.58,
0.82,
1.00,
0.70,
0,
0,
0,
0,
0,
0,
0.37,
0.34,
0.06,
0,
0.49,
1.00,
1.00,
1.00,
1.00,
0.57,
0.01,
0,
0,
0,
0,
],
[
0,
0,
0.16,
0.17,
0.16,
0.53,
0.63,
0.82,
0.71,
0.16,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.52,
0.45,
0.07,
0.37,
0.05,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0.14,
1.00,
1.00,
1.00,
0.43,
0,
0,
0.35,
0.03,
0,
0,
0,
0,
0,
0.03,
0,
0,
0.08,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
0.22,
0,
0,
0,
0,
0,
0,
0,
0.07,
0,
0.18,
0,
0,
0,
0,
],
[
0,
0,
0.25,
0,
0,
0,
0,
0.01,
1.00,
1.00,
0.81,
0.40,
0.25,
0,
0,
0,
0,
0,
0,
0,
0,
0.31,
0.53,
0.48,
0,
0,
0,
0,
],
[
0,
0,
0.63,
0,
0,
0,
0,
0,
0.40,
1.00,
0.14,
0.18,
0.17,
0,
0,
0,
0,
0,
0.07,
0,
0.46,
1.00,
1.00,
0.94,
0.23,
0,
0,
0,
],
[
0,
0,
0.97,
1.00,
0,
0,
0,
0,
0,
1.00,
0.19,
0.13,
0.09,
0,
0,
0,
0,
0.91,
1.00,
0.97,
1.00,
1.00,
1.00,
0.99,
0.61,
0,
0,
0,
],
[
0,
0.22,
1.00,
1.00,
1.00,
0.58,
0,
0,
0,
0.64,
0.10,
0.10,
0,
0,
0,
0,
0,
0.92,
0.73,
0.73,
0.88,
1.00,
1.00,
0.27,
0.36,
0.11,
0,
0,
],
[
0,
1.00,
1.00,
1.00,
1.00,
1.00,
0.78,
0,
0.06,
0.34,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.29,
0.70,
1.00,
1.00,
0,
0,
0.11,
0,
0,
],
[
0,
0.39,
0,
0.78,
1.00,
1.00,
0.88,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.60,
1.00,
0.82,
0,
0.01,
0.02,
0,
0,
],
[
0,
0,
0,
0,
0.59,
0.85,
0.52,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.55,
1.00,
0.62,
0.05,
0.21,
0,
0,
0,
],
[
0,
0,
0,
0,
0.45,
0.86,
1.00,
0,
0,
0,
0,
0,
0.48,
0.93,
0,
0,
0,
0,
0,
0,
0.92,
1.00,
0.74,
0.37,
0.10,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0.75,
1.00,
0.33,
0,
0,
0,
0,
1.00,
1.00,
0.22,
0,
0,
0,
0,
0.29,
1.00,
1.00,
0.49,
0.18,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0.88,
1.00,
1.00,
0.27,
0,
0,
0,
0.61,
1.00,
0.54,
0.27,
0,
0,
0.38,
1.00,
1.00,
0.60,
0.24,
0.02,
0,
0,
0,
0,
],
[
0,
0,
0.43,
0,
0.95,
1.00,
1.00,
1.00,
0.12,
0,
0,
0,
0.66,
1.00,
0.84,
0.75,
0.78,
0.90,
1.00,
1.00,
0.65,
0.27,
0.14,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0.97,
1.00,
1.00,
1.00,
1.00,
0,
0,
0.07,
0.43,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
0.58,
0.27,
0.17,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0.09,
0.48,
0.87,
1.00,
1.00,
0,
0,
0,
0,
1.00,
1.00,
1.00,
0.81,
0.33,
0.29,
0.49,
0.30,
0.12,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0.14,
0.22,
0.21,
0.19,
0.12,
0,
0,
0.45,
0.98,
1.00,
0.42,
0,
0,
0.09,
0.28,
0.05,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.19,
0.60,
0.50,
0.14,
0.16,
0.19,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.05,
0.06,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
],
[
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.08,
0.21,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.81,
0.98,
0.85,
0.67,
0.45,
0.69,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0.35,
0,
0,
0,
0,
0.64,
1.00,
1.00,
1.00,
1.00,
1.00,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0.06,
0.36,
0,
0,
0,
0,
0.28,
0.50,
0.46,
0.91,
1.00,
1.00,
0.82,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0.12,
0.13,
0.06,
0.05,
0.05,
0.03,
0,
0.36,
0.56,
0.60,
1.00,
1.00,
1.00,
0.62,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.18,
0.35,
0.35,
1.00,
0.07,
0,
0.47,
0.77,
1.00,
1.00,
1.00,
1.00,
0.14,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.16,
0.38,
0,
0,
0.37,
0.29,
0.34,
0.90,
1.00,
1.00,
1.00,
1.00,
0.34,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0.05,
0,
0,
0,
0.11,
1.00,
0,
0,
0,
0,
0.50,
1.00,
1.00,
1.00,
1.00,
1.00,
0.43,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0.86,
0,
0,
0.01,
0.19,
0.82,
0,
0,
0,
0.52,
1.00,
1.00,
1.00,
1.00,
0.97,
0.48,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0.73,
0.73,
0.30,
0.32,
0,
0.21,
0.10,
0,
0.07,
0.98,
1.00,
1.00,
1.00,
1.00,
0.85,
0.50,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0.30,
1.00,
0.26,
0.44,
0.57,
0.36,
0.49,
0.69,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
0.86,
0.45,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0.12,
0.95,
0.71,
0.26,
0.55,
0.78,
0.93,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
0.27,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0.83,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
0.93,
0.06,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0.38,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
0.53,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0.04,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
1.00,
0.68,
0.05,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.56,
0.85,
0.95,
0.97,
0.92,
0.82,
0.82,
0.69,
0.32,
0.02,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.03,
0.12,
0.11,
0.04,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
],
[
[
0,
0,
0,
0,
0,
0,
0.01,
0.02,
0.03,
0.04,
0.06,
0.08,
0.09,
0.10,
0.10,
0.10,
0.10,
0.08,
0.05,
0.03,
0.02,
0.01,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0.01,
0.01,
0.03,
0.05,
0.08,
0.11,
0.13,
0.15,
0.17,
0.18,
0.20,
0.22,
0.21,
0.19,
0.15,
0.10,
0.06,
0.04,
0.02,
0.01,
0,
0,
0,
0,
],
[
0,
0,
0,
0.01,
0.02,
0.04,
0.08,
0.12,
0.17,
0.20,
0.22,
0.24,
0.26,
0.28,
0.30,
0.33,
0.33,
0.31,
0.26,
0.20,
0.14,
0.09,
0.06,
0.03,
0.01,
0,
0,
0,
],
[
0,
0,
0.01,
0.03,
0.05,
0.09,
0.15,
0.22,
0.28,
0.31,
0.32,
0.32,
0.33,
0.34,
0.37,
0.40,
0.42,
0.41,
0.36,
0.30,
0.23,
0.17,
0.11,
0.07,
0.03,
0.01,
0,
0,
],
[
0,
0.01,
0.03,
0.06,
0.10,
0.16,
0.24,
0.31,
0.37,
0.40,
0.39,
0.37,
0.36,
0.37,
0.39,
0.42,
0.44,
0.44,
0.42,
0.38,
0.33,
0.26,
0.19,
0.13,
0.07,
0.03,
0.01,
0,
],
[
0.01,
0.02,
0.05,
0.10,
0.16,
0.23,
0.30,
0.38,
0.42,
0.43,
0.43,
0.41,
0.39,
0.39,
0.40,
0.42,
0.43,
0.43,
0.42,
0.41,
0.40,
0.35,
0.28,
0.20,
0.12,
0.06,
0.02,
0,
],
[
0.01,
0.04,
0.09,
0.15,
0.23,
0.29,
0.34,
0.39,
0.42,
0.43,
0.44,
0.45,
0.45,
0.45,
0.45,
0.45,
0.44,
0.43,
0.42,
0.43,
0.43,
0.42,
0.37,
0.29,
0.19,
0.10,
0.04,
0.01,
],
[
0.03,
0.07,
0.13,
0.21,
0.28,
0.32,
0.35,
0.39,
0.42,
0.45,
0.48,
0.51,
0.53,
0.54,
0.53,
0.52,
0.50,
0.48,
0.46,
0.45,
0.46,
0.47,
0.45,
0.37,
0.26,
0.15,
0.07,
0.03,
],
[
0.04,
0.10,
0.19,
0.27,
0.33,
0.35,
0.36,
0.39,
0.44,
0.50,
0.57,
0.62,
0.64,
0.65,
0.63,
0.61,
0.59,
0.56,
0.54,
0.52,
0.51,
0.52,
0.52,
0.45,
0.34,
0.21,
0.11,
0.05,
],
[
0.06,
0.14,
0.25,
0.34,
0.39,
0.39,
0.39,
0.42,
0.48,
0.56,
0.65,
0.71,
0.74,
0.74,
0.72,
0.69,
0.66,
0.63,
0.60,
0.58,
0.56,
0.56,
0.56,
0.51,
0.41,
0.27,
0.15,
0.07,
],
[
0.09,
0.19,
0.30,
0.39,
0.42,
0.43,
0.44,
0.48,
0.55,
0.64,
0.69,
0.73,
0.75,
0.77,
0.77,
0.76,
0.73,
0.69,
0.66,
0.63,
0.60,
0.58,
0.58,
0.55,
0.46,
0.32,
0.19,
0.09,
],
[
0.11,
0.21,
0.33,
0.41,
0.43,
0.44,
0.48,
0.55,
0.63,
0.69,
0.69,
0.66,
0.66,
0.69,
0.73,
0.76,
0.76,
0.73,
0.71,
0.67,
0.63,
0.59,
0.58,
0.56,
0.49,
0.36,
0.22,
0.11,
],
[
0.13,
0.22,
0.33,
0.40,
0.43,
0.45,
0.50,
0.60,
0.69,
0.72,
0.66,
0.59,
0.56,
0.58,
0.64,
0.70,
0.73,
0.73,
0.71,
0.68,
0.64,
0.59,
0.57,
0.56,
0.51,
0.40,
0.25,
0.13,
],
[
0.14,
0.23,
0.32,
0.37,
0.41,
0.45,
0.52,
0.63,
0.73,
0.74,
0.67,
0.57,
0.52,
0.52,
0.57,
0.65,
0.71,
0.72,
0.71,
0.68,
0.63,
0.59,
0.56,
0.56,
0.53,
0.43,
0.27,
0.14,
],
[
0.14,
0.24,
0.31,
0.35,
0.38,
0.43,
0.51,
0.62,
0.73,
0.77,
0.71,
0.60,
0.54,
0.53,
0.57,
0.65,
0.71,
0.73,
0.72,
0.68,
0.64,
0.59,
0.57,
0.57,
0.55,
0.44,
0.28,
0.15,
],
[
0.15,
0.24,
0.32,
0.35,
0.37,
0.41,
0.50,
0.60,
0.70,
0.77,
0.76,
0.67,
0.60,
0.58,
0.62,
0.69,
0.75,
0.76,
0.74,
0.70,
0.65,
0.60,
0.58,
0.58,
0.55,
0.45,
0.28,
0.14,
],
[
0.15,
0.25,
0.33,
0.37,
0.38,
0.41,
0.48,
0.57,
0.66,
0.74,
0.77,
0.73,
0.68,
0.66,
0.70,
0.75,
0.77,
0.77,
0.76,
0.72,
0.66,
0.61,
0.59,
0.59,
0.55,
0.43,
0.26,
0.13,
],
[
0.15,
0.26,
0.35,
0.40,
0.41,
0.41,
0.46,
0.54,
0.62,
0.69,
0.74,
0.74,
0.72,
0.72,
0.75,
0.77,
0.77,
0.76,
0.75,
0.72,
0.66,
0.61,
0.59,
0.59,
0.53,
0.39,
0.23,
0.11,
],
[
0.13,
0.25,
0.36,
0.42,
0.43,
0.42,
0.45,
0.51,
0.58,
0.65,
0.70,
0.72,
0.72,
0.72,
0.75,
0.77,
0.76,
0.74,
0.73,
0.70,
0.64,
0.60,
0.59,
0.56,
0.48,
0.34,
0.19,
0.09,
],
[
0.11,
0.22,
0.33,
0.41,
0.43,
0.42,
0.43,
0.48,
0.55,
0.61,
0.67,
0.69,
0.69,
0.70,
0.72,
0.74,
0.74,
0.73,
0.70,
0.66,
0.61,
0.58,
0.57,
0.52,
0.42,
0.28,
0.15,
0.06,
],
[
0.07,
0.17,
0.28,
0.36,
0.40,
0.41,
0.42,
0.46,
0.52,
0.58,
0.63,
0.66,
0.66,
0.66,
0.67,
0.69,
0.70,
0.69,
0.65,
0.61,
0.58,
0.56,
0.54,
0.47,
0.35,
0.21,
0.11,
0.04,
],
[
0.04,
0.11,
0.20,
0.29,
0.36,
0.39,
0.42,
0.45,
0.51,
0.55,
0.60,
0.62,
0.62,
0.61,
0.62,
0.63,
0.64,
0.63,
0.61,
0.58,
0.56,
0.54,
0.48,
0.39,
0.27,
0.15,
0.07,
0.02,
],
[
0.02,
0.07,
0.13,
0.21,
0.30,
0.36,
0.41,
0.46,
0.51,
0.55,
0.57,
0.59,
0.58,
0.57,
0.57,
0.58,
0.59,
0.59,
0.59,
0.57,
0.54,
0.49,
0.41,
0.30,
0.19,
0.10,
0.04,
0.01,
],
[
0.01,
0.03,
0.08,
0.15,
0.23,
0.31,
0.38,
0.44,
0.51,
0.55,
0.57,
0.58,
0.56,
0.55,
0.55,
0.56,
0.57,
0.58,
0.57,
0.54,
0.48,
0.41,
0.31,
0.21,
0.12,
0.05,
0.02,
0,
],
[
0,
0.02,
0.05,
0.09,
0.15,
0.23,
0.30,
0.38,
0.46,
0.52,
0.55,
0.56,
0.56,
0.55,
0.56,
0.56,
0.57,
0.56,
0.52,
0.46,
0.38,
0.29,
0.20,
0.12,
0.06,
0.02,
0,
0,
],
[
0,
0.01,
0.02,
0.05,
0.09,
0.15,
0.21,
0.28,
0.36,
0.42,
0.46,
0.49,
0.50,
0.51,
0.52,
0.52,
0.50,
0.47,
0.41,
0.34,
0.26,
0.18,
0.11,
0.06,
0.02,
0.01,
0,
0,
],
[
0,
0,
0.01,
0.02,
0.04,
0.08,
0.12,
0.17,
0.23,
0.28,
0.32,
0.35,
0.38,
0.39,
0.40,
0.39,
0.37,
0.32,
0.26,
0.20,
0.14,
0.09,
0.05,
0.02,
0.01,
0,
0,
0,
],
[
0,
0,
0,
0,
0.01,
0.03,
0.06,
0.09,
0.12,
0.15,
0.18,
0.21,
0.23,
0.24,
0.25,
0.23,
0.21,
0.18,
0.14,
0.10,
0.06,
0.04,
0.02,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0.01,
0.02,
0.03,
0.05,
0.07,
0.09,
0.11,
0.12,
0.12,
0.12,
0.11,
0.10,
0.08,
0.06,
0.04,
0.02,
0.01,
0,
0,
0,
0,
0,
0,
],
],
],
}
creatures["aquarium"] = {"name":"Tessellatium gyrans","R":12,"T":2,"kernels":[
{"b":[1],"m":0.272,"s":0.0595,"h":0.138,"r":0.91,"c0":0,"c1":0},
{"b":[1],"m":0.349,"s":0.1585,"h":0.48,"r":0.62,"c0":0,"c1":0},
{"b":[1,1/4],"m":0.2,"s":0.0332,"h":0.284,"r":0.5,"c0":0,"c1":0},
{"b":[0,1],"m":0.114,"s":0.0528,"h":0.256,"r":0.97,"c0":1,"c1":1},
{"b":[1],"m":0.447,"s":0.0777,"h":0.5,"r":0.72,"c0":1,"c1":1},
{"b":[5/6,1],"m":0.247,"s":0.0342,"h":0.622,"r":0.8,"c0":1,"c1":1},
{"b":[1],"m":0.21,"s":0.0617,"h":0.35,"r":0.96,"c0":2,"c1":2},
{"b":[1],"m":0.462,"s":0.1192,"h":0.218,"r":0.56,"c0":2,"c1":2},
{"b":[1],"m":0.446,"s":0.1793,"h":0.556,"r":0.78,"c0":2,"c1":2},
{"b":[11/12,1],"m":0.327,"s":0.1408,"h":0.344,"r":0.79,"c0":0,"c1":1},
{"b":[3/4,1],"m":0.476,"s":0.0995,"h":0.456,"r":0.5,"c0":0,"c1":2},
{"b":[11/12,1],"m":0.379,"s":0.0697,"h":0.67,"r":0.72,"c0":1,"c1":0},
{"b":[1],"m":0.262,"s":0.0877,"h":0.42,"r":0.68,"c0":1,"c1":2},
{"b":[1/6,1,0],"m":0.412,"s":0.1101,"h":0.43,"r":0.82,"c0":2,"c1":0},
{"b":[1],"m":0.201,"s":0.0786,"h":0.278,"r":0.82,"c0":2,"c1":1}],
"cells":[
[[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.04,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0.49,1.0,0,0.03,0.49,0.49,0.28,0.16,0.03,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0.6,0.47,0.31,0.58,0.51,0.35,0.28,0.22,0,0,0,0,0], [0,0,0,0,0,0,0.15,0.32,0.17,0.61,0.97,0.29,0.67,0.59,0.88,1.0,0.92,0.8,0.61,0.42,0.19,0,0,0], [0,0,0,0,0,0,0,0.25,0.64,0.26,0.92,0.04,0.24,0.97,1.0,1.0,1.0,1.0,0.97,0.71,0.33,0.12,0,0], [0,0,0,0,0,0,0,0.38,0.84,0.99,0.78,0.67,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.95,0.62,0.37,0,0], [0,0,0,0,0.04,0.11,0,0.69,0.75,0.75,0.91,1.0,1.0,0.89,1.0,1.0,1.0,1.0,1.0,1.0,0.81,0.42,0.07,0], [0,0,0,0,0.44,0.63,0.04,0,0,0,0.11,0.14,0,0.05,0.64,1.0,1.0,1.0,1.0,1.0,0.92,0.56,0.23,0], [0,0,0,0,0.11,0.36,0.35,0.2,0,0,0,0,0,0,0.63,1.0,1.0,1.0,1.0,1.0,0.96,0.49,0.26,0], [0,0,0,0,0,0.4,0.37,0.18,0,0,0,0,0,0.04,0.41,0.52,0.67,0.82,1.0,1.0,0.91,0.4,0.23,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.04,0,0.05,0.45,0.89,1.0,0.66,0.35,0.09,0], [0,0,0.22,0,0,0,0.05,0.36,0.6,0.13,0.02,0.04,0.24,0.34,0.1,0,0.04,0.62,1.0,1.0,0.44,0.25,0,0], [0,0,0,0.43,0.53,0.58,0.78,0.9,0.96,1.0,1.0,1.0,1.0,0.71,0.46,0.51,0.81,1.0,1.0,0.93,0.19,0.06,0,0], [0,0,0,0,0.23,0.26,0.37,0.51,0.71,0.89,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.42,0.06,0,0,0], [0,0,0,0,0.03,0,0,0.11,0.35,0.62,0.81,0.93,1.0,1.0,1.0,1.0,1.0,0.64,0.15,0,0,0,0,0], [0,0,0,0,0,0,0.06,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0.05,0.09,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]],
[[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0.28,0.42,0.44,0.34,0.18,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,1.0,1.0,1.0,1.0,1.0,0.91,0.52,0.14,0], [0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.17,0.75,1.0,1.0,1.0,1.0,1.0,1.0,0.93,0.35,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.22,0.92,1.0,1.0,1.0,1.0,1.0,1.0,0.59,0.09], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.75,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.71,0.16], [0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.67,0.83,0.85,1.0,1.0,1.0,1.0,1.0,1.0,0.68,0.17], [0,0,0,0,0,0,0,0,0,0,0,0,0.21,0.04,0.12,0.58,0.95,1.0,1.0,1.0,1.0,1.0,0.57,0.13], [0,0,0,0,0,0,0,0,0,0,0,0.07,0,0,0,0.2,0.64,0.96,1.0,1.0,1.0,0.9,0.24,0.01], [0,0,0,0,0,0,0,0,0,0,0.13,0.29,0,0,0,0.25,0.9,1.0,1.0,1.0,1.0,0.45,0.05,0], [0,0,0,0,0,0,0,0,0,0,0.13,0.31,0.07,0,0.46,0.96,1.0,1.0,1.0,1.0,0.51,0.12,0,0], [0,0,0,0,0,0,0,0,0.26,0.82,1.0,0.95,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.3,0.05,0,0,0], [0,0,0,0,0,0,0,0,0.28,0.74,1.0,0.95,0.87,1.0,1.0,1.0,1.0,1.0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0.07,0.69,1.0,1.0,1.0,1.0,1.0,0.96,0.25,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0.4,0.72,0.9,0.83,0.7,0.56,0.43,0.14,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]],
[[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0.04,0.25,0.37,0.44,0.37,0.24,0.11,0.04,0,0,0,0], [0,0,0,0,0,0,0,0,0,0.19,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.75,0.4,0.15,0,0,0,0], [0,0,0,0,0,0,0,0,0.14,0.48,0.83,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.4,0,0,0,0], [0,0,0,0,0,0,0,0,0.62,0.78,0.94,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.64,0,0,0,0], [0,0,0,0,0,0,0,0.02,0.65,0.98,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.78,0,0,0,0], [0,0,0,0,0,0,0,0.15,0.48,0.93,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.79,0.05,0,0,0], [0,0,0,0,0,0,0.33,0.56,0.8,1.0,1.0,1.0,0.37,0.6,0.94,1.0,1.0,1.0,1.0,0.68,0.05,0,0,0], [0,0,0,0,0.35,0.51,0.76,0.89,1.0,1.0,0.72,0.15,0,0.29,0.57,0.69,0.86,1.0,0.92,0.49,0,0,0,0], [0,0,0,0,0,0.38,0.86,1.0,1.0,0.96,0.31,0,0,0,0,0.02,0.2,0.52,0.37,0.11,0,0,0,0], [0,0,0.01,0,0,0.07,0.75,1.0,1.0,1.0,0.48,0.03,0,0,0,0,0,0.18,0.07,0,0,0,0,0], [0,0.11,0.09,0.22,0.15,0.32,0.71,0.94,1.0,1.0,0.97,0.54,0.12,0.02,0,0,0,0,0,0,0,0,0,0], [0.06,0.33,0.47,0.51,0.58,0.77,0.95,1.0,1.0,1.0,1.0,0.62,0.12,0,0,0,0,0,0,0,0,0,0,0], [0.04,0.4,0.69,0.88,0.95,1.0,1.0,1.0,1.0,1.0,0.93,0.68,0.22,0.02,0,0,0.01,0,0,0,0,0,0,0], [0,0.39,0.69,0.91,1.0,1.0,1.0,1.0,1.0,0.85,0.52,0.35,0.24,0.17,0.07,0,0,0,0,0,0,0,0,0], [0,0,0.29,0.82,1.0,1.0,1.0,1.0,1.0,1.0,0.67,0.29,0.02,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0.2,0.51,0.77,0.96,0.93,0.71,0.4,0.16,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0.08,0.07,0.03,0,0,0,0,0,0,0,0,0,0,0,0,0]]]
}
creatures["wanderer"] = {"name":"Smooth wanderer","R":13,"T":10,"kernels":[{"m":0.1,"s":0.005,"b":[3/4,1], "r":1,"h":1, "c0":0,"c1":0}],
"cells":[[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0.01,0.01,0.01,0.01,0.01,0.02,0.03,0.04,0.05,0.04,0.01,0,0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0.02,0.03,0.02,0.01,0.02,0.04,0.04,0.06,0.09,0.10,0.11,0.12,0.05,0.01,0.01,0,0,0,0,0,0,0], [0,0,0,0,0.01,0.07,0.07,0.08,0.09,0.09,0.10,0.12,0.20,0.28,0.41,0.58,0.62,0.51,0.30,0.08,0.04,0.01,0,0,0,0,0], [0,0,0,0.01,0.07,0.03,0.05,0.05,0.06,0.09,0.14,0.21,0.27,0.27,0.14,0.07,0.10,0.28,0.71,0.73,0.28,0.10,0.03,0,0,0,0], [0,0,0.01,0.01,0.04,0.05,0.04,0.06,0.10,0.16,0.19,0.22,0.07,0.01,0.04,0.03,0.06,0.07,0.07,0.37,0.78,0.47,0.22,0.09,0.01,0,0], [0,0,0.01,0.01,0.05,0.04,0.06,0.10,0.12,0.17,0.24,0.13,0.05,0.02,0.04,0.07,0.08,0.09,0.07,0.07,0.38,0.74,0.49,0.32,0.12,0,0], [0,0,0.01,0.01,0.04,0.06,0.07,0.12,0.14,0.19,0.26,0.03,0.05,0.05,0.03,0.09,0.08,0.11,0.13,0.13,0.25,0.47,0.58,0.39,0.31,0.02,0], [0,0,0,0,0.03,0.10,0.08,0.13,0.15,0.21,0.19,0.05,0.06,0.13,0.02,0.06,0.13,0.19,0.27,0.26,0.32,0.41,0.50,0.39,0.32,0.14,0], [0,0,0,0,0.03,0.06,0.09,0.12,0.15,0.24,0.04,0.06,0.12,0.18,0.08,0.17,0.05,0.09,0.20,0.26,0.35,0.46,0.49,0.45,0.30,0.21,0], [0,0,0,0.01,0.01,0.03,0.09,0.11,0.16,0.28,0.08,0.08,0.08,0.09,0.33,0.32,0.44,0.04,0.06,0.14,0.31,0.50,0.59,0.55,0.32,0.21,0], [0,0,0,0,0.01,0.03,0.09,0.09,0.14,0.21,0,0,0,0.01,0.32,0.32,0.33,0.07,0.06,0.09,0.29,0.58,0.70,0.60,0.34,0.18,0], [0,0,0,0,0,0.07,0.10,0.08,0.14,0.19,0,0,0.01,0.02,0.03,0.13,0.10,0.07,0.08,0.12,0.39,0.65,0.65,0.54,0.33,0.12,0], [0,0,0,0,0,0.04,0.15,0.08,0.15,0.30,0.07,0.11,0.13,0.14,0.14,0.12,0.12,0.11,0.12,0.19,0.59,0.61,0.55,0.46,0.29,0.06,0], [0,0,0,0,0,0,0.08,0.18,0.14,0.40,0.08,0.04,0.06,0.15,0.29,0.27,0.13,0.12,0.14,0.46,0.62,0.51,0.42,0.36,0.25,0.01,0], [0,0,0,0,0,0,0,0.09,0.19,0.27,0.48,0.07,0.07,0.14,0.27,0.24,0.13,0.12,0.26,0.55,0.55,0.39,0.31,0.28,0.20,0,0], [0,0,0,0,0,0,0,0.01,0.12,0.21,0.46,0.48,0.11,0.07,0.08,0.08,0.07,0.10,0.36,0.59,0.50,0.30,0.26,0.27,0.09,0,0], [0,0,0,0,0,0,0,0,0.01,0.09,0.23,0.45,0.61,0.33,0.08,0.05,0.06,0.19,0.57,0.66,0.40,0.28,0.26,0.26,0,0,0], [0,0,0,0,0,0,0,0,0,0,0.01,0.09,0.27,0.62,0.75,0.58,0.55,0.68,0.65,0.46,0.36,0.31,0.29,0.03,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0.12,0.22,0.28,0.30,0.31,0.29,0.21,0.03,0,0,0,0], [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.03,0.05,0.06,0.03,0,0,0,0,0,0]]
}
| 18.657958 | 2,287 | 0.125349 | 12,271 | 124,971 | 1.276587 | 0.018091 | 0.577338 | 0.76623 | 0.932269 | 0.825343 | 0.669518 | 0.643026 | 0.56872 | 0.529205 | 0.478136 | 0 | 0.51361 | 0.764529 | 124,971 | 6,697 | 2,288 | 18.660744 | 0.018724 | 0 | 0 | 0.950695 | 0 | 0 | 0.004417 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.000149 | 0 | 0.000149 | 0 | 0 | 0 | 1 | null | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
0738a082d60d4925641a5349ce580312233028ef | 13,547 | py | Python | copct-master/baxter_corpus/demo_remove_red_drive_2.py | jhomble/electron435 | 2a94a901679a1ebbdeea01bb9e888d365d536bec | [
"MIT"
] | 4 | 2016-10-26T13:58:44.000Z | 2018-11-13T13:03:52.000Z | copct-master/baxter_corpus/demo_remove_red_drive_2.py | jhomble/electron435 | 2a94a901679a1ebbdeea01bb9e888d365d536bec | [
"MIT"
] | 4 | 2020-03-31T01:10:26.000Z | 2020-03-31T03:06:28.000Z | copct-master/baxter_corpus/demo_remove_red_drive_2.py | jhomble/electron435 | 2a94a901679a1ebbdeea01bb9e888d365d536bec | [
"MIT"
] | 1 | 2020-03-03T06:22:08.000Z | 2020-03-03T06:22:08.000Z | demo = (
(
(
("workspace", "Workspace"),
("table", "Block"),
("dock-case", "DockCase"),
("dock-case_1", "Block"),
("dock-body", "DockDrawer"),
("dock-body_2", "DockFrontPanel"),
("dock-body_2_1", "Prism"),
("dock-body_2_2", "Block"),
("dock-body_2_3", "Block"),
("dock-body_4", "DockHandle"),
("dock-body_4_1", "Prism"),
("dock-body_4_2", "Prism"),
("dock-body_5", "DockModule"),
("dock-body_5_1", "DockSlot"),
("c1", "Cartridge"),
("dock-body_5_2", "DockSwitch"),
("dock-body_5_3", "DockLED"),
("dock-body_6", "DockModule"),
("dock-body_6_1", "DockSlot"),
("c2", "Cartridge"),
("dock-body_6_2", "DockSwitch"),
("dock-body_6_3", "DockLED"),
("dock-body_7", "DockModule"),
("dock-body_7_1", "DockSlot"),
("c3", "Cartridge"),
("dock-body_7_2", "DockSwitch"),
("dock-body_7_3", "DockLED"),
("dock-body_8", "DockModule"),
("dock-body_8_1", "DockSlot"),
("c4", "Cartridge"),
("dock-body_8_2", "DockSwitch"),
("dock-body_8_3", "DockLED"),
("dock-case_2", "Block"),
("dock-case_3", "Block"),
("dock-case_4", "Block"),
("dock-case_5", "Prism"),
("dock-case_6", "Block"),
("c5", "Cartridge"),
("c6", "Cartridge"),
("discard-bin", "Block"),
("gripping", ("nothing","nothing")),
),
"move arm and grasp",
(
2.000000,
"dock-body",
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("dock-case", "DockCase"),
("dock-case_1", "Block"),
("dock-body", "DockDrawer"),
("dock-body_2", "DockFrontPanel"),
("dock-body_2_1", "Prism"),
("dock-body_2_2", "Block"),
("dock-body_2_3", "Block"),
("dock-body_4", "DockHandle"),
("dock-body_4_1", "Prism"),
("dock-body_4_2", "Prism"),
("dock-body_5", "DockModule"),
("dock-body_5_1", "DockSlot"),
("c1", "Cartridge"),
("dock-body_5_2", "DockSwitch"),
("dock-body_5_3", "DockLED"),
("dock-body_6", "DockModule"),
("dock-body_6_1", "DockSlot"),
("c2", "Cartridge"),
("dock-body_6_2", "DockSwitch"),
("dock-body_6_3", "DockLED"),
("dock-body_7", "DockModule"),
("dock-body_7_1", "DockSlot"),
("c3", "Cartridge"),
("dock-body_7_2", "DockSwitch"),
("dock-body_7_3", "DockLED"),
("dock-body_8", "DockModule"),
("dock-body_8_1", "DockSlot"),
("c4", "Cartridge"),
("dock-body_8_2", "DockSwitch"),
("dock-body_8_3", "DockLED"),
("dock-case_2", "Block"),
("dock-case_3", "Block"),
("dock-case_4", "Block"),
("dock-case_5", "Prism"),
("dock-case_6", "Block"),
("c5", "Cartridge"),
("c6", "Cartridge"),
("discard-bin", "Block"),
("gripping", ("nothing","dock-body")),
),
"move grasped object",
(
2.000000,
"dock-case",
(
(1.000000, 0.000731, 0.000241, ),
(-0.000731, 1.000000, 0.000199, ),
(-0.000241, -0.000199, 1.000000, ),
),
(
(3.888290, ),
(-0.000731, ),
(0.103128, ),
),
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("dock-case", "DockCase"),
("dock-case_1", "Block"),
("dock-body", "DockDrawer"),
("dock-body_2", "DockFrontPanel"),
("dock-body_2_1", "Prism"),
("dock-body_2_2", "Block"),
("dock-body_2_3", "Block"),
("dock-body_4", "DockHandle"),
("dock-body_4_1", "Prism"),
("dock-body_4_2", "Prism"),
("dock-body_5", "DockModule"),
("dock-body_5_1", "DockSlot"),
("c1", "Cartridge"),
("dock-body_5_2", "DockSwitch"),
("dock-body_5_3", "DockLED"),
("dock-body_6", "DockModule"),
("dock-body_6_1", "DockSlot"),
("c2", "Cartridge"),
("dock-body_6_2", "DockSwitch"),
("dock-body_6_3", "DockLED"),
("dock-body_7", "DockModule"),
("dock-body_7_1", "DockSlot"),
("c3", "Cartridge"),
("dock-body_7_2", "DockSwitch"),
("dock-body_7_3", "DockLED"),
("dock-body_8", "DockModule"),
("dock-body_8_1", "DockSlot"),
("c4", "Cartridge"),
("dock-body_8_2", "DockSwitch"),
("dock-body_8_3", "DockLED"),
("dock-case_2", "Block"),
("dock-case_3", "Block"),
("dock-case_4", "Block"),
("dock-case_5", "Prism"),
("dock-case_6", "Block"),
("c5", "Cartridge"),
("c6", "Cartridge"),
("discard-bin", "Block"),
("gripping", ("nothing","dock-body")),
),
"release",
(
2.000000,
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("dock-case", "DockCase"),
("dock-case_1", "Block"),
("dock-body", "DockDrawer"),
("dock-body_2", "DockFrontPanel"),
("dock-body_2_1", "Prism"),
("dock-body_2_2", "Block"),
("dock-body_2_3", "Block"),
("dock-body_4", "DockHandle"),
("dock-body_4_1", "Prism"),
("dock-body_4_2", "Prism"),
("dock-body_5", "DockModule"),
("dock-body_5_1", "DockSlot"),
("c1", "Cartridge"),
("dock-body_5_2", "DockSwitch"),
("dock-body_5_3", "DockLED"),
("dock-body_6", "DockModule"),
("dock-body_6_1", "DockSlot"),
("c2", "Cartridge"),
("dock-body_6_2", "DockSwitch"),
("dock-body_6_3", "DockLED"),
("dock-body_7", "DockModule"),
("dock-body_7_1", "DockSlot"),
("c3", "Cartridge"),
("dock-body_7_2", "DockSwitch"),
("dock-body_7_3", "DockLED"),
("dock-body_8", "DockModule"),
("dock-body_8_1", "DockSlot"),
("c4", "Cartridge"),
("dock-body_8_2", "DockSwitch"),
("dock-body_8_3", "DockLED"),
("dock-case_2", "Block"),
("dock-case_3", "Block"),
("dock-case_4", "Block"),
("dock-case_5", "Prism"),
("dock-case_6", "Block"),
("c5", "Cartridge"),
("c6", "Cartridge"),
("discard-bin", "Block"),
("gripping", ("nothing","nothing")),
),
"press dock toggle",
(
1.000000,
"dock-body_7_2",
2.000000,
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("dock-case", "DockCase"),
("dock-case_1", "Block"),
("dock-body", "DockDrawer"),
("dock-body_2", "DockFrontPanel"),
("dock-body_2_1", "Prism"),
("dock-body_2_2", "Block"),
("dock-body_2_3", "Block"),
("dock-body_4", "DockHandle"),
("dock-body_4_1", "Prism"),
("dock-body_4_2", "Prism"),
("dock-body_5", "DockModule"),
("dock-body_5_1", "DockSlot"),
("c1", "Cartridge"),
("dock-body_5_2", "DockSwitch"),
("dock-body_5_3", "DockLED"),
("dock-body_6", "DockModule"),
("dock-body_6_1", "DockSlot"),
("c2", "Cartridge"),
("dock-body_6_2", "DockSwitch"),
("dock-body_6_3", "DockLED"),
("dock-body_7", "DockModule"),
("dock-body_7_1", "DockSlot"),
("c3", "Cartridge"),
("dock-body_7_2", "DockSwitch"),
("dock-body_7_3", "DockLED"),
("dock-body_8", "DockModule"),
("dock-body_8_1", "DockSlot"),
("c4", "Cartridge"),
("dock-body_8_2", "DockSwitch"),
("dock-body_8_3", "DockLED"),
("dock-case_2", "Block"),
("dock-case_3", "Block"),
("dock-case_4", "Block"),
("dock-case_5", "Prism"),
("dock-case_6", "Block"),
("c5", "Cartridge"),
("c6", "Cartridge"),
("discard-bin", "Block"),
("gripping", ("nothing","nothing")),
),
"move arm and grasp",
(
1.000000,
"c3",
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("dock-case", "DockCase"),
("dock-case_1", "Block"),
("dock-body", "DockDrawer"),
("dock-body_2", "DockFrontPanel"),
("dock-body_2_1", "Prism"),
("dock-body_2_2", "Block"),
("dock-body_2_3", "Block"),
("dock-body_4", "DockHandle"),
("dock-body_4_1", "Prism"),
("dock-body_4_2", "Prism"),
("dock-body_5", "DockModule"),
("dock-body_5_1", "DockSlot"),
("c1", "Cartridge"),
("dock-body_5_2", "DockSwitch"),
("dock-body_5_3", "DockLED"),
("dock-body_6", "DockModule"),
("dock-body_6_1", "DockSlot"),
("c2", "Cartridge"),
("dock-body_6_2", "DockSwitch"),
("dock-body_6_3", "DockLED"),
("dock-body_7", "DockModule"),
("dock-body_7_1", "DockSlot"),
("c3", "Cartridge"),
("dock-body_7_2", "DockSwitch"),
("dock-body_7_3", "DockLED"),
("dock-body_8", "DockModule"),
("dock-body_8_1", "DockSlot"),
("c4", "Cartridge"),
("dock-body_8_2", "DockSwitch"),
("dock-body_8_3", "DockLED"),
("dock-case_2", "Block"),
("dock-case_3", "Block"),
("dock-case_4", "Block"),
("dock-case_5", "Prism"),
("dock-case_6", "Block"),
("c5", "Cartridge"),
("c6", "Cartridge"),
("discard-bin", "Block"),
("gripping", ("c3","nothing")),
),
"move grasped object",
(
1.000000,
"discard-bin",
(
(0.998794, 0.001675, -0.049061, ),
(-0.001682, 0.999999, -0.000097, ),
(0.049061, 0.000179, 0.998796, ),
),
(
(-2.096782, ),
(1.212035, ),
(6.707517, ),
),
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("dock-case", "DockCase"),
("dock-case_1", "Block"),
("dock-body", "DockDrawer"),
("dock-body_2", "DockFrontPanel"),
("dock-body_2_1", "Prism"),
("dock-body_2_2", "Block"),
("dock-body_2_3", "Block"),
("dock-body_4", "DockHandle"),
("dock-body_4_1", "Prism"),
("dock-body_4_2", "Prism"),
("dock-body_5", "DockModule"),
("dock-body_5_1", "DockSlot"),
("c1", "Cartridge"),
("dock-body_5_2", "DockSwitch"),
("dock-body_5_3", "DockLED"),
("dock-body_6", "DockModule"),
("dock-body_6_1", "DockSlot"),
("c2", "Cartridge"),
("dock-body_6_2", "DockSwitch"),
("dock-body_6_3", "DockLED"),
("dock-body_7", "DockModule"),
("dock-body_7_1", "DockSlot"),
("c3", "Cartridge"),
("dock-body_7_2", "DockSwitch"),
("dock-body_7_3", "DockLED"),
("dock-body_8", "DockModule"),
("dock-body_8_1", "DockSlot"),
("c4", "Cartridge"),
("dock-body_8_2", "DockSwitch"),
("dock-body_8_3", "DockLED"),
("dock-case_2", "Block"),
("dock-case_3", "Block"),
("dock-case_4", "Block"),
("dock-case_5", "Prism"),
("dock-case_6", "Block"),
("c5", "Cartridge"),
("c6", "Cartridge"),
("discard-bin", "Block"),
("gripping", ("c3","nothing")),
),
"release",
(
1.000000,
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("dock-case", "DockCase"),
("dock-case_1", "Block"),
("dock-body", "DockDrawer"),
("dock-body_2", "DockFrontPanel"),
("dock-body_2_1", "Prism"),
("dock-body_2_2", "Block"),
("dock-body_2_3", "Block"),
("dock-body_4", "DockHandle"),
("dock-body_4_1", "Prism"),
("dock-body_4_2", "Prism"),
("dock-body_5", "DockModule"),
("dock-body_5_1", "DockSlot"),
("c1", "Cartridge"),
("dock-body_5_2", "DockSwitch"),
("dock-body_5_3", "DockLED"),
("dock-body_6", "DockModule"),
("dock-body_6_1", "DockSlot"),
("c2", "Cartridge"),
("dock-body_6_2", "DockSwitch"),
("dock-body_6_3", "DockLED"),
("dock-body_7", "DockModule"),
("dock-body_7_1", "DockSlot"),
("c3", "Cartridge"),
("dock-body_7_2", "DockSwitch"),
("dock-body_7_3", "DockLED"),
("dock-body_8", "DockModule"),
("dock-body_8_1", "DockSlot"),
("c4", "Cartridge"),
("dock-body_8_2", "DockSwitch"),
("dock-body_8_3", "DockLED"),
("dock-case_2", "Block"),
("dock-case_3", "Block"),
("dock-case_4", "Block"),
("dock-case_5", "Prism"),
("dock-case_6", "Block"),
("c5", "Cartridge"),
("c6", "Cartridge"),
("discard-bin", "Block"),
("gripping", ("nothing","nothing")),
),
"move arm and grasp",
(
2.000000,
"dock-body",
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("dock-case", "DockCase"),
("dock-case_1", "Block"),
("dock-body", "DockDrawer"),
("dock-body_2", "DockFrontPanel"),
("dock-body_2_1", "Prism"),
("dock-body_2_2", "Block"),
("dock-body_2_3", "Block"),
("dock-body_4", "DockHandle"),
("dock-body_4_1", "Prism"),
("dock-body_4_2", "Prism"),
("dock-body_5", "DockModule"),
("dock-body_5_1", "DockSlot"),
("c1", "Cartridge"),
("dock-body_5_2", "DockSwitch"),
("dock-body_5_3", "DockLED"),
("dock-body_6", "DockModule"),
("dock-body_6_1", "DockSlot"),
("c2", "Cartridge"),
("dock-body_6_2", "DockSwitch"),
("dock-body_6_3", "DockLED"),
("dock-body_7", "DockModule"),
("dock-body_7_1", "DockSlot"),
("c3", "Cartridge"),
("dock-body_7_2", "DockSwitch"),
("dock-body_7_3", "DockLED"),
("dock-body_8", "DockModule"),
("dock-body_8_1", "DockSlot"),
("c4", "Cartridge"),
("dock-body_8_2", "DockSwitch"),
("dock-body_8_3", "DockLED"),
("dock-case_2", "Block"),
("dock-case_3", "Block"),
("dock-case_4", "Block"),
("dock-case_5", "Prism"),
("dock-case_6", "Block"),
("c5", "Cartridge"),
("c6", "Cartridge"),
("discard-bin", "Block"),
("gripping", ("nothing","dock-body")),
),
"move grasped object",
(
2.000000,
"dock-case",
(
(0.999999, 0.001428, 0.000132, ),
(-0.001428, 0.999999, 0.000208, ),
(-0.000131, -0.000208, 1.000000, ),
),
(
(-0.060174, ),
(-0.008966, ),
(0.126360, ),
),
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("dock-case", "DockCase"),
("dock-case_1", "Block"),
("dock-body", "DockDrawer"),
("dock-body_2", "DockFrontPanel"),
("dock-body_2_1", "Prism"),
("dock-body_2_2", "Block"),
("dock-body_2_3", "Block"),
("dock-body_4", "DockHandle"),
("dock-body_4_1", "Prism"),
("dock-body_4_2", "Prism"),
("dock-body_5", "DockModule"),
("dock-body_5_1", "DockSlot"),
("c1", "Cartridge"),
("dock-body_5_2", "DockSwitch"),
("dock-body_5_3", "DockLED"),
("dock-body_6", "DockModule"),
("dock-body_6_1", "DockSlot"),
("c2", "Cartridge"),
("dock-body_6_2", "DockSwitch"),
("dock-body_6_3", "DockLED"),
("dock-body_7", "DockModule"),
("dock-body_7_1", "DockSlot"),
("c3", "Cartridge"),
("dock-body_7_2", "DockSwitch"),
("dock-body_7_3", "DockLED"),
("dock-body_8", "DockModule"),
("dock-body_8_1", "DockSlot"),
("c4", "Cartridge"),
("dock-body_8_2", "DockSwitch"),
("dock-body_8_3", "DockLED"),
("dock-case_2", "Block"),
("dock-case_3", "Block"),
("dock-case_4", "Block"),
("dock-case_5", "Prism"),
("dock-case_6", "Block"),
("c5", "Cartridge"),
("c6", "Cartridge"),
("discard-bin", "Block"),
("gripping", ("nothing","dock-body")),
),
"release",
(
2.000000,
)
),
)
| 25.512241 | 40 | 0.560641 | 1,751 | 13,547 | 4.073672 | 0.039406 | 0.277022 | 0.051731 | 0.106547 | 0.952474 | 0.952474 | 0.952474 | 0.952474 | 0.952474 | 0.952474 | 0 | 0.073824 | 0.146084 | 13,547 | 530 | 41 | 25.560377 | 0.54279 | 0 | 0 | 0.867925 | 0 | 0 | 0.561674 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
07450d3677f03066f28ae592e9e2f317b8d33be3 | 3,692 | py | Python | tests/migrations/0004_auto_20180917_0820.py | jobelenus/opinionated-reporting | 7b41f479e7aa8d9bd9a374f0799df92d430b7a6f | [
"MIT"
] | null | null | null | tests/migrations/0004_auto_20180917_0820.py | jobelenus/opinionated-reporting | 7b41f479e7aa8d9bd9a374f0799df92d430b7a6f | [
"MIT"
] | null | null | null | tests/migrations/0004_auto_20180917_0820.py | jobelenus/opinionated-reporting | 7b41f479e7aa8d9bd9a374f0799df92d430b7a6f | [
"MIT"
] | null | null | null | # Generated by Django 2.1 on 2018-09-17 12:20
from django.db import migrations
import django.db.models.deletion
import opinionated_reporting.fields
class Migration(migrations.Migration):
dependencies = [
('tests', '0003_orderedproductfact_quantity'),
]
operations = [
migrations.AlterField(
model_name='orderedfact',
name='created_on',
field=opinionated_reporting.fields.DimensionForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ordered_created_on', to='opinionated_reporting.DateDimension'),
),
migrations.AlterField(
model_name='orderedfact',
name='customer',
field=opinionated_reporting.fields.DimensionForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='tests.CustomerDimension'),
),
migrations.AlterField(
model_name='orderedfact',
name='hour_created_on',
field=opinionated_reporting.fields.DimensionForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ordered_hour_created_on', to='opinionated_reporting.HourDimension'),
),
migrations.AlterField(
model_name='orderedfact',
name='hour_ordered_on',
field=opinionated_reporting.fields.DimensionForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ordered_hour_ordered_on', to='opinionated_reporting.HourDimension'),
),
migrations.AlterField(
model_name='orderedfact',
name='ordered_on',
field=opinionated_reporting.fields.DimensionForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ordered_ordered_on', to='opinionated_reporting.DateDimension'),
),
migrations.AlterField(
model_name='orderedproductfact',
name='created_on',
field=opinionated_reporting.fields.DimensionForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='orderedproduct_created_on', to='opinionated_reporting.DateDimension'),
),
migrations.AlterField(
model_name='orderedproductfact',
name='customer',
field=opinionated_reporting.fields.DimensionForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='tests.CustomerDimension'),
),
migrations.AlterField(
model_name='orderedproductfact',
name='hour_created_on',
field=opinionated_reporting.fields.DimensionForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='orderedproduct_hour_created_on', to='opinionated_reporting.HourDimension'),
),
migrations.AlterField(
model_name='orderedproductfact',
name='hour_ordered_on',
field=opinionated_reporting.fields.DimensionForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='orderedproduct_hour_ordered_on', to='opinionated_reporting.HourDimension'),
),
migrations.AlterField(
model_name='orderedproductfact',
name='ordered_on',
field=opinionated_reporting.fields.DimensionForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='orderedproduct_ordered_on', to='opinionated_reporting.DateDimension'),
),
migrations.AlterField(
model_name='orderedproductfact',
name='product',
field=opinionated_reporting.fields.DimensionForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='tests.ProductDimension'),
),
]
| 52 | 212 | 0.700975 | 363 | 3,692 | 6.898072 | 0.137741 | 0.159744 | 0.067093 | 0.105431 | 0.90655 | 0.90655 | 0.886182 | 0.874601 | 0.874601 | 0.874601 | 0 | 0.006044 | 0.193391 | 3,692 | 70 | 213 | 52.742857 | 0.834788 | 0.011647 | 0 | 0.703125 | 1 | 0 | 0.236633 | 0.14697 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.046875 | 0 | 0.09375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
074c42a5c4789de212f4951f63d558c9bc3c660c | 24,475 | py | Python | sdk/python/pulumi_alicloud/rocketmq/topic.py | pulumi/pulumi-alicloud | 9c34d84b4588a7c885c6bec1f03b5016e5a41683 | [
"ECL-2.0",
"Apache-2.0"
] | 42 | 2019-03-18T06:34:37.000Z | 2022-03-24T07:08:57.000Z | sdk/python/pulumi_alicloud/rocketmq/topic.py | pulumi/pulumi-alicloud | 9c34d84b4588a7c885c6bec1f03b5016e5a41683 | [
"ECL-2.0",
"Apache-2.0"
] | 152 | 2019-04-15T21:03:44.000Z | 2022-03-29T18:00:57.000Z | sdk/python/pulumi_alicloud/rocketmq/topic.py | pulumi/pulumi-alicloud | 9c34d84b4588a7c885c6bec1f03b5016e5a41683 | [
"ECL-2.0",
"Apache-2.0"
] | 3 | 2020-08-26T17:30:07.000Z | 2021-07-05T01:37:45.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['TopicArgs', 'Topic']
@pulumi.input_type
class TopicArgs:
def __init__(__self__, *,
instance_id: pulumi.Input[str],
message_type: pulumi.Input[int],
perm: Optional[pulumi.Input[int]] = None,
remark: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
topic: Optional[pulumi.Input[str]] = None,
topic_name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Topic resource.
:param pulumi.Input[str] instance_id: ID of the ONS Instance that owns the topics.
:param pulumi.Input[int] message_type: The type of the message. Read [Ons Topic Create](https://www.alibabacloud.com/help/doc-detail/29591.html) for further details.
:param pulumi.Input[int] perm: This attribute is used to set the read-write mode for the topic. Read [Request parameters](https://www.alibabacloud.com/help/doc-detail/56880.html) for further details.
:param pulumi.Input[str] remark: This attribute is a concise description of topic. The length cannot exceed 128.
:param pulumi.Input[Mapping[str, Any]] tags: A mapping of tags to assign to the resource.
- Key: It can be up to 64 characters in length. It cannot begin with "aliyun", "acs:", "http://", or "https://". It cannot be a null string.
- Value: It can be up to 128 characters in length. It cannot begin with "aliyun", "acs:", "http://", or "https://". It can be a null string.
:param pulumi.Input[str] topic: Replaced by `topic_name` after version 1.97.0.
:param pulumi.Input[str] topic_name: Name of the topic. Two topics on a single instance cannot have the same name and the name cannot start with 'GID' or 'CID'. The length cannot exceed 64 characters.
"""
pulumi.set(__self__, "instance_id", instance_id)
pulumi.set(__self__, "message_type", message_type)
if perm is not None:
pulumi.set(__self__, "perm", perm)
if remark is not None:
pulumi.set(__self__, "remark", remark)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if topic is not None:
warnings.warn("""Field 'topic' has been deprecated from version 1.97.0. Use 'topic_name' instead.""", DeprecationWarning)
pulumi.log.warn("""topic is deprecated: Field 'topic' has been deprecated from version 1.97.0. Use 'topic_name' instead.""")
if topic is not None:
pulumi.set(__self__, "topic", topic)
if topic_name is not None:
pulumi.set(__self__, "topic_name", topic_name)
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> pulumi.Input[str]:
"""
ID of the ONS Instance that owns the topics.
"""
return pulumi.get(self, "instance_id")
@instance_id.setter
def instance_id(self, value: pulumi.Input[str]):
pulumi.set(self, "instance_id", value)
@property
@pulumi.getter(name="messageType")
def message_type(self) -> pulumi.Input[int]:
"""
The type of the message. Read [Ons Topic Create](https://www.alibabacloud.com/help/doc-detail/29591.html) for further details.
"""
return pulumi.get(self, "message_type")
@message_type.setter
def message_type(self, value: pulumi.Input[int]):
pulumi.set(self, "message_type", value)
@property
@pulumi.getter
def perm(self) -> Optional[pulumi.Input[int]]:
"""
This attribute is used to set the read-write mode for the topic. Read [Request parameters](https://www.alibabacloud.com/help/doc-detail/56880.html) for further details.
"""
return pulumi.get(self, "perm")
@perm.setter
def perm(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "perm", value)
@property
@pulumi.getter
def remark(self) -> Optional[pulumi.Input[str]]:
"""
This attribute is a concise description of topic. The length cannot exceed 128.
"""
return pulumi.get(self, "remark")
@remark.setter
def remark(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "remark", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
A mapping of tags to assign to the resource.
- Key: It can be up to 64 characters in length. It cannot begin with "aliyun", "acs:", "http://", or "https://". It cannot be a null string.
- Value: It can be up to 128 characters in length. It cannot begin with "aliyun", "acs:", "http://", or "https://". It can be a null string.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def topic(self) -> Optional[pulumi.Input[str]]:
"""
Replaced by `topic_name` after version 1.97.0.
"""
return pulumi.get(self, "topic")
@topic.setter
def topic(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "topic", value)
@property
@pulumi.getter(name="topicName")
def topic_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the topic. Two topics on a single instance cannot have the same name and the name cannot start with 'GID' or 'CID'. The length cannot exceed 64 characters.
"""
return pulumi.get(self, "topic_name")
@topic_name.setter
def topic_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "topic_name", value)
@pulumi.input_type
class _TopicState:
def __init__(__self__, *,
instance_id: Optional[pulumi.Input[str]] = None,
message_type: Optional[pulumi.Input[int]] = None,
perm: Optional[pulumi.Input[int]] = None,
remark: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
topic: Optional[pulumi.Input[str]] = None,
topic_name: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Topic resources.
:param pulumi.Input[str] instance_id: ID of the ONS Instance that owns the topics.
:param pulumi.Input[int] message_type: The type of the message. Read [Ons Topic Create](https://www.alibabacloud.com/help/doc-detail/29591.html) for further details.
:param pulumi.Input[int] perm: This attribute is used to set the read-write mode for the topic. Read [Request parameters](https://www.alibabacloud.com/help/doc-detail/56880.html) for further details.
:param pulumi.Input[str] remark: This attribute is a concise description of topic. The length cannot exceed 128.
:param pulumi.Input[Mapping[str, Any]] tags: A mapping of tags to assign to the resource.
- Key: It can be up to 64 characters in length. It cannot begin with "aliyun", "acs:", "http://", or "https://". It cannot be a null string.
- Value: It can be up to 128 characters in length. It cannot begin with "aliyun", "acs:", "http://", or "https://". It can be a null string.
:param pulumi.Input[str] topic: Replaced by `topic_name` after version 1.97.0.
:param pulumi.Input[str] topic_name: Name of the topic. Two topics on a single instance cannot have the same name and the name cannot start with 'GID' or 'CID'. The length cannot exceed 64 characters.
"""
if instance_id is not None:
pulumi.set(__self__, "instance_id", instance_id)
if message_type is not None:
pulumi.set(__self__, "message_type", message_type)
if perm is not None:
pulumi.set(__self__, "perm", perm)
if remark is not None:
pulumi.set(__self__, "remark", remark)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if topic is not None:
warnings.warn("""Field 'topic' has been deprecated from version 1.97.0. Use 'topic_name' instead.""", DeprecationWarning)
pulumi.log.warn("""topic is deprecated: Field 'topic' has been deprecated from version 1.97.0. Use 'topic_name' instead.""")
if topic is not None:
pulumi.set(__self__, "topic", topic)
if topic_name is not None:
pulumi.set(__self__, "topic_name", topic_name)
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of the ONS Instance that owns the topics.
"""
return pulumi.get(self, "instance_id")
@instance_id.setter
def instance_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_id", value)
@property
@pulumi.getter(name="messageType")
def message_type(self) -> Optional[pulumi.Input[int]]:
"""
The type of the message. Read [Ons Topic Create](https://www.alibabacloud.com/help/doc-detail/29591.html) for further details.
"""
return pulumi.get(self, "message_type")
@message_type.setter
def message_type(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "message_type", value)
@property
@pulumi.getter
def perm(self) -> Optional[pulumi.Input[int]]:
"""
This attribute is used to set the read-write mode for the topic. Read [Request parameters](https://www.alibabacloud.com/help/doc-detail/56880.html) for further details.
"""
return pulumi.get(self, "perm")
@perm.setter
def perm(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "perm", value)
@property
@pulumi.getter
def remark(self) -> Optional[pulumi.Input[str]]:
"""
This attribute is a concise description of topic. The length cannot exceed 128.
"""
return pulumi.get(self, "remark")
@remark.setter
def remark(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "remark", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
A mapping of tags to assign to the resource.
- Key: It can be up to 64 characters in length. It cannot begin with "aliyun", "acs:", "http://", or "https://". It cannot be a null string.
- Value: It can be up to 128 characters in length. It cannot begin with "aliyun", "acs:", "http://", or "https://". It can be a null string.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def topic(self) -> Optional[pulumi.Input[str]]:
"""
Replaced by `topic_name` after version 1.97.0.
"""
return pulumi.get(self, "topic")
@topic.setter
def topic(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "topic", value)
@property
@pulumi.getter(name="topicName")
def topic_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the topic. Two topics on a single instance cannot have the same name and the name cannot start with 'GID' or 'CID'. The length cannot exceed 64 characters.
"""
return pulumi.get(self, "topic_name")
@topic_name.setter
def topic_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "topic_name", value)
class Topic(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
instance_id: Optional[pulumi.Input[str]] = None,
message_type: Optional[pulumi.Input[int]] = None,
perm: Optional[pulumi.Input[int]] = None,
remark: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
topic: Optional[pulumi.Input[str]] = None,
topic_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides an ONS topic resource.
For more information about how to use it, see [RocketMQ Topic Management API](https://www.alibabacloud.com/help/doc-detail/29591.html).
> **NOTE:** Available in 1.53.0+
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
config = pulumi.Config()
name = config.get("name")
if name is None:
name = "onsInstanceName"
topic = config.get("topic")
if topic is None:
topic = "onsTopicName"
default_instance = alicloud.rocketmq.Instance("defaultInstance", remark="default_ons_instance_remark")
default_topic = alicloud.rocketmq.Topic("defaultTopic",
topic_name=topic,
instance_id=default_instance.id,
message_type=0,
remark="dafault_ons_topic_remark")
```
## Import
ONS TOPIC can be imported using the id, e.g.
```sh
$ pulumi import alicloud:rocketmq/topic:Topic topic MQ_INST_1234567890_Baso1234567:onsTopicDemo
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] instance_id: ID of the ONS Instance that owns the topics.
:param pulumi.Input[int] message_type: The type of the message. Read [Ons Topic Create](https://www.alibabacloud.com/help/doc-detail/29591.html) for further details.
:param pulumi.Input[int] perm: This attribute is used to set the read-write mode for the topic. Read [Request parameters](https://www.alibabacloud.com/help/doc-detail/56880.html) for further details.
:param pulumi.Input[str] remark: This attribute is a concise description of topic. The length cannot exceed 128.
:param pulumi.Input[Mapping[str, Any]] tags: A mapping of tags to assign to the resource.
- Key: It can be up to 64 characters in length. It cannot begin with "aliyun", "acs:", "http://", or "https://". It cannot be a null string.
- Value: It can be up to 128 characters in length. It cannot begin with "aliyun", "acs:", "http://", or "https://". It can be a null string.
:param pulumi.Input[str] topic: Replaced by `topic_name` after version 1.97.0.
:param pulumi.Input[str] topic_name: Name of the topic. Two topics on a single instance cannot have the same name and the name cannot start with 'GID' or 'CID'. The length cannot exceed 64 characters.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: TopicArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides an ONS topic resource.
For more information about how to use it, see [RocketMQ Topic Management API](https://www.alibabacloud.com/help/doc-detail/29591.html).
> **NOTE:** Available in 1.53.0+
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
config = pulumi.Config()
name = config.get("name")
if name is None:
name = "onsInstanceName"
topic = config.get("topic")
if topic is None:
topic = "onsTopicName"
default_instance = alicloud.rocketmq.Instance("defaultInstance", remark="default_ons_instance_remark")
default_topic = alicloud.rocketmq.Topic("defaultTopic",
topic_name=topic,
instance_id=default_instance.id,
message_type=0,
remark="dafault_ons_topic_remark")
```
## Import
ONS TOPIC can be imported using the id, e.g.
```sh
$ pulumi import alicloud:rocketmq/topic:Topic topic MQ_INST_1234567890_Baso1234567:onsTopicDemo
```
:param str resource_name: The name of the resource.
:param TopicArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(TopicArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
instance_id: Optional[pulumi.Input[str]] = None,
message_type: Optional[pulumi.Input[int]] = None,
perm: Optional[pulumi.Input[int]] = None,
remark: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
topic: Optional[pulumi.Input[str]] = None,
topic_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = TopicArgs.__new__(TopicArgs)
if instance_id is None and not opts.urn:
raise TypeError("Missing required property 'instance_id'")
__props__.__dict__["instance_id"] = instance_id
if message_type is None and not opts.urn:
raise TypeError("Missing required property 'message_type'")
__props__.__dict__["message_type"] = message_type
__props__.__dict__["perm"] = perm
__props__.__dict__["remark"] = remark
__props__.__dict__["tags"] = tags
if topic is not None and not opts.urn:
warnings.warn("""Field 'topic' has been deprecated from version 1.97.0. Use 'topic_name' instead.""", DeprecationWarning)
pulumi.log.warn("""topic is deprecated: Field 'topic' has been deprecated from version 1.97.0. Use 'topic_name' instead.""")
__props__.__dict__["topic"] = topic
__props__.__dict__["topic_name"] = topic_name
super(Topic, __self__).__init__(
'alicloud:rocketmq/topic:Topic',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
instance_id: Optional[pulumi.Input[str]] = None,
message_type: Optional[pulumi.Input[int]] = None,
perm: Optional[pulumi.Input[int]] = None,
remark: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
topic: Optional[pulumi.Input[str]] = None,
topic_name: Optional[pulumi.Input[str]] = None) -> 'Topic':
"""
Get an existing Topic resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] instance_id: ID of the ONS Instance that owns the topics.
:param pulumi.Input[int] message_type: The type of the message. Read [Ons Topic Create](https://www.alibabacloud.com/help/doc-detail/29591.html) for further details.
:param pulumi.Input[int] perm: This attribute is used to set the read-write mode for the topic. Read [Request parameters](https://www.alibabacloud.com/help/doc-detail/56880.html) for further details.
:param pulumi.Input[str] remark: This attribute is a concise description of topic. The length cannot exceed 128.
:param pulumi.Input[Mapping[str, Any]] tags: A mapping of tags to assign to the resource.
- Key: It can be up to 64 characters in length. It cannot begin with "aliyun", "acs:", "http://", or "https://". It cannot be a null string.
- Value: It can be up to 128 characters in length. It cannot begin with "aliyun", "acs:", "http://", or "https://". It can be a null string.
:param pulumi.Input[str] topic: Replaced by `topic_name` after version 1.97.0.
:param pulumi.Input[str] topic_name: Name of the topic. Two topics on a single instance cannot have the same name and the name cannot start with 'GID' or 'CID'. The length cannot exceed 64 characters.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _TopicState.__new__(_TopicState)
__props__.__dict__["instance_id"] = instance_id
__props__.__dict__["message_type"] = message_type
__props__.__dict__["perm"] = perm
__props__.__dict__["remark"] = remark
__props__.__dict__["tags"] = tags
__props__.__dict__["topic"] = topic
__props__.__dict__["topic_name"] = topic_name
return Topic(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> pulumi.Output[str]:
"""
ID of the ONS Instance that owns the topics.
"""
return pulumi.get(self, "instance_id")
@property
@pulumi.getter(name="messageType")
def message_type(self) -> pulumi.Output[int]:
"""
The type of the message. Read [Ons Topic Create](https://www.alibabacloud.com/help/doc-detail/29591.html) for further details.
"""
return pulumi.get(self, "message_type")
@property
@pulumi.getter
def perm(self) -> pulumi.Output[Optional[int]]:
"""
This attribute is used to set the read-write mode for the topic. Read [Request parameters](https://www.alibabacloud.com/help/doc-detail/56880.html) for further details.
"""
return pulumi.get(self, "perm")
@property
@pulumi.getter
def remark(self) -> pulumi.Output[Optional[str]]:
"""
This attribute is a concise description of topic. The length cannot exceed 128.
"""
return pulumi.get(self, "remark")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
"""
A mapping of tags to assign to the resource.
- Key: It can be up to 64 characters in length. It cannot begin with "aliyun", "acs:", "http://", or "https://". It cannot be a null string.
- Value: It can be up to 128 characters in length. It cannot begin with "aliyun", "acs:", "http://", or "https://". It can be a null string.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def topic(self) -> pulumi.Output[str]:
"""
Replaced by `topic_name` after version 1.97.0.
"""
return pulumi.get(self, "topic")
@property
@pulumi.getter(name="topicName")
def topic_name(self) -> pulumi.Output[str]:
"""
Name of the topic. Two topics on a single instance cannot have the same name and the name cannot start with 'GID' or 'CID'. The length cannot exceed 64 characters.
"""
return pulumi.get(self, "topic_name")
| 46.619048 | 208 | 0.631297 | 3,175 | 24,475 | 4.72378 | 0.071811 | 0.069676 | 0.07221 | 0.048406 | 0.893386 | 0.880051 | 0.867649 | 0.858248 | 0.850313 | 0.845979 | 0 | 0.013554 | 0.255444 | 24,475 | 524 | 209 | 46.708015 | 0.809472 | 0.431461 | 0 | 0.763838 | 1 | 0.02214 | 0.110898 | 0.002322 | 0 | 0 | 0 | 0 | 0 | 1 | 0.154982 | false | 0.00369 | 0.01845 | 0 | 0.265683 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
075b5921420d4fe2bd5b438e278e461973035ddb | 103 | py | Python | src/pyetllib/etllib/j2.py | slouchart/pyetllib | 133df36a1628f413cd60a86e4c7eac2738844d17 | [
"MIT"
] | 2 | 2020-04-01T10:08:02.000Z | 2021-03-07T15:18:14.000Z | src/pyetllib/etllib/j2.py | slouchart/pyetllib | 133df36a1628f413cd60a86e4c7eac2738844d17 | [
"MIT"
] | null | null | null | src/pyetllib/etllib/j2.py | slouchart/pyetllib | 133df36a1628f413cd60a86e4c7eac2738844d17 | [
"MIT"
] | 1 | 2020-10-13T13:23:02.000Z | 2020-10-13T13:23:02.000Z | # flake8: noqa
from .tools.j2 import * # pragma: no cover
from .utils.j2 import * # pragma: no cover
| 25.75 | 43 | 0.679612 | 16 | 103 | 4.375 | 0.625 | 0.228571 | 0.4 | 0.457143 | 0.6 | 0 | 0 | 0 | 0 | 0 | 0 | 0.036585 | 0.203884 | 103 | 3 | 44 | 34.333333 | 0.817073 | 0.446602 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
4aeb4aba1abbcbde570be390e3d89d1d84009b40 | 795,198 | py | Python | Wkikimoves2.py | tayayan/suisho | b456457410890ed09a8eac13efb4be259a65430c | [
"MIT"
] | 1 | 2021-01-26T11:38:45.000Z | 2021-01-26T11:38:45.000Z | Wkikimoves2.py | tayayan/suisho | b456457410890ed09a8eac13efb4be259a65430c | [
"MIT"
] | null | null | null | Wkikimoves2.py | tayayan/suisho | b456457410890ed09a8eac13efb4be259a65430c | [
"MIT"
] | null | null | null | #後手番合法手生成
import re
import Bboard
import Bboardbak2
import Wboard
import Wboardbak2
import board
import oute
#動かした後に後手玉に王手がかかっていないか判定
def kaihimore(sfen):
oute.oute = 0
#以下合法手生成コード
def move1():
global depth1
w1a,w2a,w3a,w4a,w5a,w6a,w7a,w8a,w9a = '','','','','','','','',''
w1b,w2b,w3b,w4b,w5b,w6b,w7b,w8b,w9b = '','','','','','','','',''
w1c,w2c,w3c,w4c,w5c,w6c,w7c,w8c,w9c = '','','','','','','','',''
w1d,w2d,w3d,w4d,w5d,w6d,w7d,w8d,w9d = '','','','','','','','',''
w1e,w2e,w3e,w4e,w5e,w6e,w7e,w8e,w9e = '','','','','','','','',''
w1f,w2f,w3f,w4f,w5f,w6f,w7f,w8f,w9f = '','','','','','','','',''
w1g,w2g,w3g,w4g,w5g,w6g,w7g,w8g,w9g = '','','','','','','','',''
w1h,w2h,w3h,w4h,w5h,w6h,w7h,w8h,w9h = '','','','','','','','',''
w1i,w2i,w3i,w4i,w5i,w6i,w7i,w8i,w9i = '','','','','','','','',''
depth1 = []
Bboardbak2.yobidashi()
Wboardbak2.yobidashi()
board.synth()
if Wboard.w1i !='':
if re.match(r'[gk+]',Wboard.w1i)and w2i=='':
moves = '1i2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w1i)and w1h=='':
moves = '1i1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w1i)and w2h=='':
moves = '1i2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w1i)and w2i=='':
moves = '1i2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w1i)and w1h=='':
moves = '1i1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w1i)and w1h=='':
moves = '1i2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1i)and w1g==''\
and board.s1h=='':
moves = '1i1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1i)and w1g==''\
and board.s1h=='':
moves = '1i1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1i)and w1f==''\
and board.s1h+board.s1g=='':
moves = '1i1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1i)and w1f==''\
and board.s1h+board.s1g=='':
moves = '1i1f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1i)and w1e==''\
and board.s1h+board.s1g+board.s1f=='':
moves = '1i1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1i)and w1e==''\
and board.s1h+board.s1g+board.s1f=='':
moves = '1i1e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1i)and w1d==''\
and board.s1h+board.s1g+board.s1f+board.s1e=='':
moves = '1i1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1i)and w1d==''\
and board.s1h+board.s1g+board.s1f+board.s1e=='':
moves = '1i1d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1i)and w1c==''\
and board.s1h+board.s1g+board.s1f+board.s1e+board.s1d=='':
moves = '1i1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1i)and w1c==''\
and board.s1h+board.s1g+board.s1f+board.s1e+board.s1d=='':
moves = '1i1c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1i)and w1b==''\
and board.s1h+board.s1g+board.s1f+board.s1e+board.s1d+board.s1c=='':
moves = '1i1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1i)and w1b==''\
and board.s1h+board.s1g+board.s1f+board.s1e+board.s1d+board.s1c=='':
moves = '1i1b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1i)and w1a==''\
and board.s1h+board.s1g+board.s1f+board.s1e+board.s1d+board.s1c+board.s1b=='':
moves = '1i1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1i)and w1a==''\
and board.s1h+board.s1g+board.s1f+board.s1e+board.s1d+board.s1c+board.s1b=='':
moves = '1i1a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1i)and w3i==''\
and board.s2i=='':
moves = '1i3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1i)and w3i==''\
and board.s2i=='':
moves = '1i3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1i)and w4i==''\
and board.s2i+board.s3i=='':
moves = '1i4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1i)and w4i==''\
and board.s2i+board.s3i=='':
moves = '1i4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1i)and w5i==''\
and board.s2i+board.s3i+board.s4i=='':
moves = '1i5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1i)and w5i==''\
and board.s2i+board.s3i+board.s4i=='':
moves = '1i5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1i)and w6i==''\
and board.s2i+board.s3i+board.s4i+board.s5i=='':
moves = '1i6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1i)and w6i==''\
and board.s2i+board.s3i+board.s4i+board.s5i=='':
moves = '1i6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1i)and w7i==''\
and board.s2i+board.s3i+board.s4i+board.s5i+board.s6i=='':
moves = '1i7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1i)and w7i==''\
and board.s2i+board.s3i+board.s4i+board.s5i+board.s6i=='':
moves = '1i7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1i)and w8i==''\
and board.s2i+board.s3i+board.s4i+board.s5i+board.s6i+board.s7i=='':
moves = '1i8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1i)and w8i==''\
and board.s2i+board.s3i+board.s4i+board.s5i+board.s6i+board.s7i=='':
moves = '1i8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1i)and w9i==''\
and board.s2i+board.s3i+board.s4i+board.s5i+board.s6i+board.s7i+board.s8i=='':
moves = '1i9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1i)and w9i==''\
and board.s2i+board.s3i+board.s4i+board.s5i+board.s6i+board.s7i+board.s8i=='':
moves = '1i9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1i)and w3g==''\
and board.s2h=='':
moves = '1i3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1i)and w4f==''\
and board.s2h+board.s3g=='':
moves = '1i4f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1i)and w5e==''\
and board.s2h+board.s3g+board.s4f=='':
moves = '1i5e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1i)and w6d==''\
and board.s2h+board.s3g+board.s4f+board.s5e=='':
moves = '1i6d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1i)and w7c==''\
and board.s2h+board.s3g+board.s4f+board.s5e+board.s6d=='':
moves = '1i7c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1i)and w8b==''\
and board.s2h+board.s3g+board.s4f+board.s5e+board.s6d+board.s7c=='':
moves = '1i8b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1i)and w9a==''\
and board.s2h+board.s3g+board.s4f+board.s5e+board.s6d+board.s7c+board.s8b=='':
moves = '1i9a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1i)and w3g==''\
and board.s2h=='':
moves = '1i3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1i)and w4f==''\
and board.s2h+board.s3g=='':
moves = '1i4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1i)and w5e==''\
and board.s2h+board.s3g+board.s4f=='':
moves = '1i5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1i)and w6d==''\
and board.s2h+board.s3g+board.s4f+board.s5e=='':
moves = '1i6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1i)and w7c==''\
and board.s2h+board.s3g+board.s4f+board.s5e+board.s6d=='':
moves = '1i7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1i)and w8b==''\
and board.s2h+board.s3g+board.s4f+board.s5e+board.s6d+board.s7c=='':
moves = '1i8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1i)and w9a==''\
and board.s2h+board.s3g+board.s4f+board.s5e+board.s6d+board.s7c+board.s8b=='':
moves = '1i9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w2i !='':
if re.match(r'[gk+]',Wboard.w2i)and w1i=='':
moves = '2i1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w2i)and w3i=='':
moves = '2i3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w2i)and w2h=='':
moves = '2i2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w2i)and w1h=='':
moves = '2i1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w2i)and w3h=='':
moves = '2i3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w2i)and w1i=='':
moves = '2i1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w2i)and w3i=='':
moves = '2i3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w2i)and w2h=='':
moves = '2i2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w2i)and w1h=='':
moves = '2i1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w2i)and w3h=='':
moves = '2i3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2i)and w2g==''\
and board.s2h=='':
moves = '2i2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2i)and w2g==''\
and board.s2h=='':
moves = '2i2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2i)and w2f==''\
and board.s2h+board.s2g=='':
moves = '2i2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2i)and w2f==''\
and board.s2h+board.s2g=='':
moves = '2i2f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2i)and w2e==''\
and board.s2h+board.s2g+board.s2f=='':
moves = '2i2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2i)and w2e==''\
and board.s2h+board.s2g+board.s2f=='':
moves = '2i2e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2i)and w2d==''\
and board.s2h+board.s2g+board.s2f+board.s2e=='':
moves = '2i2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2i)and w2d==''\
and board.s2h+board.s2g+board.s2f+board.s2e=='':
moves = '2i2d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2i)and w2c==''\
and board.s2h+board.s2g+board.s2f+board.s2e+board.s2d=='':
moves = '2i2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2i)and w2c==''\
and board.s2h+board.s2g+board.s2f+board.s2e+board.s2d=='':
moves = '2i2c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2i)and w2b==''\
and board.s2h+board.s2g+board.s2f+board.s2e+board.s2d+board.s2c=='':
moves = '2i2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2i)and w2b==''\
and board.s2h+board.s2g+board.s2f+board.s2e+board.s2d+board.s2c=='':
moves = '2i2b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2i)and w2a==''\
and board.s2h+board.s2g+board.s2f+board.s2e+board.s2d+board.s2c+board.s2b=='':
moves = '2i2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2i)and w2a==''\
and board.s2h+board.s2g+board.s2f+board.s2e+board.s2d+board.s2c+board.s2b=='':
moves = '2i2a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2i)and w4i==''\
and board.s3i=='':
moves = '2i4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2i)and w4i==''\
and board.s3i=='':
moves = '2i4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2i)and w5i==''\
and board.s3i+board.s4i=='':
moves = '2i5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2i)and w5i==''\
and board.s3i+board.s4i=='':
moves = '2i5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2i)and w6i==''\
and board.s3i+board.s4i+board.s5i=='':
moves = '2i6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2i)and w6i==''\
and board.s3i+board.s4i+board.s5i=='':
moves = '2i6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2i)and w7i==''\
and board.s3i+board.s4i+board.s5i+board.s6i=='':
moves = '2i7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2i)and w7i==''\
and board.s3i+board.s4i+board.s5i+board.s6i=='':
moves = '2i7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2i)and w8i==''\
and board.s3i+board.s4i+board.s5i+board.s6i+board.s7i=='':
moves = '2i8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2i)and w8i==''\
and board.s3i+board.s4i+board.s5i+board.s6i+board.s7i=='':
moves = '2i8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2i)and w9i==''\
and board.s3i+board.s4i+board.s5i+board.s6i+board.s7i+board.s8i=='':
moves = '2i9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2i)and w9i==''\
and board.s3i+board.s4i+board.s5i+board.s6i+board.s7i+board.s8i=='':
moves = '2i9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2i)and w4g==''\
and board.s3h=='':
moves = '2i4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2i)and w5f==''\
and board.s3h+board.s4g=='':
moves = '2i5f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2i)and w6e==''\
and board.s3h+board.s4g+board.s5f=='':
moves = '2i6e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2i)and w7d==''\
and board.s3h+board.s4g+board.s5f+board.s6e=='':
moves = '2i7d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2i)and w8c==''\
and board.s3h+board.s4g+board.s5f+board.s6e+board.s7d=='':
moves = '2i8c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2i)and w9b==''\
and board.s3h+board.s4g+board.s5f+board.s6e+board.s7d+board.s8c=='':
moves = '2i9b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2i)and w4g==''\
and board.s3h=='':
moves = '2i4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2i)and w5f==''\
and board.s3h+board.s4g=='':
moves = '2i5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2i)and w6e==''\
and board.s3h+board.s4g+board.s5f=='':
moves = '2i6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2i)and w7d==''\
and board.s3h+board.s4g+board.s5f+board.s6e=='':
moves = '2i7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2i)and w8c==''\
and board.s3h+board.s4g+board.s5f+board.s6e+board.s7d=='':
moves = '2i8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2i)and w9b==''\
and board.s3h+board.s4g+board.s5f+board.s6e+board.s7d+board.s8c=='':
moves = '2i9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w3i !='':
if re.match(r'[gk+]',Wboard.w3i)and w2i=='':
moves = '3i2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w3i)and w4i=='':
moves = '3i4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w3i)and w3h=='':
moves = '3i3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w3i)and w2h=='':
moves = '3i2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w3i)and w4h=='':
moves = '3i4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w3i)and w2i=='':
moves = '3i2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w3i)and w4i=='':
moves = '3i4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w3i)and w3h=='':
moves = '3i3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w3i)and w2h=='':
moves = '3i2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w3i)and w4h=='':
moves = '3i4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3i)and w3g==''\
and board.s3h=='':
moves = '3i3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3i)and w3g==''\
and board.s3h=='':
moves = '3i3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3i)and w3f==''\
and board.s3h+board.s3g=='':
moves = '3i3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3i)and w3f==''\
and board.s3h+board.s3g=='':
moves = '3i3f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3i)and w3e==''\
and board.s3h+board.s3g+board.s3f=='':
moves = '3i3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3i)and w3e==''\
and board.s3h+board.s3g+board.s3f=='':
moves = '3i3e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3i)and w3d==''\
and board.s3h+board.s3g+board.s3f+board.s3e=='':
moves = '3i3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3i)and w3d==''\
and board.s3h+board.s3g+board.s3f+board.s3e=='':
moves = '3i3d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3i)and w3c==''\
and board.s3h+board.s3g+board.s3f+board.s3e+board.s3d=='':
moves = '3i3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3i)and w3c==''\
and board.s3h+board.s3g+board.s3f+board.s3e+board.s3d=='':
moves = '3i3c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3i)and w3b==''\
and board.s3h+board.s3g+board.s3f+board.s3e+board.s3d+board.s3c=='':
moves = '3i3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3i)and w3b==''\
and board.s3h+board.s3g+board.s3f+board.s3e+board.s3d+board.s3c=='':
moves = '3i3b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3i)and w3a==''\
and board.s3h+board.s3g+board.s3f+board.s3e+board.s3d+board.s3c+board.s3b=='':
moves = '3i3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3i)and w3a==''\
and board.s3h+board.s3g+board.s3f+board.s3e+board.s3d+board.s3c+board.s3b=='':
moves = '3i3a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3i)and w1i==''\
and board.s2i=='':
moves = '3i1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3i)and w1i==''\
and board.s2i=='':
moves = '3i1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3i)and w5i==''\
and board.s4i=='':
moves = '3i5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3i)and w5i==''\
and board.s4i=='':
moves = '3i5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3i)and w6i==''\
and board.s4i+board.s5i=='':
moves = '3i6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3i)and w6i==''\
and board.s4i+board.s5i=='':
moves = '3i6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3i)and w7i==''\
and board.s4i+board.s5i+board.s6i=='':
moves = '3i7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3i)and w7i==''\
and board.s4i+board.s5i+board.s6i=='':
moves = '3i7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3i)and w8i==''\
and board.s4i+board.s5i+board.s6i+board.s7i=='':
moves = '3i8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3i)and w8i==''\
and board.s4i+board.s5i+board.s6i+board.s7i=='':
moves = '3i8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3i)and w9i==''\
and board.s4i+board.s5i+board.s6i+board.s7i+board.s8i=='':
moves = '3i9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3i)and w9i==''\
and board.s4i+board.s5i+board.s6i+board.s7i+board.s8i=='':
moves = '3i9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3i)and w1g==''\
and board.s2h=='':
moves = '3i1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3i)and w5g==''\
and board.s4h=='':
moves = '3i5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3i)and w6f==''\
and board.s4h+board.s5g=='':
moves = '3i6f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3i)and w6e==''\
and board.s4h+board.s5g+board.s6f=='':
moves = '3i7e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3i)and w7d==''\
and board.s4h+board.s5g+board.s6f+board.s7e=='':
moves = '3i8d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3i)and w9c==''\
and board.s4h+board.s5g+board.s6f+board.s7e+board.s8d=='':
moves = '3i9c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3i)and w1g==''\
and board.s2h=='':
moves = '3i1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3i)and w5g==''\
and board.s4h=='':
moves = '3i5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3i)and w6f==''\
and board.s4h+board.s5g=='':
moves = '3i6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3i)and w6e==''\
and board.s4h+board.s5g+board.s6f=='':
moves = '3i7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3i)and w7d==''\
and board.s4h+board.s5g+board.s6f+board.s7e=='':
moves = '3i8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3i)and w9c==''\
and board.s4h+board.s5g+board.s6f+board.s7e+board.s8d=='':
moves = '3i9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w4i !='':
if re.match(r'[gk+]',Wboard.w4i)and w3i=='':
moves = '4i3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w4i)and w5i=='':
moves = '4i5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w4i)and w4h=='':
moves = '4i4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w4i)and w3h=='':
moves = '4i3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w4i)and w5h=='':
moves = '4i5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w4i)and w3i=='':
moves = '4i3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w4i)and w5i=='':
moves = '4i5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w4i)and w4h=='':
moves = '4i4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w4i)and w3h=='':
moves = '4i3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w4i)and w5h=='':
moves = '4i5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4i)and w4g==''\
and board.s4h=='':
moves = '4i4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4i)and w4g==''\
and board.s4h=='':
moves = '4i4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4i)and w4f==''\
and board.s4h+board.s4g=='':
moves = '4i4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4i)and w4f==''\
and board.s4h+board.s4g=='':
moves = '4i4f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4i)and w4e==''\
and board.s4h+board.s4g+board.s4f=='':
moves = '4i4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4i)and w4e==''\
and board.s4h+board.s4g+board.s4f=='':
moves = '4i4e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4i)and w4d==''\
and board.s4h+board.s4g+board.s4f+board.s4e=='':
moves = '4i4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4i)and w4d==''\
and board.s4h+board.s4g+board.s4f+board.s4e=='':
moves = '4i4d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4i)and w4c==''\
and board.s4h+board.s4g+board.s4f+board.s4e+board.s4d=='':
moves = '4i4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4i)and w4c==''\
and board.s4h+board.s4g+board.s4f+board.s4e+board.s4d=='':
moves = '4i4c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4i)and w4b==''\
and board.s4h+board.s4g+board.s4f+board.s4e+board.s4d+board.s4c=='':
moves = '4i4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4i)and w4b==''\
and board.s4h+board.s4g+board.s4f+board.s4e+board.s4d+board.s4c=='':
moves = '4i4b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4i)and w4a==''\
and board.s4h+board.s4g+board.s4f+board.s4e+board.s4d+board.s4c+board.s4b=='':
moves = '4i4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4i)and w4a==''\
and board.s4h+board.s4g+board.s4f+board.s4e+board.s4d+board.s4c+board.s4b=='':
moves = '4i4a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4i)and w1i==''\
and board.s2i+board.s3i=='':
moves = '4i1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4i)and w1i==''\
and board.s2i+board.s3i=='':
moves = '4i1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4i)and w2i==''\
and board.s3i=='':
moves = '4i2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4i)and w2i==''\
and board.s3i=='':
moves = '4i2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4i)and w6i==''\
and board.s5i=='':
moves = '4i6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4i)and w6i==''\
and board.s5i=='':
moves = '4i6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4i)and w7i==''\
and board.s5i+board.s6i=='':
moves = '4i7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4i)and w7i==''\
and board.s5i+board.s6i=='':
moves = '4i7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4i)and w8i==''\
and board.s5i+board.s6i+board.s7i=='':
moves = '4i8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4i)and w8i==''\
and board.s5i+board.s6i+board.s7i=='':
moves = '4i8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4i)and w9i==''\
and board.s5i+board.s6i+board.s7i+board.s8i=='':
moves = '4i9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4i)and w9i==''\
and board.s5i+board.s6i+board.s7i+board.s8i=='':
moves = '4i9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4i)and w6g==''\
and board.s5h=='':
moves = '4i6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4i)and w7f==''\
and board.s5h+board.s6g=='':
moves = '4i7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4i)and w8e==''\
and board.s5h+board.s6g+board.s7f=='':
moves = '4i8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4i)and w9d==''\
and board.s5h+board.s6g+board.s7f+board.s8e=='':
moves = '4i9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('B', Wboard.w4i)and w6g==''\
and board.s5h=='':
moves = '4i6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w4i)and w7f==''\
and board.s5h+board.s6g=='':
moves = '4i7f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w4i)and w8e==''\
and board.s5h+board.s6g+board.s7f=='':
moves = '4i8e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w4i)and w9d==''\
and board.s5h+board.s6g+board.s7f+board.s8e=='':
moves = '4i9d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w4i)and w1f==''\
and board.s2g+board.s3h=='':
moves = '4i1f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w4i)and w2g==''\
and board.s3h=='':
moves = '4i2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4i)and w1f==''\
and board.s2g+board.s3h=='':
moves = '4i1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4i)and w2g==''\
and board.s3h=='':
moves = '4i2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w5i !='':
if re.match(r'[gk+]',Wboard.w5i)and w4i=='':
moves = '5i4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w5i)and w6i=='':
moves = '5i6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w5i)and w5h=='':
moves = '5i5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w5i)and w4h=='':
moves = '5i4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w5i)and w6h=='':
moves = '5i6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w5i)and w4i=='':
moves = '5i4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w5i)and w6i=='':
moves = '5i6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w5i)and w5h=='':
moves = '5i5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w5i)and w4h=='':
moves = '5i4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w5i)and w6h=='':
moves = '5i6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5i)and w5g==''\
and board.s5h=='':
moves = '5i5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5i)and w5g==''\
and board.s5h=='':
moves = '5i5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5i)and w5f==''\
and board.s5h+board.s5g=='':
moves = '5i5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5i)and w5f==''\
and board.s5h+board.s5g=='':
moves = '5i5f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5i)and w5e==''\
and board.s5h+board.s5g+board.s5f=='':
moves = '5i5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5i)and w5e==''\
and board.s5h+board.s5g+board.s5f=='':
moves = '5i5e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5i)and w5d==''\
and board.s5h+board.s5g+board.s5f+board.s5e=='':
moves = '5i5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5i)and w5d==''\
and board.s5h+board.s5g+board.s5f+board.s5e=='':
moves = '5i5d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5i)and w5c==''\
and board.s5h+board.s5g+board.s5f+board.s5e+board.s5d=='':
moves = '5i5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5i)and w5c==''\
and board.s5h+board.s5g+board.s5f+board.s5e+board.s5d=='':
moves = '5i5c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5i)and w5b==''\
and board.s5h+board.s5g+board.s5f+board.s5e+board.s5d+board.s5c=='':
moves = '5i5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5i)and w5b==''\
and board.s5h+board.s5g+board.s5f+board.s5e+board.s5d+board.s5c=='':
moves = '5i5b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5i)and w5a==''\
and board.s5h+board.s5g+board.s5f+board.s5e+board.s5d+board.s5c+board.s5b=='':
moves = '5i5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5i)and w5a==''\
and board.s5h+board.s5g+board.s5f+board.s5e+board.s5d+board.s5c+board.s5b=='':
moves = '5i5a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5i)and w1i==''\
and board.s2i+board.s3i+board.s4i=='':
moves = '5i1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5i)and w1i==''\
and board.s2i+board.s3i+board.s4i=='':
moves = '5i1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5i)and w2i==''\
and board.s3i+board.s4i=='':
moves = '5i2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5i)and w2i==''\
and board.s3i+board.s4i=='':
moves = '5i2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5i)and w3i==''\
and board.s4i=='':
moves = '5i3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5i)and w3i==''\
and board.s4i=='':
moves = '5i3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5i)and w7i==''\
and board.s6i=='':
moves = '5i7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5i)and w7i==''\
and board.s6i=='':
moves = '5i7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5i)and w8i==''\
and board.s6i+board.s7i=='':
moves = '5i8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5i)and w8i==''\
and board.s6i+board.s7i=='':
moves = '5i8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5i)and w9i==''\
and board.s6i+board.s7i+board.s8i=='':
moves = '5i9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5i)and w9i==''\
and board.s6i+board.s7i+board.s8i=='':
moves = '5i9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5i)and w7g==''\
and board.s6h=='':
moves = '5i7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5i)and w8f==''\
and board.s6h+board.s7g=='':
moves = '5i8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5i)and w9e==''\
and board.s6h+board.s7g+board.s8f=='':
moves = '5i9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('B', Wboard.w5i)and w7g==''\
and board.s6h=='':
moves = '5i7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w5i)and w8f==''\
and board.s6h+board.s7g=='':
moves = '5i8f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w5i)and w9e==''\
and board.s6h+board.s7g+board.s8f=='':
moves = '5i9e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w5i)and w2f==''\
and board.s3g+board.s4h=='':
moves = '5i2f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w5i)and w3g==''\
and board.s4h=='':
moves = '5i3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5i)and w2f==''\
and board.s3g+board.s4h=='':
moves = '5i2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5i)and w3g==''\
and board.s4h=='':
moves = '5i3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5i)and w1e==''\
and board.s4h+board.s3g+board.s2f=='':
moves = '5i1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w5i)and w1e==''\
and board.s4h+board.s3g+board.s2f=='':
moves = '5i1e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w6i !='':
if re.match(r'[gk+]',Wboard.w6i)and w5i=='':
moves = '6i5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w6i)and w7i=='':
moves = '6i7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w6i)and w6h=='':
moves = '6i6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w6i)and w5h=='':
moves = '6i5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w6i)and w7h=='':
moves = '6i7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w6i)and w5i=='':
moves = '6i5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w6i)and w7i=='':
moves = '6i7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w6i)and w6h=='':
moves = '6i6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w6i)and w5h=='':
moves = '6i5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w6i)and w7h=='':
moves = '6i7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6i)and w6g==''\
and board.s6h=='':
moves = '6i6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6i)and w6g==''\
and board.s6h=='':
moves = '6i6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6i)and w6f==''\
and board.s6h+board.s6g=='':
moves = '6i6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6i)and w6f==''\
and board.s6h+board.s6g=='':
moves = '6i6f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6i)and w6e==''\
and board.s6h+board.s6g+board.s6f=='':
moves = '6i6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6i)and w6e==''\
and board.s6h+board.s6g+board.s6f=='':
moves = '6i6e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6i)and w6d==''\
and board.s6h+board.s6g+board.s6f+board.s6e=='':
moves = '6i6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6i)and w6d==''\
and board.s6h+board.s6g+board.s6f+board.s6e=='':
moves = '6i6d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6i)and w6c==''\
and board.s6h+board.s6g+board.s6f+board.s6e+board.s6d=='':
moves = '6i6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6i)and w6c==''\
and board.s6h+board.s6g+board.s6f+board.s6e+board.s6d=='':
moves = '6i6c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6i)and w6b==''\
and board.s6h+board.s6g+board.s6f+board.s6e+board.s6d+board.s6c=='':
moves = '6i6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6i)and w6b==''\
and board.s6h+board.s6g+board.s6f+board.s6e+board.s6d+board.s6c=='':
moves = '6i6b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6i)and w6a==''\
and board.s6h+board.s6g+board.s6f+board.s6e+board.s6d+board.s6c+board.s6b=='':
moves = '6i6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6i)and w6a==''\
and board.s6h+board.s6g+board.s6f+board.s6e+board.s6d+board.s6c+board.s6b=='':
moves = '6i6a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6i)and w9i==''\
and board.s8i+board.s7i=='':
moves = '6i9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6i)and w9i==''\
and board.s8i+board.s7i=='':
moves = '6i9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6i)and w8i==''\
and board.s7i=='':
moves = '6i8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6i)and w8i==''\
and board.s7i=='':
moves = '6i8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6i)and w4i==''\
and board.s5i=='':
moves = '6i4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6i)and w4i==''\
and board.s5i=='':
moves = '6i4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6i)and w3i==''\
and board.s5i+board.s4i=='':
moves = '6i3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6i)and w3i==''\
and board.s5i+board.s4i=='':
moves = '6i3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6i)and w2i==''\
and board.s5i+board.s4i+board.s3i=='':
moves = '6i2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6i)and w2i==''\
and board.s5i+board.s4i+board.s3i=='':
moves = '6i2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6i)and w1i==''\
and board.s5i+board.s4i+board.s3i+board.s2i=='':
moves = '6i1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6i)and w1i==''\
and board.s5i+board.s4i+board.s3i+board.s2i=='':
moves = '6i1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6i)and w4g==''\
and board.s5h=='':
moves = '6i4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6i)and w3f==''\
and board.s5h+board.s4g=='':
moves = '6i3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6i)and w2e==''\
and board.s5h+board.s4g+board.s3f=='':
moves = '6i2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6i)and w1d==''\
and board.s5h+board.s4g+board.s3f+board.s2e=='':
moves = '6i1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('B', Wboard.w6i)and w4g==''\
and board.s5h=='':
moves = '6i4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w6i)and w3f==''\
and board.s5h+board.s4g=='':
moves = '6i3f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w6i)and w2e==''\
and board.s5h+board.s4g+board.s3f=='':
moves = '6i2e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w6i)and w1d==''\
and board.s5h+board.s4g+board.s3f+board.s2e=='':
moves = '6i1d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w6i)and w9f==''\
and board.s8g+board.s7h=='':
moves = '6i9f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w6i)and w8g==''\
and board.s7h=='':
moves = '6i8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6i)and w9f==''\
and board.s8g+board.s7h=='':
moves = '6i9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6i)and w8g==''\
and board.s7h=='':
moves = '6i8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w7i !='':
if re.match(r'[gk+]',Wboard.w7i)and w6i=='':
moves = '7i6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w7i)and w8i=='':
moves = '7i8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w7i)and w7h=='':
moves = '7i7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w7i)and w6h=='':
moves = '7i6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w7i)and w8h=='':
moves = '7i8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w7i)and w6i=='':
moves = '7i6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w7i)and w8i=='':
moves = '7i8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w7i)and w7h=='':
moves = '7i7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w7i)and w6h=='':
moves = '7i6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w7i)and w8h=='':
moves = '7i8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7i)and w7g==''\
and board.s7h=='':
moves = '7i7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7i)and w7g==''\
and board.s7h=='':
moves = '7i7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7i)and w7f==''\
and board.s7h+board.s7g=='':
moves = '7i7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7i)and w7f==''\
and board.s7h+board.s7g=='':
moves = '7i7f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7i)and w7e==''\
and board.s7h+board.s7g+board.s7f=='':
moves = '7i7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7i)and w7e==''\
and board.s7h+board.s7g+board.s7f=='':
moves = '7i7e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7i)and w7d==''\
and board.s7h+board.s7g+board.s7f+board.s7e=='':
moves = '7i7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7i)and w7d==''\
and board.s7h+board.s7g+board.s7f+board.s7e=='':
moves = '7i7d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7i)and w7c==''\
and board.s7h+board.s7g+board.s7f+board.s7e+board.s7d=='':
moves = '7i7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7i)and w7c==''\
and board.s7h+board.s7g+board.s7f+board.s7e+board.s7d=='':
moves = '7i7c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7i)and w7b==''\
and board.s7h+board.s7g+board.s7f+board.s7e+board.s7d+board.s7c=='':
moves = '7i7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7i)and w7b==''\
and board.s7h+board.s7g+board.s7f+board.s7e+board.s7d+board.s7c=='':
moves = '7i7b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7i)and w7a==''\
and board.s7h+board.s7g+board.s7f+board.s7e+board.s7d+board.s7c+board.s7b=='':
moves = '7i7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7i)and w7a==''\
and board.s7h+board.s7g+board.s7f+board.s7e+board.s7d+board.s7c+board.s7b=='':
moves = '7i7a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7i)and w9i==''\
and board.s8i=='':
moves = '7i9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7i)and w9i==''\
and board.s8i=='':
moves = '7i9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7i)and w5i==''\
and board.s6i=='':
moves = '7i5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7i)and w5i==''\
and board.s6i=='':
moves = '7i5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7i)and w4i==''\
and board.s6i+board.s5i=='':
moves = '7i4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7i)and w4i==''\
and board.s6i+board.s5i=='':
moves = '7i4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7i)and w3i==''\
and board.s6i+board.s5i+board.s4i=='':
moves = '7i3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7i)and w3i==''\
and board.s6i+board.s5i+board.s4i=='':
moves = '7i3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7i)and w2i==''\
and board.s6i+board.s5i+board.s4i+board.s3i=='':
moves = '7i2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7i)and w2i==''\
and board.s6i+board.s5i+board.s4i+board.s3i=='':
moves = '7i2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7i)and w1i==''\
and board.s6i+board.s5i+board.s4i+board.s3i+board.s2i=='':
moves = '7i1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7i)and w1i==''\
and board.s6i+board.s5i+board.s4i+board.s3i+board.s2i=='':
moves = '7i1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7i)and w9g==''\
and board.s8h=='':
moves = '7i9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7i)and w5g==''\
and board.s6h=='':
moves = '7i5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7i)and w4f==''\
and board.s6h+board.s5g=='':
moves = '7i4f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7i)and w4e==''\
and board.s6h+board.s5g+board.s4f=='':
moves = '7i3e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7i)and w3d==''\
and board.s6h+board.s5g+board.s4f+board.s3e=='':
moves = '7i2d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7i)and w1c==''\
and board.s6h+board.s5g+board.s4f+board.s3e+board.s2d=='':
moves = '7i1c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7i)and w9g==''\
and board.s8h=='':
moves = '7i9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7i)and w5g==''\
and board.s6h=='':
moves = '7i5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7i)and w4f==''\
and board.s6h+board.s5g=='':
moves = '7i4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7i)and w4e==''\
and board.s6h+board.s5g+board.s4f=='':
moves = '7i3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7i)and w3d==''\
and board.s6h+board.s5g+board.s4f+board.s3e=='':
moves = '7i2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7i)and w1c==''\
and board.s6h+board.s5g+board.s4f+board.s3e+board.s2d=='':
moves = '7i1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w8i !='':
if re.match(r'[gk+]',Wboard.w8i)and w7i=='':
moves = '8i7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w8i)and w9i=='':
moves = '8i9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w8i)and w8h=='':
moves = '8i8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w8i)and w7h=='':
moves = '8i7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w8i)and w9h=='':
moves = '8i9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w8i)and w7i=='':
moves = '8i7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w8i)and w9i=='':
moves = '8i9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w8i)and w8h=='':
moves = '8i8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w8i)and w7h=='':
moves = '8i7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w8i)and w9h=='':
moves = '8i9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8i)and w8g==''\
and board.s8h=='':
moves = '8i8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8i)and w8g==''\
and board.s8h=='':
moves = '8i8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8i)and w8f==''\
and board.s8h+board.s8g=='':
moves = '8i8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8i)and w8f==''\
and board.s8h+board.s8g=='':
moves = '8i8f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8i)and w8e==''\
and board.s8h+board.s8g+board.s8f=='':
moves = '8i8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8i)and w8e==''\
and board.s8h+board.s8g+board.s8f=='':
moves = '8i8e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8i)and w8d==''\
and board.s8h+board.s8g+board.s8f+board.s8e=='':
moves = '8i8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8i)and w8d==''\
and board.s8h+board.s8g+board.s8f+board.s8e=='':
moves = '8i8d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8i)and w8c==''\
and board.s8h+board.s8g+board.s8f+board.s8e+board.s8d=='':
moves = '8i8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8i)and w8c==''\
and board.s8h+board.s8g+board.s8f+board.s8e+board.s8d=='':
moves = '8i8c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8i)and w8b==''\
and board.s8h+board.s8g+board.s8f+board.s8e+board.s8d+board.s8c=='':
moves = '8i8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8i)and w8b==''\
and board.s8h+board.s8g+board.s8f+board.s8e+board.s8d+board.s8c=='':
moves = '8i8b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8i)and w8a==''\
and board.s8h+board.s8g+board.s8f+board.s8e+board.s8d+board.s8c+board.s8b=='':
moves = '8i8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8i)and w8a==''\
and board.s8h+board.s8g+board.s8f+board.s8e+board.s8d+board.s8c+board.s8b=='':
moves = '8i8a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8i)and w6i==''\
and board.s7i=='':
moves = '8i6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8i)and w6i==''\
and board.s7i=='':
moves = '8i6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8i)and w5i==''\
and board.s7i+board.s6i=='':
moves = '8i5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8i)and w5i==''\
and board.s7i+board.s6i=='':
moves = '8i5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8i)and w4i==''\
and board.s7i+board.s6i+board.s5i=='':
moves = '8i4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8i)and w4i==''\
and board.s7i+board.s6i+board.s5i=='':
moves = '8i4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8i)and w3i==''\
and board.s7i+board.s6i+board.s5i+board.s4i=='':
moves = '8i3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8i)and w3i==''\
and board.s7i+board.s6i+board.s5i+board.s4i=='':
moves = '8i3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8i)and w2i==''\
and board.s7i+board.s6i+board.s5i+board.s4i+board.s3i=='':
moves = '8i2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8i)and w2i==''\
and board.s7i+board.s6i+board.s5i+board.s4i+board.s3i=='':
moves = '8i2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8i)and w1i==''\
and board.s7i+board.s6i+board.s5i+board.s4i+board.s3i+board.s2i=='':
moves = '8i1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8i)and w1i==''\
and board.s7i+board.s6i+board.s5i+board.s4i+board.s3i+board.s2i=='':
moves = '8i1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8i)and w6g==''\
and board.s7h=='':
moves = '8i6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8i)and w5f==''\
and board.s7h+board.s6g=='':
moves = '8i5f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8i)and w4e==''\
and board.s7h+board.s6g+board.s5f=='':
moves = '8i4e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8i)and w3d==''\
and board.s7h+board.s6g+board.s5f+board.s4e=='':
moves = '8i3d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8i)and w2c==''\
and board.s7h+board.s6g+board.s5f+board.s4e+board.s3d=='':
moves = '8i2c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8i)and w1b==''\
and board.s7h+board.s6g+board.s5f+board.s4e+board.s3d+board.s2c=='':
moves = '8i1b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8i)and w6g==''\
and board.s7h=='':
moves = '8i6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8i)and w5f==''\
and board.s7h+board.s6g=='':
moves = '8i5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8i)and w4e==''\
and board.s7h+board.s6g+board.s5f=='':
moves = '8i4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8i)and w3d==''\
and board.s7h+board.s6g+board.s5f+board.s4e=='':
moves = '8i3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8i)and w2c==''\
and board.s7h+board.s6g+board.s5f+board.s4e+board.s3d=='':
moves = '8i2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8i)and w1b==''\
and board.s7h+board.s6g+board.s5f+board.s4e+board.s3d+board.s2c=='':
moves = '8i1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w9i !='':
if re.match(r'[gk+]',Wboard.w9i)and w8i=='':
moves = '9i8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w9i)and w9h=='':
moves = '9i9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w9i)and w8h=='':
moves = '9i8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w9i)and w8i=='':
moves = '9i8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w9i)and w9h=='':
moves = '9i9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w9i)and w8h=='':
moves = '9i8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9i)and w9g==''\
and board.s9h=='':
moves = '9i9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9i)and w9g==''\
and board.s9h=='':
moves = '9i9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9i)and w9f==''\
and board.s9h+board.s9g=='':
moves = '9i9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9i)and w9f==''\
and board.s9h+board.s9g=='':
moves = '9i9f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9i)and w9e==''\
and board.s9h+board.s9g+board.s9f=='':
moves = '9i9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9i)and w9e==''\
and board.s9h+board.s9g+board.s9f=='':
moves = '9i9e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9i)and w9d==''\
and board.s9h+board.s9g+board.s9f+board.s9e=='':
moves = '9i9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9i)and w9d==''\
and board.s9h+board.s9g+board.s9f+board.s9e=='':
moves = '9i9d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9i)and w9c==''\
and board.s9h+board.s9g+board.s9f+board.s9e+board.s9d=='':
moves = '9i9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9i)and w9c==''\
and board.s9h+board.s9g+board.s9f+board.s9e+board.s9d=='':
moves = '9i9c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9i)and w9b==''\
and board.s9h+board.s9g+board.s9f+board.s9e+board.s9d+board.s9c=='':
moves = '9i9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9i)and w9b==''\
and board.s9h+board.s9g+board.s9f+board.s9e+board.s9d+board.s9c=='':
moves = '9i9b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9i)and w9a==''\
and board.s9h+board.s9g+board.s9f+board.s9e+board.s9d+board.s9c+board.s9b=='':
moves = '9i9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9i)and w9a==''\
and board.s9h+board.s9g+board.s9f+board.s9e+board.s9d+board.s9c+board.s9b=='':
moves = '9i9a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9i)and w7i==''\
and board.s8i=='':
moves = '9i7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9i)and w7i==''\
and board.s8i=='':
moves = '9i7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9i)and w6i==''\
and board.s8i+board.s7i=='':
moves = '9i6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9i)and w6i==''\
and board.s8i+board.s7i=='':
moves = '9i6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9i)and w5i==''\
and board.s8i+board.s7i+board.s6i=='':
moves = '9i5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9i)and w5i==''\
and board.s8i+board.s7i+board.s6i=='':
moves = '9i5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9i)and w4i==''\
and board.s8i+board.s7i+board.s6i+board.s5i=='':
moves = '9i4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9i)and w4i==''\
and board.s8i+board.s7i+board.s6i+board.s5i=='':
moves = '9i4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9i)and w3i==''\
and board.s8i+board.s7i+board.s6i+board.s5i+board.s4i=='':
moves = '9i3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9i)and w3i==''\
and board.s8i+board.s7i+board.s6i+board.s5i+board.s4i=='':
moves = '9i3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9i)and w2i==''\
and board.s8i+board.s7i+board.s6i+board.s5i+board.s4i+board.s3i=='':
moves = '9i2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9i)and w2i==''\
and board.s8i+board.s7i+board.s6i+board.s5i+board.s4i+board.s3i=='':
moves = '9i2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9i)and w1i==''\
and board.s8i+board.s7i+board.s6i+board.s5i+board.s4i+board.s3i+board.s2i=='':
moves = '9i1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9i)and w1i==''\
and board.s8i+board.s7i+board.s6i+board.s5i+board.s4i+board.s3i+board.s2i=='':
moves = '9i1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9i)and w7g==''\
and board.s8h=='':
moves = '9i7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9i)and w6f==''\
and board.s8h+board.s7g=='':
moves = '9i6f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9i)and w5e==''\
and board.s8h+board.s7g+board.s6f=='':
moves = '9i5e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9i)and w4d==''\
and board.s8h+board.s7g+board.s6f+board.s5e=='':
moves = '9i4d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9i)and w3c==''\
and board.s8h+board.s7g+board.s6f+board.s5e+board.s4d=='':
moves = '9i3c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9i)and w2b==''\
and board.s8h+board.s7g+board.s6f+board.s5e+board.s4d+board.s3c=='':
moves = '9i2b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9i)and w1a==''\
and board.s8h+board.s7g+board.s6f+board.s5e+board.s4d+board.s3c+board.s2b=='':
moves = '9i1a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9i)and w7g==''\
and board.s8h=='':
moves = '9i7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9i)and w6f==''\
and board.s8h+board.s7g=='':
moves = '9i6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9i)and w5e==''\
and board.s8h+board.s7g+board.s6f=='':
moves = '9i5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9i)and w4d==''\
and board.s8h+board.s7g+board.s6f+board.s5e=='':
moves = '9i4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9i)and w3c==''\
and board.s8h+board.s7g+board.s6f+board.s5e+board.s4d=='':
moves = '9i3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9i)and w2b==''\
and board.s8h+board.s7g+board.s6f+board.s5e+board.s4d+board.s3c=='':
moves = '9i2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9i)and w1a==''\
and board.s8h+board.s7g+board.s6f+board.s5e+board.s4d+board.s3c+board.s2b=='':
moves = '9i1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w1h !='':
if re.match(r'[sgk+]',Wboard.w1h)and w1i=='':
moves = '1h1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]',Wboard.w1h)and w2i=='':
moves = '1h2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w1h)and w2h=='':
moves = '1h2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w1h)and w1g=='':
moves = '1h1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w1h)and w2g=='':
moves = '1h2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]',Wboard.w1h)and w1i=='':
moves = '1h1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w1h)and w2i=='':
moves = '1h2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w1h)and w2h=='':
moves = '1h2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w1h)and w1g=='':
moves = '1h1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w1h)and w2g=='':
moves = '1h2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1h)and w1f==''\
and board.s1e=='':
moves = '1h1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1h)and w1f==''\
and board.s1e=='':
moves = '1h1f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1h)and w1e==''\
and board.s1g+board.s1f=='':
moves = '1h1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1h)and w1e==''\
and board.s1g+board.s1f=='':
moves = '1h1e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1h)and w1d==''\
and board.s1g+board.s1f+board.s1e=='':
moves = '1h1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1h)and w1d==''\
and board.s1g+board.s1f+board.s1e=='':
moves = '1h1d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1h)and w1c==''\
and board.s1g+board.s1f+board.s1e+board.s1d=='':
moves = '1h1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1h)and w1c==''\
and board.s1g+board.s1f+board.s1e+board.s1d=='':
moves = '1h1c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1h)and w1b==''\
and board.s1g+board.s1f+board.s1e+board.s1d+board.s1c=='':
moves = '1h1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1h)and w1b==''\
and board.s1g+board.s1f+board.s1e+board.s1d+board.s1c=='':
moves = '1h1b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1h)and w1a==''\
and board.s1g+board.s1f+board.s1e+board.s1d+board.s1c+board.s1b=='':
moves = '1h1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1h)and w1a==''\
and board.s1g+board.s1f+board.s1e+board.s1d+board.s1c+board.s1b=='':
moves = '1h1a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1h)and w3h==''\
and board.s2h=='':
moves = '1h3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1h)and w3h==''\
and board.s2h=='':
moves = '1h3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1h)and w4h==''\
and board.s2h+board.s3h=='':
moves = '1h4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1h)and w4h==''\
and board.s2h+board.s3h=='':
moves = '1h4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1h)and w5h==''\
and board.s2h+board.s3h+board.s4h=='':
moves = '1h5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1h)and w5h==''\
and board.s2h+board.s3h+board.s4h=='':
moves = '1h5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1h)and w6h==''\
and board.s2h+board.s3h+board.s4h+board.s5h=='':
moves = '1h6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1h)and w6h==''\
and board.s2h+board.s3h+board.s4h+board.s5h=='':
moves = '1h6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1h)and w7h==''\
and board.s2h+board.s3h+board.s4h+board.s5h+board.s6h=='':
moves = '1h7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1h)and w7h==''\
and board.s2h+board.s3h+board.s4h+board.s5h+board.s6h=='':
moves = '1h7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1h)and w8h==''\
and board.s2h+board.s3h+board.s4h+board.s5h+board.s6h+board.s7h=='':
moves = '1h8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1h)and w8h==''\
and board.s2h+board.s3h+board.s4h+board.s5h+board.s6h+board.s7h=='':
moves = '1h8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1h)and w9h==''\
and board.s2h+board.s3h+board.s4h+board.s5h+board.s6h+board.s7h+board.s8h=='':
moves = '1h9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1h)and w9h==''\
and board.s2h+board.s3h+board.s4h+board.s5h+board.s6h+board.s7h+board.s8h=='':
moves = '1h9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1h)and w3f==''\
and board.s2g=='':
moves = '1h3f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1h)and w4e==''\
and board.s2g+board.s3f=='':
moves = '1h4e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1h)and w5d==''\
and board.s2g+board.s3f+board.s4e=='':
moves = '1h5d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1h)and w6c==''\
and board.s2g+board.s3f+board.s4e+board.s5d=='':
moves = '1h6c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1h)and w7b==''\
and board.s2g+board.s3f+board.s4e+board.s5d+board.s6c=='':
moves = '1h7b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1h)and w8a==''\
and board.s2g+board.s3f+board.s4e+board.s5d+board.s6c+board.s7b=='':
moves = '1h8a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1h)and w3f==''\
and board.s2g=='':
moves = '1h3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1h)and w4e==''\
and board.s2g+board.s3f=='':
moves = '1h4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1h)and w5d==''\
and board.s2g+board.s3f+board.s4e=='':
moves = '1h5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1h)and w6c==''\
and board.s2g+board.s3f+board.s4e+board.s5d=='':
moves = '1h6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1h)and w7b==''\
and board.s2g+board.s3f+board.s4e+board.s5d+board.s6c=='':
moves = '1h7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1h)and w8a==''\
and board.s2g+board.s3f+board.s4e+board.s5d+board.s6c+board.s7b=='':
moves = '1h8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w2h !='':
if re.match(r'[sgk+]',Wboard.w2h)and w2i=='':
moves = '2h2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]',Wboard.w2h)and w1i=='':
moves = '2h1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]',Wboard.w2h)and w3i=='':
moves = '2h3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w2h)and w1h=='':
moves = '2h1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w2h)and w3h=='':
moves = '2h3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w2h)and w2g=='':
moves = '2h2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w2h)and w1g=='':
moves = '2h1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w2h)and w3g=='':
moves = '2h3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]',Wboard.w2h)and w2i=='':
moves = '2h2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w2h)and w1i=='':
moves = '2h1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w2h)and w3i=='':
moves = '2h3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w2h)and w1h=='':
moves = '2h1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w2h)and w3h=='':
moves = '2h3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w2h)and w2g=='':
moves = '2h2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w2h)and w1g=='':
moves = '2h1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w2h)and w3g=='':
moves = '2h3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2h)and w2f==''\
and board.s2e=='':
moves = '2h2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2h)and w2f==''\
and board.s2e=='':
moves = '2h2f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2h)and w2e==''\
and board.s2g+board.s2f=='':
moves = '2h2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2h)and w2e==''\
and board.s2g+board.s2f=='':
moves = '2h2e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2h)and w2d==''\
and board.s2g+board.s2f+board.s2e=='':
moves = '2h2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2h)and w2d==''\
and board.s2g+board.s2f+board.s2e=='':
moves = '2h2d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2h)and w2c==''\
and board.s2g+board.s2f+board.s2e+board.s2d=='':
moves = '2h2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2h)and w2c==''\
and board.s2g+board.s2f+board.s2e+board.s2d=='':
moves = '2h2c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2h)and w2b==''\
and board.s2g+board.s2f+board.s2e+board.s2d+board.s2c=='':
moves = '2h2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2h)and w2b==''\
and board.s2g+board.s2f+board.s2e+board.s2d+board.s2c=='':
moves = '2h2b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2h)and w2a==''\
and board.s2g+board.s2f+board.s2e+board.s2d+board.s2c+board.s2b=='':
moves = '2h2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2h)and w2a==''\
and board.s2g+board.s2f+board.s2e+board.s2d+board.s2c+board.s2b=='':
moves = '2h2a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2h)and w4h==''\
and board.s3h=='':
moves = '2h4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2h)and w4h==''\
and board.s3h=='':
moves = '2h4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2h)and w5h==''\
and board.s3h+board.s4h=='':
moves = '2h5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2h)and w5h==''\
and board.s3h+board.s4h=='':
moves = '2h5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2h)and w6h==''\
and board.s3h+board.s4h+board.s5h=='':
moves = '2h6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2h)and w6h==''\
and board.s3h+board.s4h+board.s5h=='':
moves = '2h6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2h)and w7h==''\
and board.s3h+board.s4h+board.s5h+board.s6h=='':
moves = '2h7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2h)and w7h==''\
and board.s3h+board.s4h+board.s5h+board.s6h=='':
moves = '2h7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2h)and w8h==''\
and board.s3h+board.s4h+board.s5h+board.s6h+board.s7h=='':
moves = '2h8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2h)and w8h==''\
and board.s3h+board.s4h+board.s5h+board.s6h+board.s7h=='':
moves = '2h8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2h)and w9h==''\
and board.s3h+board.s4h+board.s5h+board.s6h+board.s7h+board.s8h=='':
moves = '2h9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2h)and w9h==''\
and board.s3h+board.s4h+board.s5h+board.s6h+board.s7h+board.s8h=='':
moves = '2h9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2h)and w4f==''\
and board.s3g=='':
moves = '2h4f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2h)and w5e==''\
and board.s3g+board.s4f=='':
moves = '2h5e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2h)and w6d==''\
and board.s3g+board.s4f+board.s5e=='':
moves = '2h6d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2h)and w7c==''\
and board.s3g+board.s4f+board.s5e+board.s6d=='':
moves = '2h7c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2h)and w8b==''\
and board.s3g+board.s4f+board.s5e+board.s6d+board.s7c=='':
moves = '2h8b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2h)and w9a==''\
and board.s3g+board.s4f+board.s5e+board.s6d+board.s7c+board.s8b=='':
moves = '2h9a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2h)and w4f==''\
and board.s3g=='':
moves = '2h4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2h)and w5e==''\
and board.s3g+board.s4f=='':
moves = '2h5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2h)and w6d==''\
and board.s3g+board.s4f+board.s5e=='':
moves = '2h6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2h)and w7c==''\
and board.s3g+board.s4f+board.s5e+board.s6d=='':
moves = '2h7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2h)and w8b==''\
and board.s3g+board.s4f+board.s5e+board.s6d+board.s7c=='':
moves = '2h8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2h)and w9a==''\
and board.s3g+board.s4f+board.s5e+board.s6d+board.s7c+board.s8b=='':
moves = '2h9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w3h !='':
if re.match(r'[sgk+]',Wboard.w3h)and w3i=='':
moves = '3h3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]',Wboard.w3h)and w2i=='':
moves = '3h2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]',Wboard.w3h)and w4i=='':
moves = '3h4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w3h)and w2h=='':
moves = '3h2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w3h)and w4h=='':
moves = '3h4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]',Wboard.w3h)and w3g=='':
moves = '3h3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w3h)and w2g=='':
moves = '3h2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w3h)and w4g=='':
moves = '3h4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]',Wboard.w3h)and w3i=='':
moves = '3h3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w3h)and w2i=='':
moves = '3h2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w3h)and w4i=='':
moves = '3h4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w3h)and w2h=='':
moves = '3h2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w3h)and w4h=='':
moves = '3h4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r',Wboard.w3h)and w3g=='':
moves = '3h3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w3h)and w2g=='':
moves = '3h2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]',Wboard.w3h)and w4g=='':
moves = '3h4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3h)and w3f==''\
and board.s3e=='':
moves = '3h3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3h)and w3f==''\
and board.s3e=='':
moves = '3h3f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3h)and w3e==''\
and board.s3g+board.s3f=='':
moves = '3h3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3h)and w3e==''\
and board.s3g+board.s3f=='':
moves = '3h3e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3h)and w3d==''\
and board.s3g+board.s3f+board.s3e=='':
moves = '3h3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3h)and w3d==''\
and board.s3g+board.s3f+board.s3e=='':
moves = '3h3d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3h)and w3c==''\
and board.s3g+board.s3f+board.s3e+board.s3d=='':
moves = '3h3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3h)and w3c==''\
and board.s3g+board.s3f+board.s3e+board.s3d=='':
moves = '3h3c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3h)and w3b==''\
and board.s3g+board.s3f+board.s3e+board.s3d+board.s3c=='':
moves = '3h3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3h)and w3b==''\
and board.s3g+board.s3f+board.s3e+board.s3d+board.s3c=='':
moves = '3h3b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3h)and w3a==''\
and board.s3g+board.s3f+board.s3e+board.s3d+board.s3c+board.s3b=='':
moves = '3h3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3h)and w3a==''\
and board.s3g+board.s3f+board.s3e+board.s3d+board.s3c+board.s3b=='':
moves = '3h3a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3h)and w1h==''\
and board.s2h=='':
moves = '3h1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3h)and w1h==''\
and board.s2h=='':
moves = '3h1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3h)and w5h==''\
and board.s4h=='':
moves = '3h5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3h)and w5h==''\
and board.s4h=='':
moves = '3h5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3h)and w6h==''\
and board.s4h+board.s5h=='':
moves = '3h6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3h)and w6h==''\
and board.s4h+board.s5h=='':
moves = '3h6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3h)and w7h==''\
and board.s4h+board.s5h+board.s6h=='':
moves = '3h7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3h)and w7h==''\
and board.s4h+board.s5h+board.s6h=='':
moves = '3h7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3h)and w8h==''\
and board.s4h+board.s5h+board.s6h+board.s7h=='':
moves = '3h8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3h)and w8h==''\
and board.s4h+board.s5h+board.s6h+board.s7h=='':
moves = '3h8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3h)and w9h==''\
and board.s4h+board.s5h+board.s6h+board.s7h+board.s8h=='':
moves = '3h9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3h)and w9h==''\
and board.s4h+board.s5h+board.s6h+board.s7h+board.s8h=='':
moves = '3h9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3h)and w5f==''\
and board.s4g=='':
moves = '3h5f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3h)and w6e==''\
and board.s4g+board.s5f=='':
moves = '3h6e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3h)and w7d==''\
and board.s4g+board.s5f+board.s6e=='':
moves = '3h7d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3h)and w8c==''\
and board.s4g+board.s5f+board.s6e+board.s7d=='':
moves = '3h8c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3h)and w9b==''\
and board.s4g+board.s5f+board.s6e+board.s7d+board.s8c=='':
moves = '3h9b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3h)and w5f==''\
and board.s4g=='':
moves = '3h5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3h)and w6e==''\
and board.s4g+board.s5f=='':
moves = '3h6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3h)and w7d==''\
and board.s4g+board.s5f+board.s6e=='':
moves = '3h7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3h)and w8c==''\
and board.s4g+board.s5f+board.s6e+board.s7d=='':
moves = '3h8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3h)and w9b==''\
and board.s4g+board.s5f+board.s6e+board.s7d+board.s8c=='':
moves = '3h9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3h)and w1f==''\
and board.s2g=='':
moves = '3h1f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3h)and w1f==''\
and board.s2g=='':
moves = '3h1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w4h !='':
if re.match(r'[sgk+]', Wboard.w4h)and w4i=='':
moves = '4h4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w4h)and w3i=='':
moves = '4h3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w4h)and w5i=='':
moves = '4h5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w4h)and w3h=='':
moves = '4h3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w4h)and w5h=='':
moves = '4h5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w4h)and w4g=='':
moves = '4h4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w4h)and w3g=='':
moves = '4h3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w4h)and w5g=='':
moves = '4h5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w4h)and w4i=='':
moves = '4h4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w4h)and w3i=='':
moves = '4h3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w4h)and w5i=='':
moves = '4h5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4h)and w3h=='':
moves = '4h3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4h)and w5h=='':
moves = '4h5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4h)and w4g=='':
moves = '4h4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w4h)and w3g=='':
moves = '4h3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w4h)and w5g=='':
moves = '4h5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4h)and w4f==''\
and board.s4e=='':
moves = '4h4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4h)and w4f==''\
and board.s4e=='':
moves = '4h4f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4h)and w4e==''\
and board.s4g+board.s4f=='':
moves = '4h4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4h)and w4e==''\
and board.s4g+board.s4f=='':
moves = '4h4e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4h)and w4d==''\
and board.s4g+board.s4f+board.s4e=='':
moves = '4h4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4h)and w4d==''\
and board.s4g+board.s4f+board.s4e=='':
moves = '4h4d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4h)and w4c==''\
and board.s4g+board.s4f+board.s4e+board.s4d=='':
moves = '4h4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4h)and w4c==''\
and board.s4g+board.s4f+board.s4e+board.s4d=='':
moves = '4h4c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4h)and w4b==''\
and board.s4g+board.s4f+board.s4e+board.s4d+board.s4c=='':
moves = '4h4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4h)and w4b==''\
and board.s4g+board.s4f+board.s4e+board.s4d+board.s4c=='':
moves = '4h4b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4h)and w4a==''\
and board.s4g+board.s4f+board.s4e+board.s4d+board.s4c+board.s4b=='':
moves = '4h4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4h)and w4a==''\
and board.s4g+board.s4f+board.s4e+board.s4d+board.s4c+board.s4b=='':
moves = '4h4a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4h)and w1h==''\
and board.s2h+board.s3h=='':
moves = '4h1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4h)and w1h==''\
and board.s2h+board.s3h=='':
moves = '4h1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4h)and w5h==''\
and board.s3h=='':
moves = '4h2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4h)and w5h==''\
and board.s3h=='':
moves = '4h2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4h)and w6h==''\
and board.s5h=='':
moves = '4h6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4h)and w6h==''\
and board.s5h=='':
moves = '4h6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4h)and w7h==''\
and board.s5h+board.s6h=='':
moves = '4h7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4h)and w7h==''\
and board.s5h+board.s6h=='':
moves = '4h7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4h)and w8h==''\
and board.s5h+board.s6h+board.s7h=='':
moves = '4h8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4h)and w8h==''\
and board.s5h+board.s6h+board.s7h=='':
moves = '4h8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4h)and w9h==''\
and board.s5h+board.s6h+board.s7h+board.s8h=='':
moves = '4h9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4h)and w9h==''\
and board.s5h+board.s6h+board.s7h+board.s8h=='':
moves = '4h9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4h)and w6f==''\
and board.s5g=='':
moves = '4h6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4h)and w7e==''\
and board.s5g+board.s6f=='':
moves = '4h7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4h)and w8d==''\
and board.s5g+board.s6f+board.s7e=='':
moves = '4h8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4h)and w9c==''\
and board.s5g+board.s6f+board.s7e+board.s8d=='':
moves = '4h9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('B', Wboard.w4h)and w6f==''\
and board.s5g=='':
moves = '4h6f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w4h)and w7e==''\
and board.s5g+board.s6f=='':
moves = '4h7e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w4h)and w8d==''\
and board.s5g+board.s6f+board.s7e=='':
moves = '4h8d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w4h)and w9c==''\
and board.s5g+board.s6f+board.s7e+board.s8d=='':
moves = '4h9c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w4h)and w1e==''\
and board.s2f+board.s3g=='':
moves = '4h1e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w4h)and w2f==''\
and board.s3g=='':
moves = '4h2f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4h)and w1e==''\
and board.s2f+board.s3g=='':
moves = '4h1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4h)and w2f==''\
and board.s3g=='':
moves = '4h2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w5h !='':
if re.match(r'[sgk+]', Wboard.w5h)and w5i=='':
moves = '5h5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w5h)and w4i=='':
moves = '5h4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w5h)and w6i=='':
moves = '5h6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w5h)and w4h=='':
moves = '5h4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w5h)and w6h=='':
moves = '5h6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w5h)and w5g=='':
moves = '5h5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w5h)and w4g=='':
moves = '5h4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w5h)and w6g=='':
moves = '5h6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w5h)and w5i=='':
moves = '5h5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w5h)and w4i=='':
moves = '5h4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w5h)and w6i=='':
moves = '5h6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5h)and w4h=='':
moves = '5h4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5h)and w6h=='':
moves = '5h6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5h)and w5g=='':
moves = '5h5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w5h)and w4g=='':
moves = '5h4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w5h)and w6g=='':
moves = '5h6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5h)and w5f==''\
and board.s5e=='':
moves = '5h5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5h)and w5f==''\
and board.s5e=='':
moves = '5h5f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5h)and w5e==''\
and board.s5g+board.s5f=='':
moves = '5h5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5h)and w5e==''\
and board.s5g+board.s5f=='':
moves = '5h5e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5h)and w5d==''\
and board.s5g+board.s5f+board.s5e=='':
moves = '5h5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5h)and w5d==''\
and board.s5g+board.s5f+board.s5e=='':
moves = '5h5d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5h)and w5c==''\
and board.s5g+board.s5f+board.s5e+board.s5d=='':
moves = '5h5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5h)and w5c==''\
and board.s5g+board.s5f+board.s5e+board.s5d=='':
moves = '5h5c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5h)and w5b==''\
and board.s5g+board.s5f+board.s5e+board.s5d+board.s5c=='':
moves = '5h5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5h)and w5b==''\
and board.s5g+board.s5f+board.s5e+board.s5d+board.s5c=='':
moves = '5h5b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5h)and w5a==''\
and board.s5g+board.s5f+board.s5e+board.s5d+board.s5c+board.s5b=='':
moves = '5h5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5h)and w5a==''\
and board.s5g+board.s5f+board.s5e+board.s5d+board.s5c+board.s5b=='':
moves = '5h5a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5h)and w1h==''\
and board.s2h+board.s3h+board.s4h=='':
moves = '5h1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5h)and w1h==''\
and board.s2h+board.s3h+board.s4h=='':
moves = '5h1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5h)and w2h==''\
and board.s3h+board.s4h=='':
moves = '5h2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5h)and w2h==''\
and board.s3h+board.s4h=='':
moves = '5h2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5h)and w3h==''\
and board.s4h=='':
moves = '5h3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5h)and w3h==''\
and board.s4h=='':
moves = '5h3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5h)and w7h==''\
and board.s6h=='':
moves = '5h7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5h)and w7h==''\
and board.s6h=='':
moves = '5h7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5h)and w8h==''\
and board.s6h+board.s7h=='':
moves = '5h8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5h)and w8h==''\
and board.s6h+board.s7h=='':
moves = '5h8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5h)and w9h==''\
and board.s6h+board.s7h+board.s8h=='':
moves = '5h9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5h)and w9h==''\
and board.s6h+board.s7h+board.s8h=='':
moves = '5h9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5h)and w7f==''\
and board.s6g=='':
moves = '5h7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5h)and w8e==''\
and board.s6g+board.s7f=='':
moves = '5h8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5h)and w9d==''\
and board.s6g+board.s7f+board.s8e=='':
moves = '5h9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('B', Wboard.w5h)and w7f==''\
and board.s6g=='':
moves = '5h7f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w5h)and w8e==''\
and board.s6g+board.s7f=='':
moves = '5h8e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w5h)and w9d==''\
and board.s6g+board.s7f+board.s8e=='':
moves = '5h9d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w5h)and w2e==''\
and board.s3f+board.s4g=='':
moves = '5h2e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w5h)and w3f==''\
and board.s4g=='':
moves = '5h3f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5h)and w2e==''\
and board.s3f+board.s4g=='':
moves = '5h2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5h)and w3f==''\
and board.s4g=='':
moves = '5h3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5h)and w1d==''\
and board.s4g+board.s3f+board.s2e=='':
moves = '5h1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w5h)and w1d==''\
and board.s4g+board.s3f+board.s2e=='':
moves = '5h1d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w6h !='':
if re.match(r'[sgk+]', Wboard.w6h)and w6i=='':
moves = '6h6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w6h)and w5i=='':
moves = '6h5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w6h)and w7i=='':
moves = '6h7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w6h)and w5h=='':
moves = '6h5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w6h)and w7h=='':
moves = '6h7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w6h)and w6g=='':
moves = '6h6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w6h)and w5g=='':
moves = '6h5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w6h)and w7g=='':
moves = '6h7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w6h)and w6i=='':
moves = '6h6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w6h)and w5i=='':
moves = '6h5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w6h)and w7i=='':
moves = '6h7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6h)and w5h=='':
moves = '6h5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6h)and w7h=='':
moves = '6h7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6h)and w6g=='':
moves = '6h6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w6h)and w5g=='':
moves = '6h5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w6h)and w7g=='':
moves = '6h7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6h)and w6f==''\
and board.s6e=='':
moves = '6h6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6h)and w6f==''\
and board.s6e=='':
moves = '6h6f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6h)and w6e==''\
and board.s6g+board.s6f=='':
moves = '6h6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6h)and w6e==''\
and board.s6g+board.s6f=='':
moves = '6h6e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6h)and w6d==''\
and board.s6g+board.s6f+board.s6e=='':
moves = '6h6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6h)and w6d==''\
and board.s6g+board.s6f+board.s6e=='':
moves = '6h6d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6h)and w6c==''\
and board.s6g+board.s6f+board.s6e+board.s6d=='':
moves = '6h6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6h)and w6c==''\
and board.s6g+board.s6f+board.s6e+board.s6d=='':
moves = '6h6c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6h)and w6b==''\
and board.s6g+board.s6f+board.s6e+board.s6d+board.s6c=='':
moves = '6h6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6h)and w6b==''\
and board.s6g+board.s6f+board.s6e+board.s6d+board.s6c=='':
moves = '6h6b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6h)and w6a==''\
and board.s6g+board.s6f+board.s6e+board.s6d+board.s6c+board.s6b=='':
moves = '6h6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6h)and w6a==''\
and board.s6g+board.s6f+board.s6e+board.s6d+board.s6c+board.s6b=='':
moves = '6h6a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6h)and w9h==''\
and board.s8h+board.s7h=='':
moves = '6h9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6h)and w9h==''\
and board.s8h+board.s7h=='':
moves = '6h9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6h)and w5h==''\
and board.s7h=='':
moves = '6h8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6h)and w5h==''\
and board.s7h=='':
moves = '6h8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6h)and w4h==''\
and board.s5h=='':
moves = '6h4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6h)and w4h==''\
and board.s5h=='':
moves = '6h4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6h)and w3h==''\
and board.s5h+board.s4h=='':
moves = '6h3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6h)and w3h==''\
and board.s5h+board.s4h=='':
moves = '6h3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6h)and w2h==''\
and board.s5h+board.s4h+board.s3h=='':
moves = '6h2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6h)and w2h==''\
and board.s5h+board.s4h+board.s3h=='':
moves = '6h2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6h)and w1h==''\
and board.s5h+board.s4h+board.s3h+board.s2h=='':
moves = '6h1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6h)and w1h==''\
and board.s5h+board.s4h+board.s3h+board.s2h=='':
moves = '6h1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6h)and w4f==''\
and board.s5g=='':
moves = '6h4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6h)and w3e==''\
and board.s5g+board.s4f=='':
moves = '6h3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6h)and w2d==''\
and board.s5g+board.s4f+board.s3e=='':
moves = '6h2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6h)and w1c==''\
and board.s5g+board.s4f+board.s3e+board.s2d=='':
moves = '6h1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('B', Wboard.w6h)and w4f==''\
and board.s5g=='':
moves = '6h4f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w6h)and w3e==''\
and board.s5g+board.s4f=='':
moves = '6h3e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w6h)and w2d==''\
and board.s5g+board.s4f+board.s3e=='':
moves = '6h2d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w6h)and w1c==''\
and board.s5g+board.s4f+board.s3e+board.s2d=='':
moves = '6h1c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w6h)and w9e==''\
and board.s8f+board.s7g=='':
moves = '6h9e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w6h)and w8f==''\
and board.s7g=='':
moves = '6h8f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6h)and w9e==''\
and board.s8f+board.s7g=='':
moves = '6h9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6h)and w8f==''\
and board.s7g=='':
moves = '6h8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w7h !='':
if re.match(r'[sgk+]', Wboard.w7h)and w7i=='':
moves = '7h7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w7h)and w6i=='':
moves = '7h6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w7h)and w8i=='':
moves = '7h8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w7h)and w6h=='':
moves = '7h6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w7h)and w8h=='':
moves = '7h8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w7h)and w7g=='':
moves = '7h7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w7h)and w6g=='':
moves = '7h6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w7h)and w8g=='':
moves = '7h8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w7h)and w7i=='':
moves = '7h7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w7h)and w6i=='':
moves = '7h6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w7h)and w8i=='':
moves = '7h8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7h)and w6h=='':
moves = '7h6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7h)and w8h=='':
moves = '7h8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7h)and w7g=='':
moves = '7h7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w7h)and w6g=='':
moves = '7h6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w7h)and w8g=='':
moves = '7h8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7h)and w7f==''\
and board.s7e=='':
moves = '7h7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7h)and w7f==''\
and board.s7e=='':
moves = '7h7f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7h)and w7e==''\
and board.s7g+board.s7f=='':
moves = '7h7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7h)and w7e==''\
and board.s7g+board.s7f=='':
moves = '7h7e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7h)and w7d==''\
and board.s7g+board.s7f+board.s7e=='':
moves = '7h7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7h)and w7d==''\
and board.s7g+board.s7f+board.s7e=='':
moves = '7h7d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7h)and w7c==''\
and board.s7g+board.s7f+board.s7e+board.s7d=='':
moves = '7h7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7h)and w7c==''\
and board.s7g+board.s7f+board.s7e+board.s7d=='':
moves = '7h7c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7h)and w7b==''\
and board.s7g+board.s7f+board.s7e+board.s7d+board.s7c=='':
moves = '7h7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7h)and w7b==''\
and board.s7g+board.s7f+board.s7e+board.s7d+board.s7c=='':
moves = '7h7b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7h)and w7a==''\
and board.s7g+board.s7f+board.s7e+board.s7d+board.s7c+board.s7b=='':
moves = '7h7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7h)and w7a==''\
and board.s7g+board.s7f+board.s7e+board.s7d+board.s7c+board.s7b=='':
moves = '7h7a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7h)and w9h==''\
and board.s8h=='':
moves = '7h9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7h)and w9h==''\
and board.s8h=='':
moves = '7h9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7h)and w5h==''\
and board.s6h=='':
moves = '7h5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7h)and w5h==''\
and board.s6h=='':
moves = '7h5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7h)and w4h==''\
and board.s6h+board.s5h=='':
moves = '7h4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7h)and w4h==''\
and board.s6h+board.s5h=='':
moves = '7h4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7h)and w3h==''\
and board.s6h+board.s5h+board.s4h=='':
moves = '7h3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7h)and w3h==''\
and board.s6h+board.s5h+board.s4h=='':
moves = '7h3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7h)and w2h==''\
and board.s6h+board.s5h+board.s4h+board.s3h=='':
moves = '7h2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7h)and w2h==''\
and board.s6h+board.s5h+board.s4h+board.s3h=='':
moves = '7h2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7h)and w1h==''\
and board.s6h+board.s5h+board.s4h+board.s3h+board.s2h=='':
moves = '7h1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7h)and w1h==''\
and board.s6h+board.s5h+board.s4h+board.s3h+board.s2h=='':
moves = '7h1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7h)and w5f==''\
and board.s6g=='':
moves = '7h5f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7h)and w4e==''\
and board.s6g+board.s5f=='':
moves = '7h4e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7h)and w3d==''\
and board.s6g+board.s5f+board.s4e=='':
moves = '7h3d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7h)and w2c==''\
and board.s6g+board.s5f+board.s4e+board.s3d=='':
moves = '7h2c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7h)and w1b==''\
and board.s6g+board.s5f+board.s4e+board.s3d+board.s2c=='':
moves = '7h1b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7h)and w5f==''\
and board.s6g=='':
moves = '7h5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7h)and w4e==''\
and board.s6g+board.s5f=='':
moves = '7h4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7h)and w3d==''\
and board.s6g+board.s5f+board.s4e=='':
moves = '7h3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7h)and w2c==''\
and board.s6g+board.s5f+board.s4e+board.s3d=='':
moves = '7h2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7h)and w1b==''\
and board.s6g+board.s5f+board.s4e+board.s3d+board.s2c=='':
moves = '7h1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7h)and w9f==''\
and board.s8g=='':
moves = '7h9f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7h)and w9f==''\
and board.s8g=='':
moves = '7h9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w8h !='':
if re.match(r'[sgk+]', Wboard.w8h)and w8i=='':
moves = '8h8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w8h)and w7i=='':
moves = '8h7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w8h)and w9i=='':
moves = '8h9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w8h)and w7h=='':
moves = '8h7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w8h)and w9h=='':
moves = '8h9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w8h)and w8g=='':
moves = '8h8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w8h)and w7g=='':
moves = '8h7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w8h)and w9g=='':
moves = '8h9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w8h)and w8i=='':
moves = '8h8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w8h)and w7i=='':
moves = '8h7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w8h)and w9i=='':
moves = '8h9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and w7h=='':
moves = '8h7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and w9h=='':
moves = '8h9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and w8g=='':
moves = '8h8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w8h)and w7g=='':
moves = '8h7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w8h)and w9g=='':
moves = '8h9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and w8f==''\
and board.s8e=='':
moves = '8h8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and w8f==''\
and board.s8e=='':
moves = '8h8f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and w8e==''\
and board.s8g+board.s8f=='':
moves = '8h8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and w8e==''\
and board.s8g+board.s8f=='':
moves = '8h8e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and w8d==''\
and board.s8g+board.s8f+board.s8e=='':
moves = '8h8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and w8d==''\
and board.s8g+board.s8f+board.s8e=='':
moves = '8h8d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and w8c==''\
and board.s8g+board.s8f+board.s8e+board.s8d=='':
moves = '8h8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and w8c==''\
and board.s8g+board.s8f+board.s8e+board.s8d=='':
moves = '8h8c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and w8b==''\
and board.s8g+board.s8f+board.s8e+board.s8d+board.s8c=='':
moves = '8h8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and w8b==''\
and board.s8g+board.s8f+board.s8e+board.s8d+board.s8c=='':
moves = '8h8b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and w8a==''\
and board.s8g+board.s8f+board.s8e+board.s8d+board.s8c+board.s8b=='':
moves = '8h8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and w8a==''\
and board.s8g+board.s8f+board.s8e+board.s8d+board.s8c+board.s8b=='':
moves = '8h8a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and w6h==''\
and board.s7h=='':
moves = '8h6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and w6h==''\
and board.s7h=='':
moves = '8h6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and w5h==''\
and board.s7h+board.s6h=='':
moves = '8h5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and w5h==''\
and board.s7h+board.s6h=='':
moves = '8h5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and w4h==''\
and board.s7h+board.s6h+board.s5h=='':
moves = '8h4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and w4h==''\
and board.s7h+board.s6h+board.s5h=='':
moves = '8h4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and w3h==''\
and board.s7h+board.s6h+board.s5h+board.s4h=='':
moves = '8h3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and w3h==''\
and board.s7h+board.s6h+board.s5h+board.s4h=='':
moves = '8h3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and w2h==''\
and board.s7h+board.s6h+board.s5h+board.s4h+board.s3h=='':
moves = '8h2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and w2h==''\
and board.s7h+board.s6h+board.s5h+board.s4h+board.s3h=='':
moves = '8h2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and w1h==''\
and board.s7h+board.s6h+board.s5h+board.s4h+board.s3h+board.s2h=='':
moves = '8h1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and w1h==''\
and board.s7h+board.s6h+board.s5h+board.s4h+board.s3h+board.s2h=='':
moves = '8h1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8h)and w6f==''\
and board.s7g=='':
moves = '8h6f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8h)and w5e==''\
and board.s7g+board.s6f=='':
moves = '8h5e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8h)and w4d==''\
and board.s7g+board.s6f+board.s5e=='':
moves = '8h4d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8h)and w3c==''\
and board.s7g+board.s6f+board.s5e+board.s4d=='':
moves = '8h3c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8h)and w2b==''\
and board.s7g+board.s6f+board.s5e+board.s4d+board.s3c=='':
moves = '8h2b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8h)and w1a==''\
and board.s7g+board.s6f+board.s5e+board.s4d+board.s3c+board.s2b=='':
moves = '8h1a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8h)and w6f==''\
and board.s7g=='':
moves = '8h6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8h)and w5e==''\
and board.s7g+board.s6f=='':
moves = '8h5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8h)and w4d==''\
and board.s7g+board.s6f+board.s5e=='':
moves = '8h4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8h)and w3c==''\
and board.s7g+board.s6f+board.s5e+board.s4d=='':
moves = '8h3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8h)and w2b==''\
and board.s7g+board.s6f+board.s5e+board.s4d+board.s3c=='':
moves = '8h2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8h)and w1a==''\
and board.s7g+board.s6f+board.s5e+board.s4d+board.s3c+board.s2b=='':
moves = '8h1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w9h !='':
if re.match(r'[sgk+]', Wboard.w9h)and w9i=='':
moves = '9h9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w9h)and w8i=='':
moves = '9h8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w9h)and w8h=='':
moves = '9h8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w9h)and w9g=='':
moves = '9h9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w9h)and w8g=='':
moves = '9h8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w9h)and w9i=='':
moves = '9h9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w9h)and w8i=='':
moves = '9h8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and w8h=='':
moves = '9h8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and w9g=='':
moves = '9h9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w9h)and w8g=='':
moves = '9h8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and w9f==''\
and board.s9e=='':
moves = '9h9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and w9f==''\
and board.s9e=='':
moves = '9h9f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and w9e==''\
and board.s9g+board.s9f=='':
moves = '9h9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and w9e==''\
and board.s9g+board.s9f=='':
moves = '9h9e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and w9d==''\
and board.s9g+board.s9f+board.s9e=='':
moves = '9h9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and w9d==''\
and board.s9g+board.s9f+board.s9e=='':
moves = '9h9d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and w9c==''\
and board.s9g+board.s9f+board.s9e+board.s9d=='':
moves = '9h9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and w9c==''\
and board.s9g+board.s9f+board.s9e+board.s9d=='':
moves = '9h9c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and w9b==''\
and board.s9g+board.s9f+board.s9e+board.s9d+board.s9c=='':
moves = '9h9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and w9b==''\
and board.s9g+board.s9f+board.s9e+board.s9d+board.s9c=='':
moves = '9h9b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and w9a==''\
and board.s9g+board.s9f+board.s9e+board.s9d+board.s9c+board.s9b=='':
moves = '9h9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and w9a==''\
and board.s9g+board.s9f+board.s9e+board.s9d+board.s9c+board.s9b=='':
moves = '9h9a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and w7h==''\
and board.s8h=='':
moves = '9h7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and w7h==''\
and board.s8h=='':
moves = '9h7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and w6h==''\
and board.s8h+board.s7h=='':
moves = '9h6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and w6h==''\
and board.s8h+board.s7h=='':
moves = '9h6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and w5h==''\
and board.s8h+board.s7h+board.s6h=='':
moves = '9h5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and w5h==''\
and board.s8h+board.s7h+board.s6h=='':
moves = '9h5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and w4h==''\
and board.s8h+board.s7h+board.s6h+board.s5h=='':
moves = '9h4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and w4h==''\
and board.s8h+board.s7h+board.s6h+board.s5h=='':
moves = '9h4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and w3h==''\
and board.s8h+board.s7h+board.s6h+board.s5h+board.s4h=='':
moves = '9h3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and w3h==''\
and board.s8h+board.s7h+board.s6h+board.s5h+board.s4h=='':
moves = '9h3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and w2h==''\
and board.s8h+board.s7h+board.s6h+board.s5h+board.s4h+board.s3h=='':
moves = '9h2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and w2h==''\
and board.s8h+board.s7h+board.s6h+board.s5h+board.s4h+board.s3h=='':
moves = '9h2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and w1h==''\
and board.s8h+board.s7h+board.s6h+board.s5h+board.s4h+board.s3h+board.s2h=='':
moves = '9h1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and w1h==''\
and board.s8h+board.s7h+board.s6h+board.s5h+board.s4h+board.s3h+board.s2h=='':
moves = '9h1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9h)and w7f==''\
and board.s8g=='':
moves = '9h7f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9h)and w6e==''\
and board.s8g+board.s7f=='':
moves = '9h6e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9h)and w5d==''\
and board.s8g+board.s7f+board.s6e=='':
moves = '9h5d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9h)and w4c==''\
and board.s8g+board.s7f+board.s6e+board.s5d=='':
moves = '9h4c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9h)and w3b==''\
and board.s8g+board.s7f+board.s6e+board.s5d+board.s4c=='':
moves = '9h3b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9h)and w2a==''\
and board.s8g+board.s7f+board.s6e+board.s5d+board.s4c+board.s3b=='':
moves = '9h2a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9h)and w7f==''\
and board.s8g=='':
moves = '9h7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9h)and w6e==''\
and board.s8g+board.s7f=='':
moves = '9h6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9h)and w5d==''\
and board.s8g+board.s7f+board.s6e=='':
moves = '9h5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9h)and w4c==''\
and board.s8g+board.s7f+board.s6e+board.s5d=='':
moves = '9h4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9h)and w3b==''\
and board.s8g+board.s7f+board.s6e+board.s5d+board.s4c=='':
moves = '9h3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9h)and w2a==''\
and board.s8g+board.s7f+board.s6e+board.s5d+board.s4c+board.s3b=='':
moves = '9h2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w1g !='':
if re.match(r'[sgk+]', Wboard.w1g)and w1h=='':
moves = '1g1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w1g)and w2h=='':
moves = '1g2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w1g)and w2g=='':
moves = '1g2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w1g)and w1f=='':
moves = '1g1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w1g)and w2f=='':
moves = '1g2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w1g)and w1h=='':
moves = '1g1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w1g)and w2h=='':
moves = '1g2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1g)and w2g=='':
moves = '1g2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1g)and w1f=='':
moves = '1g1f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w1g)and w2f=='':
moves = '1g2f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w1g)and w2i=='':
moves = '1g2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1g)and w1i==''\
and board.s1h=='':
moves = '1g1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w1g)and w1i==''\
and board.s1h=='':
moves = '1g1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1g)and w1e==''\
and board.s1f=='':
moves = '1g1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1g)and w1e==''\
and board.s1f=='':
moves = '1g1e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1g)and w1d==''\
and board.s1f+board.s1e=='':
moves = '1g1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1g)and w1d==''\
and board.s1f+board.s1e=='':
moves = '1g1d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1g)and w1c==''\
and board.s1f+board.s1e+board.s1d=='':
moves = '1g1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1g)and w1c==''\
and board.s1f+board.s1e+board.s1d=='':
moves = '1g1c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1g)and w1b==''\
and board.s1f+board.s1e+board.s1d+board.s1c=='':
moves = '1g1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1g)and w1b==''\
and board.s1f+board.s1e+board.s1d+board.s1c=='':
moves = '1g1b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1g)and w1a==''\
and board.s1f+board.s1e+board.s1d+board.s1c+board.s1b=='':
moves = '1g1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1g)and w1a==''\
and board.s1f+board.s1e+board.s1d+board.s1c+board.s1b=='':
moves = '1g1a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1g)and w3g==''\
and board.s2g=='':
moves = '1g3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1g)and w3g==''\
and board.s2g=='':
moves = '1g3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1g)and w4g==''\
and board.s2g+board.s3g=='':
moves = '1g4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1g)and w4g==''\
and board.s2g+board.s3g=='':
moves = '1g4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1g)and w5g==''\
and board.s2g+board.s3g+board.s4g=='':
moves = '1g5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1g)and w5g==''\
and board.s2g+board.s3g+board.s4g=='':
moves = '1g5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1g)and w6g==''\
and board.s2g+board.s3g+board.s4g+board.s5g=='':
moves = '1g6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1g)and w6g==''\
and board.s2g+board.s3g+board.s4g+board.s5g=='':
moves = '1g6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1g)and w7g==''\
and board.s2g+board.s3g+board.s4g+board.s5g+board.s6g=='':
moves = '1g7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1g)and w7g==''\
and board.s2g+board.s3g+board.s4g+board.s5g+board.s6g=='':
moves = '1g7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1g)and w8g==''\
and board.s2g+board.s3g+board.s4g+board.s5g+board.s6g+board.s7g=='':
moves = '1g8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1g)and w8g==''\
and board.s2g+board.s3g+board.s4g+board.s5g+board.s6g+board.s7g=='':
moves = '1g8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1g)and w9g==''\
and board.s2g+board.s3g+board.s4g+board.s5g+board.s6g+board.s7g+board.s8g=='':
moves = '1g9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w1g)and w9g==''\
and board.s2g+board.s3g+board.s4g+board.s5g+board.s6g+board.s7g+board.s8g=='':
moves = '1g9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1g)and w3i==''\
and board.s2h=='':
moves = '1g3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1g)and w3e==''\
and board.s2f=='':
moves = '1g3e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1g)and w4d==''\
and board.s2f+board.s3e=='':
moves = '1g4d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1g)and w5c==''\
and board.s2f+board.s3e+board.s4d=='':
moves = '1g5c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1g)and w6b==''\
and board.s2f+board.s3e+board.s4d+board.s5c=='':
moves = '1g6b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1g)and w7a==''\
and board.s2f+board.s3e+board.s4d+board.s5c+board.s6b=='':
moves = '1g7a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1g)and w3i==''\
and board.s2h=='':
moves = '1g3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1g)and w3e==''\
and board.s2f=='':
moves = '1g3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1g)and w4d==''\
and board.s2f+board.s3e=='':
moves = '1g4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1g)and w5c==''\
and board.s2f+board.s3e+board.s4d=='':
moves = '1g5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1g)and w6b==''\
and board.s2f+board.s3e+board.s4d+board.s5c=='':
moves = '1g6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1g)and w7a==''\
and board.s2f+board.s3e+board.s4d+board.s5c+board.s6b=='':
moves = '1g7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w2g !='':
if re.match(r'[sgk+]', Wboard.w2g)and w2h=='':
moves = '2g2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w2g)and w1h=='':
moves = '2g1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w2g)and w3h=='':
moves = '2g3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w2g)and w1g=='':
moves = '2g1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w2g)and w3g=='':
moves = '2g3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w2g)and w2f=='':
moves = '2g2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w2g)and w1f=='':
moves = '2g1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w2g)and w3f=='':
moves = '2g3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w2g)and w2h=='':
moves = '2g2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w2g)and w1h=='':
moves = '2g1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w2g)and w3h=='':
moves = '2g3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2g)and w1g=='':
moves = '2g1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2g)and w3g=='':
moves = '2g3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2g)and w2f=='':
moves = '2g2f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w2g)and w1f=='':
moves = '2g1f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w2g)and w3f=='':
moves = '2g3f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w2g)and w1i=='':
moves = '2g1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w2g)and w3i=='':
moves = '2g3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2g)and w2i==''\
and board.s2h=='':
moves = '2g2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w2g)and w2i==''\
and board.s2h=='':
moves = '2g2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2g)and w2e==''\
and board.s2f=='':
moves = '2g2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2g)and w2e==''\
and board.s2f=='':
moves = '2g2e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2g)and w2d==''\
and board.s2f+board.s2e=='':
moves = '2g2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2g)and w2d==''\
and board.s2f+board.s2e=='':
moves = '2g2d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2g)and w2c==''\
and board.s2f+board.s2e+board.s2d=='':
moves = '2g2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2g)and w2c==''\
and board.s2f+board.s2e+board.s2d=='':
moves = '2g2c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2g)and w2b==''\
and board.s2f+board.s2e+board.s2d+board.s2c=='':
moves = '2g2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2g)and w2b==''\
and board.s2f+board.s2e+board.s2d+board.s2c=='':
moves = '2g2b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2g)and w2a==''\
and board.s2f+board.s2e+board.s2d+board.s2c+board.s2b=='':
moves = '2g2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2g)and w2a==''\
and board.s2f+board.s2e+board.s2d+board.s2c+board.s2b=='':
moves = '2g2a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2g)and w4g==''\
and board.s3g=='':
moves = '2g4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2g)and w4g==''\
and board.s3g=='':
moves = '2g4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2g)and w5g==''\
and board.s3g+board.s4g=='':
moves = '2g5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2g)and w5g==''\
and board.s3g+board.s4g=='':
moves = '2g5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2g)and w6g==''\
and board.s3g+board.s4g+board.s5g=='':
moves = '2g6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2g)and w6g==''\
and board.s3g+board.s4g+board.s5g=='':
moves = '2g6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2g)and w7g==''\
and board.s3g+board.s4g+board.s5g+board.s6g=='':
moves = '2g7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2g)and w7g==''\
and board.s3g+board.s4g+board.s5g+board.s6g=='':
moves = '2g7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2g)and w8g==''\
and board.s3g+board.s4g+board.s5g+board.s6g+board.s7g=='':
moves = '2g8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2g)and w8g==''\
and board.s3g+board.s4g+board.s5g+board.s6g+board.s7g=='':
moves = '2g8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2g)and w9g==''\
and board.s3g+board.s4g+board.s5g+board.s6g+board.s7g+board.s8g=='':
moves = '2g9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w2g)and w9g==''\
and board.s3g+board.s4g+board.s5g+board.s6g+board.s7g+board.s8g=='':
moves = '2g9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2g)and w4e==''\
and board.s3f=='':
moves = '2g4e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2g)and w5d==''\
and board.s3f+board.s4e=='':
moves = '2g5d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2g)and w6c==''\
and board.s3f+board.s4e+board.s5d=='':
moves = '2g6c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2g)and w7b==''\
and board.s3f+board.s4e+board.s5d+board.s6c=='':
moves = '2g7b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2g)and w8a==''\
and board.s3f+board.s4e+board.s5d+board.s6c+board.s7b=='':
moves = '2g8a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2g)and w4e==''\
and board.s3f=='':
moves = '2g4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2g)and w5d==''\
and board.s3f+board.s4e=='':
moves = '2g5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2g)and w6c==''\
and board.s3f+board.s4e+board.s5d=='':
moves = '2g6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2g)and w7b==''\
and board.s3f+board.s4e+board.s5d+board.s6c=='':
moves = '2g7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2g)and w8a==''\
and board.s3f+board.s4e+board.s5d+board.s6c+board.s7b=='':
moves = '2g8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2g)and w4i==''\
and board.s3h=='':
moves = '2g4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2g)and w4i==''\
and board.s3h=='':
moves = '2g4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w3g !='':
if re.match(r'[sgk+]', Wboard.w3g)and w3h=='':
moves = '3g3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w3g)and w2h=='':
moves = '3g2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w3g)and w4h=='':
moves = '3g4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w3g)and w2g=='':
moves = '3g2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w3g)and w4g=='':
moves = '3g4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w3g)and w3f=='':
moves = '3g3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w3g)and w2f=='':
moves = '3g2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w3g)and w4f=='':
moves = '3g4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w3g)and w3h=='':
moves = '3g3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w3g)and w2h=='':
moves = '3g2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w3g)and w4h=='':
moves = '3g4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3g)and w2g=='':
moves = '3g2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3g)and w4g=='':
moves = '3g4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3g)and w3f=='':
moves = '3g3f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w3g)and w2f=='':
moves = '3g2f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w3g)and w4f=='':
moves = '3g4f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w3g)and w2i=='':
moves = '3g2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w3g)and w4i=='':
moves = '3g4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3g)and w3i==''\
and board.s3h=='':
moves = '3g3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w3g)and w3i==''\
and board.s3h=='':
moves = '3g3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3g)and w3e==''\
and board.s3f=='':
moves = '3g3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3g)and w3e==''\
and board.s3f=='':
moves = '3g3e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3g)and w3d==''\
and board.s3f+board.s3e=='':
moves = '3g3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3g)and w3d==''\
and board.s3f+board.s3e=='':
moves = '3g3d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3g)and w3c==''\
and board.s3f+board.s3e+board.s3d=='':
moves = '3g3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3g)and w3c==''\
and board.s3f+board.s3e+board.s3d=='':
moves = '3g3c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3g)and w3b==''\
and board.s3f+board.s3e+board.s3d+board.s3c=='':
moves = '3g3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3g)and w3b==''\
and board.s3f+board.s3e+board.s3d+board.s3c=='':
moves = '3g3b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3g)and w3a==''\
and board.s3f+board.s3e+board.s3d+board.s3c+board.s3b=='':
moves = '3g3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3g)and w3a==''\
and board.s3f+board.s3e+board.s3d+board.s3c+board.s3b=='':
moves = '3g3a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3g)and w1g==''\
and board.s2g=='':
moves = '3g1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3g)and w1g==''\
and board.s2g=='':
moves = '3g1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3g)and w5g==''\
and board.s4g=='':
moves = '3g5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3g)and w5g==''\
and board.s4g=='':
moves = '3g5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3g)and w6g==''\
and board.s4g+board.s5g=='':
moves = '3g6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3g)and w6g==''\
and board.s4g+board.s5g=='':
moves = '3g6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3g)and w7g==''\
and board.s4g+board.s5g+board.s6g=='':
moves = '3g7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3g)and w7g==''\
and board.s4g+board.s5g+board.s6g=='':
moves = '3g7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3g)and w8g==''\
and board.s4g+board.s5g+board.s6g+board.s7g=='':
moves = '3g8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3g)and w8g==''\
and board.s4g+board.s5g+board.s6g+board.s7g=='':
moves = '3g8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3g)and w9g==''\
and board.s4g+board.s5g+board.s6g+board.s7g+board.s8g=='':
moves = '3g9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w3g)and w9g==''\
and board.s4g+board.s5g+board.s6g+board.s7g+board.s8g=='':
moves = '3g9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3g)and w1i==''\
and board.s2h=='':
moves = '3g1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3g)and w5e==''\
and board.s4f=='':
moves = '3g5e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3g)and w6d==''\
and board.s4f+board.s5e=='':
moves = '3g6d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3g)and w7c==''\
and board.s4f+board.s5e+board.s6d=='':
moves = '3g7c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3g)and w8b==''\
and board.s4f+board.s5e+board.s6d+board.s7c=='':
moves = '3g8b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3g)and w9a==''\
and board.s4f+board.s5e+board.s6d+board.s7c+board.s8b=='':
moves = '3g9a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3g)and w1i==''\
and board.s2h=='':
moves = '3g1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3g)and w5e==''\
and board.s4f=='':
moves = '3g5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3g)and w6d==''\
and board.s4f+board.s5e=='':
moves = '3g6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3g)and w7c==''\
and board.s4f+board.s5e+board.s6d=='':
moves = '3g7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3g)and w8b==''\
and board.s4f+board.s5e+board.s6d+board.s7c=='':
moves = '3g8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3g)and w9a==''\
and board.s4f+board.s5e+board.s6d+board.s7c+board.s8b=='':
moves = '3g9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3g)and w5i==''\
and board.s4h=='':
moves = '3g5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3g)and w1e==''\
and board.s2f=='':
moves = '3g1e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3g)and w5i==''\
and board.s4h=='':
moves = '3g5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3g)and w1e==''\
and board.s2f=='':
moves = '3g1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w4g !='':
if re.match(r'[sgk+]', Wboard.w4g)and w4h=='':
moves = '4g4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w4g)and w3h=='':
moves = '4g3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w4g)and w5h=='':
moves = '4g5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w4g)and w3g=='':
moves = '4g3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w4g)and w5g=='':
moves = '4g5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w4g)and w4f=='':
moves = '4g4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w4g)and w3f=='':
moves = '4g3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w4g)and w5f=='':
moves = '4g5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w4g)and w4h=='':
moves = '4g4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w4g)and w3h=='':
moves = '4g3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w4g)and w5h=='':
moves = '4g5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4g)and w3g=='':
moves = '4g3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4g)and w5g=='':
moves = '4g5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4g)and w4f=='':
moves = '4g4f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w4g)and w3f=='':
moves = '4g3f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w4g)and w5f=='':
moves = '4g5f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w4g)and w3i=='':
moves = '4g3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w4g)and w5i=='':
moves = '4g5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4g)and w4i==''\
and board.s4h=='':
moves = '4g4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w4g)and w4i==''\
and board.s4h=='':
moves = '4g4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4g)and w4e==''\
and board.s4f=='':
moves = '4g4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4g)and w4e==''\
and board.s4f=='':
moves = '4g4e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4g)and w4d==''\
and board.s4f+board.s4e=='':
moves = '4g4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4g)and w4d==''\
and board.s4f+board.s4e=='':
moves = '4g4d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4g)and w4c==''\
and board.s4f+board.s4e+board.s4d=='':
moves = '4g4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4g)and w4c==''\
and board.s4f+board.s4e+board.s4d=='':
moves = '4g4c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4g)and w4b==''\
and board.s4f+board.s4e+board.s4d+board.s4c=='':
moves = '4g4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4g)and w4b==''\
and board.s4f+board.s4e+board.s4d+board.s4c=='':
moves = '4g4b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4g)and w4a==''\
and board.s4f+board.s4e+board.s4d+board.s4c+board.s4b=='':
moves = '4g4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4g)and w4a==''\
and board.s4f+board.s4e+board.s4d+board.s4c+board.s4b=='':
moves = '4g4a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4g)and w1g==''\
and board.s2g+board.s3g=='':
moves = '4g1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4g)and w1g==''\
and board.s2g+board.s3g=='':
moves = '4g1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4g)and w5g==''\
and board.s3g=='':
moves = '4g2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4g)and w5g==''\
and board.s3g=='':
moves = '4g2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4g)and w6g==''\
and board.s5g=='':
moves = '4g6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4g)and w6g==''\
and board.s5g=='':
moves = '4g6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4g)and w7g==''\
and board.s5g+board.s6g=='':
moves = '4g7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4g)and w7g==''\
and board.s5g+board.s6g=='':
moves = '4g7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4g)and w8g==''\
and board.s5g+board.s6g+board.s7g=='':
moves = '4g8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4g)and w8g==''\
and board.s5g+board.s6g+board.s7g=='':
moves = '4g8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4g)and w9g==''\
and board.s5g+board.s6g+board.s7g+board.s8g=='':
moves = '4g9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w4g)and w9g==''\
and board.s5g+board.s6g+board.s7g+board.s8g=='':
moves = '4g9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4g)and w6e==''\
and board.s5f=='':
moves = '4g6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4g)and w7d==''\
and board.s5f+board.s6e=='':
moves = '4g7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4g)and w8c==''\
and board.s5f+board.s6e+board.s7d=='':
moves = '4g8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4g)and w9b==''\
and board.s5f+board.s6e+board.s7d+board.s8c=='':
moves = '4g9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('B', Wboard.w4g)and w6e==''\
and board.s5f=='':
moves = '4g6e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w4g)and w7d==''\
and board.s5f+board.s6e=='':
moves = '4g7d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w4g)and w8c==''\
and board.s5f+board.s6e+board.s7d=='':
moves = '4g8c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w4g)and w9b==''\
and board.s5f+board.s6e+board.s7d+board.s8c=='':
moves = '4g9b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w4g)and w1d==''\
and board.s2e+board.s3f=='':
moves = '4g1d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w4g)and w2e==''\
and board.s3f=='':
moves = '4g2e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4g)and w1d==''\
and board.s2e+board.s3f=='':
moves = '4g1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4g)and w2e==''\
and board.s3f=='':
moves = '4g2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w4g)and w2i==''\
and board.s3h=='':
moves = '4g2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4g)and w2i==''\
and board.s3h=='':
moves = '4g2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w4g)and w6i==''\
and board.s5h=='':
moves = '4g6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4g)and w6i==''\
and board.s5h=='':
moves = '4g6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w5g !='':
if re.match(r'[sgk+]', Wboard.w5g)and w5h=='':
moves = '5g5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w5g)and w4h=='':
moves = '5g4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w5g)and w6h=='':
moves = '5g6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w5g)and w4g=='':
moves = '5g4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w5g)and w6g=='':
moves = '5g6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w5g)and w5f=='':
moves = '5g5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w5g)and w4f=='':
moves = '5g4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w5g)and w6f=='':
moves = '5g6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w5g)and w5h=='':
moves = '5g5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w5g)and w4h=='':
moves = '5g4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w5g)and w6h=='':
moves = '5g6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5g)and w4g=='':
moves = '5g4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5g)and w6g=='':
moves = '5g6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5g)and w5f=='':
moves = '5g5f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w5g)and w4f=='':
moves = '5g4f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w5g)and w6f=='':
moves = '5g6f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w5g)and w4i=='':
moves = '5g4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w5g)and w6i=='':
moves = '5g6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5g)and w5i==''\
and board.s5h=='':
moves = '5g5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w5g)and w5i==''\
and board.s5h=='':
moves = '5g5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5g)and w5e==''\
and board.s5f=='':
moves = '5g5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5g)and w5e==''\
and board.s5f=='':
moves = '5g5e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5g)and w5d==''\
and board.s5f+board.s5e=='':
moves = '5g5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5g)and w5d==''\
and board.s5f+board.s5e=='':
moves = '5g5d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5g)and w5c==''\
and board.s5f+board.s5e+board.s5d=='':
moves = '5g5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5g)and w5c==''\
and board.s5f+board.s5e+board.s5d=='':
moves = '5g5c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5g)and w5b==''\
and board.s5f+board.s5e+board.s5d+board.s5c=='':
moves = '5g5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5g)and w5b==''\
and board.s5f+board.s5e+board.s5d+board.s5c=='':
moves = '5g5b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5g)and w5a==''\
and board.s5f+board.s5e+board.s5d+board.s5c+board.s5b=='':
moves = '5g5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5g)and w5a==''\
and board.s5f+board.s5e+board.s5d+board.s5c+board.s5b=='':
moves = '5g5a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5g)and w1g==''\
and board.s2g+board.s3g+board.s4g=='':
moves = '5g1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5g)and w1g==''\
and board.s2g+board.s3g+board.s4g=='':
moves = '5g1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5g)and w2g==''\
and board.s3g+board.s4g=='':
moves = '5g2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5g)and w2g==''\
and board.s3g+board.s4g=='':
moves = '5g2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5g)and w3g==''\
and board.s4g=='':
moves = '5g3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5g)and w3g==''\
and board.s4g=='':
moves = '5g3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5g)and w7g==''\
and board.s6g=='':
moves = '5g7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5g)and w7g==''\
and board.s6g=='':
moves = '5g7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5g)and w8g==''\
and board.s6g+board.s7g=='':
moves = '5g8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5g)and w8g==''\
and board.s6g+board.s7g=='':
moves = '5g8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5g)and w9g==''\
and board.s6g+board.s7g+board.s8g=='':
moves = '5g9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w5g)and w9g==''\
and board.s6g+board.s7g+board.s8g=='':
moves = '5g9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5g)and w7e==''\
and board.s6f=='':
moves = '5g7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5g)and w8d==''\
and board.s6f+board.s7e=='':
moves = '5g8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5g)and w9c==''\
and board.s6f+board.s7e+board.s8d=='':
moves = '5g9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('B', Wboard.w5g)and w7e==''\
and board.s6f=='':
moves = '5g7e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w5g)and w8d==''\
and board.s6f+board.s7e=='':
moves = '5g8d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w5g)and w9c==''\
and board.s6f+board.s7e+board.s8d=='':
moves = '5g9c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w5g)and w2d==''\
and board.s3e+board.s4f=='':
moves = '5g2d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w5g)and w3e==''\
and board.s4f=='':
moves = '5g3e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5g)and w2d==''\
and board.s3e+board.s4f=='':
moves = '5g2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5g)and w3e==''\
and board.s4f=='':
moves = '5g3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5g)and w1c==''\
and board.s4f+board.s3e+board.s2d=='':
moves = '5g1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w5g)and w1c==''\
and board.s4f+board.s3e+board.s2d=='':
moves = '5g1c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w5g)and w3i==''\
and board.s4h=='':
moves = '5g3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5g)and w3i==''\
and board.s4h=='':
moves = '5g3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w5g)and w7i==''\
and board.s6h=='':
moves = '5g7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5g)and w7i==''\
and board.s6h=='':
moves = '5g7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w6g !='':
if re.match(r'[sgk+]', Wboard.w6g)and w6h=='':
moves = '6g6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w6g)and w5h=='':
moves = '6g5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w6g)and w7h=='':
moves = '6g7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w6g)and w5g=='':
moves = '6g5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w6g)and w7g=='':
moves = '6g7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w6g)and w6f=='':
moves = '6g6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w6g)and w5f=='':
moves = '6g5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w6g)and w7f=='':
moves = '6g7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w6g)and w6h=='':
moves = '6g6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w6g)and w5h=='':
moves = '6g5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w6g)and w7h=='':
moves = '6g7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6g)and w5g=='':
moves = '6g5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6g)and w7g=='':
moves = '6g7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6g)and w6f=='':
moves = '6g6f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w6g)and w5f=='':
moves = '6g5f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w6g)and w7f=='':
moves = '6g7f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w6g)and w5i=='':
moves = '6g5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w6g)and w7i=='':
moves = '6g7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6g)and w6i==''\
and board.s6h=='':
moves = '6g6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w6g)and w6i==''\
and board.s6h=='':
moves = '6g6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6g)and w6e==''\
and board.s6f=='':
moves = '6g6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6g)and w6e==''\
and board.s6f=='':
moves = '6g6e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6g)and w6d==''\
and board.s6f+board.s6e=='':
moves = '6g6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6g)and w6d==''\
and board.s6f+board.s6e=='':
moves = '6g6d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6g)and w6c==''\
and board.s6f+board.s6e+board.s6d=='':
moves = '6g6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6g)and w6c==''\
and board.s6f+board.s6e+board.s6d=='':
moves = '6g6c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6g)and w6b==''\
and board.s6f+board.s6e+board.s6d+board.s6c=='':
moves = '6g6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6g)and w6b==''\
and board.s6f+board.s6e+board.s6d+board.s6c=='':
moves = '6g6b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6g)and w6a==''\
and board.s6f+board.s6e+board.s6d+board.s6c+board.s6b=='':
moves = '6g6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6g)and w6a==''\
and board.s6f+board.s6e+board.s6d+board.s6c+board.s6b=='':
moves = '6g6a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6g)and w9g==''\
and board.s8g+board.s7g=='':
moves = '6g9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6g)and w9g==''\
and board.s8g+board.s7g=='':
moves = '6g9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6g)and w5g==''\
and board.s7g=='':
moves = '6g8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6g)and w5g==''\
and board.s7g=='':
moves = '6g8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6g)and w4g==''\
and board.s5g=='':
moves = '6g4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6g)and w4g==''\
and board.s5g=='':
moves = '6g4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6g)and w3g==''\
and board.s5g+board.s4g=='':
moves = '6g3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6g)and w3g==''\
and board.s5g+board.s4g=='':
moves = '6g3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6g)and w2g==''\
and board.s5g+board.s4g+board.s3g=='':
moves = '6g2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6g)and w2g==''\
and board.s5g+board.s4g+board.s3g=='':
moves = '6g2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6g)and w1g==''\
and board.s5g+board.s4g+board.s3g+board.s2g=='':
moves = '6g1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w6g)and w1g==''\
and board.s5g+board.s4g+board.s3g+board.s2g=='':
moves = '6g1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6g)and w4e==''\
and board.s5f=='':
moves = '6g4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6g)and w3d==''\
and board.s5f+board.s4e=='':
moves = '6g3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6g)and w2c==''\
and board.s5f+board.s4e+board.s3d=='':
moves = '6g2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6g)and w1b==''\
and board.s5f+board.s4e+board.s3d+board.s2c=='':
moves = '6g1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('B', Wboard.w6g)and w4e==''\
and board.s5f=='':
moves = '6g4e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w6g)and w3d==''\
and board.s5f+board.s4e=='':
moves = '6g3d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w6g)and w2c==''\
and board.s5f+board.s4e+board.s3d=='':
moves = '6g2c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w6g)and w1b==''\
and board.s5f+board.s4e+board.s3d+board.s2c=='':
moves = '6g1b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w6g)and w9d==''\
and board.s8e+board.s7f=='':
moves = '6g9d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w6g)and w8e==''\
and board.s7f=='':
moves = '6g8e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6g)and w9d==''\
and board.s8e+board.s7f=='':
moves = '6g9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6g)and w8e==''\
and board.s7f=='':
moves = '6g8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w6g)and w8i==''\
and board.s7h=='':
moves = '6g8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6g)and w8i==''\
and board.s7h=='':
moves = '6g8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w6g)and w4i==''\
and board.s5h=='':
moves = '6g4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6g)and w4i==''\
and board.s5h=='':
moves = '6g4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w7g !='':
if re.match(r'[sgk+]', Wboard.w7g)and w7h=='':
moves = '7g7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w7g)and w6h=='':
moves = '7g6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w7g)and w8h=='':
moves = '7g8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w7g)and w6g=='':
moves = '7g6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w7g)and w8g=='':
moves = '7g8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w7g)and w7f=='':
moves = '7g7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w7g)and w6f=='':
moves = '7g6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w7g)and w8f=='':
moves = '7g8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w7g)and w7h=='':
moves = '7g7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w7g)and w6h=='':
moves = '7g6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w7g)and w8h=='':
moves = '7g8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7g)and w6g=='':
moves = '7g6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7g)and w8g=='':
moves = '7g8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7g)and w7f=='':
moves = '7g7f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w7g)and w6f=='':
moves = '7g6f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w7g)and w8f=='':
moves = '7g8f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w7g)and w6i=='':
moves = '7g6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w7g)and w8i=='':
moves = '7g8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7g)and w7i==''\
and board.s7h=='':
moves = '7g7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w7g)and w7i==''\
and board.s7h=='':
moves = '7g7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7g)and w7e==''\
and board.s7f=='':
moves = '7g7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7g)and w7e==''\
and board.s7f=='':
moves = '7g7e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7g)and w7d==''\
and board.s7f+board.s7e=='':
moves = '7g7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7g)and w7d==''\
and board.s7f+board.s7e=='':
moves = '7g7d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7g)and w7c==''\
and board.s7f+board.s7e+board.s7d=='':
moves = '7g7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7g)and w7c==''\
and board.s7f+board.s7e+board.s7d=='':
moves = '7g7c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7g)and w7b==''\
and board.s7f+board.s7e+board.s7d+board.s7c=='':
moves = '7g7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7g)and w7b==''\
and board.s7f+board.s7e+board.s7d+board.s7c=='':
moves = '7g7b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7g)and w7a==''\
and board.s7f+board.s7e+board.s7d+board.s7c+board.s7b=='':
moves = '7g7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7g)and w7a==''\
and board.s7f+board.s7e+board.s7d+board.s7c+board.s7b=='':
moves = '7g7a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7g)and w9g==''\
and board.s8g=='':
moves = '7g9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7g)and w9g==''\
and board.s8g=='':
moves = '7g9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7g)and w5g==''\
and board.s6g=='':
moves = '7g5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7g)and w5g==''\
and board.s6g=='':
moves = '7g5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7g)and w4g==''\
and board.s6g+board.s5g=='':
moves = '7g4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7g)and w4g==''\
and board.s6g+board.s5g=='':
moves = '7g4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7g)and w3g==''\
and board.s6g+board.s5g+board.s4g=='':
moves = '7g3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7g)and w3g==''\
and board.s6g+board.s5g+board.s4g=='':
moves = '7g3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7g)and w2g==''\
and board.s6g+board.s5g+board.s4g+board.s3g=='':
moves = '7g2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7g)and w2g==''\
and board.s6g+board.s5g+board.s4g+board.s3g=='':
moves = '7g2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7g)and w1g==''\
and board.s6g+board.s5g+board.s4g+board.s3g+board.s2g=='':
moves = '7g1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w7g)and w1g==''\
and board.s6g+board.s5g+board.s4g+board.s3g+board.s2g=='':
moves = '7g1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7g)and w9i==''\
and board.s8h=='':
moves = '7g9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7g)and w5e==''\
and board.s6f=='':
moves = '7g5e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7g)and w4d==''\
and board.s6f+board.s5e=='':
moves = '7g4d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7g)and w3c==''\
and board.s6f+board.s5e+board.s4d=='':
moves = '7g3c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7g)and w2b==''\
and board.s6f+board.s5e+board.s4d+board.s3c=='':
moves = '7g2b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7g)and w1a==''\
and board.s6f+board.s5e+board.s4d+board.s3c+board.s2b=='':
moves = '7g1a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7g)and w9i==''\
and board.s8h=='':
moves = '7g9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7g)and w5e==''\
and board.s6f=='':
moves = '7g5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7g)and w4d==''\
and board.s6f+board.s5e=='':
moves = '7g4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7g)and w3c==''\
and board.s6f+board.s5e+board.s4d=='':
moves = '7g3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7g)and w2b==''\
and board.s6f+board.s5e+board.s4d+board.s3c=='':
moves = '7g2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7g)and w1a==''\
and board.s6f+board.s5e+board.s4d+board.s3c+board.s2b=='':
moves = '7g1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7g)and w5i==''\
and board.s6h=='':
moves = '7g5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7g)and w9e==''\
and board.s8f=='':
moves = '7g9e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7g)and w5i==''\
and board.s6h=='':
moves = '7g5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7g)and w9e==''\
and board.s8f=='':
moves = '7g9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w8g !='':
if re.match(r'[sgk+]', Wboard.w8g)and w8h=='':
moves = '8g8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w8g)and w7h=='':
moves = '8g7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w8g)and w9h=='':
moves = '8g9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w8g)and w7g=='':
moves = '8g7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w8g)and w9g=='':
moves = '8g9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w8g)and w8f=='':
moves = '8g8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w8g)and w7f=='':
moves = '8g7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w8g)and w9f=='':
moves = '8g9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w8g)and w8h=='':
moves = '8g8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w8g)and w7h=='':
moves = '8g7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w8g)and w9h=='':
moves = '8g9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8g)and w7g=='':
moves = '8g7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8g)and w9g=='':
moves = '8g9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8g)and w8f=='':
moves = '8g8f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w8g)and w7f=='':
moves = '8g7f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w8g)and w9f=='':
moves = '8g9f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w8g)and w7i=='':
moves = '8g7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w8g)and w9i=='':
moves = '8g9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8g)and w8i==''\
and board.s8h=='':
moves = '8g8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w8g)and w8i==''\
and board.s8h=='':
moves = '8g8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8g)and w8e==''\
and board.s8f=='':
moves = '8g8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8g)and w8e==''\
and board.s8f=='':
moves = '8g8e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8g)and w8d==''\
and board.s8f+board.s8e=='':
moves = '8g8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8g)and w8d==''\
and board.s8f+board.s8e=='':
moves = '8g8d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8g)and w8c==''\
and board.s8f+board.s8e+board.s8d=='':
moves = '8g8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8g)and w8c==''\
and board.s8f+board.s8e+board.s8d=='':
moves = '8g8c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8g)and w8b==''\
and board.s8f+board.s8e+board.s8d+board.s8c=='':
moves = '8g8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8g)and w8b==''\
and board.s8f+board.s8e+board.s8d+board.s8c=='':
moves = '8g8b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8g)and w8a==''\
and board.s8f+board.s8e+board.s8d+board.s8c+board.s8b=='':
moves = '8g8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8g)and w8a==''\
and board.s8f+board.s8e+board.s8d+board.s8c+board.s8b=='':
moves = '8g8a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8g)and w6g==''\
and board.s7g=='':
moves = '8g6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8g)and w6g==''\
and board.s7g=='':
moves = '8g6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8g)and w5g==''\
and board.s7g+board.s6g=='':
moves = '8g5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8g)and w5g==''\
and board.s7g+board.s6g=='':
moves = '8g5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8g)and w4g==''\
and board.s7g+board.s6g+board.s5g=='':
moves = '8g4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8g)and w4g==''\
and board.s7g+board.s6g+board.s5g=='':
moves = '8g4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8g)and w3g==''\
and board.s7g+board.s6g+board.s5g+board.s4g=='':
moves = '8g3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8g)and w3g==''\
and board.s7g+board.s6g+board.s5g+board.s4g=='':
moves = '8g3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8g)and w2g==''\
and board.s7g+board.s6g+board.s5g+board.s4g+board.s3g=='':
moves = '8g2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8g)and w2g==''\
and board.s7g+board.s6g+board.s5g+board.s4g+board.s3g=='':
moves = '8g2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8g)and w1g==''\
and board.s7g+board.s6g+board.s5g+board.s4g+board.s3g+board.s2g=='':
moves = '8g1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8g)and w1g==''\
and board.s7g+board.s6g+board.s5g+board.s4g+board.s3g+board.s2g=='':
moves = '8g1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8g)and w6e==''\
and board.s7f=='':
moves = '8g6e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8g)and w5d==''\
and board.s7f+board.s6e=='':
moves = '8g5d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8g)and w4c==''\
and board.s7f+board.s6e+board.s5d=='':
moves = '8g4c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8g)and w3b==''\
and board.s7f+board.s6e+board.s5d+board.s4c=='':
moves = '8g3b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8g)and w2a==''\
and board.s7f+board.s6e+board.s5d+board.s4c+board.s3b=='':
moves = '8g2a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8g)and w6e==''\
and board.s7f=='':
moves = '8g6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8g)and w5d==''\
and board.s7f+board.s6e=='':
moves = '8g5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8g)and w4c==''\
and board.s7f+board.s6e+board.s5d=='':
moves = '8g4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8g)and w3b==''\
and board.s7f+board.s6e+board.s5d+board.s4c=='':
moves = '8g3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8g)and w2a==''\
and board.s7f+board.s6e+board.s5d+board.s4c+board.s3b=='':
moves = '8g2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8g)and w6i==''\
and board.s7h=='':
moves = '8g6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8g)and w6i==''\
and board.s7h=='':
moves = '8g6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w9g !='':
if re.match(r'[sgk+]', Wboard.w9g)and w9h=='':
moves = '9g9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w9g)and w8h=='':
moves = '9g8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w9g)and w8g=='':
moves = '9g8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w9g)and w9f=='':
moves = '9g9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w9g)and w8f=='':
moves = '9g8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w9g)and w9h=='':
moves = '9g9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w9g)and w8h=='':
moves = '9g8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9g)and w8g=='':
moves = '9g8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9g)and w9f=='':
moves = '9g9f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w9g)and w8f=='':
moves = '9g8f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w9g)and w8i=='':
moves = '9g8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9g)and w9i==''\
and board.s9h=='':
moves = '9g9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w9g)and w9i==''\
and board.s9h=='':
moves = '9g9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9g)and w9e==''\
and board.s9f=='':
moves = '9g9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9g)and w9e==''\
and board.s9f=='':
moves = '9g9e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9g)and w9d==''\
and board.s9f+board.s9e=='':
moves = '9g9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9g)and w9d==''\
and board.s9f+board.s9e=='':
moves = '9g9d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9g)and w9c==''\
and board.s9f+board.s9e+board.s9d=='':
moves = '9g9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9g)and w9c==''\
and board.s9f+board.s9e+board.s9d=='':
moves = '9g9c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9g)and w9b==''\
and board.s9f+board.s9e+board.s9d+board.s9c=='':
moves = '9g9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9g)and w9b==''\
and board.s9f+board.s9e+board.s9d+board.s9c=='':
moves = '9g9b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9g)and w9a==''\
and board.s9f+board.s9e+board.s9d+board.s9c+board.s9b=='':
moves = '9g9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9g)and w9a==''\
and board.s9f+board.s9e+board.s9d+board.s9c+board.s9b=='':
moves = '9g9a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9g)and w7g==''\
and board.s8g=='':
moves = '9g7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9g)and w7g==''\
and board.s8g=='':
moves = '9g7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9g)and w6g==''\
and board.s8g+board.s7g=='':
moves = '9g6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9g)and w6g==''\
and board.s8g+board.s7g=='':
moves = '9g6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9g)and w5g==''\
and board.s8g+board.s7g+board.s6g=='':
moves = '9g5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9g)and w5g==''\
and board.s8g+board.s7g+board.s6g=='':
moves = '9g5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9g)and w4g==''\
and board.s8g+board.s7g+board.s6g+board.s5g=='':
moves = '9g4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9g)and w4g==''\
and board.s8g+board.s7g+board.s6g+board.s5g=='':
moves = '9g4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9g)and w3g==''\
and board.s8g+board.s7g+board.s6g+board.s5g+board.s4g=='':
moves = '9g3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9g)and w3g==''\
and board.s8g+board.s7g+board.s6g+board.s5g+board.s4g=='':
moves = '9g3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9g)and w2g==''\
and board.s8g+board.s7g+board.s6g+board.s5g+board.s4g+board.s3g=='':
moves = '9g2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9g)and w2g==''\
and board.s8g+board.s7g+board.s6g+board.s5g+board.s4g+board.s3g=='':
moves = '9g2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9g)and w1g==''\
and board.s8g+board.s7g+board.s6g+board.s5g+board.s4g+board.s3g+board.s2g=='':
moves = '9g1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9g)and w1g==''\
and board.s8g+board.s7g+board.s6g+board.s5g+board.s4g+board.s3g+board.s2g=='':
moves = '9g1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9g)and w7i==''\
and board.s8h=='':
moves = '9g7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9g)and w7e==''\
and board.s8f=='':
moves = '9g7e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9g)and w6d==''\
and board.s8f+board.s7e=='':
moves = '9g6d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9g)and w5c==''\
and board.s8f+board.s7e+board.s6d=='':
moves = '9g5c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9g)and w4b==''\
and board.s8f+board.s7e+board.s6d+board.s5c=='':
moves = '9g4b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9g)and w3a==''\
and board.s8f+board.s7e+board.s6d+board.s5c+board.s4b=='':
moves = '9g3a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9g)and w7i==''\
and board.s8h=='':
moves = '9g7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9g)and w7e==''\
and board.s8f=='':
moves = '9g7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9g)and w6d==''\
and board.s8f+board.s7e=='':
moves = '9g6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9g)and w5c==''\
and board.s8f+board.s7e+board.s6d=='':
moves = '9g5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9g)and w4b==''\
and board.s8f+board.s7e+board.s6d+board.s5c=='':
moves = '9g4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9g)and w3a==''\
and board.s8f+board.s7e+board.s6d+board.s5c+board.s4b=='':
moves = '9g3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w1f !='':
if re.match(r'[lsgk+]', Wboard.w1f)and w1g=='':
moves = '1f1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w1f)and w2g=='':
moves = '1f2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w1f)and w2f=='':
moves = '1f2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w1f)and w1e=='':
moves = '1f1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w1f)and w2e=='':
moves = '1f2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w1f)and w1g=='':
moves = '1f1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w1f)and w2g=='':
moves = '1f2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w1f)and w2h=='':
moves = '1f2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1f)and w1i==''\
and board.s1h+board.s1g=='':
moves = '1f1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w1f)and w1i==''\
and board.s1h+board.s1g=='':
moves = '1f1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1f)and w1h==''\
and board.s1g=='':
moves = '1f1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w1f)and w1h==''\
and board.s1g=='':
moves = '1f1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1f)and w1d==''\
and board.s1e=='':
moves = '1f1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1f)and w1c==''\
and board.s1e+board.s1d=='':
moves = '1f1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1f)and w1b==''\
and board.s1e+board.s1d+board.s1c=='':
moves = '1f1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1f)and w1a==''\
and board.s1e+board.s1d+board.s1c+board.s1b=='':
moves = '1f1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1f)and w3f==''\
and board.s2f=='':
moves = '1f3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1f)and w4f==''\
and board.s2f+board.s3f=='':
moves = '1f4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1f)and w5f==''\
and board.s2f+board.s3f+board.s4f=='':
moves = '1f5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1f)and w6f==''\
and board.s2f+board.s3f+board.s4f+board.s5f=='':
moves = '1f6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1f)and w7f==''\
and board.s2f+board.s3f+board.s4f+board.s5f+board.s6f=='':
moves = '1f7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1f)and w8f==''\
and board.s2f+board.s3f+board.s4f+board.s5f+board.s6f+board.s7f=='':
moves = '1f8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1f)and w9f==''\
and board.s2f+board.s3f+board.s4f+board.s5f+board.s6f+board.s7f+board.s8f=='':
moves = '1f9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w1f)and w3d==''\
and board.s2e=='':
moves = '1f3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w1f)and w4c==''\
and board.s2e+board.s3d=='':
moves = '1f4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w1f)and w5b==''\
and board.s2e+board.s3d+board.s4c=='':
moves = '1f5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w1f)and w6a==''\
and board.s2e+board.s3d+board.s4c+board.s5b=='':
moves = '1f6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1f)and w4i==''\
and board.s3h+board.s2g=='':
moves = '1f4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1f)and w3h==''\
and board.s2g=='':
moves = '1f3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1f)and w4i==''\
and board.s3h+board.s2g=='':
moves = '1f4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1f)and w3h==''\
and board.s2g=='':
moves = '1f3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w2f !='':
if re.match(r'[lsgk+]', Wboard.w2f)and w2g=='':
moves = '2f2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w2f)and w1g=='':
moves = '2f1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w2f)and w3g=='':
moves = '2f3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w2f)and w1f=='':
moves = '2f1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w2f)and w3f=='':
moves = '2f3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w2f)and w2e=='':
moves = '2f2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w2f)and w1e=='':
moves = '2f1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w2f)and w3e=='':
moves = '2f3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w2f)and w2g=='':
moves = '2f2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w2f)and w1g=='':
moves = '2f1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w2f)and w3g=='':
moves = '2f3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w2f)and w1h=='':
moves = '2f1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w2f)and w3h=='':
moves = '2f3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2f)and w2i==''\
and board.s2h+board.s2g=='':
moves = '2f2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w2f)and w2i==''\
and board.s2h+board.s2g=='':
moves = '2f2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2f)and w2h==''\
and board.s2g=='':
moves = '2f2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w2f)and w2h==''\
and board.s2g=='':
moves = '2f2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2f)and w2d==''\
and board.s2e=='':
moves = '2f2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2f)and w2c==''\
and board.s2e+board.s2d=='':
moves = '2f2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2f)and w2b==''\
and board.s2e+board.s2d+board.s2c=='':
moves = '2f2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2f)and w2a==''\
and board.s2e+board.s2d+board.s2c+board.s2b=='':
moves = '2f2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2f)and w4f==''\
and board.s3f=='':
moves = '2f4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2f)and w5f==''\
and board.s3f+board.s4f=='':
moves = '2f5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2f)and w6f==''\
and board.s3f+board.s4f+board.s5f=='':
moves = '2f6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2f)and w7f==''\
and board.s3f+board.s4f+board.s5f+board.s6f=='':
moves = '2f7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2f)and w8f==''\
and board.s3f+board.s4f+board.s5f+board.s6f+board.s7f=='':
moves = '2f8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2f)and w9f==''\
and board.s3f+board.s4f+board.s5f+board.s6f+board.s7f+board.s8f=='':
moves = '2f9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w2f)and w4d==''\
and board.s3e=='':
moves = '2f4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w2f)and w5c==''\
and board.s3e+board.s4d=='':
moves = '2f5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w2f)and w6b==''\
and board.s3e+board.s4d+board.s5c=='':
moves = '2f6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w2f)and w7a==''\
and board.s3e+board.s4d+board.s5c+board.s6b=='':
moves = '2f7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2f)and w5i==''\
and board.s4h+board.s3g=='':
moves = '2f5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2f)and w4h==''\
and board.s3g=='':
moves = '2f4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2f)and w5i==''\
and board.s4h+board.s3g=='':
moves = '2f5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2f)and w4h==''\
and board.s3g=='':
moves = '2f4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w3f !='':
if re.match(r'[lsgk+]', Wboard.w3f)and w3g=='':
moves = '3f3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w3f)and w2g=='':
moves = '3f2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w3f)and w4g=='':
moves = '3f4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w3f)and w2f=='':
moves = '3f2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w3f)and w4f=='':
moves = '3f4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w3f)and w3e=='':
moves = '3f3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w3f)and w2e=='':
moves = '3f2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w3f)and w4e=='':
moves = '3f4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w3f)and w3g=='':
moves = '3f3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w3f)and w2g=='':
moves = '3f2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w3f)and w4g=='':
moves = '3f4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w3f)and w2h=='':
moves = '3f2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w3f)and w4h=='':
moves = '3f4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3f)and w3i==''\
and board.s3h+board.s3g=='':
moves = '3f3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w3f)and w3i==''\
and board.s3h+board.s3g=='':
moves = '3f3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3f)and w3h==''\
and board.s3g=='':
moves = '3f3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w3f)and w3h==''\
and board.s3g=='':
moves = '3f3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3f)and w3d==''\
and board.s3e=='':
moves = '3f3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3f)and w3c==''\
and board.s3e+board.s3d=='':
moves = '3f3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3f)and w3b==''\
and board.s3e+board.s3d+board.s3c=='':
moves = '3f3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3f)and w3a==''\
and board.s3e+board.s3d+board.s3c+board.s3b=='':
moves = '3f3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3f)and w1f==''\
and board.s2f=='':
moves = '3f1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3f)and w5f==''\
and board.s4f=='':
moves = '3f5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3f)and w6f==''\
and board.s4f+board.s5f=='':
moves = '3f6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3f)and w7f==''\
and board.s4f+board.s5f+board.s6f=='':
moves = '3f7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3f)and w8f==''\
and board.s4f+board.s5f+board.s6f+board.s7f=='':
moves = '3f8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3f)and w9f==''\
and board.s4f+board.s5f+board.s6f+board.s7f+board.s8f=='':
moves = '3f9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3f)and w1h==''\
and board.s2g=='':
moves = '3f1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3f)and w1h==''\
and board.s2g=='':
moves = '3f1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3f)and w5d==''\
and board.s4e=='':
moves = '3f5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3f)and w6c==''\
and board.s4e+board.s5d=='':
moves = '3f6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3f)and w7b==''\
and board.s4e+board.s5d+board.s6c=='':
moves = '3f7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3f)and w8a==''\
and board.s4e+board.s5d+board.s6c+board.s7b=='':
moves = '3f8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3f)and w6i==''\
and board.s5h+board.s4g=='':
moves = '3f6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3f)and w5h==''\
and board.s4g=='':
moves = '3f5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3f)and w6i==''\
and board.s5h+board.s4g=='':
moves = '3f6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3f)and w5h==''\
and board.s4g=='':
moves = '3f5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3f)and w1d==''\
and board.s2e=='':
moves = '3f1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w4f !='':
if re.match(r'[lsgk+]', Wboard.w4f)and w4g=='':
moves = '4f4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w4f)and w3g=='':
moves = '4f3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w4f)and w5g=='':
moves = '4f5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w4f)and w3f=='':
moves = '4f3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w4f)and w5f=='':
moves = '4f5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w4f)and w4e=='':
moves = '4f4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w4f)and w3e=='':
moves = '4f3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w4f)and w5e=='':
moves = '4f5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w4f)and w4g=='':
moves = '4f4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w4f)and w3g=='':
moves = '4f3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w4f)and w5g=='':
moves = '4f5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w4f)and w3h=='':
moves = '4f3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w4f)and w5h=='':
moves = '4f5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4f)and w4i==''\
and board.s4h+board.s4g=='':
moves = '4f4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w4f)and w4i==''\
and board.s4h+board.s4g=='':
moves = '4f4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4f)and w4h==''\
and board.s4g=='':
moves = '4f4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w4f)and w4h==''\
and board.s4g=='':
moves = '4f4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4f)and w4d==''\
and board.s4e=='':
moves = '4f4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4f)and w4c==''\
and board.s4e+board.s4d=='':
moves = '4f4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4f)and w4b==''\
and board.s4e+board.s4d+board.s4c=='':
moves = '4f4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4f)and w4a==''\
and board.s4e+board.s4d+board.s4c+board.s4b=='':
moves = '4f4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4f)and w1f==''\
and board.s2f+board.s3f=='':
moves = '4f1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4f)and w2f==''\
and board.s3f=='':
moves = '4f2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4f)and w6f==''\
and board.s5f=='':
moves = '4f6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4f)and w7f==''\
and board.s5f+board.s6f=='':
moves = '4f7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4f)and w8f==''\
and board.s5f+board.s6f+board.s7f=='':
moves = '4f8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4f)and w9f==''\
and board.s5f+board.s6f+board.s7f+board.s8f=='':
moves = '4f9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w4f)and w1i==''\
and board.s2h+board.s3g=='':
moves = '4f1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w4f)and w2h==''\
and board.s3g=='':
moves = '4f2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4f)and w1i==''\
and board.s2h+board.s3g=='':
moves = '4f1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4f)and w2h==''\
and board.s3g=='':
moves = '4f2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4f)and w6d==''\
and board.s5e=='':
moves = '4f6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4f)and w7c==''\
and board.s5e+board.s6d=='':
moves = '4f7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4f)and w8b==''\
and board.s5e+board.s6d+board.s7c=='':
moves = '4f8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4f)and w9a==''\
and board.s5e+board.s6d+board.s7c+board.s8b=='':
moves = '4f9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w4f)and w7i==''\
and board.s6h+board.s5g=='':
moves = '4f7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w4f)and w6h==''\
and board.s5g=='':
moves = '4f6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4f)and w7i==''\
and board.s6h+board.s5g=='':
moves = '4f7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4f)and w6h==''\
and board.s5g=='':
moves = '4f6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4f)and w2d==''\
and board.s3e=='':
moves = '4f2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4f)and w1c==''\
and board.s3e+board.s2d=='':
moves = '4f1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w5f !='':
if re.match(r'[lsgk+]', Wboard.w5f)and w5g=='':
moves = '5f5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w5f)and w4g=='':
moves = '5f4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w5f)and w6g=='':
moves = '5f6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w5f)and w4f=='':
moves = '5f4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w5f)and w6f=='':
moves = '5f6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w5f)and w5e=='':
moves = '5f5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w5f)and w4e=='':
moves = '5f4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w5f)and w6e=='':
moves = '5f6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w5f)and w5g=='':
moves = '5f5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w5f)and w4g=='':
moves = '5f4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w5f)and w6g=='':
moves = '5f6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w5f)and w4h=='':
moves = '5f4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w5f)and w6h=='':
moves = '5f6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5f)and w5i==''\
and board.s5h+board.s5g=='':
moves = '5f5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w5f)and w5i==''\
and board.s5h+board.s5g=='':
moves = '5f5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5f)and w5h==''\
and board.s5g=='':
moves = '5f5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w5f)and w5h==''\
and board.s5g=='':
moves = '5f5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5f)and w5d==''\
and board.s5e=='':
moves = '5f5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5f)and w5c==''\
and board.s5e+board.s5d=='':
moves = '5f5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5f)and w5b==''\
and board.s5e+board.s5d+board.s5c=='':
moves = '5f5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5f)and w5a==''\
and board.s5e+board.s5d+board.s5c+board.s5b=='':
moves = '5f5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5f)and w1f==''\
and board.s2f+board.s3f+board.s4f=='':
moves = '5f1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5f)and w2f==''\
and board.s3f+board.s4f=='':
moves = '5f2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5f)and w3f==''\
and board.s4f=='':
moves = '5f3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5f)and w7f==''\
and board.s6f=='':
moves = '5f7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5f)and w8f==''\
and board.s6f+board.s7f=='':
moves = '5f8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5f)and w9f==''\
and board.s6f+board.s7f+board.s8f=='':
moves = '5f9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w5f)and w2i==''\
and board.s3h+board.s4g=='':
moves ='5f2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w5f)and w3h==''\
and board.s4g=='':
moves ='5f3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5f)and w2i==''\
and board.s3h+board.s4g=='':
moves ='5f2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b',Wboard.w5f)and w3h==''\
and board.s4g=='':
moves ='5f3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5f)and w7d==''\
and board.s6e=='':
moves ='5f7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5f)and w8c==''\
and board.s6e+board.s7d=='':
moves ='5f8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5f)and w9b==''\
and board.s6e+board.s7d+board.s8c=='':
moves ='5f9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w5f)and w8i==''\
and board.s7h+board.s6g=='':
moves ='5f8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w5f)and w7h==''\
and board.s6g=='':
moves ='5f7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5f)and w8i==''\
and board.s7h+board.s6g=='':
moves ='5f8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b',Wboard.w5f)and w7h==''\
and board.s6g=='':
moves ='5f7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5f)and w3d==''\
and board.s4e=='':
moves ='5f3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5f)and w2c==''\
and board.s4e+board.s3d=='':
moves ='5f2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w6f !='':
if re.match(r'[lsgk+]', Wboard.w6f)and w6g=='':
moves = '6f6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w6f)and w5g=='':
moves = '6f5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w6f)and w7g=='':
moves = '6f7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w6f)and w5f=='':
moves = '6f5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w6f)and w7f=='':
moves = '6f7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w6f)and w6e=='':
moves = '6f6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w6f)and w5e=='':
moves = '6f5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w6f)and w7e=='':
moves = '6f7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w6f)and w6g=='':
moves = '6f6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w6f)and w5g=='':
moves = '6f5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w6f)and w7g=='':
moves = '6f7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w6f)and w5h=='':
moves = '6f5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w6f)and w7h=='':
moves = '6f7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6f)and w6i==''\
and board.s6h+board.s6g=='':
moves = '6f6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w6f)and w6i==''\
and board.s6h+board.s6g=='':
moves = '6f6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6f)and w6h==''\
and board.s6g=='':
moves = '6f6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w6f)and w6h==''\
and board.s6g=='':
moves = '6f6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6f)and w6d==''\
and board.s6e=='':
moves = '6f6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6f)and w6c==''\
and board.s6e+board.s6d=='':
moves = '6f6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6f)and w6b==''\
and board.s6e+board.s6d+board.s6c=='':
moves = '6f6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6f)and w6a==''\
and board.s6e+board.s6d+board.s6c+board.s6b=='':
moves = '6f6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6f)and w9f==''\
and board.s8f+board.s7f=='':
moves = '6f9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6f)and w8f==''\
and board.s7f=='':
moves = '6f8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6f)and w4f==''\
and board.s5f=='':
moves = '6f4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6f)and w3f==''\
and board.s5f+board.s4f=='':
moves = '6f3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6f)and w2f==''\
and board.s5f+board.s4f+board.s3f=='':
moves = '6f2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6f)and w1f==''\
and board.s5f+board.s4f+board.s3f+board.s2f=='':
moves = '6f1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w6f)and w9i==''\
and board.s8h+board.s7g=='':
moves = '6f9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w6f)and w8h==''\
and board.s7g=='':
moves = '6f8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6f)and w9i==''\
and board.s8h+board.s7g=='':
moves = '6f9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6f)and w8h==''\
and board.s7g=='':
moves = '6f8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6f)and w4d==''\
and board.s5e=='':
moves = '6f4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6f)and w3c==''\
and board.s5e+board.s4d=='':
moves = '6f3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6f)and w2b==''\
and board.s5e+board.s4d+board.s3c=='':
moves = '6f2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6f)and w1a==''\
and board.s5e+board.s4d+board.s3c+board.s2b=='':
moves = '6f1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w6f)and w3i==''\
and board.s4h+board.s5g=='':
moves = '6f3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w6f)and w4h==''\
and board.s5g=='':
moves = '6f4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6f)and w3i==''\
and board.s4h+board.s5g=='':
moves = '6f3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6f)and w4h==''\
and board.s5g=='':
moves = '6f4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6f)and w8d==''\
and board.s7e=='':
moves = '6f8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6f)and w9c==''\
and board.s7e+board.s8d=='':
moves = '6f9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w7f !='':
if re.match(r'[lsgk+]', Wboard.w7f)and w7g=='':
moves = '7f7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w7f)and w6g=='':
moves = '7f6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w7f)and w8g=='':
moves = '7f8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w7f)and w6f=='':
moves = '7f6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w7f)and w8f=='':
moves = '7f8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w7f)and w7e=='':
moves = '7f7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w7f)and w6e=='':
moves = '7f6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w7f)and w8e=='':
moves = '7f8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w7f)and w7g=='':
moves = '7f7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w7f)and w6g=='':
moves = '7f6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w7f)and w8g=='':
moves = '7f8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w7f)and w6h=='':
moves = '7f6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w7f)and w8h=='':
moves = '7f8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7f)and w7i==''\
and board.s7h+board.s7g=='':
moves = '7f7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w7f)and w7i==''\
and board.s7h+board.s7g=='':
moves = '7f7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7f)and w7h==''\
and board.s7g=='':
moves = '7f7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w7f)and w7h==''\
and board.s7g=='':
moves = '7f7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7f)and w7d==''\
and board.s7e=='':
moves = '7f7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7f)and w7c==''\
and board.s7e+board.s7d=='':
moves = '7f7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7f)and w7b==''\
and board.s7e+board.s7d+board.s7c=='':
moves = '7f7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7f)and w7a==''\
and board.s7e+board.s7d+board.s7c+board.s7b=='':
moves = '7f7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7f)and w9f==''\
and board.s8f=='':
moves = '7f9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7f)and w5f==''\
and board.s6f=='':
moves = '7f5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7f)and w4f==''\
and board.s6f+board.s5f=='':
moves = '7f4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7f)and w3f==''\
and board.s6f+board.s5f+board.s4f=='':
moves = '7f3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7f)and w2f==''\
and board.s6f+board.s5f+board.s4f+board.s3f=='':
moves = '7f2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7f)and w1f==''\
and board.s6f+board.s5f+board.s4f+board.s3f+board.s2f=='':
moves = '7f1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7f)and w9h==''\
and board.s8g=='':
moves = '7f9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7f)and w9h==''\
and board.s8g=='':
moves = '7f9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7f)and w5d==''\
and board.s6e=='':
moves = '7f5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7f)and w4c==''\
and board.s6e+board.s5d=='':
moves = '7f4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7f)and w3b==''\
and board.s6e+board.s5d+board.s4c=='':
moves = '7f3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7f)and w2a==''\
and board.s6e+board.s5d+board.s4c+board.s3b=='':
moves = '7f2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7f)and w4i==''\
and board.s5h+board.s6g=='':
moves = '7f4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7f)and w5h==''\
and board.s6g=='':
moves = '7f5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7f)and w4i==''\
and board.s5h+board.s6g=='':
moves = '7f4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7f)and w5h==''\
and board.s6g=='':
moves = '7f5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7f)and w9d==''\
and board.s8e=='':
moves = '7f9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w8f !='':
if re.match(r'[lsgk+]', Wboard.w8f)and w8g=='':
moves = '8f8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w8f)and w7g=='':
moves = '8f7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w8f)and w9g=='':
moves = '8f9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w8f)and w7f=='':
moves = '8f7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w8f)and w9f=='':
moves = '8f9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w8f)and w8e=='':
moves = '8f8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w8f)and w7e=='':
moves = '8f7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w8f)and w9e=='':
moves = '8f9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w8f)and w8g=='':
moves = '8f8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w8f)and w7g=='':
moves = '8f7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w8f)and w9g=='':
moves = '8f9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w8f)and w7h=='':
moves = '8f7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w8f)and w9h=='':
moves = '8f9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8f)and w8i==''\
and board.s8h+board.s8g=='':
moves = '8f8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w8f)and w8i==''\
and board.s8h+board.s8g=='':
moves = '8f8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8f)and w8h==''\
and board.s8g=='':
moves = '8f8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w8f)and w8h==''\
and board.s8g=='':
moves = '8f8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8f)and w8d==''\
and board.s8e=='':
moves = '8f8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8f)and w8c==''\
and board.s8e+board.s8d=='':
moves = '8f8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8f)and w8b==''\
and board.s8e+board.s8d+board.s8c=='':
moves = '8f8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8f)and w8a==''\
and board.s8e+board.s8d+board.s8c+board.s8b=='':
moves = '8f8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8f)and w6f==''\
and board.s7f=='':
moves = '8f6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8f)and w5f==''\
and board.s7f+board.s6f=='':
moves = '8f5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8f)and w4f==''\
and board.s7f+board.s6f+board.s5f=='':
moves = '8f4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8f)and w3f==''\
and board.s7f+board.s6f+board.s5f+board.s4f=='':
moves = '8f3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8f)and w2f==''\
and board.s7f+board.s6f+board.s5f+board.s4f+board.s3f=='':
moves = '8f2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8f)and w1f==''\
and board.s7f+board.s6f+board.s5f+board.s4f+board.s3f+board.s2f=='':
moves = '8f1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w8f)and w6d==''\
and board.s7e=='':
moves = '8f6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w8f)and w5c==''\
and board.s7e+board.s6d=='':
moves = '8f5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w8f)and w4b==''\
and board.s7e+board.s6d+board.s5c=='':
moves = '8f4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w8f)and w3a==''\
and board.s7e+board.s6d+board.s5c+board.s4b=='':
moves = '8f3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8f)and w5i==''\
and board.s6h+board.s7g=='':
moves = '8f5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8f)and w6h==''\
and board.s7g=='':
moves = '8f6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8f)and w5i==''\
and board.s6h+board.s7g=='':
moves = '8f5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8f)and w6h==''\
and board.s7g=='':
moves = '8f6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w9f !='':
if re.match(r'[lsgk+]', Wboard.w9f)and w9g=='':
moves = '9f9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w9f)and w8g=='':
moves = '9f8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w9f)and w8f=='':
moves = '9f8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w9f)and w9e=='':
moves = '9f9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w9f)and w8e=='':
moves = '9f8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w9f)and w9g=='':
moves = '9f9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w9f)and w8g=='':
moves = '9f8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w9f)and w8h=='':
moves = '9f8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9f)and w9i==''\
and board.s9h+board.s9g=='':
moves = '9f9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w9f)and w9i==''\
and board.s9h+board.s9g=='':
moves = '9f9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9f)and w9h==''\
and board.s9g=='':
moves = '9f9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w9f)and w9h==''\
and board.s9g=='':
moves = '9f9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9f)and w9d==''\
and board.s9e=='':
moves = '9f9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9f)and w9c==''\
and board.s9e+board.s9d=='':
moves = '9f9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9f)and w9b==''\
and board.s9e+board.s9d+board.s9c=='':
moves = '9f9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9f)and w9a==''\
and board.s9e+board.s9d+board.s9c+board.s9b=='':
moves = '9f9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9f)and w7f==''\
and board.s8f=='':
moves = '9f7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9f)and w6f==''\
and board.s8f+board.s7f=='':
moves = '9f6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9f)and w5f==''\
and board.s8f+board.s7f+board.s6f=='':
moves = '9f5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9f)and w4f==''\
and board.s8f+board.s7f+board.s6f+board.s5f=='':
moves = '9f4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9f)and w3f==''\
and board.s8f+board.s7f+board.s6f+board.s5f+board.s4f=='':
moves = '9f3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9f)and w2f==''\
and board.s8f+board.s7f+board.s6f+board.s5f+board.s4f+board.s3f=='':
moves = '9f2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9f)and w1f==''\
and board.s8f+board.s7f+board.s6f+board.s5f+board.s4f+board.s3f+board.s2f=='':
moves = '9f1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w9f)and w7d==''\
and board.s8e=='':
moves = '9f7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w9f)and w6c==''\
and board.s8e+board.s7d=='':
moves = '9f6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w9f)and w5b==''\
and board.s8e+board.s7d+board.s6c=='':
moves = '9f5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w9f)and w4a==''\
and board.s8e+board.s7d+board.s6c+board.s5b=='':
moves = '9f4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9f)and w6i==''\
and board.s7h+board.s8g=='':
moves = '9f6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9f)and w7h==''\
and board.s8g=='':
moves = '9f7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9f)and w6i==''\
and board.s7h+board.s8g=='':
moves = '9f6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9f)and w7h==''\
and board.s8g=='':
moves = '9f7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w1e !='':
if re.match(r'[plsgrk+]', Wboard.w1e)and w1f=='':
moves = '1e1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w1e)and w2f=='':
moves = '1e2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w1e)and w2e=='':
moves = '1e2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w1e)and w1d=='':
moves = '1e1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w1e)and w2d=='':
moves = '1e2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w1e)and w2g=='':
moves = '1e2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w1e)and w2g=='':
moves = '1e2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1e)and w1i==''\
and board.s1h+board.s1g+board.s1f=='':
moves = '1e1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w1e)and w1i==''\
and board.s1h+board.s1g+board.s1f=='':
moves = '1e1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1e)and w1h==''\
and board.s1g+board.s1f=='':
moves = '1e1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w1e)and w1h==''\
and board.s1g+board.s1f=='':
moves = '1e1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w1e)and w1g==''\
and board.s1f=='':
moves = '1e1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w1e)and w1g==''\
and board.s1f=='':
moves = '1e1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1e)and w1c==''\
and board.s1d=='':
moves = '1e1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1e)and w1b==''\
and board.s1d+board.s1c=='':
moves = '1e1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1e)and w1a==''\
and board.s1d+board.s1c+board.s1b=='':
moves = '1e1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1e)and w3e==''\
and board.s2e=='':
moves = '1e3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1e)and w4e==''\
and board.s2e+board.s3e=='':
moves = '1e4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1e)and w5e==''\
and board.s2e+board.s3e+board.s4e=='':
moves = '1e5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1e)and w6e==''\
and board.s2e+board.s3e+board.s4e+board.s5e=='':
moves = '1e6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1e)and w7e==''\
and board.s2e+board.s3e+board.s4e+board.s5e+board.s6e=='':
moves = '1e7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1e)and w8e==''\
and board.s2e+board.s3e+board.s4e+board.s5e+board.s6e+board.s7e=='':
moves = '1e8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1e)and w9e==''\
and board.s2e+board.s3e+board.s4e+board.s5e+board.s6e+board.s7e+board.s8e=='':
moves = '1e9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w1e)and w3c==''\
and board.s2d=='':
moves = '1e3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w1e)and w4b==''\
and board.s2d+board.s3c=='':
moves = '1e4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w1e)and w5a==''\
and board.s2d+board.s3c+board.s4b=='':
moves = '1e5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1e)and w5i==''\
and board.s4h+board.s3g+board.s2f=='':
moves = '1e5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1e)and w4h==''\
and board.s3g+board.s2f=='':
moves = '1e4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1e)and w3g==''\
and board.s2f=='':
moves = '1e3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1e)and w5i==''\
and board.s4h+board.s3g+board.s2f=='':
moves = '1e5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1e)and w4h==''\
and board.s3g+board.s2f=='':
moves = '1e4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b',Wboard.w1e)and w3g==''\
and board.s2f=='':
moves = '1e3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w2e !='':
if re.match(r'[plsgrk+]', Wboard.w2e)and w2f=='':
moves = '2e2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w2e)and w1f=='':
moves = '2e1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w2e)and w3f=='':
moves = '2e3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w2e)and w1e=='':
moves = '2e1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w2e)and w3e=='':
moves = '2e3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w2e)and w2d=='':
moves = '2e2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w2e)and w1d=='':
moves = '2e1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w2e)and w3d=='':
moves = '2e3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w2e)and w1g=='':
moves = '2e1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w2e)and w3g=='':
moves = '2e3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w2e)and w1g=='':
moves = '2e1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w2e)and w3g=='':
moves = '2e3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2e)and w2i==''\
and board.s2h+board.s2g+board.s2f=='':
moves = '2e2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w2e)and w2i==''\
and board.s2h+board.s2g+board.s2f=='':
moves = '2e2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2e)and w2h==''\
and board.s2g+board.s2f=='':
moves = '2e2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w2e)and w2h==''\
and board.s2g+board.s2f=='':
moves = '2e2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w2e)and w2g==''\
and board.s2f=='':
moves = '2e2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w2e)and w2g==''\
and board.s2f=='':
moves = '2e2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2e)and w2c==''\
and board.s2d=='':
moves = '2e2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2e)and w2b==''\
and board.s2d+board.s2c=='':
moves = '2e2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2e)and w2a==''\
and board.s2d+board.s2c+board.s2b=='':
moves = '2e2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2e)and w4e==''\
and board.s3e=='':
moves = '2e4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2e)and w5e==''\
and board.s3e+board.s4e=='':
moves = '2e5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2e)and w6e==''\
and board.s3e+board.s4e+board.s5e=='':
moves = '2e6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2e)and w7e==''\
and board.s3e+board.s4e+board.s5e+board.s6e=='':
moves = '2e7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2e)and w8e==''\
and board.s3e+board.s4e+board.s5e+board.s6e+board.s7e=='':
moves = '2e8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2e)and w9e==''\
and board.s3e+board.s4e+board.s5e+board.s6e+board.s7e+board.s8e=='':
moves = '2e9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w2e)and w4c==''\
and board.s3d=='':
moves = '2e4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w2e)and w5b==''\
and board.s3d+board.s4c=='':
moves = '2e5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w2e)and w6a==''\
and board.s3d+board.s4c+board.s5b=='':
moves = '2e6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2e)and w6i==''\
and board.s5h+board.s4g+board.s3f=='':
moves = '2e6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2e)and w5h==''\
and board.s4g+board.s3f=='':
moves = '2e5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2e)and w4g==''\
and board.s3f=='':
moves = '2e4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2e)and w6i==''\
and board.s5h+board.s4g+board.s3f=='':
moves = '2e6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2e)and w5h==''\
and board.s4g+board.s3f=='':
moves = '2e5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b',Wboard.w2e)and w4g==''\
and board.s3f=='':
moves = '2e4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w3e !='':
if re.match(r'[plsgrk+]', Wboard.w3e)and w3f=='':
moves = '3e3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w3e)and w2f=='':
moves = '3e2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w3e)and w4f=='':
moves = '3e4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w3e)and w2e=='':
moves = '3e2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w3e)and w4e=='':
moves = '3e4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w3e)and w3d=='':
moves = '3e3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w3e)and w2d=='':
moves = '3e2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w3e)and w4d=='':
moves = '3e4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w3e)and w2g=='':
moves = '3e2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w3e)and w4g=='':
moves = '3e4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w3e)and w2g=='':
moves = '3e2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w3e)and w4g=='':
moves = '3e4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3e)and w3i==''\
and board.s3h+board.s3g+board.s3f=='':
moves = '3e3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w3e)and w3i==''\
and board.s3h+board.s3g+board.s3f=='':
moves = '3e3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3e)and w3h==''\
and board.s3g+board.s3f=='':
moves = '3e3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w3e)and w3h==''\
and board.s3g+board.s3f=='':
moves = '3e3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w3e)and w3g==''\
and board.s3f=='':
moves = '3e3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w3e)and w3g==''\
and board.s3f=='':
moves = '3e3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3e)and w3c==''\
and board.s3d=='':
moves = '3e3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3e)and w3b==''\
and board.s3d+board.s3c=='':
moves = '3e3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3e)and w3a==''\
and board.s3d+board.s3c+board.s3b=='':
moves = '3e3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3e)and w1e==''\
and board.s2e=='':
moves = '3e1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3e)and w5e==''\
and board.s4e=='':
moves = '3e5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3e)and w6e==''\
and board.s4e+board.s5e=='':
moves = '3e6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3e)and w7e==''\
and board.s4e+board.s5e+board.s6e=='':
moves = '3e7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3e)and w8e==''\
and board.s4e+board.s5e+board.s6e+board.s7e=='':
moves = '3e8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3e)and w9e==''\
and board.s4e+board.s5e+board.s6e+board.s7e+board.s8e=='':
moves = '3e9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3e)and w1g==''\
and board.s2f=='':
moves = '3e1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b',Wboard.w3e)and w1g==''\
and board.s2f=='':
moves = '3e1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3e)and w5c==''\
and board.s2d=='':
moves = '3e5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3e)and w6b==''\
and board.s2d+board.s5c=='':
moves = '3e6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3e)and w7a==''\
and board.s2d+board.s5c+board.s6b=='':
moves = '3e7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3e)and w7i==''\
and board.s6h+board.s5g+board.s4f=='':
moves = '3e7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3e)and w6h==''\
and board.s5g+board.s4f=='':
moves = '3e6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3e)and w5g==''\
and board.s4f=='':
moves = '3e5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3e)and w7i==''\
and board.s6h+board.s5g+board.s4f=='':
moves = '3e7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3e)and w6h==''\
and board.s5g+board.s4f=='':
moves = '3e6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b',Wboard.w3e)and w5g==''\
and board.s4f=='':
moves = '3e5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3e)and w1c==''\
and board.s2d=='':
moves = '3e1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w4e !='':
if re.match(r'[plsgrk+]', Wboard.w4e)and w4f=='':
moves = '4e4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w4e)and w3f=='':
moves = '4e3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w4e)and w5f=='':
moves = '4e5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w4e)and w3e=='':
moves = '4e3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w4e)and w5e=='':
moves = '4e5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w4e)and w4d=='':
moves = '4e4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w4e)and w3d=='':
moves = '4e3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w4e)and w5d=='':
moves = '4e5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w4e)and w3g=='':
moves = '4e3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w4e)and w5g=='':
moves = '4e5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w4e)and w3g=='':
moves = '4e3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w4e)and w5g=='':
moves = '4e5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4e)and w4i==''\
and board.s4h+board.s4g+board.s4f=='':
moves = '4e4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w4e)and w4i==''\
and board.s4h+board.s4g+board.s4f=='':
moves = '4e4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4e)and w4h==''\
and board.s4g+board.s4f=='':
moves = '4e4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w4e)and w4h==''\
and board.s4g+board.s4f=='':
moves = '4e4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w4e)and w4g==''\
and board.s4f=='':
moves = '4e4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w4e)and w4g==''\
and board.s4f=='':
moves = '4e4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4e)and w4c==''\
and board.s4d=='':
moves = '4e4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4e)and w4b==''\
and board.s4d+board.s4c=='':
moves = '4e4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4e)and w4a==''\
and board.s4d+board.s4c+board.s4b=='':
moves = '4e4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4e)and w1e==''\
and board.s2e+board.s3e=='':
moves = '4e1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4e)and w2e==''\
and board.s3e=='':
moves = '4e2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4e)and w6e==''\
and board.s5e=='':
moves = '4e6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4e)and w7e==''\
and board.s5e+board.s6e=='':
moves = '4e7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4e)and w8e==''\
and board.s5e+board.s6e+board.s7e=='':
moves = '4e8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4e)and w9e==''\
and board.s5e+board.s6e+board.s7e+board.s8e=='':
moves = '4e9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w4e)and w1h==''\
and board.s2g+board.s3f=='':
moves = '4e1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w4e)and w2g==''\
and board.s3f=='':
moves = '4e2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4e)and w1h==''\
and board.s2g+board.s3f=='':
moves = '4e1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b',Wboard.w4e)and w2g==''\
and board.s3f=='':
moves = '4e2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4e)and w6c==''\
and board.s5d=='':
moves = '4e6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4e)and w7b==''\
and board.s5d+board.s6c=='':
moves = '4e7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4e)and w8a==''\
and board.s5d+board.s6c+board.s7b=='':
moves = '4e8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w4e)and w8i==''\
and board.s7h+board.s6g+board.s5f=='':
moves = '4e8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w4e)and w7h==''\
and board.s6g+board.s5f=='':
moves = '4e7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w4e)and w6g==''\
and board.s5f=='':
moves = '4e6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4e)and w8i==''\
and board.s7h+board.s6g+board.s5f=='':
moves = '4e8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4e)and w7h==''\
and board.s6g+board.s5f=='':
moves = '4e7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b',Wboard.w4e)and w6g==''\
and board.s5f=='':
moves = '4e6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4e)and w2c==''\
and board.s3d=='':
moves = '4e2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4e)and w1b==''\
and board.s3d+board.s2c=='':
moves = '4e1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w5e !='':
if re.match(r'[plsgrk+]', Wboard.w5e)and w5f=='':
moves = '5e5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w5e)and w4f=='':
moves = '5e4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w5e)and w6f=='':
moves = '5e6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w5e)and w4e=='':
moves = '5e4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w5e)and w6e=='':
moves = '5e6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w5e)and w5d=='':
moves = '5e5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w5e)and w4d=='':
moves = '5e4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w5e)and w6d=='':
moves = '5e6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w5e)and w4g=='':
moves = '5e4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w5e)and w6g=='':
moves = '5e6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w5e)and w4g=='':
moves = '5e4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w5e)and w6g=='':
moves = '5e6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5e)and w5i==''\
and board.s5h+board.s5g+board.s5f=='':
moves = '5e5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w5e)and w5i==''\
and board.s5h+board.s5g+board.s5f=='':
moves = '5e5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5e)and w5h==''\
and board.s5g+board.s5f=='':
moves = '5e5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w5e)and w5h==''\
and board.s5g+board.s5f=='':
moves = '5e5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w5e)and w5g==''\
and board.s5f=='':
moves = '5e5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w5e)and w5g==''\
and board.s5f=='':
moves = '5e5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5e)and w5c==''\
and board.s5d=='':
moves = '5e5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5e)and w5b==''\
and board.s5d+board.s5c=='':
moves = '5e5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5e)and w5a==''\
and board.s5d+board.s5c+board.s5b=='':
moves = '5e5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5e)and w1e==''\
and board.s2e+board.s3e+board.s4e=='':
moves = '5e1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5e)and w2e==''\
and board.s3e+board.s4e=='':
moves = '5e2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5e)and w3e==''\
and board.s4e=='':
moves = '5e3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5e)and w7e==''\
and board.s6e=='':
moves = '5e7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5e)and w8e==''\
and board.s6e+board.s7e=='':
moves = '5e8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5e)and w9e==''\
and board.s6e+board.s7e+board.s8e=='':
moves = '5e9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w5e)and w1i==''\
and board.s2h+board.s3g+board.s4f=='':
moves = '5e1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w5e)and w2h==''\
and board.s3g+board.s4f=='':
moves = '5e2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w5e)and w3g==''\
and board.s4f=='':
moves = '5e3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5e)and w1i==''\
and board.s2h+board.s3g+board.s4f=='':
moves = '5e1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5e)and w2h==''\
and board.s3g+board.s4f=='':
moves = '5e2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b',Wboard.w5e)and w3g==''\
and board.s4f=='':
moves = '5e3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5e)and w7c==''\
and board.s6d=='':
moves = '5e7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5e)and w8b==''\
and board.s6d+board.s7c=='':
moves = '5e8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5e)and w9a==''\
and board.s6d+board.s7c+board.s8b=='':
moves = '5e9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w5e)and w9i==''\
and board.s8h+board.s7g+board.s6f=='':
moves = '5e9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w5e)and w8h==''\
and board.s7g+board.s6f=='':
moves = '5e8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w5e)and w7g==''\
and board.s6f=='':
moves = '5e7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5e)and w9i==''\
and board.s8h+board.s7g+board.s6f=='':
moves = '5e9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5e)and w8h==''\
and board.s7g+board.s6f=='':
moves = '5e8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b',Wboard.w5e)and w7g==''\
and board.s6f=='':
moves = '5e7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5e)and w3c==''\
and board.s4d=='':
moves = '5e3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5e)and w2b==''\
and board.s4d+board.s3c=='':
moves = '5e2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5e)and w1a==''\
and board.s4d+board.s3c+board.s2b=='':
moves = '5e1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w6e !='':
if re.match(r'[plsgrk+]', Wboard.w6e)and w6f=='':
moves = '6e6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w6e)and w5f=='':
moves = '6e5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w6e)and w7f=='':
moves = '6e7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w6e)and w5e=='':
moves = '6e5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w6e)and w7e=='':
moves = '6e7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w6e)and w6d=='':
moves = '6e6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w6e)and w5d=='':
moves = '6e5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w6e)and w7d=='':
moves = '6e7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w6e)and w5g=='':
moves = '6e5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w6e)and w7g=='':
moves = '6e7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w6e)and w5g=='':
moves = '6e5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w6e)and w7g=='':
moves = '6e7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6e)and w6i==''\
and board.s6h+board.s6g+board.s6f=='':
moves = '6e6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w6e)and w6i==''\
and board.s6h+board.s6g+board.s6f=='':
moves = '6e6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6e)and w6h==''\
and board.s6g+board.s6f=='':
moves = '6e6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w6e)and w6h==''\
and board.s6g+board.s6f=='':
moves = '6e6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w6e)and w6g==''\
and board.s6f=='':
moves = '6e6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w6e)and w6g==''\
and board.s6f=='':
moves = '6e6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6e)and w6c==''\
and board.s6d=='':
moves = '6e6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6e)and w6b==''\
and board.s6d+board.s6c=='':
moves = '6e6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6e)and w6a==''\
and board.s6d+board.s6c+board.s6b=='':
moves = '6e6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6e)and w9e==''\
and board.s8e+board.s7e=='':
moves = '6e9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6e)and w8e==''\
and board.s7e=='':
moves = '6e8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6e)and w4e==''\
and board.s5e=='':
moves = '6e4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6e)and w3e==''\
and board.s5e+board.s4e=='':
moves = '6e3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6e)and w2e==''\
and board.s5e+board.s4e+board.s3e=='':
moves = '6e2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6e)and w1e==''\
and board.s5e+board.s4e+board.s3e+board.s2e=='':
moves = '6e1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w6e)and w9h==''\
and board.s8g+board.s7f=='':
moves = '6e9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w6e)and w8g==''\
and board.s7f=='':
moves = '6e8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6e)and w9h==''\
and board.s8g+board.s7f=='':
moves = '6e9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b',Wboard.w6e)and w8g==''\
and board.s7f=='':
moves = '6e8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6e)and w4c==''\
and board.s5d=='':
moves = '6e4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6e)and w3b==''\
and board.s5d+board.s4c=='':
moves = '6e3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6e)and w2a==''\
and board.s5d+board.s4c+board.s3b=='':
moves = '6e2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w6e)and w2i==''\
and board.s3h+board.s4g+board.s5f=='':
moves = '6e2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w6e)and w3h==''\
and board.s4g+board.s5f=='':
moves = '6e3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w6e)and w4g==''\
and board.s5f=='':
moves = '6e4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6e)and w2i==''\
and board.s3h+board.s4g+board.s5f=='':
moves = '6e2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6e)and w3h==''\
and board.s4g+board.s5f=='':
moves = '6e3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b',Wboard.w6e)and w4g==''\
and board.s5f=='':
moves = '6e4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6e)and w8c==''\
and board.s7d=='':
moves = '6e8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6e)and w9b==''\
and board.s7d+board.s8c=='':
moves = '6e9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w7e !='':
if re.match(r'[plsgrk+]', Wboard.w7e)and w7f=='':
moves = '7e7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w7e)and w6f=='':
moves = '7e6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w7e)and w8f=='':
moves = '7e8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w7e)and w6e=='':
moves = '7e6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w7e)and w8e=='':
moves = '7e8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w7e)and w7d=='':
moves = '7e7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w7e)and w6d=='':
moves = '7e6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w7e)and w8d=='':
moves = '7e8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w7e)and w6g=='':
moves = '7e6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w7e)and w8g=='':
moves = '7e8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w7e)and w6g=='':
moves = '7e6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w7e)and w8g=='':
moves = '7e8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7e)and w7i==''\
and board.s7h+board.s7g+board.s7f=='':
moves = '7e7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w7e)and w7i==''\
and board.s7h+board.s7g+board.s7f=='':
moves = '7e7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7e)and w7h==''\
and board.s7g+board.s7f=='':
moves = '7e7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w7e)and w7h==''\
and board.s7g+board.s7f=='':
moves = '7e7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w7e)and w7g==''\
and board.s7f=='':
moves = '7e7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w7e)and w7g==''\
and board.s7f=='':
moves = '7e7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7e)and w7c==''\
and board.s7d=='':
moves = '7e7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7e)and w7b==''\
and board.s7d+board.s7c=='':
moves = '7e7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7e)and w7a==''\
and board.s7d+board.s7c+board.s7b=='':
moves = '7e7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7e)and w9e==''\
and board.s8e=='':
moves = '7e9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7e)and w5e==''\
and board.s6e=='':
moves = '7e5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7e)and w4e==''\
and board.s6e+board.s5e=='':
moves = '7e4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7e)and w3e==''\
and board.s6e+board.s5e+board.s4e=='':
moves = '7e3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7e)and w2e==''\
and board.s6e+board.s5e+board.s4e+board.s3e=='':
moves = '7e2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7e)and w1e==''\
and board.s6e+board.s5e+board.s4e+board.s3e+board.s2e=='':
moves = '7e1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7e)and w9g==''\
and board.s8f=='':
moves = '7e9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b',Wboard.w7e)and w9g==''\
and board.s8f=='':
moves = '7e9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7e)and w5c==''\
and board.s8d=='':
moves = '7e5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7e)and w4b==''\
and board.s8d+board.s5c=='':
moves = '7e4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7e)and w3a==''\
and board.s8d+board.s5c+board.s4b=='':
moves = '7e3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7e)and w3i==''\
and board.s4h+board.s5g+board.s6f=='':
moves = '7e3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7e)and w4h==''\
and board.s5g+board.s6f=='':
moves = '7e4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7e)and w5g==''\
and board.s6f=='':
moves = '7e5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7e)and w3i==''\
and board.s4h+board.s5g+board.s6f=='':
moves = '7e3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7e)and w4h==''\
and board.s5g+board.s6f=='':
moves = '7e4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b',Wboard.w7e)and w5g==''\
and board.s6f=='':
moves = '7e5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7e)and w9c==''\
and board.s8d=='':
moves = '7e9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w8e !='':
if re.match(r'[plsgrk+]', Wboard.w8e)and w8f=='':
moves = '8e8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w8e)and w7f=='':
moves = '8e7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w8e)and w9f=='':
moves = '8e9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w8e)and w7e=='':
moves = '8e7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w8e)and w9e=='':
moves = '8e9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w8e)and w8d=='':
moves = '8e8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w8e)and w7d=='':
moves = '8e7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w8e)and w9d=='':
moves = '8e9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w8e)and w7g=='':
moves = '8e7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w8e)and w9g=='':
moves = '8e9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w8e)and w7g=='':
moves = '8e7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w8e)and w9g=='':
moves = '8e9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8e)and w8i==''\
and board.s8h+board.s8g+board.s8f=='':
moves = '8e8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w8e)and w8i==''\
and board.s8h+board.s8g+board.s8f=='':
moves = '8e8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8e)and w8h==''\
and board.s8g+board.s8f=='':
moves = '8e8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w8e)and w8h==''\
and board.s8g+board.s8f=='':
moves = '8e8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w8e)and w8g==''\
and board.s8f=='':
moves = '8e8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w8e)and w8g==''\
and board.s8f=='':
moves = '8e8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8e)and w8c==''\
and board.s8d=='':
moves = '8e8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8e)and w8b==''\
and board.s8d+board.s8c=='':
moves = '8e8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8e)and w8a==''\
and board.s8d+board.s8c+board.s8b=='':
moves = '8e8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8e)and w6e==''\
and board.s7e=='':
moves = '8e6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8e)and w5e==''\
and board.s7e+board.s6e=='':
moves = '8e5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8e)and w4e==''\
and board.s7e+board.s6e+board.s5e=='':
moves = '8e4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8e)and w3e==''\
and board.s7e+board.s6e+board.s5e+board.s4e=='':
moves = '8e3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8e)and w2e==''\
and board.s7e+board.s6e+board.s5e+board.s4e+board.s3e=='':
moves = '8e2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8e)and w1e==''\
and board.s7e+board.s6e+board.s5e+board.s4e+board.s3e+board.s2e=='':
moves = '8e1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w8e)and w6c==''\
and board.s7d=='':
moves = '8e6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w8e)and w5b==''\
and board.s7d+board.s6c=='':
moves = '8e5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w8e)and w4a==''\
and board.s7d+board.s6c+board.s5b=='':
moves = '8e4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8e)and w4i==''\
and board.s5h+board.s6g+board.s7f=='':
moves = '8e4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8e)and w5h==''\
and board.s6g+board.s7f=='':
moves = '8e5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8e)and w6g==''\
and board.s7f=='':
moves = '8e6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8e)and w4i==''\
and board.s5h+board.s6g+board.s7f=='':
moves = '8e4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8e)and w5h==''\
and board.s6g+board.s7f=='':
moves = '8e5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b',Wboard.w8e)and w6g==''\
and board.s7f=='':
moves = '8e6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w9e !='':
if re.match(r'[plsgrk+]', Wboard.w9e)and w9f=='':
moves = '9e9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w9e)and w8f=='':
moves = '9e8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w9e)and w8e=='':
moves = '9e8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w9e)and w9d=='':
moves = '9e9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w9e)and w8d=='':
moves = '9e8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w9e)and w8g=='':
moves = '9e8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w9e)and w8g=='':
moves = '9e8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9e)and w9i==''\
and board.s9h+board.s9g+board.s9f=='':
moves = '9e9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w9e)and w9i==''\
and board.s9h+board.s9g+board.s9f=='':
moves = '9e9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9e)and w9h==''\
and board.s9g+board.s9f=='':
moves = '9e9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w9e)and w9h==''\
and board.s9g+board.s9f=='':
moves = '9e9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w9e)and w9g==''\
and board.s9f=='':
moves = '9e9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w9e)and w9g==''\
and board.s9f=='':
moves = '9e9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9e)and w9c==''\
and board.s9d=='':
moves = '9e9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9e)and w9b==''\
and board.s9d+board.s9c=='':
moves = '9e9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9e)and w9a==''\
and board.s9d+board.s9c+board.s9b=='':
moves = '9e9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9e)and w9a==''\
and board.s9d+board.s9c+board.s9b=='':
moves = '9e9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9e)and w7e==''\
and board.s8e=='':
moves = '9e7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9e)and w6e==''\
and board.s8e+board.s7e=='':
moves = '9e6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9e)and w5e==''\
and board.s8e+board.s7e+board.s6e=='':
moves = '9e5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9e)and w4e==''\
and board.s8e+board.s7e+board.s6e+board.s5e=='':
moves = '9e4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9e)and w3e==''\
and board.s8e+board.s7e+board.s6e+board.s5e+board.s4e=='':
moves = '9e3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9e)and w2e==''\
and board.s8e+board.s7e+board.s6e+board.s5e+board.s4e+board.s3e=='':
moves = '9e2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9e)and w1e==''\
and board.s8e+board.s7e+board.s6e+board.s5e+board.s4e+board.s3e+board.s2e=='':
moves = '9e1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w9e)and w7c==''\
and board.s8d=='':
moves = '9e7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w9e)and w6b==''\
and board.s8d+board.s7c=='':
moves = '9e6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w9e)and w5a==''\
and board.s8d+board.s7c+board.s6b=='':
moves = '9e5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9e)and w5i==''\
and board.s6h+board.s7g+board.s8f=='':
moves = '9e5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9e)and w6h==''\
and board.s7g+board.s8f=='':
moves = '9e6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9e)and w7g==''\
and board.s8f=='':
moves = '9e7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9e)and w5i==''\
and board.s6h+board.s7g+board.s8f=='':
moves = '9e5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9e)and w6h==''\
and board.s7g+board.s8f=='':
moves = '9e6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b',Wboard.w9e)and w7g==''\
and board.s8f=='':
moves = '9e7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w1d !='':
if re.match(r'[plsgrk+]', Wboard.w1d)and w1e=='':
moves = '1d1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w1d)and w2e=='':
moves = '1d2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w1d)and w2d=='':
moves = '1d2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w1d)and w1c=='':
moves = '1d1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w1d)and w2c=='':
moves = '1d2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w1d)and w2f=='':
moves = '1d2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1d)and w1i==''\
and board.s1h+board.s1g+board.s1f+board.s1e=='':
moves = '1d1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w1d)and w1i==''\
and board.s1h+board.s1g+board.s1f+board.s1e=='':
moves = '1d1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1d)and w1h==''\
and board.s1g+board.s1f+board.s1e=='':
moves = '1d1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w1d)and w1h==''\
and board.s1g+board.s1f+board.s1e=='':
moves = '1d1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w1d)and w1g==''\
and board.s1f+board.s1e=='':
moves = '1d1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w1d)and w1g==''\
and board.s1f+board.s1e=='':
moves = '1d1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w1d)and w1f==''\
and board.s1e=='':
moves = '1d1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1d)and w1b==''\
and board.s1c=='':
moves = '1d1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1d)and w1a==''\
and board.s1c+board.s1b=='':
moves = '1d1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1d)and w3d==''\
and board.s2d=='':
moves = '1d3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1d)and w4d==''\
and board.s2d+board.s3d=='':
moves = '1d4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1d)and w5d==''\
and board.s2d+board.s3d+board.s4d=='':
moves = '1d5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1d)and w6d==''\
and board.s2d+board.s3d+board.s4d+board.s5d=='':
moves = '1d6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1d)and w7d==''\
and board.s2d+board.s3d+board.s4d+board.s5d+board.s6d=='':
moves = '1d7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1d)and w8d==''\
and board.s2d+board.s3d+board.s4d+board.s5d+board.s6d+board.s7d=='':
moves = '1d8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1d)and w9d==''\
and board.s2d+board.s3d+board.s4d+board.s5d+board.s6d+board.s7d+board.s8d=='':
moves = '1d9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w1d)and w3f==''\
and board.s2e=='':
moves = '1d3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1d)and w4g==''\
and board.s2e+board.s3f=='':
moves = '1d4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1d)and w5h==''\
and board.s2e+board.s3f+board.s4g=='':
moves = '1d5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1d)and w6i==''\
and board.s2e+board.s3f+board.s4g+board.s5h=='':
moves = '1d6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1d)and w4a==''\
and board.s3b+board.s2c=='':
moves = '1d4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1d)and w3b==''\
and board.s2c=='':
moves = '1d3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w1d)and w4g==''\
and board.s2e+board.s3f=='':
moves = '1d4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w1d)and w5h==''\
and board.s2e+board.s3f+board.s4g=='':
moves = '1d5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w1d)and w6i==''\
and board.s2e+board.s3f+board.s4g+board.s5h=='':
moves = '1d6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w2d !='':
if re.match(r'[plsgrk+]', Wboard.w2d)and w2e=='':
moves = '2d2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w2d)and w1e=='':
moves = '2d1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w2d)and w3e=='':
moves = '2d3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w2d)and w1d=='':
moves = '2d1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w2d)and w3d=='':
moves = '2d3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w2d)and w2c=='':
moves = '2d2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w2d)and w1c=='':
moves = '2d1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w2d)and w3c=='':
moves = '2d3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w2d)and w1f=='':
moves = '2d1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w2d)and w3f=='':
moves = '2d3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2d)and w2i==''\
and board.s2h+board.s2g+board.s2f+board.s2e=='':
moves = '2d2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w2d)and w2i==''\
and board.s2h+board.s2g+board.s2f+board.s2e=='':
moves = '2d2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2d)and w2h==''\
and board.s2g+board.s2f+board.s2e=='':
moves = '2d2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w2d)and w2h==''\
and board.s2g+board.s2f+board.s2e=='':
moves = '2d2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w2d)and w2g==''\
and board.s2f+board.s2e=='':
moves = '2d2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w2d)and w2g==''\
and board.s2f+board.s2e=='':
moves = '2d2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w2d)and w2f==''\
and board.s2e=='':
moves = '2d2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2d)and w2b==''\
and board.s2c=='':
moves = '2d2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2d)and w2a==''\
and board.s2c+board.s2b=='':
moves = '2d2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2d)and w4d==''\
and board.s3d=='':
moves = '2d4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2d)and w5d==''\
and board.s3d+board.s4d=='':
moves = '2d5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2d)and w6d==''\
and board.s3d+board.s4d+board.s5d=='':
moves = '2d6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2d)and w7d==''\
and board.s3d+board.s4d+board.s5d+board.s6d=='':
moves = '2d7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2d)and w8d==''\
and board.s3d+board.s4d+board.s5d+board.s6d+board.s7d=='':
moves = '2d8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2d)and w9d==''\
and board.s3d+board.s4d+board.s5d+board.s6d+board.s7d+board.s8d=='':
moves = '2d9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w2d)and w4f==''\
and board.s3e=='':
moves = '2d4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2d)and w5g==''\
and board.s3e+board.s4f=='':
moves = '2d5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2d)and w6h==''\
and board.s3e+board.s4f+board.s5g=='':
moves = '2d6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2d)and w7i==''\
and board.s3e+board.s4f+board.s5g+board.s6h=='':
moves = '2d7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w2d)and w5a==''\
and board.s4b+board.s3c=='':
moves = '2d5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w2d)and w4b==''\
and board.s3c=='':
moves = '2d4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w2d)and w5g==''\
and board.s3e+board.s4f=='':
moves = '2d5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w2d)and w6h==''\
and board.s3e+board.s4f+board.s5g=='':
moves = '2d6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w2d)and w7i==''\
and board.s3e+board.s4f+board.s5g+board.s6h=='':
moves = '2d7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w3d !='':
if re.match(r'[plsgrk+]', Wboard.w3d)and w3e=='':
moves = '3d3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w3d)and w2e=='':
moves = '3d2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w3d)and w4e=='':
moves = '3d4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w3d)and w2d=='':
moves = '3d2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w3d)and w4d=='':
moves = '3d4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w3d)and w3c=='':
moves = '3d3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w3d)and w2c=='':
moves = '3d2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w3d)and w4c=='':
moves = '3d4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w3d)and w2f=='':
moves = '3d2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w3d)and w4f=='':
moves = '3d4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3d)and w3i==''\
and board.s3h+board.s3g+board.s3f+board.s3e=='':
moves = '3d3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w3d)and w3i==''\
and board.s3h+board.s3g+board.s3f+board.s3e=='':
moves = '3d3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3d)and w3h==''\
and board.s3g+board.s3f+board.s3e=='':
moves = '3d3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w3d)and w3h==''\
and board.s3g+board.s3f+board.s3e=='':
moves = '3d3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w3d)and w3g==''\
and board.s3f+board.s3e=='':
moves = '3d3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w3d)and w3g==''\
and board.s3f+board.s3e=='':
moves = '3d3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w3d)and w3f==''\
and board.s3e=='':
moves = '3d3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3d)and w3b==''\
and board.s3c=='':
moves = '3d3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3d)and w3a==''\
and board.s3c+board.s3b=='':
moves = '3d3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3d)and w1d==''\
and board.s2d=='':
moves = '3d1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3d)and w5d==''\
and board.s4d=='':
moves = '3d5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3d)and w6d==''\
and board.s4d+board.s5d=='':
moves = '3d6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3d)and w7d==''\
and board.s4d+board.s5d+board.s6d=='':
moves = '3d7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3d)and w8d==''\
and board.s4d+board.s5d+board.s6d+board.s7d=='':
moves = '3d8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3d)and w9d==''\
and board.s4d+board.s5d+board.s6d+board.s7d+board.s8d=='':
moves = '3d9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3d)and w1b==''\
and board.s2c=='':
moves = '3d1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3d)and w5f==''\
and board.s4e=='':
moves = '3d5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3d)and w6g==''\
and board.s4e+board.s5f=='':
moves = '3d6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3d)and w7h==''\
and board.s4e+board.s5f+board.s6g=='':
moves = '3d7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3d)and w8i==''\
and board.s4e+board.s5f+board.s6g+board.s7h=='':
moves = '3d8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3d)and w6a==''\
and board.s5b+board.s4c=='':
moves = '3d6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3d)and w5b==''\
and board.s4c=='':
moves = '3d5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3d)and w1f==''\
and board.s2e=='':
moves = '3d1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w3d)and w6g==''\
and board.s4e+board.s5f=='':
moves = '3d6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w3d)and w7h==''\
and board.s4e+board.s5f+board.s6g=='':
moves = '3d7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w3d)and w8i==''\
and board.s4e+board.s5f+board.s6g+board.s7h=='':
moves = '3d8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w4d !='':
if re.match(r'[plsgrk+]', Wboard.w4d)and w4e=='':
moves = '4d4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w4d)and w3e=='':
moves = '4d3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w4d)and w5e=='':
moves = '4d5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w4d)and w3d=='':
moves = '4d3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w4d)and w5d=='':
moves = '4d5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w4d)and w4c=='':
moves = '4d4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w4d)and w3c=='':
moves = '4d3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w4d)and w5c=='':
moves = '4d5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w4d)and w3f=='':
moves = '4d3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w4d)and w5f=='':
moves = '4d5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4d)and w4i==''\
and board.s4h+board.s4g+board.s4f+board.s4e=='':
moves = '4d4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w4d)and w4i==''\
and board.s4h+board.s4g+board.s4f+board.s4e=='':
moves = '4d4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4d)and w4h==''\
and board.s4g+board.s4f+board.s4e=='':
moves = '4d4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w4d)and w4h==''\
and board.s4g+board.s4f+board.s4e=='':
moves = '4d4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w4d)and w4g==''\
and board.s4f+board.s4e=='':
moves = '4d4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w4d)and w4g==''\
and board.s4f+board.s4e=='':
moves = '4d4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w4d)and w4f==''\
and board.s4e=='':
moves = '4d4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4d)and w4b==''\
and board.s4c=='':
moves = '4d4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4d)and w4a==''\
and board.s4c+board.s4b=='':
moves = '4d4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4d)and w1d==''\
and board.s2d+board.s3d=='':
moves = '4d1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4d)and w2d==''\
and board.s3d=='':
moves = '4d2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4d)and w6d==''\
and board.s5d=='':
moves = '4d6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4d)and w7d==''\
and board.s5d+board.s6d=='':
moves = '4d7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4d)and w8d==''\
and board.s5d+board.s6d+board.s7d=='':
moves = '4d8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4d)and w9d==''\
and board.s5d+board.s6d+board.s7d+board.s8d=='':
moves = '4d9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4d)and w1a==''\
and board.s2b+board.s3c=='':
moves = '4d1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4d)and w2b==''\
and board.s3c=='':
moves = '4d2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4d)and w6f==''\
and board.s5e=='':
moves = '4d6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4d)and w7g==''\
and board.s5e+board.s6f=='':
moves = '4d7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4d)and w8h==''\
and board.s5e+board.s6f+board.s7g=='':
moves = '4d8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4d)and w9i==''\
and board.s5e+board.s6f+board.s7g+board.s8h=='':
moves = '4d9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4d)and w7a==''\
and board.s6b+board.s5c=='':
moves = '4d7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4d)and w6b==''\
and board.s5c=='':
moves = '4d6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4d)and w2f==''\
and board.s3e=='':
moves = '4d2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4d)and w1g==''\
and board.s3e+board.s2f=='':
moves = '4d1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w4d)and w7g==''\
and board.s5e+board.s6f=='':
moves = '4d7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w4d)and w8h==''\
and board.s5e+board.s6f+board.s7g=='':
moves = '4d8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w4d)and w1g==''\
and board.s3e+board.s2f=='':
moves = '4d1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w4d)and w9i==''\
and board.s5e+board.s6f+board.s7g+board.s8h=='':
moves = '4d9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w5d !='':
if re.match(r'[plsgrk+]', Wboard.w5d)and w5e=='':
moves = '5d5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w5d)and w4e=='':
moves = '5d4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w5d)and w6e=='':
moves = '5d6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w5d)and w4d=='':
moves = '5d4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w5d)and w6d=='':
moves = '5d6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w5d)and w5c=='':
moves = '5d5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w5d)and w4c=='':
moves = '5d4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w5d)and w6c=='':
moves = '5d6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w5d)and w4f=='':
moves = '5d4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w5d)and w6f=='':
moves = '5d6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5d)and w5i==''\
and board.s5h+board.s5g+board.s5f+board.s5e=='':
moves = '5d5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w5d)and w5i==''\
and board.s5h+board.s5g+board.s5f+board.s5e=='':
moves = '5d5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5d)and w5h==''\
and board.s5g+board.s5f+board.s5e=='':
moves = '5d5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w5d)and w5h==''\
and board.s5g+board.s5f+board.s5e=='':
moves = '5d5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w5d)and w5g==''\
and board.s5f+board.s5e=='':
moves = '5d5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w5d)and w5g==''\
and board.s5f+board.s5e=='':
moves = '5d5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w5d)and w5f==''\
and board.s5e=='':
moves = '5d5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5d)and w5b==''\
and board.s5c=='':
moves = '5d5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5d)and w5a==''\
and board.s5c+board.s5b=='':
moves = '5d5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5d)and w1d==''\
and board.s2d+board.s3d+board.s4d=='':
moves = '5d1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5d)and w2d==''\
and board.s3d+board.s4d=='':
moves = '5d2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5d)and w3d==''\
and board.s4d=='':
moves = '5d3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5d)and w7d==''\
and board.s6d=='':
moves = '5d7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5d)and w8d==''\
and board.s6d+board.s7d=='':
moves = '5d8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5d)and w9d==''\
and board.s6d+board.s7d+board.s8d=='':
moves = '5d9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5d)and w2a==''\
and board.s3b+board.s4c=='':
moves ='5d2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b',Wboard.w5d)and w3b==''\
and board.s4c=='':
moves ='5d3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5d)and w7f==''\
and board.s6e=='':
moves ='5d7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5d)and w8g==''\
and board.s6e+board.s7f=='':
moves ='5d8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5d)and w9h==''\
and board.s6e+board.s7f+board.s8g=='':
moves ='5d9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5d)and w8a==''\
and board.s7b+board.s6c=='':
moves ='5d8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b',Wboard.w5d)and w7b==''\
and board.s6c=='':
moves ='5d7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5d)and w3f==''\
and board.s4e=='':
moves ='5d3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5d)and w2g==''\
and board.s4e+board.s3f=='':
moves ='5d2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w5d)and w8g==''\
and board.s6e+board.s7f=='':
moves ='5d8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w5d)and w2g==''\
and board.s4e+board.s3f=='':
moves ='5d2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w6d !='':
if re.match(r'[plsgrk+]', Wboard.w6d)and w6e=='':
moves = '6d6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w6d)and w5e=='':
moves = '6d5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w6d)and w7e=='':
moves = '6d7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w6d)and w5d=='':
moves = '6d5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w6d)and w7d=='':
moves = '6d7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w6d)and w6c=='':
moves = '6d6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w6d)and w5c=='':
moves = '6d5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w6d)and w7c=='':
moves = '6d7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w6d)and w5f=='':
moves = '6d5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w6d)and w7f=='':
moves = '6d7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6d)and w6i==''\
and board.s6h+board.s6g+board.s6f+board.s6e=='':
moves = '6d6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w6d)and w6i==''\
and board.s6h+board.s6g+board.s6f+board.s6e=='':
moves = '6d6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6d)and w6h==''\
and board.s6g+board.s6f+board.s6e=='':
moves = '6d6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w6d)and w6h==''\
and board.s6g+board.s6f+board.s6e=='':
moves = '6d6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w6d)and w6g==''\
and board.s6f+board.s6e=='':
moves = '6d6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w6d)and w6g==''\
and board.s6f+board.s6e=='':
moves = '6d6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w6d)and w6f==''\
and board.s6e=='':
moves = '6d6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6d)and w6b==''\
and board.s6c=='':
moves = '6d6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6d)and w6a==''\
and board.s6c+board.s6b=='':
moves = '6d6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6d)and w9d==''\
and board.s8d+board.s7d=='':
moves = '6d9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6d)and w8d==''\
and board.s7d=='':
moves = '6d8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6d)and w4d==''\
and board.s5d=='':
moves = '6d4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6d)and w3d==''\
and board.s5d+board.s4d=='':
moves = '6d3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6d)and w2d==''\
and board.s5d+board.s4d+board.s3d=='':
moves = '6d2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6d)and w1d==''\
and board.s5d+board.s4d+board.s3d+board.s2d=='':
moves = '6d1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6d)and w9a==''\
and board.s8b+board.s7c=='':
moves = '6d9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6d)and w8b==''\
and board.s7c=='':
moves = '6d8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6d)and w4f==''\
and board.s5e=='':
moves = '6d4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6d)and w3g==''\
and board.s5e+board.s4f=='':
moves = '6d3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6d)and w2h==''\
and board.s5e+board.s4f+board.s3g=='':
moves = '6d2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6d)and w1i==''\
and board.s5e+board.s4f+board.s3g+board.s2h=='':
moves = '6d1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6d)and w3a==''\
and board.s4b+board.s5c=='':
moves = '6d3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6d)and w4b==''\
and board.s5c=='':
moves = '6d4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6d)and w8f==''\
and board.s7e=='':
moves = '6d8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6d)and w9g==''\
and board.s7e+board.s8f=='':
moves = '6d9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w6d)and w3g==''\
and board.s5e+board.s4f=='':
moves = '6d3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w6d)and w2h==''\
and board.s5e+board.s4f+board.s3g=='':
moves = '6d2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w6d)and w9g==''\
and board.s7e+board.s8f=='':
moves = '6d9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w6d)and w1i==''\
and board.s5e+board.s4f+board.s3g+board.s2h=='':
moves = '6d1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w7d !='':
if re.match(r'[plsgrk+]', Wboard.w7d)and w7e=='':
moves = '7d7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w7d)and w6e=='':
moves = '7d6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w7d)and w8e=='':
moves = '7d8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w7d)and w6d=='':
moves = '7d6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w7d)and w8d=='':
moves = '7d8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w7d)and w7c=='':
moves = '7d7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w7d)and w6c=='':
moves = '7d6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w7d)and w8c=='':
moves = '7d8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w7d)and w6f=='':
moves = '7d6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w7d)and w8f=='':
moves = '7d8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7d)and w7i==''\
and board.s7h+board.s7g+board.s7f+board.s7e=='':
moves = '7d7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w7d)and w7i==''\
and board.s7h+board.s7g+board.s7f+board.s7e=='':
moves = '7d7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7d)and w7h==''\
and board.s7g+board.s7f+board.s7e=='':
moves = '7d7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w7d)and w7h==''\
and board.s7g+board.s7f+board.s7e=='':
moves = '7d7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w7d)and w7g==''\
and board.s7f+board.s7e=='':
moves = '7d7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w7d)and w7g==''\
and board.s7f+board.s7e=='':
moves = '7d7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w7d)and w7f==''\
and board.s7e=='':
moves = '7d7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7d)and w7b==''\
and board.s7c=='':
moves = '7d7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7d)and w7a==''\
and board.s7c+board.s7b=='':
moves = '7d7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7d)and w9d==''\
and board.s8d=='':
moves = '7d9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7d)and w5d==''\
and board.s6d=='':
moves = '7d5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7d)and w4d==''\
and board.s6d+board.s5d=='':
moves = '7d4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7d)and w3d==''\
and board.s6d+board.s5d+board.s4d=='':
moves = '7d3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7d)and w2d==''\
and board.s6d+board.s5d+board.s4d+board.s3d=='':
moves = '7d2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7d)and w1d==''\
and board.s6d+board.s5d+board.s4d+board.s3d+board.s2d=='':
moves = '7d1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7d)and w9b==''\
and board.s8c=='':
moves = '7d9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7d)and w5f==''\
and board.s6e=='':
moves = '7d5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7d)and w4g==''\
and board.s6e+board.s5f=='':
moves = '7d4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7d)and w3h==''\
and board.s6e+board.s5f+board.s4g=='':
moves = '7d3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7d)and w2i==''\
and board.s6e+board.s5f+board.s4g+board.s3h=='':
moves = '7d2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7d)and w4a==''\
and board.s5b+board.s6c=='':
moves = '7d4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7d)and w5b==''\
and board.s6c=='':
moves = '7d5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7d)and w9f==''\
and board.s8e=='':
moves = '7d9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w7d)and w4g==''\
and board.s6e+board.s5f=='':
moves = '7d4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w7d)and w3h==''\
and board.s6e+board.s5f+board.s4g=='':
moves = '7d3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w7d)and w2i==''\
and board.s6e+board.s5f+board.s4g+board.s3h=='':
moves = '7d2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w8d !='':
if re.match(r'[plsgrk+]', Wboard.w8d)and w8e=='':
moves = '8d8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w8d)and w7e=='':
moves = '8d7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w8d)and w9e=='':
moves = '8d9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w8d)and w7d=='':
moves = '8d7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w8d)and w9d=='':
moves = '8d9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w8d)and w8c=='':
moves = '8d8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w8d)and w7c=='':
moves = '8d7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w8d)and w9c=='':
moves = '8d9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w8d)and w7f=='':
moves = '8d7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w8d)and w9f=='':
moves = '8d9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8d)and w8i==''\
and board.s8h+board.s8g+board.s8f+board.s8e=='':
moves = '8d8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w8d)and w8i==''\
and board.s8h+board.s8g+board.s8f+board.s8e=='':
moves = '8d8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8d)and w8h==''\
and board.s8g+board.s8f+board.s8e=='':
moves = '8d8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w8d)and w8h==''\
and board.s8g+board.s8f+board.s8e=='':
moves = '8d8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w8d)and w8g==''\
and board.s8f+board.s8e=='':
moves = '8d8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w8d)and w8g==''\
and board.s8f+board.s8e=='':
moves = '8d8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w8d)and w8f==''\
and board.s8e=='':
moves = '8d8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8d)and w8b==''\
and board.s8c=='':
moves = '8d8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8d)and w8a==''\
and board.s8c+board.s8b=='':
moves = '8d8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8d)and w6d==''\
and board.s7d=='':
moves = '8d6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8d)and w5d==''\
and board.s7d+board.s6d=='':
moves = '8d5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8d)and w4d==''\
and board.s7d+board.s6d+board.s5d=='':
moves = '8d4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8d)and w3d==''\
and board.s7d+board.s6d+board.s5d+board.s4d=='':
moves = '8d3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8d)and w2d==''\
and board.s7d+board.s6d+board.s5d+board.s4d+board.s3d=='':
moves = '8d2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8d)and w1d==''\
and board.s7d+board.s6d+board.s5d+board.s4d+board.s3d+board.s2d=='':
moves = '8d1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w8d)and w6f==''\
and board.s7e=='':
moves = '8d6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8d)and w5g==''\
and board.s7e+board.s6f=='':
moves = '8d5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8d)and w4h==''\
and board.s7e+board.s6f+board.s5g=='':
moves = '8d4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8d)and w3i==''\
and board.s7e+board.s6f+board.s5g+board.s4h=='':
moves = '8d3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w8d)and w5a==''\
and board.s6b+board.s7c=='':
moves = '8d5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w8d)and w6b==''\
and board.s7c=='':
moves = '8d6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w8d)and w5g==''\
and board.s7e+board.s6f=='':
moves = '8d5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w8d)and w4h==''\
and board.s7e+board.s6f+board.s5g=='':
moves = '8d4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w8d)and w3i==''\
and board.s7e+board.s6f+board.s5g+board.s4h=='':
moves = '8d3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w9d !='':
if re.match(r'[plsgrk+]', Wboard.w9d)and w9e=='':
moves = '9d9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w9d)and w8e=='':
moves = '9d8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w9d)and w8d=='':
moves = '9d8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w9d)and w9c=='':
moves = '9d9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w9d)and w8c=='':
moves = '9d8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w9d)and w8f=='':
moves = '9d8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9d)and w9i==''\
and board.s9h+board.s9g+board.s9f+board.s9e=='':
moves = '9d9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w9d)and w9i==''\
and board.s9h+board.s9g+board.s9f+board.s9e=='':
moves = '9d9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9d)and w9h==''\
and board.s9g+board.s9f+board.s9e=='':
moves = '9d9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w9d)and w9h==''\
and board.s9g+board.s9f+board.s9e=='':
moves = '9d9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w9d)and w9g==''\
and board.s9f+board.s9e=='':
moves = '9d9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w9d)and w9g==''\
and board.s9f+board.s9e=='':
moves = '9d9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w9d)and w9f==''\
and board.s9e=='':
moves = '9d9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9d)and w9b==''\
and board.s9c=='':
moves = '9d9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9d)and w9a==''\
and board.s9c+board.s9b=='':
moves = '9d9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9d)and w7d==''\
and board.s8d=='':
moves = '9d7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9d)and w6d==''\
and board.s8d+board.s7d=='':
moves = '9d6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9d)and w5d==''\
and board.s8d+board.s7d+board.s6d=='':
moves = '9d5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9d)and w4d==''\
and board.s8d+board.s7d+board.s6d+board.s5d=='':
moves = '9d4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9d)and w3d==''\
and board.s8d+board.s7d+board.s6d+board.s5d+board.s4d=='':
moves = '9d3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9d)and w2d==''\
and board.s8d+board.s7d+board.s6d+board.s5d+board.s4d+board.s3d=='':
moves = '9d2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9d)and w1d==''\
and board.s8d+board.s7d+board.s6d+board.s5d+board.s4d+board.s3d+board.s2d=='':
moves = '9d1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w9d)and w7f==''\
and board.s8e=='':
moves = '9d7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9d)and w6g==''\
and board.s8e+board.s7f=='':
moves = '9d6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9d)and w5h==''\
and board.s8e+board.s7f+board.s6g=='':
moves = '9d5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9d)and w4i==''\
and board.s8e+board.s7f+board.s6g+board.s5h=='':
moves = '9d4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9d)and w6a==''\
and board.s7b+board.s8c=='':
moves = '9d6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9d)and w7b==''\
and board.s8c=='':
moves = '9d7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w9d)and w6g==''\
and board.s8e+board.s7f=='':
moves = '9d6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w9d)and w5h==''\
and board.s8e+board.s7f+board.s6g=='':
moves = '9d5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w9d)and w4i==''\
and board.s8e+board.s7f+board.s6g+board.s5h=='':
moves = '9d4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w1c !='':
if re.match(r'[plsgrk+]', Wboard.w1c)and w1d=='':
moves = '1c1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w1c)and w2d=='':
moves = '1c2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w1c)and w2c=='':
moves = '1c2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w1c)and w1b=='':
moves = '1c1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w1c)and w2b=='':
moves = '1c2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w1c)and w2e=='':
moves = '1c2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1c)and w1i==''\
and board.s1h+board.s1g+board.s1f+board.s1e+board.s1d=='':
moves = '1c1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w1c)and w1i==''\
and board.s1h+board.s1g+board.s1f+board.s1e+board.s1d=='':
moves = '1c1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1c)and w1h==''\
and board.s1g+board.s1f+board.s1e+board.s1d=='':
moves = '1c1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w1c)and w1h==''\
and board.s1g+board.s1f+board.s1e+board.s1d=='':
moves = '1c1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w1c)and w1g==''\
and board.s1f+board.s1e+board.s1d=='':
moves = '1c1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w1c)and w1g==''\
and board.s1f+board.s1e+board.s1d=='':
moves = '1c1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w1c)and w1f==''\
and board.s1e+board.s1d=='':
moves = '1c1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w1c)and w1e==''\
and board.s1d=='':
moves = '1c1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1c)and w1a==''\
and board.s1b=='':
moves = '1c1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1c)and w3c==''\
and board.s2c=='':
moves = '1c3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1c)and w4c==''\
and board.s2c+board.s3c=='':
moves = '1c4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1c)and w5c==''\
and board.s2c+board.s3c+board.s4c=='':
moves = '1c5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1c)and w6c==''\
and board.s2c+board.s3c+board.s4c+board.s5c=='':
moves = '1c6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1c)and w7c==''\
and board.s2c+board.s3c+board.s4c+board.s5c+board.s6c=='':
moves = '1c7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1c)and w8c==''\
and board.s2c+board.s3c+board.s4c+board.s5c+board.s6c+board.s7c=='':
moves = '1c8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1c)and w9c==''\
and board.s2c+board.s3c+board.s4c+board.s5c+board.s6c+board.s7c+board.s8c=='':
moves = '1c9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1c)and w5g==''\
and board.s2d+board.s3e+board.s4f=='':
moves = '1c5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1c)and w6h==''\
and board.s2d+board.s3e+board.s4f+board.s5g=='':
moves = '1c6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1c)and w7i==''\
and board.s2d+board.s3e+board.s4f+board.s5g+board.s6h=='':
moves = '1c7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w1c)and w3a==''\
and board.s2b=='':
moves = '1c3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w1c)and w3e==''\
and board.s2d=='':
moves = '1c3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w1c)and w4f==''\
and board.s2d+board.s3e=='':
moves = '1c4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1c)and w5g==''\
and board.s2d+board.s3e+board.s4f=='':
moves = '1c5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1c)and w6h==''\
and board.s2d+board.s3e+board.s4f+board.s5g=='':
moves = '1c6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1c)and w7i==''\
and board.s2d+board.s3e+board.s4f+board.s5g+board.s6h=='':
moves = '1c7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w2c !='':
if re.match(r'[plsgrk+]', Wboard.w2c)and w2d=='':
moves = '2c2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w2c)and w1d=='':
moves = '2c1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w2c)and w3d=='':
moves = '2c3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w2c)and w1c=='':
moves = '2c1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w2c)and w3c=='':
moves = '2c3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w2c)and w2b=='':
moves = '2c2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w2c)and w1b=='':
moves = '2c1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w2c)and w3b=='':
moves = '2c3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w2c)and w1e=='':
moves = '2c1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w2c)and w3e=='':
moves = '2c3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2c)and w2i==''\
and board.s2h+board.s2g+board.s2f+board.s2e+board.s2d=='':
moves = '2c2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w2c)and w2i==''\
and board.s2h+board.s2g+board.s2f+board.s2e+board.s2d=='':
moves = '2c2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2c)and w2h==''\
and board.s2g+board.s2f+board.s2e+board.s2d=='':
moves = '2c2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w2c)and w2h==''\
and board.s2g+board.s2f+board.s2e+board.s2d=='':
moves = '2c2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w2c)and w2g==''\
and board.s2f+board.s2e+board.s2d=='':
moves = '2c2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w2c)and w2g==''\
and board.s2f+board.s2e+board.s2d=='':
moves = '2c2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w2c)and w2f==''\
and board.s2e+board.s2d=='':
moves = '2c2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w2c)and w2e==''\
and board.s2d=='':
moves = '2c2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2c)and w2a==''\
and board.s2b=='':
moves = '2c2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2c)and w4c==''\
and board.s3c=='':
moves = '2c4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2c)and w5c==''\
and board.s3c+board.s4c=='':
moves = '2c5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2c)and w6c==''\
and board.s3c+board.s4c+board.s5c=='':
moves = '2c6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2c)and w7c==''\
and board.s3c+board.s4c+board.s5c+board.s6c=='':
moves = '2c7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2c)and w8c==''\
and board.s3c+board.s4c+board.s5c+board.s6c+board.s7c=='':
moves = '2c8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2c)and w9c==''\
and board.s3c+board.s4c+board.s5c+board.s6c+board.s7c+board.s8c=='':
moves = '2c9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2c)and w6g==''\
and board.s3d+board.s4e+board.s5f=='':
moves = '2c6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2c)and w7h==''\
and board.s3d+board.s4e+board.s5f+board.s6g=='':
moves = '2c7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2c)and w8i==''\
and board.s3d+board.s4e+board.s5f+board.s6g+board.s7h=='':
moves = '2c8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w2c)and w4e==''\
and board.s3d=='':
moves = '2c4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w2c)and w5f==''\
and board.s3d+board.s4e=='':
moves = '2c5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2c)and w6g==''\
and board.s3d+board.s4e+board.s5f=='':
moves = '2c6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2c)and w7h==''\
and board.s3d+board.s4e+board.s5f+board.s6g=='':
moves = '2c7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2c)and w8i==''\
and board.s3d+board.s4e+board.s5f+board.s6g+board.s7h=='':
moves = '2c8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w2c)and w4a==''\
and board.s3b=='':
moves = '2c4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w3c !='':
if re.match(r'[plsgrk+]', Wboard.w3c)and w3d=='':
moves = '3c3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w3c)and w2d=='':
moves = '3c2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w3c)and w4d=='':
moves = '3c4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w3c)and w2c=='':
moves = '3c2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w3c)and w4c=='':
moves = '3c4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w3c)and w3b=='':
moves = '3c3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w3c)and w2b=='':
moves = '3c2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w3c)and w4b=='':
moves = '3c4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w3c)and w2e=='':
moves = '3c2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w3c)and w4e=='':
moves = '3c4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3c)and w3i==''\
and board.s3h+board.s3g+board.s3f+board.s3e+board.s3d=='':
moves = '3c3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w3c)and w3i==''\
and board.s3h+board.s3g+board.s3f+board.s3e+board.s3d=='':
moves = '3c3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3c)and w3h==''\
and board.s3g+board.s3f+board.s3e+board.s3d=='':
moves = '3c3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w3c)and w3h==''\
and board.s3g+board.s3f+board.s3e+board.s3d=='':
moves = '3c3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w3c)and w3g==''\
and board.s3f+board.s3e+board.s3d=='':
moves = '3c3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w3c)and w3g==''\
and board.s3f+board.s3e+board.s3d=='':
moves = '3c3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w3c)and w3f==''\
and board.s3e+board.s3d=='':
moves = '3c3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w3c)and w3e==''\
and board.s3d=='':
moves = '3c3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3c)and w3a==''\
and board.s3b=='':
moves = '3c3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3c)and w1c==''\
and board.s2c=='':
moves = '3c1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3c)and w5c==''\
and board.s4c=='':
moves = '3c5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3c)and w6c==''\
and board.s4c+board.s5c=='':
moves = '3c6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3c)and w7c==''\
and board.s4c+board.s5c+board.s6c=='':
moves = '3c7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3c)and w8c==''\
and board.s4c+board.s5c+board.s6c+board.s7c=='':
moves = '3c8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3c)and w9c==''\
and board.s4c+board.s5c+board.s6c+board.s7c+board.s8c=='':
moves = '3c9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3c)and w7g==''\
and board.s4d+board.s5e+board.s6f=='':
moves = '3c7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3c)and w8h==''\
and board.s4d+board.s5e+board.s6f+board.s7g=='':
moves = '3c8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3c)and w9i==''\
and board.s4d+board.s5e+board.s6f+board.s7g+board.s8h=='':
moves = '3c9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3c)and w1a==''\
and board.s2b=='':
moves = '3c1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3c)and w5e==''\
and board.s4d=='':
moves = '3c5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3c)and w6f==''\
and board.s4d+board.s5e=='':
moves = '3c6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3c)and w7g==''\
and board.s4d+board.s5e+board.s6f=='':
moves = '3c7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3c)and w8h==''\
and board.s4d+board.s5e+board.s6f+board.s7g=='':
moves = '3c8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3c)and w9i==''\
and board.s4d+board.s5e+board.s6f+board.s7g+board.s8h=='':
moves = '3c9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3c)and w5a==''\
and board.s4b=='':
moves = '3c5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3c)and w1e==''\
and board.s2d=='':
moves = '3c1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w4c !='':
if re.match(r'[plsgrk+]', Wboard.w4c)and w4d=='':
moves = '4c4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w4c)and w3d=='':
moves = '4c3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w4c)and w5d=='':
moves = '4c5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w4c)and w3c=='':
moves = '4c3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w4c)and w5c=='':
moves = '4c5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w4c)and w4b=='':
moves = '4c4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w4c)and w3b=='':
moves = '4c3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w4c)and w5b=='':
moves = '4c5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w4c)and w3e=='':
moves = '4c3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w4c)and w5e=='':
moves = '4c5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4c)and w4i==''\
and board.s4h+board.s4g+board.s4f+board.s4e+board.s4d=='':
moves = '4c4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w4c)and w4i==''\
and board.s4h+board.s4g+board.s4f+board.s4e+board.s4d=='':
moves = '4c4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4c)and w4h==''\
and board.s4g+board.s4f+board.s4e+board.s4d=='':
moves = '4c4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w4c)and w4h==''\
and board.s4g+board.s4f+board.s4e+board.s4d=='':
moves = '4c4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w4c)and w4g==''\
and board.s4f+board.s4e+board.s4d=='':
moves = '4c4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w4c)and w4g==''\
and board.s4f+board.s4e+board.s4d=='':
moves = '4c4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w4c)and w4f==''\
and board.s4e+board.s4d=='':
moves = '4c4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w4c)and w4e==''\
and board.s4d=='':
moves = '4c4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4c)and w4a==''\
and board.s4b=='':
moves = '4c4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4c)and w1c==''\
and board.s2c+board.s3c=='':
moves = '4c1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4c)and w2c==''\
and board.s3c=='':
moves = '4c2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4c)and w6c==''\
and board.s5c=='':
moves = '4c6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4c)and w7c==''\
and board.s5c+board.s6c=='':
moves = '4c7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4c)and w8c==''\
and board.s5c+board.s6c+board.s7c=='':
moves = '4c8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4c)and w9c==''\
and board.s5c+board.s6c+board.s7c+board.s8c=='':
moves = '4c9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4c)and w6e==''\
and board.s5d=='':
moves = '4c6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4c)and w7f==''\
and board.s5d+board.s6e=='':
moves = '4c7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4c)and w8g==''\
and board.s5d+board.s6e+board.s7f=='':
moves = '4c8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4c)and w9h==''\
and board.s5d+board.s6e+board.s7f+board.s8g=='':
moves = '4c9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w4c)and w8g==''\
and board.s5d+board.s6e+board.s7f=='':
moves = '4c8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w4c)and w9h==''\
and board.s5d+board.s6e+board.s7f+board.s8g=='':
moves = '4c9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4c)and w1f==''\
and board.s2e+board.s3d=='':
moves = '4c1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4c)and w2e==''\
and board.s3d=='':
moves = '4c2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4c)and w2a==''\
and board.s3b=='':
moves = '4c2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4c)and w6a==''\
and board.s5b=='':
moves = '4c6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w5c !='':
if re.match(r'[plsgrk+]', Wboard.w5c)and w5d=='':
moves = '5c5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w5c)and w4d=='':
moves = '5c4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w5c)and w6d=='':
moves = '5c6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w5c)and w4c=='':
moves = '5c4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w5c)and w6c=='':
moves = '5c6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w5c)and w5b=='':
moves = '5c5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w5c)and w4b=='':
moves = '5c4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w5c)and w6b=='':
moves = '5c6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w5c)and w4e=='':
moves = '5c4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w5c)and w6e=='':
moves = '5c6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5c)and w5i==''\
and board.s5h+board.s5g+board.s5f+board.s5e+board.s5d=='':
moves = '5c5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w5c)and w5i==''\
and board.s5h+board.s5g+board.s5f+board.s5e+board.s5d=='':
moves = '5c5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5c)and w5h==''\
and board.s5g+board.s5f+board.s5e+board.s5d=='':
moves = '5c5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w5c)and w5h==''\
and board.s5g+board.s5f+board.s5e+board.s5d=='':
moves = '5c5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w5c)and w5g==''\
and board.s5f+board.s5e+board.s5d=='':
moves = '5c5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w5c)and w5g==''\
and board.s5f+board.s5e+board.s5d=='':
moves = '5c5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w5c)and w5f==''\
and board.s5e+board.s5d=='':
moves = '5c5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w5c)and w5e==''\
and board.s5d=='':
moves = '5c5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5c)and w5a==''\
and board.s5b=='':
moves = '5c5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5c)and w1c==''\
and board.s2c+board.s3c+board.s4c=='':
moves = '5c1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5c)and w2c==''\
and board.s3c+board.s4c=='':
moves = '5c2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5c)and w3c==''\
and board.s4c=='':
moves = '5c3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5c)and w7c==''\
and board.s6c=='':
moves = '5c7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5c)and w8c==''\
and board.s6c+board.s7c=='':
moves = '5c8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5c)and w9c==''\
and board.s6c+board.s7c+board.s8c=='':
moves = '5c9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5c)and w7e==''\
and board.s6d=='':
moves = '5c7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5c)and w8f==''\
and board.s6d+board.s7e=='':
moves = '5c8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5c)and w9g==''\
and board.s6d+board.s7e+board.s8f=='':
moves = '5c9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w5c)and w9g==''\
and board.s6d+board.s7e+board.s8f=='':
moves = '5c9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5c)and w2f==''\
and board.s3e+board.s4d=='':
moves = '5c2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5c)and w3e==''\
and board.s4d=='':
moves = '5c3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w5c)and w1g==''\
and board.s4d+board.s3e+board.s2f=='':
moves = '5c1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w5c)and w1g==''\
and board.s4d+board.s3e+board.s2f=='':
moves = '5c1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5c)and w3a==''\
and board.s4b=='':
moves = '5c3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5c)and w7a==''\
and board.s6b=='':
moves = '5c7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w6c !='':
if re.match(r'[plsgrk+]', Wboard.w6c)and w6d=='':
moves = '6c6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w6c)and w5d=='':
moves = '6c5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w6c)and w7d=='':
moves = '6c7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w6c)and w5c=='':
moves = '6c5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w6c)and w7c=='':
moves = '6c7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w6c)and w6b=='':
moves = '6c6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w6c)and w5b=='':
moves = '6c5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w6c)and w7b=='':
moves = '6c7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w6c)and w5e=='':
moves = '6c5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w6c)and w7e=='':
moves = '6c7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6c)and w6i==''\
and board.s6h+board.s6g+board.s6f+board.s6e+board.s6d=='':
moves = '6c6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w6c)and w6i==''\
and board.s6h+board.s6g+board.s6f+board.s6e+board.s6d=='':
moves = '6c6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6c)and w6h==''\
and board.s6g+board.s6f+board.s6e+board.s6d=='':
moves = '6c6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w6c)and w6h==''\
and board.s6g+board.s6f+board.s6e+board.s6d=='':
moves = '6c6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w6c)and w6g==''\
and board.s6f+board.s6e+board.s6d=='':
moves = '6c6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w6c)and w6g==''\
and board.s6f+board.s6e+board.s6d=='':
moves = '6c6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w6c)and w6f==''\
and board.s6e+board.s6d=='':
moves = '6c6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w6c)and w6e==''\
and board.s6d=='':
moves = '6c6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6c)and w6a==''\
and board.s6b=='':
moves = '6c6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6c)and w9c==''\
and board.s8c+board.s7c=='':
moves = '6c9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6c)and w8c==''\
and board.s7c=='':
moves = '6c8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6c)and w4c==''\
and board.s5c=='':
moves = '6c4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6c)and w3c==''\
and board.s5c+board.s4c=='':
moves = '6c3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6c)and w2c==''\
and board.s5c+board.s4c+board.s3c=='':
moves = '6c2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6c)and w1c==''\
and board.s5c+board.s4c+board.s3c+board.s2c=='':
moves = '6c1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6c)and w4e==''\
and board.s5d=='':
moves = '6c4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6c)and w3f==''\
and board.s5d+board.s4e=='':
moves = '6c3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6c)and w2g==''\
and board.s5d+board.s4e+board.s3f=='':
moves = '6c2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6c)and w1h==''\
and board.s5d+board.s4e+board.s3f+board.s2g=='':
moves = '6c1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w6c)and w2g==''\
and board.s5d+board.s4e+board.s3f=='':
moves = '6c2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w6c)and w1h==''\
and board.s5d+board.s4e+board.s3f+board.s2g=='':
moves = '6c1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6c)and w9f==''\
and board.s8e+board.s7d=='':
moves = '6c9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6c)and w8e==''\
and board.s7d=='':
moves = '6c8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6c)and w8a==''\
and board.s7b=='':
moves = '6c8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6c)and w4a==''\
and board.s5b=='':
moves = '6c4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w7c !='':
if re.match(r'[plsgrk+]', Wboard.w7c)and w7d=='':
moves = '7c7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w7c)and w6d=='':
moves = '7c6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w7c)and w8d=='':
moves = '7c8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w7c)and w6c=='':
moves = '7c6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w7c)and w8c=='':
moves = '7c8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w7c)and w7b=='':
moves = '7c7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w7c)and w6b=='':
moves = '7c6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w7c)and w8b=='':
moves = '7c8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w7c)and w6e=='':
moves = '7c6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w7c)and w8e=='':
moves = '7c8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7c)and w7i==''\
and board.s7h+board.s7g+board.s7f+board.s7e+board.s7d=='':
moves = '7c7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w7c)and w7i==''\
and board.s7h+board.s7g+board.s7f+board.s7e+board.s7d=='':
moves = '7c7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7c)and w7h==''\
and board.s7g+board.s7f+board.s7e+board.s7d=='':
moves = '7c7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w7c)and w7h==''\
and board.s7g+board.s7f+board.s7e+board.s7d=='':
moves = '7c7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w7c)and w7g==''\
and board.s7f+board.s7e+board.s7d=='':
moves = '7c7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w7c)and w7g==''\
and board.s7f+board.s7e+board.s7d=='':
moves = '7c7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w7c)and w7f==''\
and board.s7e+board.s7d=='':
moves = '7c7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w7c)and w7e==''\
and board.s7d=='':
moves = '7c7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7c)and w7a==''\
and board.s7b=='':
moves = '7c7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7c)and w9c==''\
and board.s8c=='':
moves = '7c9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7c)and w5c==''\
and board.s6c=='':
moves = '7c5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7c)and w4c==''\
and board.s6c+board.s5c=='':
moves = '7c4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7c)and w3c==''\
and board.s6c+board.s5c+board.s4c=='':
moves = '7c3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7c)and w2c==''\
and board.s6c+board.s5c+board.s4c+board.s3c=='':
moves = '7c2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7c)and w1c==''\
and board.s6c+board.s5c+board.s4c+board.s3c+board.s2c=='':
moves = '7c1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7c)and w3g==''\
and board.s6d+board.s5e+board.s4f=='':
moves = '7c3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7c)and w2h==''\
and board.s6d+board.s5e+board.s4f+board.s3g=='':
moves = '7c2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7c)and w1i==''\
and board.s6d+board.s5e+board.s4f+board.s3g+board.s2h=='':
moves = '7c1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7c)and w9a==''\
and board.s8b=='':
moves = '7c9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7c)and w5e==''\
and board.s6d=='':
moves = '7c5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7c)and w4f==''\
and board.s6d+board.s5e=='':
moves = '7c4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7c)and w3g==''\
and board.s6d+board.s5e+board.s4f=='':
moves = '7c3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7c)and w2h==''\
and board.s6d+board.s5e+board.s4f+board.s3g=='':
moves = '7c2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7c)and w1i==''\
and board.s6d+board.s5e+board.s4f+board.s3g+board.s2h=='':
moves = '7c1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7c)and w5a==''\
and board.s6b=='':
moves = '7c5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7c)and w9e==''\
and board.s8d=='':
moves = '7c9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w8c !='':
if re.match(r'[plsgrk+]', Wboard.w8c)and w8d=='':
moves = '8c8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w8c)and w7d=='':
moves = '8c7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w8c)and w9d=='':
moves = '8c9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w8c)and w7c=='':
moves = '8c7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w8c)and w9c=='':
moves = '8c9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w8c)and w8b=='':
moves = '8c8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w8c)and w7b=='':
moves = '8c7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w8c)and w9b=='':
moves = '8c9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w8c)and w7e=='':
moves = '8c7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w8c)and w9e=='':
moves = '8c9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8c)and w8i==''\
and board.s8h+board.s8g+board.s8f+board.s8e+board.s8d=='':
moves = '8c8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w8c)and w8i==''\
and board.s8h+board.s8g+board.s8f+board.s8e+board.s8d=='':
moves = '8c8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8c)and w8h==''\
and board.s8g+board.s8f+board.s8e+board.s8d=='':
moves = '8c8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w8c)and w8h==''\
and board.s8g+board.s8f+board.s8e+board.s8d=='':
moves = '8c8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w8c)and w8g==''\
and board.s8f+board.s8e+board.s8d=='':
moves = '8c8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w8c)and w8g==''\
and board.s8f+board.s8e+board.s8d=='':
moves = '8c8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w8c)and w8f==''\
and board.s8e+board.s8d=='':
moves = '8c8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w8c)and w8e==''\
and board.s8d=='':
moves = '8c8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8c)and w8a==''\
and board.s8b=='':
moves = '8c8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8c)and w6c==''\
and board.s7c=='':
moves = '8c6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8c)and w5c==''\
and board.s7c+board.s6c=='':
moves = '8c5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8c)and w4c==''\
and board.s7c+board.s6c+board.s5c=='':
moves = '8c4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8c)and w3c==''\
and board.s7c+board.s6c+board.s5c+board.s4c=='':
moves = '8c3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8c)and w2c==''\
and board.s7c+board.s6c+board.s5c+board.s4c+board.s3c=='':
moves = '8c2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8c)and w1c==''\
and board.s7c+board.s6c+board.s5c+board.s4c+board.s3c+board.s2c=='':
moves = '8c1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8c)and w4g==''\
and board.s7d+board.s6e+board.s5f=='':
moves = '8c4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8c)and w3h==''\
and board.s7d+board.s6e+board.s5f+board.s4g=='':
moves = '8c3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8c)and w2i==''\
and board.s7d+board.s6e+board.s5f+board.s4g+board.s3h=='':
moves = '8c2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w8c)and w6e==''\
and board.s7d=='':
moves = '8c6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w8c)and w5f==''\
and board.s7d+board.s6e=='':
moves = '8c5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8c)and w4g==''\
and board.s7d+board.s6e+board.s5f=='':
moves = '8c4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8c)and w3h==''\
and board.s7d+board.s6e+board.s5f+board.s4g=='':
moves = '8c3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8c)and w2i==''\
and board.s7d+board.s6e+board.s5f+board.s4g+board.s3h=='':
moves = '8c2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w8c)and w6a==''\
and board.s7b=='':
moves = '8c6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w9c !='':
if re.match(r'[plsgrk+]', Wboard.w9c)and w9d=='':
moves = '9c9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w9c)and w8d=='':
moves = '9c8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w9c)and w8c=='':
moves = '9c8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w9c)and w9b=='':
moves = '9c9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w9c)and w8b=='':
moves = '9c8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w9c)and w8e=='':
moves = '9c8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9c)and w9i==''\
and board.s9h+board.s9g+board.s9f+board.s9e+board.s9d=='':
moves = '9c9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w9c)and w9i==''\
and board.s9h+board.s9g+board.s9f+board.s9e+board.s9d=='':
moves = '9c9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9c)and w9h==''\
and board.s9g+board.s9f+board.s9e+board.s9d=='':
moves = '9c9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w9c)and w9h==''\
and board.s9g+board.s9f+board.s9e+board.s9d=='':
moves = '9c9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w9c)and w9g==''\
and board.s9f+board.s9e+board.s9d=='':
moves = '9c9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w9c)and w9g==''\
and board.s9f+board.s9e+board.s9d=='':
moves = '9c9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w9c)and w9f==''\
and board.s9e+board.s9d=='':
moves = '9c9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w9c)and w9e==''\
and board.s9d=='':
moves = '9c9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9c)and w9a==''\
and board.s9b=='':
moves = '9c9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9c)and w7c==''\
and board.s8c=='':
moves = '9c7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9c)and w6c==''\
and board.s8c+board.s7c=='':
moves = '9c6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9c)and w5c==''\
and board.s8c+board.s7c+board.s6c=='':
moves = '9c5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9c)and w4c==''\
and board.s8c+board.s7c+board.s6c+board.s5c=='':
moves = '9c4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9c)and w3c==''\
and board.s8c+board.s7c+board.s6c+board.s5c+board.s4c=='':
moves = '9c3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9c)and w2c==''\
and board.s8c+board.s7c+board.s6c+board.s5c+board.s4c+board.s3c=='':
moves = '9c2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9c)and w1c==''\
and board.s8c+board.s7c+board.s6c+board.s5c+board.s4c+board.s3c+board.s2c=='':
moves = '9c1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9c)and w5g==''\
and board.s8d+board.s7e+board.s6f=='':
moves = '9c5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9c)and w4h==''\
and board.s8d+board.s7e+board.s6f+board.s5g=='':
moves = '9c4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9c)and w3i==''\
and board.s8d+board.s7e+board.s6f+board.s5g+board.s4h=='':
moves = '9c3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w9c)and w7a==''\
and board.s8b=='':
moves = '9c7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w9c)and w7e==''\
and board.s8d=='':
moves = '9c7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w9c)and w6f==''\
and board.s8d+board.s7e=='':
moves = '9c6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9c)and w5g==''\
and board.s8d+board.s7e+board.s6f=='':
moves = '9c5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9c)and w4h==''\
and board.s8d+board.s7e+board.s6f+board.s5g=='':
moves = '9c4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9c)and w3i==''\
and board.s8d+board.s7e+board.s6f+board.s5g+board.s4h=='':
moves = '9c3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w1b !='':
if re.match(r'[plsgrk+]', Wboard.w1b)and w1c=='':
moves = '1b1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w1b)and w2c=='':
moves = '1b2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w1b)and w2b=='':
moves = '1b2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w1b)and w1a=='':
moves = '1b1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w1b)and w2a=='':
moves = '1b2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w1b)and w2d=='':
moves = '1b2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1b)and w1i==''\
and board.s1h+board.s1g+board.s1f+board.s1e+board.s1d+board.s1c=='':
moves = '1b1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w1b)and w1i==''\
and board.s1h+board.s1g+board.s1f+board.s1e+board.s1d+board.s1c=='':
moves = '1b1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1b)and w1h==''\
and board.s1g+board.s1f+board.s1e+board.s1d+board.s1c=='':
moves = '1b1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w1b)and w1h==''\
and board.s1g+board.s1f+board.s1e+board.s1d+board.s1c=='':
moves = '1b1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w1b)and w1g==''\
and board.s1f+board.s1e+board.s1d+board.s1c=='':
moves = '1b1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w1b)and w1g==''\
and board.s1f+board.s1e+board.s1d+board.s1c=='':
moves = '1b1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w1b)and w1f==''\
and board.s1e+board.s1d+board.s1c=='':
moves = '1b1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w1b)and w1e==''\
and board.s1d+board.s1c=='':
moves = '1b1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w1b)and w1d==''\
and board.s1c=='':
moves = '1b1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1b)and w3b==''\
and board.s2b=='':
moves = '1b3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1b)and w4b==''\
and board.s2b+board.s3b=='':
moves = '1b4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1b)and w5b==''\
and board.s2b+board.s3b+board.s4b=='':
moves = '1b5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1b)and w6b==''\
and board.s2b+board.s3b+board.s4b+board.s5b=='':
moves = '1b6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1b)and w7b==''\
and board.s2b+board.s3b+board.s4b+board.s5b+board.s6b=='':
moves = '1b7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1b)and w8b==''\
and board.s2b+board.s3b+board.s4b+board.s5b+board.s6b+board.s7b=='':
moves = '1b8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1b)and w9b==''\
and board.s2b+board.s3b+board.s4b+board.s5b+board.s6b+board.s7b+board.s8b=='':
moves = '1b9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1b)and w6g==''\
and board.s2c+board.s3d+board.s4e+board.s5f=='':
moves = '1b6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1b)and w7h==''\
and board.s2c+board.s3d+board.s4e+board.s5f+board.s6g=='':
moves = '1b7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1b)and w8i==''\
and board.s2c+board.s3d+board.s4e+board.s5f+board.s6g+board.s7h=='':
moves = '1b8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w1b)and w3d==''\
and board.s2c=='':
moves = '1b3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w1b)and w4e==''\
and board.s2c+board.s3d=='':
moves = '1b4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w1b)and w5f==''\
and board.s2c+board.s3d+board.s4e=='':
moves = '1b5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1b)and w6g==''\
and board.s2c+board.s3d+board.s4e+board.s5f=='':
moves = '1b6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1b)and w7h==''\
and board.s2c+board.s3d+board.s4e+board.s5f+board.s6g=='':
moves = '1b7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1b)and w8i==''\
and board.s2c+board.s3d+board.s4e+board.s5f+board.s6g+board.s7h=='':
moves = '1b8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w2b !='':
if re.match(r'[plsgrk+]', Wboard.w2b)and w2c=='':
moves = '2b2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w2b)and w1c=='':
moves = '2b1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w2b)and w3c=='':
moves = '2b3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w2b)and w1b=='':
moves = '2b1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w2b)and w3b=='':
moves = '2b3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w2b)and w2a=='':
moves = '2b2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w2b)and w1a=='':
moves = '2b1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w2b)and w3a=='':
moves = '2b3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w2b)and w1d=='':
moves = '2b1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w2b)and w3d=='':
moves = '2b3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2b)and w2i==''\
and board.s2h+board.s2g+board.s2f+board.s2e+board.s2d+board.s2c=='':
moves = '2b2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w2b)and w2i==''\
and board.s2h+board.s2g+board.s2f+board.s2e+board.s2d+board.s2c=='':
moves = '2b2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2b)and w2h==''\
and board.s2g+board.s2f+board.s2e+board.s2d+board.s2c=='':
moves = '2b2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w2b)and w2h==''\
and board.s2g+board.s2f+board.s2e+board.s2d+board.s2c=='':
moves = '2b2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w2b)and w2g==''\
and board.s2f+board.s2e+board.s2d+board.s2c=='':
moves = '2b2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w2b)and w2g==''\
and board.s2f+board.s2e+board.s2d+board.s2c=='':
moves = '2b2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w2b)and w2f==''\
and board.s2e+board.s2d+board.s2c=='':
moves = '2b2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w2b)and w2e==''\
and board.s2d+board.s2c=='':
moves = '2b2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w2b)and w2d==''\
and board.s2c=='':
moves = '2b2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2b)and w4b==''\
and board.s3b=='':
moves = '2b4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2b)and w5b==''\
and board.s3b+board.s4b=='':
moves = '2b5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2b)and w6b==''\
and board.s3b+board.s4b+board.s5b=='':
moves = '2b6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2b)and w7b==''\
and board.s3b+board.s4b+board.s5b+board.s6b=='':
moves = '2b7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2b)and w8b==''\
and board.s3b+board.s4b+board.s5b+board.s6b+board.s7b=='':
moves = '2b8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2b)and w9b==''\
and board.s3b+board.s4b+board.s5b+board.s6b+board.s7b+board.s8b=='':
moves = '2b9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2b)and w7g==''\
and board.s3c+board.s4d+board.s5e+board.s6f=='':
moves = '2b7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2b)and w8h==''\
and board.s3c+board.s4d+board.s5e+board.s6f+board.s7g=='':
moves = '2b8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2b)and w9i==''\
and board.s3c+board.s4d+board.s5e+board.s6f+board.s7g+board.s8h=='':
moves = '2b9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w2b)and w4d==''\
and board.s3c=='':
moves = '2b4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w2b)and w5e==''\
and board.s3c+board.s4d=='':
moves = '2b5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w2b)and w6f==''\
and board.s3c+board.s4d+board.s5e=='':
moves = '2b6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2b)and w7g==''\
and board.s3c+board.s4d+board.s5e+board.s6f=='':
moves = '2b7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2b)and w8h==''\
and board.s3c+board.s4d+board.s5e+board.s6f+board.s7g=='':
moves = '2b8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2b)and w9i==''\
and board.s3c+board.s4d+board.s5e+board.s6f+board.s7g+board.s8h=='':
moves = '2b9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w3b !='':
if re.match(r'[plsgrk+]', Wboard.w3b)and w3c=='':
moves = '3b3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w3b)and w2c=='':
moves = '3b2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w3b)and w4c=='':
moves = '3b4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w3b)and w2b=='':
moves = '3b2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w3b)and w4b=='':
moves = '3b4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w3b)and w3a=='':
moves = '3b3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w3b)and w2a=='':
moves = '3b2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w3b)and w4a=='':
moves = '3b4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w3b)and w2d=='':
moves = '3b2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w3b)and w4d=='':
moves = '3b4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3b)and w3i==''\
and board.s3h+board.s3g+board.s3f+board.s3e+board.s3d+board.s3c=='':
moves = '3b3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w3b)and w3i==''\
and board.s3h+board.s3g+board.s3f+board.s3e+board.s3d+board.s3c=='':
moves = '3b3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3b)and w3h==''\
and board.s3g+board.s3f+board.s3e+board.s3d+board.s3c=='':
moves = '3b3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w3b)and w3h==''\
and board.s3g+board.s3f+board.s3e+board.s3d+board.s3c=='':
moves = '3b3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w3b)and w3g==''\
and board.s3f+board.s3e+board.s3d+board.s3c=='':
moves = '3b3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w3b)and w3g==''\
and board.s3f+board.s3e+board.s3d+board.s3c=='':
moves = '3b3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w3b)and w3f==''\
and board.s3e+board.s3d+board.s3c=='':
moves = '3b3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w3b)and w3e==''\
and board.s3d+board.s3c=='':
moves = '3b3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w3b)and w3d==''\
and board.s3c=='':
moves = '3b3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3b)and w1b==''\
and board.s2b=='':
moves = '3b1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3b)and w5b==''\
and board.s4b=='':
moves = '3b5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3b)and w6b==''\
and board.s4b+board.s5b=='':
moves = '3b6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3b)and w7b==''\
and board.s4b+board.s5b+board.s6b=='':
moves = '3b7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3b)and w8b==''\
and board.s4b+board.s5b+board.s6b+board.s7b=='':
moves = '3b8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3b)and w9b==''\
and board.s4b+board.s5b+board.s6b+board.s7b+board.s8b=='':
moves = '3b9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3b)and w8g==''\
and board.s4c+board.s5d+board.s6e+board.s7f=='':
moves = '3b8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3b)and w9h==''\
and board.s4c+board.s5d+board.s6e+board.s7f+board.s8g=='':
moves = '3b9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3b)and w5d==''\
and board.s4c=='':
moves = '3b5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3b)and w6e==''\
and board.s4c+board.s5d=='':
moves = '3b6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3b)and w7f==''\
and board.s4c+board.s5d+board.s6e=='':
moves = '3b7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3b)and w8g==''\
and board.s4c+board.s5d+board.s6e+board.s7f=='':
moves = '3b8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3b)and w9h==''\
and board.s4c+board.s5d+board.s6e+board.s7f+board.s8g=='':
moves = '3b9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3b)and w1d==''\
and board.s2c=='':
moves = '3b1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w4b !='':
if re.match(r'[plsgrk+]', Wboard.w4b)and w4c=='':
moves = '4b4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w4b)and w3c=='':
moves = '4b3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w4b)and w5c=='':
moves = '4b5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w4b)and w3b=='':
moves = '4b3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w4b)and w5b=='':
moves = '4b5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w4b)and w4a=='':
moves = '4b4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w4b)and w3a=='':
moves = '4b3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w4b)and w5a=='':
moves = '4b5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w4b)and w3d=='':
moves = '4b3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w4b)and w5d=='':
moves = '4b5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4b)and w4i==''\
and board.s4h+board.s4g+board.s4f+board.s4e+board.s4d+board.s4c=='':
moves = '4b4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w4b)and w4i==''\
and board.s4h+board.s4g+board.s4f+board.s4e+board.s4d+board.s4c=='':
moves = '4b4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4b)and w4h==''\
and board.s4g+board.s4f+board.s4e+board.s4d+board.s4c=='':
moves = '4b4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w4b)and w4h==''\
and board.s4g+board.s4f+board.s4e+board.s4d+board.s4c=='':
moves = '4b4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w4b)and w4g==''\
and board.s4f+board.s4e+board.s4d+board.s4c=='':
moves = '4b4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w4b)and w4g==''\
and board.s4f+board.s4e+board.s4d+board.s4c=='':
moves = '4b4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w4b)and w4f==''\
and board.s4e+board.s4d+board.s4c=='':
moves = '4b4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w4b)and w4e==''\
and board.s4d+board.s4c=='':
moves = '4b4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w4b)and w4d==''\
and board.s4c=='':
moves = '4b4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4b)and w1b==''\
and board.s2b+board.s3b=='':
moves = '4b1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4b)and w2b==''\
and board.s3b=='':
moves = '4b2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4b)and w6b==''\
and board.s5b=='':
moves = '4b6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4b)and w7b==''\
and board.s5b+board.s6b=='':
moves = '4b7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4b)and w8b==''\
and board.s5b+board.s6b+board.s7b=='':
moves = '4b8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4b)and w9b==''\
and board.s5b+board.s6b+board.s7b+board.s8b=='':
moves = '4b9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4b)and w6d==''\
and board.s5c=='':
moves = '4b6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4b)and w7e==''\
and board.s5c+board.s6d=='':
moves = '4b7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4b)and w8f==''\
and board.s5c+board.s6d+board.s7e=='':
moves = '4b8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w4b)and w9g==''\
and board.s5c+board.s6d+board.s7e+board.s8f=='':
moves = '4b9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w4b)and w9g==''\
and board.s5c+board.s6d+board.s7e+board.s8f=='':
moves = '4b9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4b)and w1e==''\
and board.s2d+board.s3c=='':
moves = '4b1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4b)and w2d==''\
and board.s3c=='':
moves = '4b2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w5b !='':
if re.match(r'[plsgrk+]', Wboard.w5b)and w5c=='':
moves = '5b5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w5b)and w4c=='':
moves = '5b4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w5b)and w6c=='':
moves = '5b6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w5b)and w4b=='':
moves = '5b4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w5b)and w6b=='':
moves = '5b6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w5b)and w5a=='':
moves = '5b5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w5b)and w4a=='':
moves = '5b4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w5b)and w6a=='':
moves = '5b6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w5b)and w4d=='':
moves = '5b4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w5b)and w6d=='':
moves = '5b6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5b)and w5i==''\
and board.s5h+board.s5g+board.s5f+board.s5e+board.s5d+board.s5c=='':
moves = '5b5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w5b)and w5i==''\
and board.s5h+board.s5g+board.s5f+board.s5e+board.s5d+board.s5c=='':
moves = '5b5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5b)and w5h==''\
and board.s5g+board.s5f+board.s5e+board.s5d+board.s5c=='':
moves = '5b5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w5b)and w5h==''\
and board.s5g+board.s5f+board.s5e+board.s5d+board.s5c=='':
moves = '5b5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w5b)and w5g==''\
and board.s5f+board.s5e+board.s5d+board.s5c=='':
moves = '5b5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w5b)and w5g==''\
and board.s5f+board.s5e+board.s5d+board.s5c=='':
moves = '5b5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w5b)and w5f==''\
and board.s5e+board.s5d+board.s5c=='':
moves = '5b5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w5b)and w5e==''\
and board.s5d+board.s5c=='':
moves = '5b5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w5b)and w5d==''\
and board.s5c=='':
moves = '5b5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5b)and w1b==''\
and board.s2b+board.s3b+board.s4b=='':
moves = '5b1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5b)and w2b==''\
and board.s3b+board.s4b=='':
moves = '5b2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5b)and w3b==''\
and board.s4b=='':
moves = '5b3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5b)and w7b==''\
and board.s6b=='':
moves = '5b7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5b)and w8b==''\
and board.s6b+board.s7b=='':
moves = '5b8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5b)and w9b==''\
and board.s6b+board.s7b+board.s8b=='':
moves = '5b9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5b)and w7d==''\
and board.s6c=='':
moves = '5b7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5b)and w8e==''\
and board.s6c+board.s7d=='':
moves = '5b8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5b)and w9f==''\
and board.s6c+board.s7d+board.s8e=='':
moves = '5b9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5b)and w2e==''\
and board.s3d+board.s4c=='':
moves = '5b2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5b)and w3d==''\
and board.s4c=='':
moves = '5b3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5b)and w1f==''\
and board.s4c+board.s3d+board.s2e=='':
moves = '5b1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w6b !='':
if re.match(r'[plsgrk+]', Wboard.w6b)and w6c=='':
moves = '6b6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w6b)and w5c=='':
moves = '6b5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w6b)and w7c=='':
moves = '6b7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w6b)and w5b=='':
moves = '6b5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w6b)and w7b=='':
moves = '6b7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w6b)and w6a=='':
moves = '6b6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w6b)and w5a=='':
moves = '6b5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w6b)and w7a=='':
moves = '6b7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w6b)and w5d=='':
moves = '6b5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w6b)and w7d=='':
moves = '6b7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6b)and w6i==''\
and board.s6h+board.s6g+board.s6f+board.s6e+board.s6d+board.s6c=='':
moves = '6b6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w6b)and w6i==''\
and board.s6h+board.s6g+board.s6f+board.s6e+board.s6d+board.s6c=='':
moves = '6b6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6b)and w6h==''\
and board.s6g+board.s6f+board.s6e+board.s6d+board.s6c=='':
moves = '6b6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w6b)and w6h==''\
and board.s6g+board.s6f+board.s6e+board.s6d+board.s6c=='':
moves = '6b6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w6b)and w6g==''\
and board.s6f+board.s6e+board.s6d+board.s6c=='':
moves = '6b6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w6b)and w6g==''\
and board.s6f+board.s6e+board.s6d+board.s6c=='':
moves = '6b6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w6b)and w6f==''\
and board.s6e+board.s6d+board.s6c=='':
moves = '6b6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w6b)and w6e==''\
and board.s6d+board.s6c=='':
moves = '6b6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w6b)and w6d==''\
and board.s6c=='':
moves = '6b6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6b)and w9b==''\
and board.s8b+board.s7b=='':
moves = '6b9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6b)and w8b==''\
and board.s7b=='':
moves = '6b8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6b)and w4b==''\
and board.s5b=='':
moves = '6b4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6b)and w3b==''\
and board.s5b+board.s4b=='':
moves = '6b3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6b)and w2b==''\
and board.s5b+board.s4b+board.s3b=='':
moves = '6b2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6b)and w1b==''\
and board.s5b+board.s4b+board.s3b+board.s2b=='':
moves = '6b1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6b)and w4d==''\
and board.s5c=='':
moves = '6b4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6b)and w3e==''\
and board.s5c+board.s4d=='':
moves = '6b3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6b)and w2f==''\
and board.s5c+board.s4d+board.s3e=='':
moves = '6b2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w6b)and w1g==''\
and board.s5c+board.s4d+board.s3e+board.s2f=='':
moves = '6b1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b', Wboard.w6b)and w1g==''\
and board.s5c+board.s4d+board.s3e+board.s2f=='':
moves = '6b1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6b)and w9e==''\
and board.s8d+board.s7c=='':
moves = '6b9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6b)and w8d==''\
and board.s7c=='':
moves = '6b8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w7b !='':
if re.match(r'[plsgrk+]', Wboard.w7b)and w7c=='':
moves = '7b7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w7b)and w6c=='':
moves = '7b6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w7b)and w8c=='':
moves = '7b8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w7b)and w6b=='':
moves = '7b6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w7b)and w8b=='':
moves = '7b8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w7b)and w7a=='':
moves = '7b7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w7b)and w6a=='':
moves = '7b6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w7b)and w8a=='':
moves = '7b8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w7b)and w6d=='':
moves = '7b6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w7b)and w8d=='':
moves = '7b8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7b)and w7i==''\
and board.s7h+board.s7g+board.s7f+board.s7e+board.s7d+board.s7c=='':
moves = '7b7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w7b)and w7i==''\
and board.s7h+board.s7g+board.s7f+board.s7e+board.s7d+board.s7c=='':
moves = '7b7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7b)and w7h==''\
and board.s7g+board.s7f+board.s7e+board.s7d+board.s7c=='':
moves = '7b7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w7b)and w7h==''\
and board.s7g+board.s7f+board.s7e+board.s7d+board.s7c=='':
moves = '7b7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w7b)and w7g==''\
and board.s7f+board.s7e+board.s7d+board.s7c=='':
moves = '7b7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w7b)and w7g==''\
and board.s7f+board.s7e+board.s7d+board.s7c=='':
moves = '7b7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w7b)and w7f==''\
and board.s7e+board.s7d+board.s7c=='':
moves = '7b7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w7b)and w7e==''\
and board.s7d+board.s7c=='':
moves = '7b7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w7b)and w7d==''\
and board.s7c=='':
moves = '7b7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7b)and w9b==''\
and board.s8b=='':
moves = '7b9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7b)and w5b==''\
and board.s6b=='':
moves = '7b5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7b)and w4b==''\
and board.s6b+board.s5b=='':
moves = '7b4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7b)and w3b==''\
and board.s6b+board.s5b+board.s4b=='':
moves = '7b3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7b)and w2b==''\
and board.s6b+board.s5b+board.s4b+board.s3b=='':
moves = '7b2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7b)and w1b==''\
and board.s6b+board.s5b+board.s4b+board.s3b+board.s2b=='':
moves = '7b1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7b)and w2g==''\
and board.s6c+board.s5d+board.s4e+board.s3f=='':
moves = '7b2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7b)and w1h==''\
and board.s6c+board.s5d+board.s4e+board.s3f+board.s2g=='':
moves = '7b1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7b)and w5d==''\
and board.s6c=='':
moves = '7b5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7b)and w4e==''\
and board.s6c+board.s5d=='':
moves = '7b4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7b)and w3f==''\
and board.s6c+board.s5d+board.s4e=='':
moves = '7b3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7b)and w2g==''\
and board.s6c+board.s5d+board.s4e+board.s3f=='':
moves = '7b2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7b)and w1h==''\
and board.s6c+board.s5d+board.s4e+board.s3f+board.s2g=='':
moves = '7b1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7b)and w9d==''\
and board.s8c=='':
moves = '7b9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w8b !='':
if re.match(r'[plsgrk+]', Wboard.w8b)and w8c=='':
moves = '8b8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w8b)and w7c=='':
moves = '8b7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w8b)and w9c=='':
moves = '8b9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w8b)and w7b=='':
moves = '8b7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w8b)and w9b=='':
moves = '8b9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w8b)and w8a=='':
moves = '8b8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w8b)and w7a=='':
moves = '8b7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w8b)and w9a=='':
moves = '8b9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w8b)and w7d=='':
moves = '8b7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w8b)and w9d=='':
moves = '8b9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8b)and w8i==''\
and board.s8h+board.s8g+board.s8f+board.s8e+board.s8d+board.s8c=='':
moves = '8b8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w8b)and w8i==''\
and board.s8h+board.s8g+board.s8f+board.s8e+board.s8d+board.s8c=='':
moves = '8b8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8b)and w8h==''\
and board.s8g+board.s8f+board.s8e+board.s8d+board.s8c=='':
moves = '8b8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w8b)and w8h==''\
and board.s8g+board.s8f+board.s8e+board.s8d+board.s8c=='':
moves = '8b8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w8b)and w8g==''\
and board.s8f+board.s8e+board.s8d+board.s8c=='':
moves = '8b8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w8b)and w8g==''\
and board.s8f+board.s8e+board.s8d+board.s8c=='':
moves = '8b8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w8b)and w8f==''\
and board.s8e+board.s8d+board.s8c=='':
moves = '8b8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w8b)and w8e==''\
and board.s8d+board.s8c=='':
moves = '8b8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w8b)and w8d==''\
and board.s8c=='':
moves = '8b8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8b)and w6b==''\
and board.s7b=='':
moves = '8b6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8b)and w5b==''\
and board.s7b+board.s6b=='':
moves = '8b5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8b)and w4b==''\
and board.s7b+board.s6b+board.s5b=='':
moves = '8b4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8b)and w3b==''\
and board.s7b+board.s6b+board.s5b+board.s4b=='':
moves = '8b3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8b)and w2b==''\
and board.s7b+board.s6b+board.s5b+board.s4b+board.s3b=='':
moves = '8b2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8b)and w1b==''\
and board.s7b+board.s6b+board.s5b+board.s4b+board.s3b+board.s2b=='':
moves = '8b1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8b)and w3g==''\
and board.s7c+board.s6d+board.s5e+board.s4f=='':
moves = '8b3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8b)and w2h==''\
and board.s7c+board.s6d+board.s5e+board.s4f+board.s3g=='':
moves = '8b2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8b)and w1i==''\
and board.s7c+board.s6d+board.s5e+board.s4f+board.s3g+board.s2h=='':
moves = '8b1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w8b)and w6d==''\
and board.s7c=='':
moves = '8b6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w8b)and w5e==''\
and board.s7c+board.s6d=='':
moves = '8b5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w8b)and w4f==''\
and board.s7c+board.s6d+board.s5e=='':
moves = '8b4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8b)and w3g==''\
and board.s7c+board.s6d+board.s5e+board.s4f=='':
moves = '8b3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8b)and w2h==''\
and board.s7c+board.s6d+board.s5e+board.s4f+board.s3g=='':
moves = '8b2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8b)and w1i==''\
and board.s7c+board.s6d+board.s5e+board.s4f+board.s3g+board.s2h=='':
moves = '8b1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w9b !='':
if re.match(r'[plsgrk+]', Wboard.w9b)and w9c=='':
moves = '9b9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w9b)and w8c=='':
moves = '9b8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w9b)and w8b=='':
moves = '9b8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w9b)and w9a=='':
moves = '9b9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|b|s|k',Wboard.w9b)and w8a=='':
moves = '9b8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w9b)and w8d=='':
moves = '9b8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9b)and w9i==''\
and board.s9h+board.s9g+board.s9f+board.s9e+board.s9d+board.s9c=='':
moves = '9b9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w9b)and w9i==''\
and board.s9h+board.s9g+board.s9f+board.s9e+board.s9d+board.s9c=='':
moves = '9b9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9b)and w9h==''\
and board.s9g+board.s9f+board.s9e+board.s9d+board.s9c=='':
moves = '9b9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w9b)and w9h==''\
and board.s9g+board.s9f+board.s9e+board.s9d+board.s9c=='':
moves = '9b9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w9b)and w9g==''\
and board.s9f+board.s9e+board.s9d+board.s9c=='':
moves = '9b9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w9b)and w9g==''\
and board.s9f+board.s9e+board.s9d+board.s9c=='':
moves = '9b9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w9b)and w9f==''\
and board.s9e+board.s9d+board.s9c=='':
moves = '9b9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w9b)and w9e==''\
and board.s9d+board.s9c=='':
moves = '9b9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w9b)and w9d==''\
and board.s9c=='':
moves = '9b9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9b)and w7b==''\
and board.s8b=='':
moves = '9b7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9b)and w6b==''\
and board.s8b+board.s7b=='':
moves = '9b6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9b)and w5b==''\
and board.s8b+board.s7b+board.s6b=='':
moves = '9b5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9b)and w4b==''\
and board.s8b+board.s7b+board.s6b+board.s5b=='':
moves = '9b4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9b)and w3b==''\
and board.s8b+board.s7b+board.s6b+board.s5b+board.s4b=='':
moves = '9b3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9b)and w2b==''\
and board.s8b+board.s7b+board.s6b+board.s5b+board.s4b+board.s3b=='':
moves = '9b2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9b)and w1b==''\
and board.s8b+board.s7b+board.s6b+board.s5b+board.s4b+board.s3b+board.s2b=='':
moves = '9b1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9b)and w4g==''\
and board.s8c+board.s7d+board.s6e+board.s5f=='':
moves = '9b4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9b)and w3h==''\
and board.s8c+board.s7d+board.s6e+board.s5f+board.s4g=='':
moves = '9b3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9b)and w2i==''\
and board.s8c+board.s7d+board.s6e+board.s5f+board.s4g+board.s3h=='':
moves = '9b2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w9b)and w7d==''\
and board.s8c=='':
moves = '9b7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w9b)and w6e==''\
and board.s8c+board.s7d=='':
moves = '9b6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w9b)and w5f==''\
and board.s8c+board.s7d+board.s6e=='':
moves = '9b5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9b)and w4g==''\
and board.s8c+board.s7d+board.s6e+board.s5f=='':
moves = '9b4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9b)and w3h==''\
and board.s8c+board.s7d+board.s6e+board.s5f+board.s4g=='':
moves = '9b3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9b)and w2i==''\
and board.s8c+board.s7d+board.s6e+board.s5f+board.s4g+board.s3h=='':
moves = '9b2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w1a !='':
if re.match(r'[plsgrk+]', Wboard.w1a)and w1b=='':
moves = '1a1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w1a)and w2b=='':
moves = '1a2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w1a)and w2a=='':
moves = '1a2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w1a)and w2c=='':
moves = '1a2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1a)and w1i==''\
and board.s1h+board.s1g+board.s1f+board.s1e+board.s1d+board.s1c+board.s1b=='':
moves = '1a1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w1a)and w1i==''\
and board.s1h+board.s1g+board.s1f+board.s1e+board.s1d+board.s1c+board.s1b=='':
moves = '1a1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w1a)and w1h==''\
and board.s1g+board.s1f+board.s1e+board.s1d+board.s1c+board.s1b=='':
moves = '1a1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w1a)and w1h==''\
and board.s1g+board.s1f+board.s1e+board.s1d+board.s1c+board.s1b=='':
moves = '1a1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w1a)and w1g==''\
and board.s1f+board.s1e+board.s1d+board.s1c+board.s1b=='':
moves = '1a1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w1a)and w1g==''\
and board.s1f+board.s1e+board.s1d+board.s1c+board.s1b=='':
moves = '1a1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w1a)and w1f==''\
and board.s1e+board.s1d+board.s1c+board.s1b=='':
moves = '1a1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w1a)and w1e==''\
and board.s1d+board.s1c+board.s1b=='':
moves = '1a1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w1a)and w1d==''\
and board.s1c+board.s1b=='':
moves = '1a1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w1a)and w1c==''\
and board.s1b=='':
moves = '1a1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1a)and w3a==''\
and board.s2a=='':
moves = '1a3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1a)and w4a==''\
and board.s2a+board.s3a=='':
moves = '1a4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1a)and w5a==''\
and board.s2a+board.s3a+board.s4a=='':
moves = '1a5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1a)and w6a==''\
and board.s2a+board.s3a+board.s4a+board.s5a=='':
moves = '1a6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1a)and w7a==''\
and board.s2a+board.s3a+board.s4a+board.s5a+board.s6a=='':
moves = '1a7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1a)and w8a==''\
and board.s2a+board.s3a+board.s4a+board.s5a+board.s6a+board.s7a=='':
moves = '1a8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w1a)and w9a==''\
and board.s2a+board.s3a+board.s4a+board.s5a+board.s6a+board.s7a+board.s8a=='':
moves = '1a9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1a)and w7g==''\
and board.s2b+board.s3c+board.s4d+board.s5e+board.s6f=='':
moves = '1a7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1a)and w8h==''\
and board.s2b+board.s3c+board.s4d+board.s5e+board.s6f+board.s7g=='':
moves = '1a8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w1a)and w9i==''\
and board.s2b+board.s3c+board.s4d+board.s5e+board.s6f+board.s7g+board.s8h=='':
moves = '1a9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w1a)and w3c==''\
and board.s2b=='':
moves = '1a3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w1a)and w4d==''\
and board.s2b+board.s3c=='':
moves = '1a4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w1a)and w5e==''\
and board.s2b+board.s3c+board.s4d=='':
moves = '1a5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w1a)and w6f==''\
and board.s2b+board.s3c+board.s4d+board.s5e=='':
moves = '1a6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1a)and w7g==''\
and board.s2b+board.s3c+board.s4d+board.s5e+board.s6f=='':
moves = '1a7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1a)and w8h==''\
and board.s2b+board.s3c+board.s4d+board.s5e+board.s6f+board.s7g=='':
moves = '1a8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w1a)and w9i==''\
and board.s2b+board.s3c+board.s4d+board.s5e+board.s6f+board.s7g+board.s8h=='':
moves = '1a9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w2a !='':
if re.match(r'[plsgrk+]', Wboard.w2a)and w2b=='':
moves = '2a2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w2a)and w1b=='':
moves = '2a1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w2a)and w3b=='':
moves = '2a3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w2a)and w1a=='':
moves = '2a1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w2a)and w3a=='':
moves = '2a3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w2a)and w1c=='':
moves = '2a1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w2a)and w3c=='':
moves = '2a3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2a)and w2i==''\
and board.s2h+board.s2g+board.s2f+board.s2e+board.s2d+board.s2c+board.s2b=='':
moves = '2a2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w2a)and w2i==''\
and board.s2h+board.s2g+board.s2f+board.s2e+board.s2d+board.s2c+board.s2b=='':
moves = '2a2i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w2a)and w2h==''\
and board.s2g+board.s2f+board.s2e+board.s2d+board.s2c+board.s2b=='':
moves = '2a2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w2a)and w2h==''\
and board.s2g+board.s2f+board.s2e+board.s2d+board.s2c+board.s2b=='':
moves = '2a2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w2a)and w2g==''\
and board.s2f+board.s2e+board.s2d+board.s2c+board.s2b=='':
moves = '2a2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w2a)and w2g==''\
and board.s2f+board.s2e+board.s2d+board.s2c+board.s2b=='':
moves = '2a2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w2a)and w2f==''\
and board.s2e+board.s2d+board.s2c+board.s2b=='':
moves = '2a2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w2a)and w2e==''\
and board.s2d+board.s2c+board.s2b=='':
moves = '2a2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w2a)and w2d==''\
and board.s2c+board.s2b=='':
moves = '2a2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w2a)and w2c==''\
and board.s2b=='':
moves = '2a2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2a)and w4a==''\
and board.s3a=='':
moves = '2a4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2a)and w5a==''\
and board.s3a+board.s4a=='':
moves = '2a5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2a)and w6a==''\
and board.s3a+board.s4a+board.s5a=='':
moves = '2a6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2a)and w7a==''\
and board.s3a+board.s4a+board.s5a+board.s6a=='':
moves = '2a7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2a)and w8a==''\
and board.s3a+board.s4a+board.s5a+board.s6a+board.s7a=='':
moves = '2a8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w2a)and w9a==''\
and board.s3a+board.s4a+board.s5a+board.s6a+board.s7a+board.s8a=='':
moves = '2a9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2a)and w8g==''\
and board.s3b+board.s4c+board.s5d+board.s6e+board.s7f=='':
moves = '2a8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w2a)and w9h==''\
and board.s3b+board.s4c+board.s5d+board.s6e+board.s7f+board.s8g=='':
moves = '2a9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w2a)and w4c==''\
and board.s3b=='':
moves = '2a4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w2a)and w5d==''\
and board.s3b+board.s4c=='':
moves = '2a5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w2a)and w6e==''\
and board.s3b+board.s4c+board.s5d=='':
moves = '2a6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w2a)and w7f==''\
and board.s3b+board.s4c+board.s5d+board.s6e=='':
moves = '2a7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2a)and w8g==''\
and board.s3b+board.s4c+board.s5d+board.s6e+board.s7f=='':
moves = '2a8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w2a)and w9h==''\
and board.s3b+board.s4c+board.s5d+board.s6e+board.s7f+board.s8g=='':
moves = '2a9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w3a !='':
if re.match(r'[plsgrk+]', Wboard.w3a)and w3b=='':
moves = '3a3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w3a)and w2b=='':
moves = '3a2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w3a)and w4b=='':
moves = '3a4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w3a)and w2a=='':
moves = '3a2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w3a)and w4a=='':
moves = '3a4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w3a)and w2c=='':
moves = '3a2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w3a)and w4c=='':
moves = '3a4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3a)and w3i==''\
and board.s3h+board.s3g+board.s3f+board.s3e+board.s3d+board.s3c+board.s3b=='':
moves = '3a3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w3a)and w3i==''\
and board.s3h+board.s3g+board.s3f+board.s3e+board.s3d+board.s3c+board.s3b=='':
moves = '3a3i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w3a)and w3h==''\
and board.s3g+board.s3f+board.s3e+board.s3d+board.s3c+board.s3b=='':
moves = '3a3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w3a)and w3h==''\
and board.s3g+board.s3f+board.s3e+board.s3d+board.s3c+board.s3b=='':
moves = '3a3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w3a)and w3g==''\
and board.s3f+board.s3e+board.s3d+board.s3c+board.s3b=='':
moves = '3a3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w3a)and w3g==''\
and board.s3f+board.s3e+board.s3d+board.s3c+board.s3b=='':
moves = '3a3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w3a)and w3f==''\
and board.s3e+board.s3d+board.s3c+board.s3b=='':
moves = '3a3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w3a)and w3e==''\
and board.s3d+board.s3c+board.s3b=='':
moves = '3a3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w3a)and w3d==''\
and board.s3c+board.s3b=='':
moves = '3a3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w3a)and w3c==''\
and board.s3b=='':
moves = '3a3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3a)and w1a==''\
and board.s2a=='':
moves = '3a1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3a)and w5a==''\
and board.s4a=='':
moves = '3a5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3a)and w6a==''\
and board.s4a+board.s5a=='':
moves = '3a6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3a)and w7a==''\
and board.s4a+board.s5a+board.s6a=='':
moves = '3a7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3a)and w8a==''\
and board.s4a+board.s5a+board.s6a+board.s7a=='':
moves = '3a8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w3a)and w9a==''\
and board.s4a+board.s5a+board.s6a+board.s7a+board.s8a=='':
moves = '3a9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w3a)and w9g==''\
and board.s4b+board.s5c+board.s6d+board.s7e+board.s8f=='':
moves = '3a9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3a)and w1c==''\
and board.s2b=='':
moves = '3a1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3a)and w5c==''\
and board.s4b=='':
moves = '3a5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3a)and w6d==''\
and board.s4b+board.s5c=='':
moves = '3a6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3a)and w6e==''\
and board.s4b+board.s5c+board.s6d=='':
moves = '3a7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w3a)and w7f==''\
and board.s4b+board.s5c+board.s6d+board.s7e=='':
moves = '3a8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w3a)and w9g==''\
and board.s4b+board.s5c+board.s6d+board.s7e+board.s8f=='':
moves = '3a9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w4a !='':
if re.match(r'[plsgrk+]', Wboard.w4a)and w4b=='':
moves = '4a4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w4a)and w3b=='':
moves = '4a3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w4a)and w5b=='':
moves = '4a5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w4a)and w3a=='':
moves = '4a3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w4a)and w5a=='':
moves = '4a5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w4a)and w3c=='':
moves = '4a3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w4a)and w5c=='':
moves = '4a5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4a)and w4i==''\
and board.s4h+board.s4g+board.s4f+board.s4e+board.s4d+board.s4c+board.s4b=='':
moves = '4a4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w4a)and w4i==''\
and board.s4h+board.s4g+board.s4f+board.s4e+board.s4d+board.s4c+board.s4b=='':
moves = '4a4i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w4a)and w4h==''\
and board.s4g+board.s4f+board.s4e+board.s4d+board.s4c+board.s4b=='':
moves = '4a4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w4a)and w4h==''\
and board.s4g+board.s4f+board.s4e+board.s4d+board.s4c+board.s4b=='':
moves = '4a4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w4a)and w4g==''\
and board.s4f+board.s4e+board.s4d+board.s4c+board.s4b=='':
moves = '4a4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w4a)and w4g==''\
and board.s4f+board.s4e+board.s4d+board.s4c+board.s4b=='':
moves = '4a4g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w4a)and w4f==''\
and board.s4e+board.s4d+board.s4c+board.s4b=='':
moves = '4a4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w4a)and w4e==''\
and board.s4d+board.s4c+board.s4b=='':
moves = '4a4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w4a)and w4d==''\
and board.s4c+board.s4b=='':
moves = '4a4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w4a)and w4c==''\
and board.s4b=='':
moves = '4a4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4a)and w1a==''\
and board.s2a+board.s3a=='':
moves = '4a1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4a)and w2a==''\
and board.s3a=='':
moves = '4a2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4a)and w6a==''\
and board.s5a=='':
moves = '4a6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4a)and w7a==''\
and board.s5a+board.s6a=='':
moves = '4a7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4a)and w8a==''\
and board.s5a+board.s6a+board.s7a=='':
moves = '4a8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w4a)and w9a==''\
and board.s5a+board.s6a+board.s7a+board.s8a=='':
moves = '4a9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4a)and w6c==''\
and board.s5b=='':
moves = '4a6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4a)and w7d==''\
and board.s5b+board.s6c=='':
moves = '4a7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4a)and w8e==''\
and board.s5b+board.s6c+board.s7d=='':
moves = '4a8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4a)and w9f==''\
and board.s5b+board.s6c+board.s7d+board.s8e=='':
moves = '4a9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4a)and w1d==''\
and board.s2c+board.s3b=='':
moves = '4a1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w4a)and w2c==''\
and board.s3b=='':
moves = '4a2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w5a !='':
if re.match(r'[plsgrk+]', Wboard.w5a)and w5b=='':
moves = '5a5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w5a)and w4b=='':
moves = '5a4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w5a)and w6b=='':
moves = '5a6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w5a)and w4a=='':
moves = '5a4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w5a)and w6a=='':
moves = '5a6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w5a)and w4c=='':
moves = '5a4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w5a)and w6c=='':
moves = '5a6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5a)and w5i==''\
and board.s5h+board.s5g+board.s5f+board.s5e+board.s5d+board.s5c+board.s5b=='':
moves = '5a5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w5a)and w5i==''\
and board.s5h+board.s5g+board.s5f+board.s5e+board.s5d+board.s5c+board.s5b=='':
moves = '5a5i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w5a)and w5h==''\
and board.s5g+board.s5f+board.s5e+board.s5d+board.s5c+board.s5b=='':
moves = '5a5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w5a)and w5h==''\
and board.s5g+board.s5f+board.s5e+board.s5d+board.s5c+board.s5b=='':
moves = '5a5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w5a)and w5g==''\
and board.s5f+board.s5e+board.s5d+board.s5c+board.s5b=='':
moves = '5a5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w5a)and w5g==''\
and board.s5f+board.s5e+board.s5d+board.s5c+board.s5b=='':
moves = '5a5g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w5a)and w5f==''\
and board.s5e+board.s5d+board.s5c+board.s5b=='':
moves = '5a5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w5a)and w5e==''\
and board.s5d+board.s5c+board.s5b=='':
moves = '5a5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w5a)and w5d==''\
and board.s5c+board.s5b=='':
moves = '5a5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w5a)and w5c==''\
and board.s5b=='':
moves = '5a5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5a)and w1a==''\
and board.s2a+board.s3a+board.s4a=='':
moves = '5a1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5a)and w2a==''\
and board.s3a+board.s4a=='':
moves = '5a2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5a)and w3a==''\
and board.s4a=='':
moves = '5a3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5a)and w7a==''\
and board.s6a=='':
moves = '5a7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5a)and w8a==''\
and board.s6a+board.s7a=='':
moves = '5a8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w5a)and w9a==''\
and board.s6a+board.s7a+board.s8a=='':
moves = '5a9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5a)and w7c==''\
and board.s6b=='':
moves = '5a7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5a)and w8d==''\
and board.s6b+board.s7c=='':
moves = '5a8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5a)and w9e==''\
and board.s6b+board.s7c+board.s8d=='':
moves = '5a9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5a)and w2d==''\
and board.s3c+board.s4b=='':
moves = '5a2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5a)and w3c==''\
and board.s4b=='':
moves = '5a3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w5a)and w1e==''\
and board.s4b+board.s3c+board.s2d=='':
moves = '5a1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w6a !='':
if re.match(r'[plsgrk+]', Wboard.w6a)and w6b=='':
moves = '6a6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w6a)and w5b=='':
moves = '6a5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w6a)and w7b=='':
moves = '6a7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w6a)and w5a=='':
moves = '6a5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w6a)and w7a=='':
moves = '6a7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w6a)and w5c=='':
moves = '6a5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w6a)and w7c=='':
moves = '6a7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6a)and w6i==''\
and board.s6h+board.s6g+board.s6f+board.s6e+board.s6d+board.s6c+board.s6b=='':
moves = '6a6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w6a)and w6i==''\
and board.s6h+board.s6g+board.s6f+board.s6e+board.s6d+board.s6c+board.s6b=='':
moves = '6a6i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w6a)and w6h==''\
and board.s6g+board.s6f+board.s6e+board.s6d+board.s6c+board.s6b=='':
moves = '6a6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w6a)and w6h==''\
and board.s6g+board.s6f+board.s6e+board.s6d+board.s6c+board.s6b=='':
moves = '6a6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w6a)and w6g==''\
and board.s6f+board.s6e+board.s6d+board.s6c+board.s6b=='':
moves = '6a6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w6a)and w6g==''\
and board.s6f+board.s6e+board.s6d+board.s6c+board.s6b=='':
moves = '6a6g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w6a)and w6f==''\
and board.s6e+board.s6d+board.s6c+board.s6b=='':
moves = '6a6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w6a)and w6e==''\
and board.s6d+board.s6c+board.s6b=='':
moves = '6a6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w6a)and w6d==''\
and board.s6c+board.s6b=='':
moves = '6a6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w6a)and w6c==''\
and board.s6b=='':
moves = '6a6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6a)and w9a==''\
and board.s8a+board.s7a=='':
moves = '6a9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6a)and w8a==''\
and board.s7a=='':
moves = '6a8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6a)and w4a==''\
and board.s5a=='':
moves = '6a4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6a)and w3a==''\
and board.s5a+board.s4a=='':
moves = '6a3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6a)and w2a==''\
and board.s5a+board.s4a+board.s3a=='':
moves = '6a2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w6a)and w1a==''\
and board.s5a+board.s4a+board.s3a+board.s2a=='':
moves = '6a1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6a)and w4c==''\
and board.s5b=='':
moves = '6a4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6a)and w3d==''\
and board.s5b+board.s4c=='':
moves = '6a3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6a)and w2e==''\
and board.s5b+board.s4c+board.s3d=='':
moves = '6a2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6a)and w1f==''\
and board.s5b+board.s4c+board.s3d+board.s2e=='':
moves = '6a1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6a)and w9d==''\
and board.s8c+board.s7b=='':
moves = '6a9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w6a)and w8c==''\
and board.s7b=='':
moves = '6a8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w7a !='':
if re.match(r'[plsgrk+]', Wboard.w7a)and w7b=='':
moves = '7a7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w7a)and w6b=='':
moves = '7a6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w7a)and w8b=='':
moves = '7a8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w7a)and w6a=='':
moves = '7a6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w7a)and w8a=='':
moves = '7a8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w7a)and w6c=='':
moves = '7a6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w7a)and w8c=='':
moves = '7a8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7a)and w7i==''\
and board.s7h+board.s7g+board.s7f+board.s7e+board.s7d+board.s7c+board.s7b=='':
moves = '7a7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w7a)and w7i==''\
and board.s7h+board.s7g+board.s7f+board.s7e+board.s7d+board.s7c+board.s7b=='':
moves = '7a7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w7a)and w7h==''\
and board.s7g+board.s7f+board.s7e+board.s7d+board.s7c+board.s7b=='':
moves = '7a7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w7a)and w7h==''\
and board.s7g+board.s7f+board.s7e+board.s7d+board.s7c+board.s7b=='':
moves = '7a7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w7a)and w7g==''\
and board.s7f+board.s7e+board.s7d+board.s7c+board.s7b=='':
moves = '7a7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w7a)and w7g==''\
and board.s7f+board.s7e+board.s7d+board.s7c+board.s7b=='':
moves = '7a7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w7a)and w7f==''\
and board.s7e+board.s7d+board.s7c+board.s7b=='':
moves = '7a7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w7a)and w7e==''\
and board.s7d+board.s7c+board.s7b=='':
moves = '7a7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w7a)and w7d==''\
and board.s7c+board.s7b=='':
moves = '7a7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w7a)and w7c==''\
and board.s7b=='':
moves = '7a7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7a)and w9a==''\
and board.s8a=='':
moves = '7a9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7a)and w5a==''\
and board.s6a=='':
moves = '7a5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7a)and w4a==''\
and board.s6a+board.s5a=='':
moves = '7a4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7a)and w3a==''\
and board.s6a+board.s5a+board.s4a=='':
moves = '7a3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7a)and w2a==''\
and board.s6a+board.s5a+board.s4a+board.s3a=='':
moves = '7a2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w7a)and w1a==''\
and board.s6a+board.s5a+board.s4a+board.s3a+board.s2a=='':
moves = '7a1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w7a)and w1g==''\
and board.s6b+board.s5c+board.s4d+board.s3e+board.s2f=='':
moves = '7a1g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7a)and w9c==''\
and board.s8b=='':
moves = '7a9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7a)and w5c==''\
and board.s6b=='':
moves = '7a5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7a)and w4d==''\
and board.s6b+board.s5c=='':
moves = '7a4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7a)and w4e==''\
and board.s6b+board.s5c+board.s4d=='':
moves = '7a3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w7a)and w3f==''\
and board.s6b+board.s5c+board.s4d+board.s3e=='':
moves = '7a2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w7a)and w1g==''\
and board.s6b+board.s5c+board.s4d+board.s3e+board.s2f=='':
moves = '7a1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w8a !='':
if re.match(r'[plsgrk+]', Wboard.w8a)and w8b=='':
moves = '8a8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w8a)and w7b=='':
moves = '8a7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w8a)and w9b=='':
moves = '8a9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w8a)and w7a=='':
moves = '8a7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w8a)and w9a=='':
moves = '8a9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w8a)and w7c=='':
moves = '8a7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w8a)and w9c=='':
moves = '8a9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8a)and w8i==''\
and board.s8h+board.s8g+board.s8f+board.s8e+board.s8d+board.s8c+board.s8b=='':
moves = '8a8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w8a)and w8i==''\
and board.s8h+board.s8g+board.s8f+board.s8e+board.s8d+board.s8c+board.s8b=='':
moves = '8a8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8a)and w8h==''\
and board.s8g+board.s8f+board.s8e+board.s8d+board.s8c+board.s8b=='':
moves = '8a8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w8a)and w8h==''\
and board.s8g+board.s8f+board.s8e+board.s8d+board.s8c+board.s8b=='':
moves = '8a8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w8a)and w8g==''\
and board.s8f+board.s8e+board.s8d+board.s8c+board.s8b=='':
moves = '8a8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w8a)and w8g==''\
and board.s8f+board.s8e+board.s8d+board.s8c+board.s8b=='':
moves = '8a8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w8a)and w8f==''\
and board.s8e+board.s8d+board.s8c+board.s8b=='':
moves = '8a8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w8a)and w8e==''\
and board.s8d+board.s8c+board.s8b=='':
moves = '8a8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w8a)and w8d==''\
and board.s8c+board.s8b=='':
moves = '8a8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w8a)and w8c==''\
and board.s8b=='':
moves = '8a8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8a)and w6a==''\
and board.s7a=='':
moves = '8a6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8a)and w5a==''\
and board.s7a+board.s6a=='':
moves = '8a5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8a)and w4a==''\
and board.s7a+board.s6a+board.s5a=='':
moves = '8a4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8a)and w3a==''\
and board.s7a+board.s6a+board.s5a+board.s4a=='':
moves = '8a3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8a)and w2a==''\
and board.s7a+board.s6a+board.s5a+board.s4a+board.s3a=='':
moves = '8a2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w8a)and w1a==''\
and board.s7a+board.s6a+board.s5a+board.s4a+board.s3a+board.s2a=='':
moves = '8a1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8a)and w2g==''\
and board.s7b+board.s6c+board.s5d+board.s4e+board.s3f=='':
moves = '8a2g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8a)and w1h==''\
and board.s7b+board.s6c+board.s5d+board.s4e+board.s3f+board.s2g=='':
moves = '8a1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w8a)and w6c==''\
and board.s7b=='':
moves = '8a6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w8a)and w5d==''\
and board.s7b+board.s6c=='':
moves = '8a5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w8a)and w4e==''\
and board.s7b+board.s6c+board.s5d=='':
moves = '8a4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w8a)and w3f==''\
and board.s7b+board.s6c+board.s5d+board.s4e=='':
moves = '8a3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8a)and w2g==''\
and board.s7b+board.s6c+board.s5d+board.s4e+board.s3f=='':
moves = '8a2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8a)and w1h==''\
and board.s7b+board.s6c+board.s5d+board.s4e+board.s3f+board.s2g=='':
moves = '8a1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w9a !='':
if re.match(r'[plsgrk+]', Wboard.w9a)and w9b=='':
moves = '9a9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgbk+]', Wboard.w9a)and w8b=='':
moves = '9a8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[grk+]', Wboard.w9a)and w8a=='':
moves = '9a8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('n', Wboard.w9a)and w8c=='':
moves = '9a8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9a)and w9i==''\
and board.s9h+board.s9g+board.s9f+board.s9e+board.s9d+board.s9c+board.s9b=='':
moves = '9a9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w9a)and w9i==''\
and board.s9h+board.s9g+board.s9f+board.s9e+board.s9d+board.s9c+board.s9b=='':
moves = '9a9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9a)and w9h==''\
and board.s9g+board.s9f+board.s9e+board.s9d+board.s9c+board.s9b=='':
moves = '9a9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w9a)and w9h==''\
and board.s9g+board.s9f+board.s9e+board.s9d+board.s9c+board.s9b=='':
moves = '9a9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|l', Wboard.w9a)and w9g==''\
and board.s9f+board.s9e+board.s9d+board.s9c+board.s9b=='':
moves = '9a9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'r|l', Wboard.w9a)and w9g==''\
and board.s9f+board.s9e+board.s9d+board.s9c+board.s9b=='':
moves = '9a9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w9a)and w9f==''\
and board.s9e+board.s9d+board.s9c+board.s9b=='':
moves = '9a9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w9a)and w9e==''\
and board.s9d+board.s9c+board.s9b=='':
moves = '9a9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w9a)and w9d==''\
and board.s9c+board.s9b=='':
moves = '9a9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r|l', Wboard.w9a)and w9c==''\
and board.s9b=='':
moves = '9a9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9a)and w7a==''\
and board.s8a=='':
moves = '9a7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9a)and w6a==''\
and board.s8a+board.s7a=='':
moves = '9a6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9a)and w5a==''\
and board.s8a+board.s7a+board.s6a=='':
moves = '9a5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9a)and w4a==''\
and board.s8a+board.s7a+board.s6a+board.s5a=='':
moves = '9a4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9a)and w3a==''\
and board.s8a+board.s7a+board.s6a+board.s5a+board.s4a=='':
moves = '9a3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9a)and w2a==''\
and board.s8a+board.s7a+board.s6a+board.s5a+board.s4a+board.s3a=='':
moves = '9a2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|r', Wboard.w9a)and w1a==''\
and board.s8a+board.s7a+board.s6a+board.s5a+board.s4a+board.s3a+board.s2a=='':
moves = '9a1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9a)and w3g==''\
and board.s8b+board.s7c+board.s6d+board.s5e+board.s4f=='':
moves = '9a3g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9a)and w2h==''\
and board.s8b+board.s7c+board.s6d+board.s5e+board.s4f+board.s3g=='':
moves = '9a2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9a)and w1i==''\
and board.s8b+board.s7c+board.s6d+board.s5e+board.s4f+board.s3g+board.s2h=='':
moves = '9a1i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w9a)and w7c==''\
and board.s8b=='':
moves = '9a7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w9a)and w6d==''\
and board.s8b+board.s7c=='':
moves = '9a6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w9a)and w5e==''\
and board.s8b+board.s7c+board.s6d=='':
moves = '9a5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+b|b', Wboard.w9a)and w4f==''\
and board.s8b+board.s7c+board.s6d+board.s5e=='':
moves = '9a4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9a)and w3g==''\
and board.s8b+board.s7c+board.s6d+board.s5e+board.s4f=='':
moves = '9a3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9a)and w2h==''\
and board.s8b+board.s7c+board.s6d+board.s5e+board.s4f+board.s3g=='':
moves = '9a2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9a)and w1i==''\
and board.s8b+board.s7c+board.s6d+board.s5e+board.s4f+board.s3g+board.s2h=='':
moves = '9a1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s1i =='':
if Wboard.s>0:
moves = 'S*1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*1i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s1h =='':
if Wboard.p>0 and (Wboard.w1h !='p' and Wboard.w1g !='p' and Wboard.w1f !='p' and Wboard.w1e !='p' and Wboard.w1d !='p' and Wboard.w1c !='p' and Wboard.w1b !='p' and Wboard.w1a !='p'):
moves = 'P*1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s1g =='':
if Wboard.p>0 and (Wboard.w1h !='p' and Wboard.w1g !='p' and Wboard.w1f !='p' and Wboard.w1e !='p' and Wboard.w1d !='p' and Wboard.w1c !='p' and Wboard.w1b !='p' and Wboard.w1a !='p'):
moves = 'P*1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*1g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s1f =='':
if Wboard.p>0 and (Wboard.w1h !='p' and Wboard.w1g !='p' and Wboard.w1f !='p' and Wboard.w1e !='p' and Wboard.w1d !='p' and Wboard.w1c !='p' and Wboard.w1b !='p' and Wboard.w1a !='p'):
moves = 'P*1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*1f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s1e =='':
if Wboard.p>0 and (Wboard.w1h !='p' and Wboard.w1g !='p' and Wboard.w1f !='p' and Wboard.w1e !='p' and Wboard.w1d !='p' and Wboard.w1c !='p' and Wboard.w1b !='p' and Wboard.w1a !='p'):
moves = 'P*1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*1e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s1d =='':
if Wboard.p>0 and (Wboard.w1h !='p' and Wboard.w1g !='p' and Wboard.w1f !='p' and Wboard.w1e !='p' and Wboard.w1d !='p' and Wboard.w1c !='p' and Wboard.w1b !='p' and Wboard.w1a !='p'):
moves = 'P*1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*1d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s1c =='':
if Wboard.p>0 and (Wboard.w1h !='p' and Wboard.w1g !='p' and Wboard.w1f !='p' and Wboard.w1e !='p' and Wboard.w1d !='p' and Wboard.w1c !='p' and Wboard.w1b !='p' and Wboard.w1a !='p'):
moves = 'P*1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*1c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s1b =='':
if Wboard.p>0 and (Wboard.w1h !='p' and Wboard.w1g !='p' and Wboard.w1f !='p' and Wboard.w1e !='p' and Wboard.w1d !='p' and Wboard.w1c !='p' and Wboard.w1b !='p' and Wboard.w1a !='p'):
moves = 'P*1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*1b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s1a =='':
if Wboard.p>0 and (Wboard.w1h !='p' and Wboard.w1g !='p' and Wboard.w1f !='p' and Wboard.w1e !='p' and Wboard.w1d !='p' and Wboard.w1c !='p' and Wboard.w1b !='p' and Wboard.w1a !='p'):
moves = 'P*1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s2i =='':
if Wboard.s>0:
moves = 'S*2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*2i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s2h =='':
if Wboard.p>0 and (Wboard.w2h !='p' and Wboard.w2g !='p' and Wboard.w2f !='p' and Wboard.w2e !='p' and Wboard.w2d !='p' and Wboard.w2c !='p' and Wboard.w2b !='p' and Wboard.w2a !='p'):
moves = 'P*2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s2g =='':
if Wboard.p>0 and (Wboard.w2h !='p' and Wboard.w2g !='p' and Wboard.w2f !='p' and Wboard.w2e !='p' and Wboard.w2d !='p' and Wboard.w2c !='p' and Wboard.w2b !='p' and Wboard.w2a !='p'):
moves = 'P*2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*2g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s2f =='':
if Wboard.p>0 and (Wboard.w2h !='p' and Wboard.w2g !='p' and Wboard.w2f !='p' and Wboard.w2e !='p' and Wboard.w2d !='p' and Wboard.w2c !='p' and Wboard.w2b !='p' and Wboard.w2a !='p'):
moves = 'P*2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*2f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s2e =='':
if Wboard.p>0 and (Wboard.w2h !='p' and Wboard.w2g !='p' and Wboard.w2f !='p' and Wboard.w2e !='p' and Wboard.w2d !='p' and Wboard.w2c !='p' and Wboard.w2b !='p' and Wboard.w2a !='p'):
moves = 'P*2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*2e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s2d =='':
if Wboard.p>0 and (Wboard.w2h !='p' and Wboard.w2g !='p' and Wboard.w2f !='p' and Wboard.w2e !='p' and Wboard.w2d !='p' and Wboard.w2c !='p' and Wboard.w2b !='p' and Wboard.w2a !='p'):
moves = 'P*2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*2d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s2c =='':
if Wboard.p>0 and (Wboard.w2h !='p' and Wboard.w2g !='p' and Wboard.w2f !='p' and Wboard.w2e !='p' and Wboard.w2d !='p' and Wboard.w2c !='p' and Wboard.w2b !='p' and Wboard.w2a !='p'):
moves = 'P*2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*2c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s2b =='':
if Wboard.p>0 and (Wboard.w2h !='p' and Wboard.w2g !='p' and Wboard.w2f !='p' and Wboard.w2e !='p' and Wboard.w2d !='p' and Wboard.w2c !='p' and Wboard.w2b !='p' and Wboard.w2a !='p'):
moves = 'P*2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s2a =='':
if Wboard.p>0 and (Wboard.w2h !='p' and Wboard.w2g !='p' and Wboard.w2f !='p' and Wboard.w2e !='p' and Wboard.w2d !='p' and Wboard.w2c !='p' and Wboard.w2b !='p' and Wboard.w2a !='p'):
moves = 'P*2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*2a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s3i =='':
if Wboard.s>0:
moves = 'S*3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*3i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s3h =='':
if Wboard.p>0 and (Wboard.w3h !='p' and Wboard.w3g !='p' and Wboard.w3f !='p' and Wboard.w3e !='p' and Wboard.w3d !='p' and Wboard.w3c !='p' and Wboard.w3b !='p' and Wboard.w3a !='p'):
moves = 'P*3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s3g =='':
if Wboard.p>0 and (Wboard.w3h !='p' and Wboard.w3g !='p' and Wboard.w3f !='p' and Wboard.w3e !='p' and Wboard.w3d !='p' and Wboard.w3c !='p' and Wboard.w3b !='p' and Wboard.w3a !='p'):
moves = 'P*3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*3g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s3f =='':
if Wboard.p>0 and (Wboard.w3h !='p' and Wboard.w3g !='p' and Wboard.w3f !='p' and Wboard.w3e !='p' and Wboard.w3d !='p' and Wboard.w3c !='p' and Wboard.w3b !='p' and Wboard.w3a !='p'):
moves = 'P*3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*3f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s3e =='':
if Wboard.p>0 and (Wboard.w3h !='p' and Wboard.w3g !='p' and Wboard.w3f !='p' and Wboard.w3e !='p' and Wboard.w3d !='p' and Wboard.w3c !='p' and Wboard.w3b !='p' and Wboard.w3a !='p'):
moves = 'P*3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*3e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s3d =='':
if Wboard.p>0 and (Wboard.w3h !='p' and Wboard.w3g !='p' and Wboard.w3f !='p' and Wboard.w3e !='p' and Wboard.w3d !='p' and Wboard.w3c !='p' and Wboard.w3b !='p' and Wboard.w3a !='p'):
moves = 'P*3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*3d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s3c =='':
if Wboard.p>0 and (Wboard.w3h !='p' and Wboard.w3g !='p' and Wboard.w3f !='p' and Wboard.w3e !='p' and Wboard.w3d !='p' and Wboard.w3c !='p' and Wboard.w3b !='p' and Wboard.w3a !='p'):
moves = 'P*3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s3b =='':
if Wboard.p>0 and (Wboard.w3h !='p' and Wboard.w3g !='p' and Wboard.w3f !='p' and Wboard.w3e !='p' and Wboard.w3d !='p' and Wboard.w3c !='p' and Wboard.w3b !='p' and Wboard.w3a !='p'):
moves = 'P*3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*3b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s3a =='':
if Wboard.p>0 and (Wboard.w3h !='p' and Wboard.w3g !='p' and Wboard.w3f !='p' and Wboard.w3e !='p' and Wboard.w3d !='p' and Wboard.w3c !='p' and Wboard.w3b !='p' and Wboard.w3a !='p'):
moves = 'P*3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*3a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s4i =='':
if Wboard.s>0:
moves = 'S*4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*4i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s4h =='':
if Wboard.p>0 and (Wboard.w4h !='p' and Wboard.w4g !='p' and Wboard.w4f !='p' and Wboard.w4e !='p' and Wboard.w4d !='p' and Wboard.w4c !='p' and Wboard.w4b !='p' and Wboard.w4a !='p'):
moves = 'P*4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s4g =='':
if Wboard.p>0 and (Wboard.w4h !='p' and Wboard.w4g !='p' and Wboard.w4f !='p' and Wboard.w4e !='p' and Wboard.w4d !='p' and Wboard.w4c !='p' and Wboard.w4b !='p' and Wboard.w4a !='p'):
moves = 'P*4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*4g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s4f =='':
if Wboard.p>0 and (Wboard.w4h !='p' and Wboard.w4g !='p' and Wboard.w4f !='p' and Wboard.w4e !='p' and Wboard.w4d !='p' and Wboard.w4c !='p' and Wboard.w4b !='p' and Wboard.w4a !='p'):
moves = 'P*4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*4f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s4e =='':
if Wboard.p>0 and (Wboard.w4h !='p' and Wboard.w4g !='p' and Wboard.w4f !='p' and Wboard.w4e !='p' and Wboard.w4d !='p' and Wboard.w4c !='p' and Wboard.w4b !='p' and Wboard.w4a !='p'):
moves = 'P*4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*4e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s4d =='':
if Wboard.p>0 and (Wboard.w4h !='p' and Wboard.w4g !='p' and Wboard.w4f !='p' and Wboard.w4e !='p' and Wboard.w4d !='p' and Wboard.w4c !='p' and Wboard.w4b !='p' and Wboard.w4a !='p'):
moves = 'P*4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s4c =='':
if Wboard.p>0 and (Wboard.w4h !='p' and Wboard.w4g !='p' and Wboard.w4f !='p' and Wboard.w4e !='p' and Wboard.w4d !='p' and Wboard.w4c !='p' and Wboard.w4b !='p' and Wboard.w4a !='p'):
moves = 'P*4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*4c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s4b =='':
if Wboard.p>0 and (Wboard.w4h !='p' and Wboard.w4g !='p' and Wboard.w4f !='p' and Wboard.w4e !='p' and Wboard.w4d !='p' and Wboard.w4c !='p' and Wboard.w4b !='p' and Wboard.w4a !='p'):
moves = 'P*4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*4b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s4a =='':
if Wboard.p>0 and (Wboard.w4h !='p' and Wboard.w4g !='p' and Wboard.w4f !='p' and Wboard.w4e !='p' and Wboard.w4d !='p' and Wboard.w4c !='p' and Wboard.w4b !='p' and Wboard.w4a !='p'):
moves = 'P*4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*4a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s9i =='':
if Wboard.s>0:
moves = 'S*9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s9h =='':
if Wboard.p>0 and (Wboard.w9h !='p' and Wboard.w9g !='p' and Wboard.w9f !='p' and Wboard.w9e !='p' and Wboard.w9d !='p' and Wboard.w9c !='p' and Wboard.w9b !='p' and Wboard.w9a !='p'):
moves = 'P*9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*9h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s9g =='':
if Wboard.p>0 and (Wboard.w9h !='p' and Wboard.w9g !='p' and Wboard.w9f !='p' and Wboard.w9e !='p' and Wboard.w9d !='p' and Wboard.w9c !='p' and Wboard.w9b !='p' and Wboard.w9a !='p'):
moves = 'P*9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s9f =='':
if Wboard.p>0 and (Wboard.w9h !='p' and Wboard.w9g !='p' and Wboard.w9f !='p' and Wboard.w9e !='p' and Wboard.w9d !='p' and Wboard.w9c !='p' and Wboard.w9b !='p' and Wboard.w9a !='p'):
moves = 'P*9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s9e =='':
if Wboard.p>0 and (Wboard.w9h !='p' and Wboard.w9g !='p' and Wboard.w9f !='p' and Wboard.w9e !='p' and Wboard.w9d !='p' and Wboard.w9c !='p' and Wboard.w9b !='p' and Wboard.w9a !='p'):
moves = 'P*9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s9d =='':
if Wboard.p>0 and (Wboard.w9h !='p' and Wboard.w9g !='p' and Wboard.w9f !='p' and Wboard.w9e !='p' and Wboard.w9d !='p' and Wboard.w9c !='p' and Wboard.w9b !='p' and Wboard.w9a !='p'):
moves = 'P*9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s9c =='':
if Wboard.p>0 and (Wboard.w9h !='p' and Wboard.w9g !='p' and Wboard.w9f !='p' and Wboard.w9e !='p' and Wboard.w9d !='p' and Wboard.w9c !='p' and Wboard.w9b !='p' and Wboard.w9a !='p'):
moves = 'P*9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s9b =='':
if Wboard.p>0 and (Wboard.w9h !='p' and Wboard.w9g !='p' and Wboard.w9f !='p' and Wboard.w9e !='p' and Wboard.w9d !='p' and Wboard.w9c !='p' and Wboard.w9b !='p' and Wboard.w9a !='p'):
moves = 'P*9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s9a =='':
if Wboard.p>0 and (Wboard.w9h !='p' and Wboard.w9g !='p' and Wboard.w9f !='p' and Wboard.w9e !='p' and Wboard.w9d !='p' and Wboard.w9c !='p' and Wboard.w9b !='p' and Wboard.w9a !='p'):
moves = 'P*9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s8i =='':
if Wboard.s>0:
moves = 'S*8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s8h =='':
if Wboard.p>0 and (Wboard.w8h !='p' and Wboard.w8g !='p' and Wboard.w8f !='p' and Wboard.w8e !='p' and Wboard.w8d !='p' and Wboard.w8c !='p' and Wboard.w8b !='p' and Wboard.w8a !='p'):
moves = 'P*8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s8g =='':
if Wboard.p>0 and (Wboard.w8h !='p' and Wboard.w8g !='p' and Wboard.w8f !='p' and Wboard.w8e !='p' and Wboard.w8d !='p' and Wboard.w8c !='p' and Wboard.w8b !='p' and Wboard.w8a !='p'):
moves = 'P*8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s8f =='':
if Wboard.p>0 and (Wboard.w8h !='p' and Wboard.w8g !='p' and Wboard.w8f !='p' and Wboard.w8e !='p' and Wboard.w8d !='p' and Wboard.w8c !='p' and Wboard.w8b !='p' and Wboard.w8a !='p'):
moves = 'P*8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s8e =='':
if Wboard.p>0 and (Wboard.w8h !='p' and Wboard.w8g !='p' and Wboard.w8f !='p' and Wboard.w8e !='p' and Wboard.w8d !='p' and Wboard.w8c !='p' and Wboard.w8b !='p' and Wboard.w8a !='p'):
moves = 'P*8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s8d =='':
if Wboard.p>0 and (Wboard.w8h !='p' and Wboard.w8g !='p' and Wboard.w8f !='p' and Wboard.w8e !='p' and Wboard.w8d !='p' and Wboard.w8c !='p' and Wboard.w8b !='p' and Wboard.w8a !='p'):
moves = 'P*8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s8c =='':
if Wboard.p>0 and (Wboard.w8h !='p' and Wboard.w8g !='p' and Wboard.w8f !='p' and Wboard.w8e !='p' and Wboard.w8d !='p' and Wboard.w8c !='p' and Wboard.w8b !='p' and Wboard.w8a !='p'):
moves = 'P*8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s8b =='':
if Wboard.p>0 and (Wboard.w8h !='p' and Wboard.w8g !='p' and Wboard.w8f !='p' and Wboard.w8e !='p' and Wboard.w8d !='p' and Wboard.w8c !='p' and Wboard.w8b !='p' and Wboard.w8a !='p'):
moves = 'P*8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s8a =='':
if Wboard.p>0 and (Wboard.w8h !='p' and Wboard.w8g !='p' and Wboard.w8f !='p' and Wboard.w8e !='p' and Wboard.w8d !='p' and Wboard.w8c !='p' and Wboard.w8b !='p' and Wboard.w8a !='p'):
moves = 'P*8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s7i =='':
if Wboard.s>0:
moves = 'S*7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*7i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s7h =='':
if Wboard.p>0 and (Wboard.w7h !='p' and Wboard.w7g !='p' and Wboard.w7f !='p' and Wboard.w7e !='p' and Wboard.w7d !='p' and Wboard.w7c !='p' and Wboard.w7b !='p' and Wboard.w7a !='p'):
moves = 'P*7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s7g =='':
if Wboard.p>0 and (Wboard.w7h !='p' and Wboard.w7g !='p' and Wboard.w7f !='p' and Wboard.w7e !='p' and Wboard.w7d !='p' and Wboard.w7c !='p' and Wboard.w7b !='p' and Wboard.w7a !='p'):
moves = 'P*7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*7g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s7f =='':
if Wboard.p>0 and (Wboard.w7h !='p' and Wboard.w7g !='p' and Wboard.w7f !='p' and Wboard.w7e !='p' and Wboard.w7d !='p' and Wboard.w7c !='p' and Wboard.w7b !='p' and Wboard.w7a !='p'):
moves = 'P*7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s7e =='':
if Wboard.p>0 and (Wboard.w7h !='p' and Wboard.w7g !='p' and Wboard.w7f !='p' and Wboard.w7e !='p' and Wboard.w7d !='p' and Wboard.w7c !='p' and Wboard.w7b !='p' and Wboard.w7a !='p'):
moves = 'P*7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*7e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s7d =='':
if Wboard.p>0 and (Wboard.w7h !='p' and Wboard.w7g !='p' and Wboard.w7f !='p' and Wboard.w7e !='p' and Wboard.w7d !='p' and Wboard.w7c !='p' and Wboard.w7b !='p' and Wboard.w7a !='p'):
moves = 'P*7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*7d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s7c =='':
if Wboard.p>0 and (Wboard.w7h !='p' and Wboard.w7g !='p' and Wboard.w7f !='p' and Wboard.w7e !='p' and Wboard.w7d !='p' and Wboard.w7c !='p' and Wboard.w7b !='p' and Wboard.w7a !='p'):
moves = 'P*7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*7c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s7b =='':
if Wboard.p>0 and (Wboard.w7h !='p' and Wboard.w7g !='p' and Wboard.w7f !='p' and Wboard.w7e !='p' and Wboard.w7d !='p' and Wboard.w7c !='p' and Wboard.w7b !='p' and Wboard.w7a !='p'):
moves = 'P*7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*7b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s7a =='':
if Wboard.p>0 and (Wboard.w7h !='p' and Wboard.w7g !='p' and Wboard.w7f !='p' and Wboard.w7e !='p' and Wboard.w7d !='p' and Wboard.w7c !='p' and Wboard.w7b !='p' and Wboard.w7a !='p'):
moves = 'P*7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*7a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s6i =='':
if Wboard.s>0:
moves = 'S*6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*6i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s6h =='':
if Wboard.p>0 and (Wboard.w6h !='p' and Wboard.w6g !='p' and Wboard.w6f !='p' and Wboard.w6e !='p' and Wboard.w6d !='p' and Wboard.w6c !='p' and Wboard.w6b !='p' and Wboard.w6a !='p'):
moves = 'P*6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s6g =='':
if Wboard.p>0 and (Wboard.w6h !='p' and Wboard.w6g !='p' and Wboard.w6f !='p' and Wboard.w6e !='p' and Wboard.w6d !='p' and Wboard.w6c !='p' and Wboard.w6b !='p' and Wboard.w6a !='p'):
moves = 'P*6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*6g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s6f =='':
if Wboard.p>0 and (Wboard.w6h !='p' and Wboard.w6g !='p' and Wboard.w6f !='p' and Wboard.w6e !='p' and Wboard.w6d !='p' and Wboard.w6c !='p' and Wboard.w6b !='p' and Wboard.w6a !='p'):
moves = 'P*6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s6e =='':
if Wboard.p>0 and (Wboard.w6h !='p' and Wboard.w6g !='p' and Wboard.w6f !='p' and Wboard.w6e !='p' and Wboard.w6d !='p' and Wboard.w6c !='p' and Wboard.w6b !='p' and Wboard.w6a !='p'):
moves = 'P*6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s6d =='':
if Wboard.p>0 and (Wboard.w6h !='p' and Wboard.w6g !='p' and Wboard.w6f !='p' and Wboard.w6e !='p' and Wboard.w6d !='p' and Wboard.w6c !='p' and Wboard.w6b !='p' and Wboard.w6a !='p'):
moves = 'P*6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*6d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s6c =='':
if Wboard.p>0 and (Wboard.w6h !='p' and Wboard.w6g !='p' and Wboard.w6f !='p' and Wboard.w6e !='p' and Wboard.w6d !='p' and Wboard.w6c !='p' and Wboard.w6b !='p' and Wboard.w6a !='p'):
moves = 'P*6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*6c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s6b =='':
if Wboard.p>0 and (Wboard.w6h !='p' and Wboard.w6g !='p' and Wboard.w6f !='p' and Wboard.w6e !='p' and Wboard.w6d !='p' and Wboard.w6c !='p' and Wboard.w6b !='p' and Wboard.w6a !='p'):
moves = 'P*6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*6b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s6a =='':
if Wboard.p>0 and (Wboard.w6h !='p' and Wboard.w6g !='p' and Wboard.w6f !='p' and Wboard.w6e !='p' and Wboard.w6d !='p' and Wboard.w6c !='p' and Wboard.w6b !='p' and Wboard.w6a !='p'):
moves = 'P*6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*6a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s5i =='':
if Wboard.s>0:
moves = 'S*5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*5i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s5h =='':
if Wboard.p>0 and (Wboard.w5h !='p' and Wboard.w5g !='p' and Wboard.w5f !='p' and Wboard.w5e !='p' and Wboard.w5d !='p' and Wboard.w5c !='p' and Wboard.w5b !='p' and Wboard.w5a !='p'):
moves = 'P*5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s5g =='':
if Wboard.p>0 and (Wboard.w5h !='p' and Wboard.w5g !='p' and Wboard.w5f !='p' and Wboard.w5e !='p' and Wboard.w5d !='p' and Wboard.w5c !='p' and Wboard.w5b !='p' and Wboard.w5a !='p'):
moves = 'P*5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*5g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s5f =='':
if Wboard.p>0 and (Wboard.w5h !='p' and Wboard.w5g !='p' and Wboard.w5f !='p' and Wboard.w5e !='p' and Wboard.w5d !='p' and Wboard.w5c !='p' and Wboard.w5b !='p' and Wboard.w5a !='p'):
moves = 'P*5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*5f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s5e =='':
if Wboard.p>0 and (Wboard.w5h !='p' and Wboard.w5g !='p' and Wboard.w5f !='p' and Wboard.w5e !='p' and Wboard.w5d !='p' and Wboard.w5c !='p' and Wboard.w5b !='p' and Wboard.w5a !='p'):
moves = 'P*5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s5d =='':
if Wboard.p>0 and (Wboard.w5h !='p' and Wboard.w5g !='p' and Wboard.w5f !='p' and Wboard.w5e !='p' and Wboard.w5d !='p' and Wboard.w5c !='p' and Wboard.w5b !='p' and Wboard.w5a !='p'):
moves = 'P*5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*5d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s5c =='':
if Wboard.p>0 and (Wboard.w5h !='p' and Wboard.w5g !='p' and Wboard.w5f !='p' and Wboard.w5e !='p' and Wboard.w5d !='p' and Wboard.w5c !='p' and Wboard.w5b !='p' and Wboard.w5a !='p'):
moves = 'P*5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*5c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s5b =='':
if Wboard.p>0 and (Wboard.w5h !='p' and Wboard.w5g !='p' and Wboard.w5f !='p' and Wboard.w5e !='p' and Wboard.w5d !='p' and Wboard.w5c !='p' and Wboard.w5b !='p' and Wboard.w5a !='p'):
moves = 'P*5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*5b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if board.s5a =='':
if Wboard.p>0 and (Wboard.w5h !='p' and Wboard.w5g !='p' and Wboard.w5f !='p' and Wboard.w5e !='p' and Wboard.w5d !='p' and Wboard.w5c !='p' and Wboard.w5b !='p' and Wboard.w5a !='p'):
moves = 'P*5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.l>0:
moves = 'L*5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.n>0:
moves = 'N*5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.s>0:
moves = 'S*5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.g>0:
moves = 'G*5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.b>0:
moves = 'B*5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.r>0:
moves = 'R*5a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
| 36.778965 | 192 | 0.446465 | 92,432 | 795,198 | 3.840964 | 0.027112 | 0.14896 | 0.095798 | 0.212828 | 0.957257 | 0.948728 | 0.937718 | 0.928245 | 0.92373 | 0.916269 | 0 | 0.061557 | 0.400369 | 795,198 | 21,620 | 193 | 36.780666 | 0.683009 | 0.000052 | 0 | 0.651471 | 0 | 0 | 0.036795 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.000093 | false | 0 | 0.000326 | 0 | 0.00042 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4af3450d206672b2ab02bd9bda1a4f1e13b4f202 | 53,196 | py | Python | azure-mgmt-resource/azure/mgmt/resource/locks/v2016_09_01/operations/management_locks_operations.py | SUSE/azure-sdk-for-python | 324f99d26dd6f4ee9793b9bf1d4d5f928e4b6c2f | [
"MIT"
] | 2 | 2020-07-29T14:22:17.000Z | 2020-11-06T18:47:40.000Z | azure-mgmt-resource/azure/mgmt/resource/locks/v2016_09_01/operations/management_locks_operations.py | SUSE/azure-sdk-for-python | 324f99d26dd6f4ee9793b9bf1d4d5f928e4b6c2f | [
"MIT"
] | 1 | 2016-08-01T07:37:04.000Z | 2016-08-01T07:37:04.000Z | azure-mgmt-resource/azure/mgmt/resource/locks/v2016_09_01/operations/management_locks_operations.py | SUSE/azure-sdk-for-python | 324f99d26dd6f4ee9793b9bf1d4d5f928e4b6c2f | [
"MIT"
] | 1 | 2020-12-12T21:04:41.000Z | 2020-12-12T21:04:41.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
import uuid
from .. import models
class ManagementLocksOperations(object):
"""ManagementLocksOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: The API version to use for the operation. Constant value: "2016-09-01".
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2016-09-01"
self.config = config
def create_or_update_at_resource_group_level(
self, resource_group_name, lock_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Creates or updates a management lock at the resource group level.
When you apply a lock at a parent scope, all child resources inherit
the same lock. To create management locks, you must have access to
Microsoft.Authorization/* or Microsoft.Authorization/locks/* actions.
Of the built-in roles, only Owner and User Access Administrator are
granted those actions.
:param resource_group_name: The name of the resource group to lock.
:type resource_group_name: str
:param lock_name: The lock name. The lock name can be a maximum of 260
characters. It cannot contain <, > %, &, :, \\, ?, /, or any control
characters.
:type lock_name: str
:param parameters: The management lock parameters.
:type parameters: :class:`ManagementLockObject
<azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ManagementLockObject
<azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/locks/{lockName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'lockName': self._serialize.url("lock_name", lock_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'ManagementLockObject')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ManagementLockObject', response)
if response.status_code == 201:
deserialized = self._deserialize('ManagementLockObject', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def delete_at_resource_group_level(
self, resource_group_name, lock_name, custom_headers=None, raw=False, **operation_config):
"""Deletes a management lock at the resource group level.
To delete management locks, you must have access to
Microsoft.Authorization/* or Microsoft.Authorization/locks/* actions.
Of the built-in roles, only Owner and User Access Administrator are
granted those actions.
:param resource_group_name: The name of the resource group containing
the lock.
:type resource_group_name: str
:param lock_name: The name of lock to delete.
:type lock_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/locks/{lockName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'lockName': self._serialize.url("lock_name", lock_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [204, 200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_at_resource_group_level(
self, resource_group_name, lock_name, custom_headers=None, raw=False, **operation_config):
"""Gets a management lock at the resource group level.
:param resource_group_name: The name of the locked resource group.
:type resource_group_name: str
:param lock_name: The name of the lock to get.
:type lock_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ManagementLockObject
<azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/locks/{lockName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'lockName': self._serialize.url("lock_name", lock_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ManagementLockObject', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update_by_scope(
self, scope, lock_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Create or update a management lock by scope.
:param scope: The scope for the lock. When providing a scope for the
assignment, use '/subscriptions/{subscriptionId}' for subscriptions,
'/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}'
for resource groups, and
'/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePathIfPresent}/{resourceType}/{resourceName}'
for resources.
:type scope: str
:param lock_name: The name of lock.
:type lock_name: str
:param parameters: Create or update management lock parameters.
:type parameters: :class:`ManagementLockObject
<azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ManagementLockObject
<azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/{scope}/providers/Microsoft.Authorization/locks/{lockName}'
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str'),
'lockName': self._serialize.url("lock_name", lock_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'ManagementLockObject')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ManagementLockObject', response)
if response.status_code == 201:
deserialized = self._deserialize('ManagementLockObject', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def delete_by_scope(
self, scope, lock_name, custom_headers=None, raw=False, **operation_config):
"""Delete a management lock by scope.
:param scope: The scope for the lock.
:type scope: str
:param lock_name: The name of lock.
:type lock_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/{scope}/providers/Microsoft.Authorization/locks/{lockName}'
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str'),
'lockName': self._serialize.url("lock_name", lock_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [204, 200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_by_scope(
self, scope, lock_name, custom_headers=None, raw=False, **operation_config):
"""Get a management lock by scope.
:param scope: The scope for the lock.
:type scope: str
:param lock_name: The name of lock.
:type lock_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ManagementLockObject
<azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/{scope}/providers/Microsoft.Authorization/locks/{lockName}'
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str'),
'lockName': self._serialize.url("lock_name", lock_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ManagementLockObject', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update_at_resource_level(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, lock_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Creates or updates a management lock at the resource level or any level
below the resource.
When you apply a lock at a parent scope, all child resources inherit
the same lock. To create management locks, you must have access to
Microsoft.Authorization/* or Microsoft.Authorization/locks/* actions.
Of the built-in roles, only Owner and User Access Administrator are
granted those actions.
:param resource_group_name: The name of the resource group containing
the resource to lock.
:type resource_group_name: str
:param resource_provider_namespace: The resource provider namespace of
the resource to lock.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to lock.
:type resource_type: str
:param resource_name: The name of the resource to lock.
:type resource_name: str
:param lock_name: The name of lock. The lock name can be a maximum of
260 characters. It cannot contain <, > %, &, :, \\, ?, /, or any
control characters.
:type lock_name: str
:param parameters: Parameters for creating or updating a management
lock.
:type parameters: :class:`ManagementLockObject
<azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ManagementLockObject
<azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/locks/{lockName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'lockName': self._serialize.url("lock_name", lock_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'ManagementLockObject')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ManagementLockObject', response)
if response.status_code == 201:
deserialized = self._deserialize('ManagementLockObject', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def delete_at_resource_level(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, lock_name, custom_headers=None, raw=False, **operation_config):
"""Deletes the management lock of a resource or any level below the
resource.
To delete management locks, you must have access to
Microsoft.Authorization/* or Microsoft.Authorization/locks/* actions.
Of the built-in roles, only Owner and User Access Administrator are
granted those actions.
:param resource_group_name: The name of the resource group containing
the resource with the lock to delete.
:type resource_group_name: str
:param resource_provider_namespace: The resource provider namespace of
the resource with the lock to delete.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type of the resource with the lock
to delete.
:type resource_type: str
:param resource_name: The name of the resource with the lock to
delete.
:type resource_name: str
:param lock_name: The name of the lock to delete.
:type lock_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/locks/{lockName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'lockName': self._serialize.url("lock_name", lock_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [204, 200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_at_resource_level(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, lock_name, custom_headers=None, raw=False, **operation_config):
"""Get the management lock of a resource or any level below resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource
provider.
:type resource_provider_namespace: str
:param parent_resource_path: An extra path parameter needed in some
services, like SQL Databases.
:type parent_resource_path: str
:param resource_type: The type of the resource.
:type resource_type: str
:param resource_name: The name of the resource.
:type resource_name: str
:param lock_name: The name of lock.
:type lock_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ManagementLockObject
<azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/locks/{lockName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'lockName': self._serialize.url("lock_name", lock_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ManagementLockObject', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update_at_subscription_level(
self, lock_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Creates or updates a management lock at the subscription level.
When you apply a lock at a parent scope, all child resources inherit
the same lock. To create management locks, you must have access to
Microsoft.Authorization/* or Microsoft.Authorization/locks/* actions.
Of the built-in roles, only Owner and User Access Administrator are
granted those actions.
:param lock_name: The name of lock. The lock name can be a maximum of
260 characters. It cannot contain <, > %, &, :, \\, ?, /, or any
control characters.
:type lock_name: str
:param parameters: The management lock parameters.
:type parameters: :class:`ManagementLockObject
<azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ManagementLockObject
<azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/locks/{lockName}'
path_format_arguments = {
'lockName': self._serialize.url("lock_name", lock_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'ManagementLockObject')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [201, 200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 201:
deserialized = self._deserialize('ManagementLockObject', response)
if response.status_code == 200:
deserialized = self._deserialize('ManagementLockObject', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def delete_at_subscription_level(
self, lock_name, custom_headers=None, raw=False, **operation_config):
"""Deletes the management lock at the subscription level.
To delete management locks, you must have access to
Microsoft.Authorization/* or Microsoft.Authorization/locks/* actions.
Of the built-in roles, only Owner and User Access Administrator are
granted those actions.
:param lock_name: The name of lock to delete.
:type lock_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/locks/{lockName}'
path_format_arguments = {
'lockName': self._serialize.url("lock_name", lock_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [204, 200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_at_subscription_level(
self, lock_name, custom_headers=None, raw=False, **operation_config):
"""Gets a management lock at the subscription level.
:param lock_name: The name of the lock to get.
:type lock_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ManagementLockObject
<azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/locks/{lockName}'
path_format_arguments = {
'lockName': self._serialize.url("lock_name", lock_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ManagementLockObject', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def list_at_resource_group_level(
self, resource_group_name, filter=None, custom_headers=None, raw=False, **operation_config):
"""Gets all the management locks for a resource group.
:param resource_group_name: The name of the resource group containing
the locks to get.
:type resource_group_name: str
:param filter: The filter to apply on the operation.
:type filter: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ManagementLockObjectPaged
<azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObjectPaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/locks'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.ManagementLockObjectPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.ManagementLockObjectPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def list_at_resource_level(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, filter=None, custom_headers=None, raw=False, **operation_config):
"""Gets all the management locks for a resource or any level below
resource.
:param resource_group_name: The name of the resource group containing
the locked resource. The name is case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource
provider.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type of the locked resource.
:type resource_type: str
:param resource_name: The name of the locked resource.
:type resource_name: str
:param filter: The filter to apply on the operation.
:type filter: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ManagementLockObjectPaged
<azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObjectPaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/locks'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.ManagementLockObjectPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.ManagementLockObjectPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def list_at_subscription_level(
self, filter=None, custom_headers=None, raw=False, **operation_config):
"""Gets all the management locks for a subscription.
:param filter: The filter to apply on the operation.
:type filter: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ManagementLockObjectPaged
<azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObjectPaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/locks'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.ManagementLockObjectPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.ManagementLockObjectPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
| 48.893382 | 222 | 0.667212 | 5,778 | 53,196 | 5.943579 | 0.041883 | 0.041931 | 0.023761 | 0.031448 | 0.963427 | 0.963048 | 0.955914 | 0.95341 | 0.949391 | 0.94648 | 0 | 0.007501 | 0.235638 | 53,196 | 1,087 | 223 | 48.938362 | 0.837092 | 0.313219 | 0 | 0.900192 | 0 | 0.007678 | 0.186287 | 0.089426 | 0 | 0 | 0 | 0 | 0 | 1 | 0.036468 | false | 0 | 0.007678 | 0 | 0.101727 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ab0cede82f126f4e9f59c2038cdc24b9ccf3ae87 | 6,624 | py | Python | tools/model.py | PKSingh0017/MSCG-Net | b7e79d68f14984fe460eff72bcbb8049e4d2bc9f | [
"MIT"
] | 59 | 2020-04-23T05:27:22.000Z | 2022-03-04T13:09:01.000Z | tools/model.py | PKSingh0017/MSCG-Net | b7e79d68f14984fe460eff72bcbb8049e4d2bc9f | [
"MIT"
] | 11 | 2020-04-29T07:11:58.000Z | 2022-03-12T00:28:29.000Z | tools/model.py | PKSingh0017/MSCG-Net | b7e79d68f14984fe460eff72bcbb8049e4d2bc9f | [
"MIT"
] | 20 | 2020-05-13T07:34:04.000Z | 2022-03-22T11:51:48.000Z | import os
import torch.nn.functional as F
from collections import OrderedDict
from pretrainedmodels import se_resnext50_32x4d, se_resnext101_32x4d
from lib.net.scg_gcn import *
def load_model(name='MSCG-Rx50', classes=7, node_size=(32,32)):
if name == 'MSCG-Rx50':
net = rx50_gcn_3head_4channel(out_channels=classes)
elif name == 'MSCG-Rx101':
net = rx101_gcn_3head_4channel(out_channels=classes)
else:
print('not found the net')
return -1
return net
class rx50_gcn_3head_4channel(nn.Module):
def __init__(self, out_channels=7, pretrained=True,
nodes=(32, 32), dropout=0,
enhance_diag=True, aux_pred=True):
super(rx50_gcn_3head_4channel, self).__init__() # same with res_fdcs_v5
self.aux_pred = aux_pred
self.node_size = nodes
self.num_cluster = out_channels
resnet = se_resnext50_32x4d()
self.layer0, self.layer1, self.layer2, self.layer3, = \
resnet.layer0, resnet.layer1, resnet.layer2, resnet.layer3
self.conv0 = torch.nn.Conv2d(4, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)
for child in self.layer0.children():
for param in child.parameters():
par = param
break
break
self.conv0.parameters = torch.cat([par[:, 0, :, :].unsqueeze(1), par], 1)
self.layer0 = torch.nn.Sequential(self.conv0, *list(self.layer0)[1:4])
self.graph_layers1 = GCN_Layer(1024, 128, bnorm=True, activation=nn.ReLU(True), dropout=dropout)
self.graph_layers2 = GCN_Layer(128, out_channels, bnorm=False, activation=None)
self.scg = SCG_block(in_ch=1024,
hidden_ch=out_channels,
node_size=nodes,
add_diag=enhance_diag,
dropout=dropout)
weight_xavier_init(self.graph_layers1, self.graph_layers2, self.scg)
def forward(self, x):
x_size = x.size()
gx = self.layer3(self.layer2(self.layer1(self.layer0(x))))
gx90 = gx.permute(0, 1, 3, 2)
gx180 = gx.flip(3)
B, C, H, W = gx.size()
A, gx, loss, z_hat = self.scg(gx)
gx, _ = self.graph_layers2(
self.graph_layers1((gx.reshape(B, -1, C), A))) # + gx.reshape(B, -1, C)
if self.aux_pred:
gx += z_hat
gx = gx.reshape(B, self.num_cluster, self.node_size[0], self.node_size[1])
A, gx90, loss2, z_hat = self.scg(gx90)
gx90, _ = self.graph_layers2(
self.graph_layers1((gx90.reshape(B, -1, C), A))) # + gx.reshape(B, -1, C)
if self.aux_pred:
gx90 += z_hat
gx90 = gx90.reshape(B, self.num_cluster, self.node_size[1], self.node_size[0])
gx90 = gx90.permute(0, 1, 3, 2)
gx += gx90
A, gx180, loss3, z_hat = self.scg(gx180)
gx180, _ = self.graph_layers2(
self.graph_layers1((gx180.reshape(B, -1, C), A))) # + gx.reshape(B, -1, C)
if self.aux_pred:
gx180 += z_hat
gx180 = gx180.reshape(B, self.num_cluster, self.node_size[0], self.node_size[1])
gx180 = gx180.flip(3)
gx += gx180
gx = F.interpolate(gx, (H, W), mode='bilinear', align_corners=False)
if self.training:
return F.interpolate(gx, x_size[2:], mode='bilinear', align_corners=False), loss + loss2 + loss3
else:
return F.interpolate(gx, x_size[2:], mode='bilinear', align_corners=False)
class rx101_gcn_3head_4channel(nn.Module):
def __init__(self, out_channels=7, pretrained=True,
nodes=(32, 32), dropout=0,
enhance_diag=True, aux_pred=True):
super(rx101_gcn_3head_4channel, self).__init__() # same with res_fdcs_v5
self.aux_pred = aux_pred
self.node_size = nodes
self.num_cluster = out_channels
resnet = se_resnext101_32x4d()
self.layer0, self.layer1, self.layer2, self.layer3, = \
resnet.layer0, resnet.layer1, resnet.layer2, resnet.layer3
self.conv0 = torch.nn.Conv2d(4, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)
for child in self.layer0.children():
for param in child.parameters():
par = param
break
break
self.conv0.parameters = torch.cat([par[:, 0, :, :].unsqueeze(1), par], 1)
self.layer0 = torch.nn.Sequential(self.conv0, *list(self.layer0)[1:4])
self.graph_layers1 = GCN_Layer(1024, 128, bnorm=True, activation=nn.ReLU(True), dropout=dropout)
self.graph_layers2 = GCN_Layer(128, out_channels, bnorm=False, activation=None)
self.scg = SCG_block(in_ch=1024,
hidden_ch=out_channels,
node_size=nodes,
add_diag=enhance_diag,
dropout=dropout)
weight_xavier_init(self.graph_layers1, self.graph_layers2, self.scg)
def forward(self, x):
x_size = x.size()
gx = self.layer3(self.layer2(self.layer1(self.layer0(x))))
gx90 = gx.permute(0, 1, 3, 2)
gx180 = gx.flip(3)
B, C, H, W = gx.size()
A, gx, loss, z_hat = self.scg(gx)
gx, _ = self.graph_layers2(
self.graph_layers1((gx.view(B, -1, C), A))) # + gx.reshape(B, -1, C)
if self.aux_pred:
gx += z_hat
gx = gx.view(B, self.num_cluster, self.node_size[0], self.node_size[1])
A, gx90, loss2, z_hat = self.scg(gx90)
gx90, _ = self.graph_layers2(
self.graph_layers1((gx90.view(B, -1, C), A))) # + gx.reshape(B, -1, C)
if self.aux_pred:
gx90 += z_hat
gx90 = gx90.view(B, self.num_cluster, self.node_size[1], self.node_size[0])
gx90 = gx90.permute(0, 1, 3, 2)
gx += gx90
A, gx180, loss3, z_hat = self.scg(gx180)
gx180, _ = self.graph_layers2(
self.graph_layers1((gx180.view(B, -1, C), A))) # + gx.reshape(B, -1, C)
if self.aux_pred:
gx180 += z_hat
gx180 = gx180.view(B, self.num_cluster, self.node_size[0], self.node_size[1])
gx180 = gx180.flip(3)
gx += gx180
gx = F.interpolate(gx, (H, W), mode='bilinear', align_corners=False)
if self.training:
return F.interpolate(gx, x_size[2:], mode='bilinear', align_corners=False), loss + loss2 + loss3
else:
return F.interpolate(gx, x_size[2:], mode='bilinear', align_corners=False)
| 37.423729 | 108 | 0.57971 | 919 | 6,624 | 3.996736 | 0.145811 | 0.049006 | 0.045739 | 0.024503 | 0.912061 | 0.912061 | 0.893548 | 0.893548 | 0.890553 | 0.890553 | 0 | 0.075392 | 0.287138 | 6,624 | 176 | 109 | 37.636364 | 0.702457 | 0.027627 | 0 | 0.768657 | 0 | 0 | 0.014461 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.037313 | false | 0 | 0.037313 | 0 | 0.134328 | 0.007463 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ab3fda7366d4220d5726f5abee49756e45bc3673 | 32,593 | py | Python | tests/unit_test/action/action_test.py | ash-pramila/chiron | ed207d52766fcce48ebc884ac97185b2901161d4 | [
"Apache-2.0"
] | null | null | null | tests/unit_test/action/action_test.py | ash-pramila/chiron | ed207d52766fcce48ebc884ac97185b2901161d4 | [
"Apache-2.0"
] | 1 | 2021-01-29T22:20:59.000Z | 2021-01-29T22:20:59.000Z | tests/unit_test/action/action_test.py | ash-pramila/chiron | ed207d52766fcce48ebc884ac97185b2901161d4 | [
"Apache-2.0"
] | null | null | null | import json
import os
from typing import Dict, Text, Any, List
import pytest
import responses
from mongoengine import connect, disconnect
from rasa_sdk import Tracker
from rasa_sdk.executor import CollectingDispatcher
from kairon.action_server.data_objects import HttpActionRequestBody, HttpActionConfig
from kairon.action_server.actions import ActionUtility, HttpAction
from kairon.action_server.exception import HttpActionFailure
from kairon.utils import Utility
def pytest_configure():
return {
'db_url': None,
}
class TestActions:
@pytest.fixture(autouse=True)
def setup(self):
os.environ["system_file"] = "./tests/testing_data/system.yaml"
Utility.load_evironment()
db_url = Utility.environment['database']["url"]
pytest.db_url = db_url
connect(host=db_url)
@responses.activate
def test_execute_http_request_getWith_auth_token(self):
http_url = 'http://localhost:8080/mock'
# file deepcode ignore HardcodedNonCryptoSecret: Random string for testing
auth_token = "bearer jkhfhkujsfsfslfhjsfhkjsfhskhfksj"
responses.add(
method=responses.GET,
url=http_url,
json={'data': 'test_data', 'test_class': [{'key': 'value'}, {'key2': 'value2'}]},
status=200
)
response = ActionUtility.execute_http_request(auth_token=auth_token, http_url=http_url,
request_method=responses.GET)
assert response
assert response['data'] == 'test_data'
assert len(response['test_class']) == 2
assert response['test_class'][1]['key2'] == 'value2'
assert responses.calls[0].request.headers['Authorization'] == auth_token
@responses.activate
def test_execute_http_request_get_no_auth_token(self):
http_url = 'http://localhost:8080/mock'
responses.add(
method=responses.GET,
url=http_url,
json={'data': 'test_data', 'test_class': [{'key': 'value'}, {'key2': 'value2'}]},
status=200
)
response = ActionUtility.execute_http_request(auth_token=None, http_url=http_url,
request_method=responses.GET)
assert response
assert response['data'] == 'test_data'
assert len(response['test_class']) == 2
assert response['test_class'][1]['key2'] == 'value2'
assert 'Authorization' not in responses.calls[0].request.headers
@responses.activate
def test_execute_http_request_post_with_auth_token(self):
http_url = 'http://localhost:8080/mock'
auth_token = "bearer jkhfhkujsfsfslfhjsfhkjsfhskhfksj"
resp_msg = "Data added successfully"
request_params = {'data': 'test_data', 'test_class': [{'key': 'value'}, {'key2': 'value2'}]}
responses.add(
method=responses.POST,
url=http_url,
body=resp_msg,
status=200,
match=[responses.json_params_matcher(request_params)]
)
response = ActionUtility.execute_http_request(auth_token=auth_token, http_url=http_url,
request_method=responses.POST, request_body=request_params)
assert response
assert response == resp_msg
assert responses.calls[0].request.headers['Authorization'] == auth_token
@responses.activate
def test_execute_http_request_post_no_auth_token(self):
http_url = 'http://localhost:8080/mock'
resp_msg = "Data added successfully"
request_params = {'data': 'test_data', 'test_class': [{'key': 'value'}, {'key2': 'value2'}]}
responses.add(
method=responses.POST,
url=http_url,
body=resp_msg,
status=200,
match=[responses.json_params_matcher(request_params)]
)
response = ActionUtility.execute_http_request(auth_token=None, http_url=http_url,
request_method=responses.POST, request_body=request_params)
assert response
assert response == resp_msg
assert 'Authorization' not in responses.calls[0].request.headers
@responses.activate
def test_execute_http_request_put_with_auth_token(self):
http_url = 'http://localhost:8080/mock'
auth_token = "bearer jkhfhkujsfsfslfhjsfhkjsfhskhfksj"
resp_msg = "Data updated successfully"
request_params = {'data': 'test_data', 'test_class': [{'key': 'value'}, {'key2': 'value2'}]}
responses.add(
method=responses.PUT,
url=http_url,
body=resp_msg,
status=200,
match=[responses.json_params_matcher(request_params)]
)
response = ActionUtility.execute_http_request(auth_token=auth_token, http_url=http_url,
request_method=responses.PUT, request_body=request_params)
assert response
assert response == resp_msg
assert responses.calls[0].request.headers['Authorization'] == auth_token
@responses.activate
def test_execute_http_request_put_no_auth_token(self):
http_url = 'http://localhost:8080/mock'
resp_msg = "Data updated successfully"
request_params = {'data': 'test_data', 'test_class': [{'key': 'value'}, {'key2': 'value2'}]}
responses.add(
method=responses.PUT,
url=http_url,
body=resp_msg,
status=200,
match=[responses.json_params_matcher(request_params)]
)
response = ActionUtility.execute_http_request(auth_token=None, http_url=http_url,
request_method=responses.PUT, request_body=request_params)
assert response
assert response == resp_msg
assert 'Authorization' not in responses.calls[0].request.headers
@responses.activate
def test_execute_http_request_delete_with_request_body_auth_token(self):
http_url = 'http://localhost:8080/mock'
auth_token = "bearer jkhfhkujsfsfslfhjsfhkjsfhskhfksj"
resp_msg = "Data deleted successfully"
request_params = {'data': 'test_data', 'test_class': [{'key': 'value'}, {'key2': 'value2'}]}
responses.add(
method=responses.DELETE,
url=http_url,
body=resp_msg,
status=200,
match=[responses.json_params_matcher(request_params)]
)
response = ActionUtility.execute_http_request(auth_token=auth_token, http_url=http_url,
request_method=responses.DELETE, request_body=request_params)
assert response
assert response == resp_msg
assert responses.calls[0].request.headers['Authorization'] == auth_token
@responses.activate
def test_execute_http_request_delete_with_auth_token_no_request_body(self):
http_url = 'http://localhost:8080/mock'
auth_token = "bearer jkhfhkujsfsfslfhjsfhkjsfhskhfksj"
resp_msg = "Data deleted successfully"
responses.add(
method=responses.DELETE,
url=http_url,
body=resp_msg,
status=200,
)
response = ActionUtility.execute_http_request(auth_token=auth_token, http_url=http_url,
request_method=responses.DELETE, request_body=None)
assert response
assert response == resp_msg
assert responses.calls[0].request.headers['Authorization'] == auth_token
@responses.activate
def test_execute_http_request_delete_no_auth_token(self):
http_url = 'http://localhost:8080/mock'
resp_msg = "Data updated successfully"
request_params = {'data': 'test_data', 'test_class': [{'key': 'value'}, {'key2': 'value2'}]}
responses.add(
method=responses.DELETE,
url=http_url,
body=resp_msg,
status=200,
match=[
responses.json_params_matcher(request_params)
]
)
response = ActionUtility.execute_http_request(auth_token=None, http_url=http_url,
request_method=responses.DELETE, request_body=request_params)
assert response
assert response == resp_msg
assert 'Authorization' not in responses.calls[0].request.headers
def test_get_http_action_config(self):
http_params = [HttpActionRequestBody(key="key1", value="value1", parameter_type="slot"),
HttpActionRequestBody(key="key2", value="value2")]
expected = HttpActionConfig(
auth_token="bearer kjflksjflksajfljsdflinlsufisnflisjbjsdalibvs",
action_name="http_action",
response="json",
http_url="http://test.com",
request_method="GET",
params_list=http_params,
bot="bot",
user="user"
).save().to_mongo().to_dict()
actual = ActionUtility.get_http_action_config(pytest.db_url, "bot", "http_action")
assert actual is not None
assert expected['auth_token'] == actual['auth_token']
assert expected['action_name'] == actual['action_name']
assert expected['response'] == actual['response']
assert expected['http_url'] == actual['http_url']
assert expected['request_method'] == actual['request_method']
assert expected['params_list'] is not None
assert expected['params_list'][0]['key'] == actual['params_list'][0]['key']
assert expected['params_list'][0]['value'] == actual['params_list'][0]['value']
assert expected['params_list'][0]['parameter_type'] == actual['params_list'][0]['parameter_type']
assert expected['params_list'][1]['key'] == actual['params_list'][1]['key']
assert expected['params_list'][1]['value'] == actual['params_list'][1]['value']
assert expected['params_list'][1]['parameter_type'] == actual['params_list'][1]['parameter_type']
assert actual['status']
def test_get_http_action_invalid_db_url(self):
disconnect()
try:
ActionUtility.get_http_action_config("mongodb://localhost:8000/test", "bot", "http_action")
assert False
except HttpActionFailure:
assert True
def test_get_http_action_no_db_url(self):
try:
ActionUtility.get_http_action_config(db_url=None, bot="bot", action_name="http_action")
assert False
except HttpActionFailure as ex:
assert str(ex) == "Database url, bot name and action name are required"
def test_get_http_action_no_bot(self):
try:
ActionUtility.get_http_action_config(db_url=pytest.db_url, bot=None, action_name="http_action")
assert False
except HttpActionFailure as ex:
assert str(ex) == "Database url, bot name and action name are required"
def test_get_http_action_no_http_action(self):
try:
ActionUtility.get_http_action_config(db_url=pytest.db_url, bot="bot", action_name=None)
assert False
except HttpActionFailure as ex:
assert str(ex) == "Database url, bot name and action name are required"
def test_get_http_action_invalid_bot(self):
http_params = [HttpActionRequestBody(key="key1", value="value1", parameter_type="slot"),
HttpActionRequestBody(key="key2", value="value2")]
HttpActionConfig(
auth_token="bearer kjflksjflksajfljsdflinlsufisnflisjbjsdalibvs",
action_name="http_action",
response="json",
http_url="http://test.com",
request_method="GET",
params_list=http_params,
bot="bot",
user="user"
).save().to_mongo().to_dict()
try:
ActionUtility.get_http_action_config(pytest.db_url, "bot1", "http_action")
assert False
except HttpActionFailure as ex:
assert str(ex).__contains__("No HTTP action found for bot")
def test_get_http_action_invalid_http_action(self):
http_params = [HttpActionRequestBody(key="key1", value="value1", parameter_type="slot"),
HttpActionRequestBody(key="key2", value="value2")]
HttpActionConfig(
auth_token="bearer kjflksjflksajfljsdflinlsufisnflisjbjsdalibvs",
action_name="http_action",
response="json",
http_url="http://test.com",
request_method="GET",
params_list=http_params,
bot="bot",
user="user"
).save().to_mongo().to_dict()
try:
ActionUtility.get_http_action_config(pytest.db_url, "bot", "http_action1")
assert False
except HttpActionFailure as ex:
assert str(ex).__contains__("No HTTP action found for bot")
def test_get_http_action_no_request_body(self):
http_params = []
HttpActionConfig(
auth_token="bearer kjflksjflksajfljsdflinlsufisnflisjbjsdalibvs",
action_name="http_action",
response="json",
http_url="http://test.com",
request_method="GET",
params_list=http_params,
bot="bot",
user="user"
).save().to_mongo().to_dict()
try:
ActionUtility.get_http_action_config(pytest.db_url, "bot", "http_action1")
assert False
except HttpActionFailure as ex:
assert str(ex).__contains__("No HTTP action found for bot")
def test_prepare_request(self):
slots = {"bot": "demo_bot", "http_action_config": "http_action_name", "param2": "param2value"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
http_action_config_params = [HttpActionRequestBody(key="param1", value="value1"),
HttpActionRequestBody(key="param2", value="", parameter_type="slot")]
tracker = Tracker(sender_id="sender1", slots=slots, events=events, paused=False, latest_message=None,
followup_action=None, active_loop=None, latest_action_name=None)
actual_request_body = ActionUtility.prepare_request(tracker=tracker,
http_action_config_params=http_action_config_params)
assert actual_request_body
assert actual_request_body['param1'] == 'value1'
assert actual_request_body['param2'] == 'param2value'
def test_prepare_request_empty_slot(self):
slots = {"bot": "demo_bot", "http_action_config": "http_action_name", "param2": "param2value"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
http_action_config_params = [HttpActionRequestBody(key="param1", value="value1"),
HttpActionRequestBody(key="param3", value="", parameter_type="slot")]
tracker = Tracker(sender_id="sender1", slots=slots, events=events, paused=False, latest_message=None,
followup_action=None, active_loop=None, latest_action_name=None)
try:
ActionUtility.prepare_request(tracker=tracker, http_action_config_params=http_action_config_params)
assert False
except HttpActionFailure as ex:
assert str(ex) == ("Coudn't find value for key param3 from slot")
def test_prepare_request_no_request_params(self):
slots = {"bot": "demo_bot", "http_action_config": "http_action_name", "param2": "param2value"}
events: List[Dict] = None
http_action_config_params: List[HttpActionRequestBody] = None
tracker = Tracker(sender_id="sender1", slots=slots, events=events, paused=False, latest_message=None,
followup_action=None, active_loop=None, latest_action_name=None)
actual_request_body = ActionUtility.prepare_request(tracker=tracker,
http_action_config_params=http_action_config_params)
# deepcode ignore C1801: empty request body for http request with no request body params
assert len(actual_request_body) == 0
def test_name(self):
assert HttpAction().name() == "kairon_http_action"
def test_is_empty(self):
assert ActionUtility.is_empty("")
assert ActionUtility.is_empty(" ")
assert ActionUtility.is_empty(None)
assert not ActionUtility.is_empty("None")
def test_prepare_response(self):
json1 = json.dumps({
"a": {
"b": {
"3": 2,
"43": 30,
"c": [],
"d": ['red', 'buggy', 'bumpers'],
}
}
})
response = ActionUtility.prepare_response("The value of ${a.b.3} in ${a.b.d.0} is ${a.b.c}", json1)
assert response == 'The value of 2 in red is []'
json2 = json.dumps({
"data": [
{"a": {
"b": {
"43": 30,
"c": [],
"d": ['red', 'buggy', 'bumpers'],
}}},
{"a": {
"b": {
"43": 5,
"c": [1, 2],
"d": ['buggy', 'bumpers'],
}}}
]
})
response = ActionUtility.prepare_response("The value of ${data.0.a} in ${data.0.a.b} is ${data.0.a.b.d}", json2)
assert response == 'The value of {"b": {"43": 30, "c": [], "d": ["red", "buggy", "bumpers"]}} in {"43": 30, "c": [], "d": ["red", "buggy", "bumpers"]} is [\'red\', \'buggy\', \'bumpers\']'
def test_prepare_response_key_not_present(self):
json1 = json.dumps({
"a": {
"b": {
"3": 2,
"43": 30,
"c": [],
"d": ['red', 'buggy', 'bumpers'],
}
}
})
try:
ActionUtility.prepare_response("The value of ${a.b.3} in ${a.b.d.0} is ${a.b.e}", json1)
assert False
except HttpActionFailure:
assert True
def test_prepare_response_string_response(self):
json1 = json.dumps({
"a": {
"b": {
"3": 2,
"43": 30,
"c": [],
"d": ['red', 'buggy', 'bumpers'],
}
}
})
response = ActionUtility.prepare_response("The value of red is 0", json1)
assert response == "The value of red is 0"
def test_prepare_response_string_empty_response_string(self):
json1 = json.dumps({
"a": {
"b": {
"3": 2,
"43": 30,
"c": [],
"d": ['red', 'buggy', 'bumpers'],
}
}
})
response = ActionUtility.prepare_response("", json1)
assert response == '{"a": {"b": {"3": 2, "43": 30, "c": [], "d": ["red", "buggy", "bumpers"]}}}'
def test_prepare_response_string_empty_request_output(self):
json1 = json.dumps("{}")
try:
ActionUtility.prepare_response("The value of ${a.b.3} in ${a.b.d.0} is ${a.b.e}", json1)
assert False
except HttpActionFailure:
assert True
def test_prepare_response_invalid_response_json(self):
json_as_string = "Not a json string"
try:
ActionUtility.prepare_response("The value of ${a.b.3} in ${a.b.d.0} is ${a.b.c}", json_as_string)
assert False
except HttpActionFailure as e:
assert str(e) == 'Could not find value for keys in response'
def test_prepare_response_as_json_and_expected_as_plain_string(self):
json_as_string = "Not a json string"
response = ActionUtility.prepare_response("The value of 2 in red is []", json_as_string)
assert response == 'The value of 2 in red is []'
def test_prepare_response_as_string_and_expected_as_none(self):
response = ActionUtility.prepare_response("The value of 2 in red is []", None)
assert response == 'The value of 2 in red is []'
def test_run_invalid_http_action(self):
slots = {"bot": "5f50fd0a56b698ca10d35d2e", "http_action_config": "test_run_invalid_http_action",
"param2": "param2value"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
HttpActionConfig(
auth_token="bearer kjflksjflksajfljsdflinlsufisnflisjbjsdalibvs",
action_name="test_run_invalid_http_action1",
response="json",
http_url="http://www.google.com",
request_method="GET",
params_list=None,
bot="5f50fd0a56b698ca10d35d2e",
user="user"
).save()
dispatcher: CollectingDispatcher = CollectingDispatcher()
tracker = Tracker(sender_id="sender1", slots=slots, events=events, paused=False, latest_message=None,
followup_action=None, active_loop=None, latest_action_name=None)
domain: Dict[Text, Any] = None
HttpAction().run(dispatcher, tracker, domain)
str(dispatcher.messages[0]['text']).__contains__(
"I have failed to process your request: No HTTP action found for bot")
def test_run_no_bot(self):
slots = {"bot": None, "http_action_config_http_action": "new_http_action", "param2": "param2value"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
dispatcher: CollectingDispatcher = CollectingDispatcher()
latest_message = {'text': 'get intents', 'intent_ranking': [{'name': 'http_action'}]}
tracker = Tracker(sender_id="sender1", slots=slots, events=events, paused=False, latest_message=latest_message,
followup_action=None, active_loop=None, latest_action_name=None)
domain: Dict[Text, Any] = None
actual: List[Dict[Text, Any]] = HttpAction().run(dispatcher, tracker, domain)
assert actual is not None
assert actual[0]['name'] is not None
assert str(actual[0]['name']) == 'I have failed to process your request'
def test_run_no_http_action(self):
slots = {"bot": "jhgfsjgfausyfgus", "http_action_config_http_action": None, "param2": "param2value"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
dispatcher: CollectingDispatcher = CollectingDispatcher()
latest_message = {'text': 'get intents', 'intent_ranking': [{'name': 'http_action'}]}
tracker = Tracker(sender_id="sender1", slots=slots, events=events, paused=False, latest_message=latest_message,
followup_action=None, active_loop=None, latest_action_name=None)
domain: Dict[Text, Any] = None
actual: List[Dict[Text, Any]] = HttpAction().run(dispatcher, tracker, domain)
assert actual is not None
assert actual[0]['name'] is not None
assert str(actual[0]['name']) == 'I have failed to process your request'
def test_run(self):
slots = {"bot": "5f50fd0a56b698ca10d35d2e", "http_action_config_test_run": "http_action", "param2": "param2value"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
dispatcher: CollectingDispatcher = CollectingDispatcher()
latest_message = {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]}
tracker = Tracker(sender_id="sender1", slots=slots, events=events, paused=False, latest_message=latest_message,
followup_action=None, active_loop=None, latest_action_name=None)
domain: Dict[Text, Any] = None
HttpActionConfig(
auth_token="bearer kjflksjflksajfljsdflinlsufisnflisjbjsdalibvs",
action_name="http_action",
response="This should be response",
http_url="http://www.google.com",
request_method="GET",
params_list=None,
bot="5f50fd0a56b698ca10d35d2e",
user="user"
).save().to_mongo().to_dict()
actual: List[Dict[Text, Any]] = HttpAction().run(dispatcher, tracker, domain)
assert actual is not None
assert actual[0]['name'] == 'This should be response'
@responses.activate
def test_run_with_post(self):
# request_params = {'data': 'test_data', 'test_class': [{'key': 'value'}, {'key2': 'value2'}]}
# request_params = [HttpActionRequestBody(key='data', value="test_data"),
# HttpActionRequestBody(key='test_class', value=[{'key': 'value'}, {'key2': 'value2'}])]
http_url = 'http://localhost:8080/mock'
resp_msg = "5000"
responses.add(
method=responses.POST,
url=http_url,
body=resp_msg,
status=200,
)
slots = {"bot": "5f50fd0a56b698ca10d35d2e", "http_action_config_test_run": "test_run_with_post"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
dispatcher: CollectingDispatcher = CollectingDispatcher()
latest_message = {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]}
tracker = Tracker(sender_id="sender1", slots=slots, events=events, paused=False, latest_message=latest_message,
followup_action=None, active_loop=None, latest_action_name=None)
domain: Dict[Text, Any] = None
HttpActionConfig(
auth_token="",
action_name="test_run_with_post",
response="Data added successfully, id:${RESPONSE}",
http_url="http://localhost:8080/mock",
request_method="POST",
params_list=None,
bot="5f50fd0a56b698ca10d35d2e",
user="user"
).save().to_mongo().to_dict()
actual: List[Dict[Text, Any]] = HttpAction().run(dispatcher, tracker, domain)
assert actual is not None
assert actual[0]['name'] == 'Data added successfully, id:5000'
@responses.activate
def test_run_with_get(self):
http_url = 'http://localhost:8080/mock'
resp_msg = json.dumps({
"a": {
"b": {
"3": 2,
"43": 30,
"c": [],
"d": ['red', 'buggy', 'bumpers'],
}
}
})
responses.add(
method=responses.GET,
url=http_url,
body=resp_msg,
status=200,
)
slots = {"bot": "5f50fd0a56b698ca10d35d2e", "http_action_config_test_run": "test_run_with_post"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
dispatcher: CollectingDispatcher = CollectingDispatcher()
latest_message = {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]}
tracker = Tracker(sender_id="sender1", slots=slots, events=events, paused=False, latest_message=latest_message,
followup_action=None, active_loop=None, latest_action_name=None)
domain: Dict[Text, Any] = None
HttpActionConfig(
auth_token="",
action_name="test_run_with_post",
response="The value of ${a.b.3} in ${a.b.d.0} is ${a.b.d}",
http_url="http://localhost:8080/mock",
request_method="GET",
params_list=None,
bot="5f50fd0a56b698ca10d35d2e",
user="user"
).save().to_mongo().to_dict()
actual: List[Dict[Text, Any]] = HttpAction().run(dispatcher, tracker, domain)
assert actual is not None
assert str(actual[0]['name']) == 'The value of 2 in red is [\'red\', \'buggy\', \'bumpers\']'
def test_run_no_connection(self):
slots = {"bot": "5f50fd0a56b698ca10d35d2e", "http_action_config_test_run": "test_run_with_post"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
dispatcher: CollectingDispatcher = CollectingDispatcher()
latest_message = {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]}
tracker = Tracker(sender_id="sender1", slots=slots, events=events, paused=False, latest_message=latest_message,
followup_action=None, active_loop=None, latest_action_name=None)
domain: Dict[Text, Any] = None
HttpActionConfig(
auth_token="",
action_name="test_run_with_post",
response="This should be response",
http_url="http://localhost:8080/mock",
request_method="GET",
params_list=None,
bot="5f50fd0a56b698ca10d35d2e",
user="user"
).save()
actual: List[Dict[Text, Any]] = HttpAction().run(dispatcher, tracker, domain)
assert actual is not None
assert str(actual[0]['name']).__contains__('I have failed to process your request')
@responses.activate
def test_run_with_get_placeholder_vs_string_response(self):
http_url = 'http://localhost:8080/mock'
resp_msg = "This is string http response"
responses.add(
method=responses.GET,
url=http_url,
body=resp_msg,
status=200,
)
slots = {"bot": "5f50fd0a56b698ca10d35d2e",
"http_action_config_test_run": "test_run_with_get_string_http_response_placeholder_required"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
dispatcher: CollectingDispatcher = CollectingDispatcher()
latest_message = {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]}
tracker = Tracker(sender_id="sender1", slots=slots, events=events, paused=False, latest_message=latest_message,
followup_action=None, active_loop=None, latest_action_name=None)
domain: Dict[Text, Any] = None
HttpActionConfig(
auth_token="",
action_name="test_run_with_get_string_http_response_placeholder_required",
response="The value of ${a.b.3} in ${a.b.d.0} is ${a.b.d}",
http_url="http://localhost:8080/mock",
request_method="GET",
params_list=None,
bot="5f50fd0a56b698ca10d35d2e",
user="user"
).save().to_mongo().to_dict()
actual: List[Dict[Text, Any]] = HttpAction().run(dispatcher, tracker, domain)
assert actual is not None
assert str(
actual[0]['name']) == 'I have failed to process your request'
def test_attach_response_no_placeholder(self):
output = ActionUtility.attach_response("This has no placeholder", {"a": "b"})
assert output == "This has no placeholder"
def test_attach_response(self):
output = ActionUtility.attach_response("I want $${RESPONSE}", {"dollars": "51"})
assert output == 'I want ${\'dollars\': \'51\'}'
def test_attach_response_int(self):
output = ActionUtility.attach_response("I want $${RESPONSE}", 51)
assert output == 'I want $51'
def test_retrieve_value_from_response(self):
keys = ["a.b.3", 'a.b']
resp_msg = {
"a": {
"b": {
"3": 2,
"43": 30,
"c": [],
"d": ['red', 'buggy', 'bumpers'],
}
}
}
key_values = ActionUtility.retrieve_value_from_response(keys, resp_msg)
assert key_values is not None
assert key_values['${a.b.3}'] == 2
assert key_values['${a.b}'] is not None
assert key_values['${a.b}']['3'] == 2
assert key_values['${a.b}']['d'][0] == 'red'
def test_retrieve_value_from_response_invalid_key(self):
keys = ["d.e.f", 'g.h']
resp_msg = {
"a": {
"b": {
"3": 2,
"43": 30,
"c": [],
"d": ['red', 'buggy', 'bumpers'],
}
}
}
try:
ActionUtility.retrieve_value_from_response(keys, resp_msg)
assert False
except HttpActionFailure as e:
assert str(e) == 'Unable to retrieve value for key from HTTP response: \'d\''
| 44.404632 | 196 | 0.590587 | 3,525 | 32,593 | 5.217589 | 0.064965 | 0.035341 | 0.018541 | 0.017399 | 0.856405 | 0.822912 | 0.806111 | 0.788549 | 0.759841 | 0.718845 | 0 | 0.02277 | 0.285859 | 32,593 | 733 | 197 | 44.465211 | 0.7674 | 0.013193 | 0 | 0.703988 | 0 | 0.013804 | 0.196007 | 0.035544 | 0 | 0 | 0 | 0 | 0.165644 | 1 | 0.069018 | false | 0 | 0.018405 | 0.001534 | 0.090491 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
db96f671289f5d5ebfa643540eeefaad78fd620b | 8,100 | py | Python | pypower/t/t_modcost.py | Bengt/PYPOWER | 78a0f8d4765d147f8237e9a905ef871508ecfee7 | [
"BSD-3-Clause"
] | 221 | 2015-01-03T23:18:11.000Z | 2022-03-27T10:21:40.000Z | pypower/t/t_modcost.py | Bengt/PYPOWER | 78a0f8d4765d147f8237e9a905ef871508ecfee7 | [
"BSD-3-Clause"
] | 33 | 2015-05-12T08:48:02.000Z | 2021-11-23T10:35:21.000Z | pypower/t/t_modcost.py | Bengt/PYPOWER | 78a0f8d4765d147f8237e9a905ef871508ecfee7 | [
"BSD-3-Clause"
] | 114 | 2015-02-02T15:07:38.000Z | 2022-03-22T17:01:55.000Z | # Copyright (c) 1996-2015 PSERC. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
"""Tests for code in C{modcost}.
"""
from numpy import array
from pypower.totcost import totcost
from pypower.modcost import modcost
from pypower.t.t_begin import t_begin
from pypower.t.t_is import t_is
from pypower.t.t_end import t_end
def t_modcost(quiet=False):
"""Tests for code in C{modcost}.
@author: Ray Zimmerman (PSERC Cornell)
"""
n_tests = 80
t_begin(n_tests, quiet)
## generator cost data
# 1 startup shutdown n x1 y1 ... xn yn
# 2 startup shutdown n c(n-1) ... c0
gencost0 = array([
[2, 0, 0, 3, 0.01, 0.1, 1, 0, 0, 0, 0, 0],
[2, 0, 0, 5, 0.0006, 0.005, 0.04, 0.3, 2, 0, 0, 0],
[1, 0, 0, 4, 0, 0, 10, 200, 20, 600, 30, 1200],
[1, 0, 0, 4, -30, -2400, -20, -1800, -10, -1000, 0, 0]
])
gencost = modcost(gencost0, 5, 'SCALE_F')
##----- POLYSHIFT -----
t = 'modcost SCALE_F - quadratic'
t_is(totcost(gencost, array([0, 0, 0, 0])) / 5, [1, 2, 0, 0], 8, t)
t_is(totcost(gencost, array([1, 0, 0, 0])) / 5, [1.11, 2, 0, 0], 8, t)
t_is(totcost(gencost, array([2, 0, 0, 0])) / 5, [1.24, 2, 0, 0], 8, t)
t = 'modcost SCALE_F - 4th order polynomial'
t_is(totcost(gencost, array([0, 0, 0, 0])) / 5, [1, 2, 0, 0], 8, t)
t_is(totcost(gencost, array([0, 1, 0, 0])) / 5, [1, 2.3456, 0, 0], 8, t)
t_is(totcost(gencost, array([0, 2, 0, 0])) / 5, [1, 2.8096, 0, 0], 8, t)
t = 'modcost SCALE_F - pwl (gen)'
t_is(totcost(gencost, array([0, 0, 5, 0 ])) / 5, [1, 2, 100, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 10, 0])) / 5, [1, 2, 200, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 15, 0])) / 5, [1, 2, 400, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 20, 0])) / 5, [1, 2, 600, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 25, 0])) / 5, [1, 2, 900, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 30, 0])) / 5, [1, 2, 1200, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 35, 0])) / 5, [1, 2, 1500, 0], 8, t)
t = 'modcost SCALE_F - pwl (load)'
t_is(totcost(gencost, array([0, 0, 0, -5 ])) / 5, [1, 2, 0, -500], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -10])) / 5, [1, 2, 0, -1000], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -15])) / 5, [1, 2, 0, -1400], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -20])) / 5, [1, 2, 0, -1800], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -25])) / 5, [1, 2, 0, -2100], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -30])) / 5, [1, 2, 0, -2400], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -35])) / 5, [1, 2, 0, -2700], 8, t)
gencost = modcost(gencost0, 2, 'SCALE_X')
t = 'modcost SCALE_X - quadratic'
t_is(totcost(gencost, array([0, 0, 0, 0]) * 2), [1, 2, 0, 0], 8, t)
t_is(totcost(gencost, array([1, 0, 0, 0]) * 2), [1.11, 2, 0, 0], 8, t)
t_is(totcost(gencost, array([2, 0, 0, 0]) * 2), [1.24, 2, 0, 0], 8, t)
t = 'modcost SCALE_X - 4th order polynomial'
t_is(totcost(gencost, array([0, 0, 0, 0]) * 2), [1, 2, 0, 0], 8, t)
t_is(totcost(gencost, array([0, 1, 0, 0]) * 2), [1, 2.3456, 0, 0], 8, t)
t_is(totcost(gencost, array([0, 2, 0, 0]) * 2), [1, 2.8096, 0, 0], 8, t)
t = 'modcost SCALE_X - pwl (gen)'
t_is(totcost(gencost, array([0, 0, 5, 0 ]) * 2), [1, 2, 100, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 10, 0]) * 2), [1, 2, 200, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 15, 0]) * 2), [1, 2, 400, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 20, 0]) * 2), [1, 2, 600, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 25, 0]) * 2), [1, 2, 900, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 30, 0]) * 2), [1, 2, 1200, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 35, 0]) * 2), [1, 2, 1500, 0], 8, t)
t = 'modcost SCALE_X - pwl (load)'
t_is(totcost(gencost, array([0, 0, 0, -5 ]) * 2), [1, 2, 0, -500], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -10]) * 2), [1, 2, 0, -1000], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -15]) * 2), [1, 2, 0, -1400], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -20]) * 2), [1, 2, 0, -1800], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -25]) * 2), [1, 2, 0, -2100], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -30]) * 2), [1, 2, 0, -2400], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -35]) * 2), [1, 2, 0, -2700], 8, t)
gencost = modcost(gencost0, 3, 'SHIFT_F')
t = 'modcost SHIFT_F - quadratic'
t_is(totcost(gencost, array([0, 0, 0, 0])) - 3, [1, 2, 0, 0], 8, t)
t_is(totcost(gencost, array([1, 0, 0, 0])) - 3, [1.11, 2, 0, 0], 8, t)
t_is(totcost(gencost, array([2, 0, 0, 0])) - 3, [1.24, 2, 0, 0], 8, t)
t = 'modcost SHIFT_F - 4th order polynomial'
t_is(totcost(gencost, array([0, 0, 0, 0])) - 3, [1, 2, 0, 0], 8, t)
t_is(totcost(gencost, array([0, 1, 0, 0])) - 3, [1, 2.3456, 0, 0], 8, t)
t_is(totcost(gencost, array([0, 2, 0, 0])) - 3, [1, 2.8096, 0, 0], 8, t)
t = 'modcost SHIFT_F - pwl (gen)'
t_is(totcost(gencost, array([0, 0, 5, 0 ])) - 3, [1, 2, 100, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 10, 0])) - 3, [1, 2, 200, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 15, 0])) - 3, [1, 2, 400, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 20, 0])) - 3, [1, 2, 600, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 25, 0])) - 3, [1, 2, 900, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 30, 0])) - 3, [1, 2, 1200, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 35, 0])) - 3, [1, 2, 1500, 0], 8, t)
t = 'modcost SHIFT_F - pwl (load)'
t_is(totcost(gencost, array([0, 0, 0, -5 ])) - 3, [1, 2, 0, -500], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -10])) - 3, [1, 2, 0, -1000], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -15])) - 3, [1, 2, 0, -1400], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -20])) - 3, [1, 2, 0, -1800], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -25])) - 3, [1, 2, 0, -2100], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -30])) - 3, [1, 2, 0, -2400], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -35])) - 3, [1, 2, 0, -2700], 8, t)
gencost = modcost(gencost0, -4, 'SHIFT_X')
t = 'modcost SHIFT_X - quadratic'
t_is(totcost(gencost, array([0, 0, 0, 0]) - 4), [1, 2, 0, 0], 8, t)
t_is(totcost(gencost, array([1, 0, 0, 0]) - 4), [1.11, 2, 0, 0], 8, t)
t_is(totcost(gencost, array([2, 0, 0, 0]) - 4), [1.24, 2, 0, 0], 8, t)
t = 'modcost SHIFT_X - 4th order polynomial'
t_is(totcost(gencost, array([0, 0, 0, 0]) - 4), [1, 2, 0, 0], 8, t)
t_is(totcost(gencost, array([0, 1, 0, 0]) - 4), [1, 2.3456, 0, 0], 8, t)
t_is(totcost(gencost, array([0, 2, 0, 0]) - 4), [1, 2.8096, 0, 0], 8, t)
t = 'modcost SHIFT_X - pwl (gen)'
t_is(totcost(gencost, array([0, 0, 5, 0 ]) - 4), [1, 2, 100, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 10, 0]) - 4), [1, 2, 200, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 15, 0]) - 4), [1, 2, 400, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 20, 0]) - 4), [1, 2, 600, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 25, 0]) - 4), [1, 2, 900, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 30, 0]) - 4), [1, 2, 1200, 0], 8, t)
t_is(totcost(gencost, array([0, 0, 35, 0]) - 4), [1, 2, 1500, 0], 8, t)
t = 'modcost SHIFT_X - pwl (load)'
t_is(totcost(gencost, array([0, 0, 0, -5 ]) - 4), [1, 2, 0, -500], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -10]) - 4), [1, 2, 0, -1000], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -15]) - 4), [1, 2, 0, -1400], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -20]) - 4), [1, 2, 0, -1800], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -25]) - 4), [1, 2, 0, -2100], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -30]) - 4), [1, 2, 0, -2400], 8, t)
t_is(totcost(gencost, array([0, 0, 0, -35]) - 4), [1, 2, 0, -2700], 8, t)
t_end()
if __name__ == '__main__':
t_modcost(quiet=False)
| 48.795181 | 77 | 0.506173 | 1,597 | 8,100 | 2.492173 | 0.068879 | 0.084422 | 0.201005 | 0.341709 | 0.823869 | 0.816583 | 0.803266 | 0.803266 | 0.803266 | 0.757789 | 0 | 0.188358 | 0.244938 | 8,100 | 165 | 78 | 49.090909 | 0.462394 | 0.050988 | 0 | 0.067797 | 0 | 0 | 0.067381 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.008475 | false | 0 | 0.050847 | 0 | 0.059322 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
dbb416d1e99f9f7ad21d8ae0e6abc700a6062667 | 2,089 | py | Python | tests/duration/test_arithmetic.py | Sn3akyP3t3/pendulum | 7ce170bdc64199d74e09e347402983f1bb015f63 | [
"MIT"
] | 2 | 2021-11-08T02:45:29.000Z | 2021-11-08T09:41:03.000Z | tests/duration/test_arithmetic.py | Sn3akyP3t3/pendulum | 7ce170bdc64199d74e09e347402983f1bb015f63 | [
"MIT"
] | null | null | null | tests/duration/test_arithmetic.py | Sn3akyP3t3/pendulum | 7ce170bdc64199d74e09e347402983f1bb015f63 | [
"MIT"
] | 1 | 2019-01-26T17:42:15.000Z | 2019-01-26T17:42:15.000Z | import pendulum
from ..conftest import assert_duration
def test_multiply():
it = pendulum.duration(days=6, seconds=34, microseconds=522222)
mul = it * 2
assert isinstance(mul, pendulum.Duration)
assert_duration(mul, 0, 0, 1, 5, 0, 1, 9, 44444)
it = pendulum.duration(days=6, seconds=34, microseconds=522222)
mul = 2 * it
assert isinstance(mul, pendulum.Duration)
assert_duration(mul, 0, 0, 1, 5, 0, 1, 9, 44444)
it = pendulum.duration(years=2, months=3, weeks=4, days=6, seconds=34, microseconds=522222)
mul = 2 * it
assert isinstance(mul, pendulum.Duration)
assert_duration(mul, 4, 6, 9, 5, 0, 1, 9, 44444)
def test_divide():
it = pendulum.duration(days=2, seconds=34, microseconds=522222)
mul = it / 2
assert isinstance(mul, pendulum.Duration)
assert_duration(mul, 0, 0, 0, 1, 0, 0, 17, 261111)
it = pendulum.duration(days=2, seconds=35, microseconds=522222)
mul = it / 2
assert isinstance(mul, pendulum.Duration)
assert_duration(mul, 0, 0, 0, 1, 0, 0, 17, 761111)
it = pendulum.duration(years=2, months=4, days=2, seconds=35, microseconds=522222)
mul = it / 2
assert isinstance(mul, pendulum.Duration)
assert_duration(mul, 1, 2, 0, 1, 0, 0, 17, 761111)
def test_floor_divide():
it = pendulum.duration(days=2, seconds=34, microseconds=522222)
mul = it // 2
assert isinstance(mul, pendulum.Duration)
assert_duration(mul, 0, 0, 0, 1, 0, 0, 17, 261111)
it = pendulum.duration(days=2, seconds=35, microseconds=522222)
mul = it // 3
assert isinstance(mul, pendulum.Duration)
assert_duration(mul, 0, 0, 0, 0, 16, 0, 11, 840740)
it = pendulum.duration(years=2, months=4, days=2, seconds=34, microseconds=522222)
mul = it // 2
assert isinstance(mul, pendulum.Duration)
assert_duration(mul, 1, 2, 0, 1, 0, 0, 17, 261111)
it = pendulum.duration(years=2, months=4, days=2, seconds=35, microseconds=522222)
mul = it // 3
assert isinstance(mul, pendulum.Duration)
assert_duration(mul, 0, 1, 0, 0, 16, 0, 11, 840740)
| 29.842857 | 95 | 0.661082 | 315 | 2,089 | 4.336508 | 0.120635 | 0.234261 | 0.131772 | 0.197657 | 0.933382 | 0.926794 | 0.890922 | 0.890922 | 0.890922 | 0.88653 | 0 | 0.147236 | 0.203447 | 2,089 | 69 | 96 | 30.275362 | 0.673678 | 0 | 0 | 0.688889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.466667 | 1 | 0.066667 | false | 0 | 0.044444 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
917560fe29651f4150838ba0e7a288830a7b2667 | 14,378 | py | Python | fabricate_bak/Archived/A4 size/baffles.py | wyolum/ClockFOUR | b41ed8ab55633fedfdef0fe9ce99d7dc2fd69f7d | [
"MIT"
] | 3 | 2015-01-09T16:40:27.000Z | 2019-11-11T20:27:32.000Z | fabricate_bak/Archived/A4 size/baffles.py | wyolum/ClockFOUR | b41ed8ab55633fedfdef0fe9ce99d7dc2fd69f7d | [
"MIT"
] | null | null | null | fabricate_bak/Archived/A4 size/baffles.py | wyolum/ClockFOUR | b41ed8ab55633fedfdef0fe9ce99d7dc2fd69f7d | [
"MIT"
] | null | null | null | import os.path
from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFont
import glob
import os.path
import PIL.Image
from numpy import array, sin, cos, dot, arange
from constants import *
from copy import deepcopy
from reportlab.pdfgen import canvas
from reportlab.graphics import renderPDF
from reportlab.graphics.shapes import Drawing, Group, String, Circle, Rect
from reportlab.platypus import Paragraph, SimpleDocTemplate, Table, TableStyle
from cnc import MyPath
def create_baffle(baffle_height,
baffle_thickness,
n_notch,
delta,
overhang=0,
overhang_height=None,
overhang_taper=False,
margin=MARGIN):
'''
delta = DX/DY
overhang = amount of extra plastic from center of last notch
overhang_height = height of overhang. if None, baffle_height
margin = extra gap for slots
'''
return asym_baffle(baffle_height,
baffle_thickness,
n_notch,
delta,
overhangs=(overhang, overhang),
overhang_heights=(overhang_height, overhang_height),
overhang_tapers=(overhang_taper, overhang_taper),
margin=MARGIN)
def asym_baffle(baffle_height,
baffle_thickness,
n_notch,
delta,
overhangs=(0,0),
overhang_heights=(None,None),
overhang_tapers=(False, False),
board_catches=(False, False),
margin=MARGIN):
'''
delta = DX/DY
overhangs = amount of extra plastic from center of last notch
overhang_heights = height of overhang. if None, baffle_height
overhang_tapers = straight~False, tapered~True,
board_catches = hooks to grab the board and hold the baffle in place.
margin = extra gap for slots
'''
overhang_heights = list(overhang_heights)
for i in range(2):
if overhang_heights[i] is None:
overhang_heights[i] = baffle_height
p = MyPath()
p.moveTo(0, 0)
if overhangs[0] > 0:
p.lineTo(-overhangs[0], 0)
if overhang_tapers[0]:
p.lineTo(-overhangs[0], overhang_heights[0]/2.)
else:
p.lineTo(-overhangs[0], overhang_heights[0])
p.lineTo(-baffle_thickness / 2. - margin, overhang_heights[0])
p.lineTo(-baffle_thickness / 2. - margin,
baffle_height / 2 - margin)
p.lineTo(0, baffle_height / 2 - margin)
for i in range(n_notch):
p.lineTo(i * delta + baffle_thickness / 2. + margin,
baffle_height / 2 - margin)
p.lineTo(i * delta + baffle_thickness / 2. + margin,
baffle_height)
p.lineTo((i + 1) * delta - baffle_thickness / 2. - margin,
baffle_height)
p.lineTo((i + 1) * delta - baffle_thickness / 2. - margin,
baffle_height / 2 - margin)
p.lineTo((i + 1) * delta,
baffle_height / 2 - margin)
if overhangs[1] > 0:
p.lineTo(n_notch * delta + baffle_thickness / 2 + margin,
baffle_height / 2 - margin)
p.lineTo(n_notch * delta + baffle_thickness / 2 + margin,
overhang_heights[1])
if overhang_tapers[1]:
p.lineTo(n_notch * delta + overhangs[1], overhang_heights[1]/2)
else:
p.lineTo(n_notch * delta + overhangs[1], overhang_heights[1])
p.lineTo(n_notch * delta + overhangs[1], 0)
p.lineTo(n_notch * delta, 0)
p.lineTo(0, 0)
return p
BOARD_THICKNESS = 0.06 * inch
def peggy_baffle(baffle_height,
baffle_thickness,
n_notch,
delta,
overhangs=(0,0),
overhang_heights=(None,None),
overhang_tapers=(False, False),
board_hooks=(False, False),
board_hooks_up=False,
margin=MARGIN,
skip_notches=()):
'''
delta = DX/DY
overhangs = amount of extra plastic from center of last notch
overhang_heights = height of overhang. if None, baffle_height
overhang_tapers = straight~False, tapered~True,
board_hooks = hooks to grab the board and hold the baffle in place.
margin = extra gap for slots
'''
overhang_heights = list(overhang_heights)
for i in range(2):
if overhang_heights[i] is None:
overhang_heights[i] = baffle_height
p = MyPath()
p.moveTo(0, 0)
if overhangs[0] > 0:
p.lineTo(-overhangs[0], 0)
if overhang_tapers[0]:
p.lineTo(-overhangs[0], overhang_heights[0]/2.)
elif board_hooks[0]:
if board_hooks_up: ## h baffles
p.lineTo(-overhangs[0] - board_hooks[0], 0)
p.lineTo(-overhangs[0] - board_hooks[0],
overhang_heights[0] + BOARD_THICKNESS)
p.lineTo(-overhangs[0],
overhang_heights[0] + BOARD_THICKNESS)
p.lineTo(-overhangs[0],
overhang_heights[0])
else: ## v baffles
p.lineTo(-overhangs[0], -BOARD_THICKNESS)
p.lineTo(-overhangs[0] - board_hooks[0], -BOARD_THICKNESS)
p.lineTo(-overhangs[0] - board_hooks[0], baffle_height)
p.lineTo(-overhangs[0], baffle_height)
else:
p.lineTo(-overhangs[0], overhang_heights[0])
p.lineTo(-baffle_thickness / 2. - margin, overhang_heights[0])
for i in range(n_notch - 1):
if i in skip_notches:
p.lineTo((i + 0) * delta, baffle_height)
else:
p.lineTo(i * delta - baffle_thickness / 2. - margin,
baffle_height)
p.lineTo(i * delta - baffle_thickness / 2. - margin,
baffle_height/2 - margin)
p.lineTo(i * delta + baffle_thickness / 2. + margin,
baffle_height/2 - margin)
p.lineTo(i * delta + baffle_thickness / 2. + margin,
baffle_height)
p.lineTo((i + 1) * delta - baffle_thickness / 2. - margin,
baffle_height)
if n_notch - 1 not in skip_notches:
## do last notch
i = n_notch - 1
p.lineTo(i * delta - baffle_thickness / 2. - margin,
baffle_height)
p.lineTo(i * delta - baffle_thickness / 2. - margin,
baffle_height/2 - margin)
p.lineTo(i * delta + baffle_thickness / 2. + margin,
baffle_height/2 - margin)
if overhangs[1] > 0:
p.lineTo((n_notch - 1) * delta + baffle_thickness / 2 + margin,
baffle_height / 2 - margin)
p.lineTo((n_notch - 1) * delta + baffle_thickness / 2 + margin,
overhang_heights[1])
if overhang_tapers[1]:
p.lineTo((n_notch - 1) * delta + overhangs[1], overhang_heights[1]/2)
elif board_hooks[1]:
x0 = (n_notch - 1) * delta + overhangs[1]
if board_hooks_up:
p.lineTo(x0, overhang_heights[1])
p.lineTo(x0, overhang_heights[1] + BOARD_THICKNESS)
p.lineTo(x0 + board_hooks[1],
overhang_heights[1] + BOARD_THICKNESS)
p.lineTo(x0 + board_hooks[1], 0)
p.lineTo(x0, 0)
else:
p.lineTo(x0 + board_hooks[1], baffle_height)
p.lineTo(x0 + board_hooks[1], -BOARD_THICKNESS)
p.lineTo(x0, -BOARD_THICKNESS)
p.lineTo(x0, 0)
else:
p.lineTo((n_notch - 1) * delta + overhangs[1], overhang_heights[1])
p.lineTo((n_notch - 1) * delta + overhangs[1], 0)
p.lineTo((n_notch - 1) * delta, 0)
p.lineTo(0, 0)
return p
def TiM_h_baffle(baffle_height,
baffle_thickness,
n_notch,
delta,
overhangs=(0,0),
overhang_heights=(None,None),
overhang_tapers=(False, False),
board_hooks=(False, False),
board_hooks_up=False,
margin=MARGIN,
skip_notches=()):
'''
delta = DX/DY
overhangs = amount of extra plastic from center of last notch
overhang_heights = height of overhang. if None, baffle_height
overhang_tapers = straight~False, tapered~True,
board_hooks = hooks to grab the board and hold the baffle in place.
margin = extra gap for slots
'''
overhang_heights = list(overhang_heights)
for i in range(2):
if overhang_heights[i] is None:
overhang_heights[i] = baffle_height
p = MyPath()
p.moveTo(0, 0)
if overhangs[0] > 0:
if board_hooks[0]:
if not board_hooks_up: ## h baffles
p.lineTo(-0.25 * mm, 0)
p.lineTo(-0.25 * mm, -BOARD_THICKNESS)
p.lineTo( - board_hooks[0], -BOARD_THICKNESS)
p.lineTo( - board_hooks[0], baffle_height)
p.lineTo(-overhangs[0], baffle_height)
else:
p.lineTo(-overhangs[0], overhang_heights[0])
p.lineTo(-baffle_thickness / 2. - margin, overhang_heights[0])
for i in range(n_notch - 1):
if i in skip_notches:
p.lineTo((i + 0) * delta, baffle_height)
else:
p.lineTo(i * delta - baffle_thickness / 2. - margin,
baffle_height)
p.lineTo(i * delta - baffle_thickness / 2. - margin,
baffle_height/2 - margin)
p.lineTo(i * delta + baffle_thickness / 2. + margin,
baffle_height/2 - margin)
p.lineTo(i * delta + baffle_thickness / 2. + margin,
baffle_height)
p.lineTo((i + 1) * delta - baffle_thickness / 2. - margin,
baffle_height)
if n_notch - 1 not in skip_notches:
## do last notch
i = n_notch - 1
p.lineTo(i * delta - baffle_thickness / 2. - margin,
baffle_height)
p.lineTo(i * delta - baffle_thickness / 2. - margin,
baffle_height/2 - margin)
p.lineTo(i * delta + baffle_thickness / 2. + margin,
baffle_height/2 - margin)
if overhangs[1] > 0:
p.lineTo((n_notch - 1) * delta + baffle_thickness / 2 + margin,
baffle_height / 2 - margin)
p.lineTo((n_notch - 1) * delta + baffle_thickness / 2 + margin,
overhang_heights[1])
if overhang_tapers[1]:
p.lineTo((n_notch - 1) * delta + overhangs[1], overhang_heights[1]/2)
elif board_hooks[1]:
x0 = (n_notch - 1) * delta
if not board_hooks_up:
p.lineTo(x0 + board_hooks[1], baffle_height)
p.lineTo(x0 + board_hooks[1], -BOARD_THICKNESS)
p.lineTo(x0 + 0.25 * mm, -BOARD_THICKNESS)
p.lineTo(x0 + 0.25 * mm, 0)
p.lineTo((n_notch - 1) * delta + MARGIN, 0)
p.lineTo((n_notch - 1) * delta + MARGIN, 0)
p.lineTo(0, 0)
return p
def TiM_v_baffle(baffle_height,
baffle_thickness,
n_notch,
delta,
overhangs=(0,0),
overhang_heights=(None,None),
overhang_tapers=(False, False),
margin=MARGIN,
skip_notches=()):
'''
delta = DX/DY
overhangs = amount of extra plastic from center of last notch
overhang_heights = height of overhang. if None, baffle_height
overhang_tapers = straight~False, tapered~True,
margin = extra gap for slots
'''
overhang_heights = list(overhang_heights)
for i in range(2):
if overhang_heights[i] is None:
overhang_heights[i] = baffle_height
p = MyPath()
p.moveTo(0, 0)
p.lineTo(-overhangs[0], 0)
p.lineTo(-overhangs[0], baffle_height + BOARD_THICKNESS)
p.lineTo(-baffle_thickness / 2. - margin, baffle_height + BOARD_THICKNESS)
for i in range(n_notch - 1):
p.lineTo(i * delta - baffle_thickness / 2. - margin,
baffle_height/2 - margin)
p.lineTo(i * delta + baffle_thickness / 2. + margin,
baffle_height/2 - margin)
p.lineTo(i * delta + baffle_thickness / 2. + margin,
baffle_height)
p.lineTo((i + 1) * delta - baffle_thickness / 2. - margin,
baffle_height)
## do last notch
i = n_notch - 1
p.lineTo(i * delta - baffle_thickness / 2. - margin,
baffle_height)
p.lineTo(i * delta - baffle_thickness / 2. - margin,
baffle_height/2 - margin)
p.lineTo(i * delta + baffle_thickness / 2. + margin,
baffle_height/2 - margin)
# do last taper
p.lineTo(i * delta + baffle_thickness / 2 + margin,
baffle_height + BOARD_THICKNESS)
if overhang_tapers[1]:
p.lineTo((n_notch - 1) * delta + overhangs[1], overhang_heights[1]/2)
else:
p.lineTo((n_notch - 1) * delta + overhangs[1], overhang_heights[1] + BOARD_THICKNESS)
p.lineTo((n_notch - 1) * delta + overhangs[1], 0)
p.lineTo((n_notch - 1) * delta, 0)
p.lineTo(0, 0)
return p
def folded_h_baffle():
BAFFLE_H = 20.00 * mm - 3.9 * mm
BAFFLE_T = .076 * inch
dx = 0.4 * inch
dy = 0.7 * inch
h_baffle = c3jr_h_baffle(BAFFLE_H,
BAFFLE_T,
n_notch=33,
delta=dx,
overhangs=(BAFFLE_T/2, BAFFLE_T/2),
overhang_heights=(None, None),
overhang_tapers=(False, False),
board_hooks=(5*mm, 5*mm),
board_hooks_up=False,
margin=0.016
)
def test():
create_baffle(1 * inch,
.06 * inch,
5,
.75 * inch,
overhang=.5 * inch,
overhang_height=None,
overhang_taper=True)
| 38.96477 | 93 | 0.539853 | 1,709 | 14,378 | 4.366881 | 0.071972 | 0.090982 | 0.083612 | 0.114967 | 0.880075 | 0.864398 | 0.838671 | 0.802626 | 0.774353 | 0.771004 | 0 | 0.031796 | 0.35909 | 14,378 | 368 | 94 | 39.070652 | 0.778079 | 0.093337 | 0 | 0.729373 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.023102 | false | 0 | 0.046205 | 0 | 0.085809 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9175dedef8412962dcbad763c0192f4d93e85c4e | 28,270 | py | Python | local_test/pick-up.py | andre-le/RouteOptimization | 6e16cb1fd857c9dc25f4c5a0c416cfff78544620 | [
"MIT"
] | 10 | 2018-07-01T14:33:13.000Z | 2022-02-27T15:00:44.000Z | local_test/pick-up.py | andre-le/RouteOptimization | 6e16cb1fd857c9dc25f4c5a0c416cfff78544620 | [
"MIT"
] | null | null | null | local_test/pick-up.py | andre-le/RouteOptimization | 6e16cb1fd857c9dc25f4c5a0c416cfff78544620 | [
"MIT"
] | 6 | 2019-08-01T21:50:07.000Z | 2022-01-14T17:26:47.000Z | """Vehicle Routing Problem"""
from __future__ import print_function
from six.moves import xrange
from ortools.constraint_solver import pywrapcp
from ortools.constraint_solver import routing_enums_pb2
#time for tester
import time
import csv
import random
import math
import json
import data_problem
import pickup_constraints
import printer
def return_lambda_gateway_response(code, body):
"""
This function wraps around the endpoint responses in a uniform and Lambda-friendly way
:param code: HTTP response code (200 for OK), must be an int
:param body: the actual content of the response
"""
return {"statusCode": code, "body": json.dumps(body)}
###########################
# Problem Data Definition #
###########################
# test_300 = {
# "points": [[10.777996, 106.702069, 0], [10.760551, 106.674746, 1], [10.784789, 106.644823, 1], [10.798254, 106.751791, 1], [10.816109, 106.669295, 1], [10.772456, 106.664881, 1], [10.769162, 106.696997, 1], [10.864858, 106.683749, 1], [10.790107, 106.720561, 1], [10.757434, 106.694729, 1], [10.795161, 106.719594, 1], [10.798385, 106.695269, 1], [10.79227, 106.654518, 1], [10.795269, 106.701586, 1], [10.767407, 106.641027, 1], [10.806281, 106.571058, 1], [10.729387, 106.726431, 1], [10.840673, 106.650483, 1], [10.876962, 106.6429, 1], [10.768695, 106.68064, 1], [10.747914, 106.669036, 1], [10.77142, 106.652953, 1], [10.765462, 106.662946, 1], [10.771312, 106.70039, 1], [10.753656, 106.647321, 1], [10.762599, 106.687925, 1], [10.802423, 106.732668, 1], [10.784827, 106.688579, 1], [10.763318, 106.677517, 1], [10.835041, 106.687098, 1], [10.775041, 106.670828, 1], [10.752945, 106.662138, 1], [10.780728, 106.689865, 1], [10.730083, 106.705849, 1], [10.825847, 106.690042, 1], [10.754379, 106.691953, 1], [10.758714, 106.700882, 1], [10.761559, 106.656239, 1], [10.765606, 106.670859, 1], [10.795782, 106.692653, 1], [10.818343, 106.566965, 1], [10.776459, 106.677361, 1], [10.78856, 106.770657, 1], [10.774102, 106.688587, 1], [10.79838, 106.660199, 1], [10.777899, 106.680975, 1], [10.727517, 106.67675, 1], [10.759002, 106.688086, 1], [10.780731, 106.670705, 1], [10.782805, 106.660796, 1], [10.752199, 106.655333, 1], [10.808873, 106.670435, 1], [10.736463, 106.730862, 1], [10.767792, 106.691716, 1], [10.75418, 106.68125, 1], [10.757164, 106.670898, 1], [10.767186, 106.703489, 1], [10.773453, 106.703822, 1], [10.79219, 106.678196, 1], [10.750311, 106.643577, 1], [10.751667, 106.654305, 1], [10.768958, 106.652784, 1], [10.795999, 106.683155, 1], [10.834114, 106.728163, 1], [10.765867, 106.66387, 1], [10.740059, 106.617809, 1], [10.765898, 106.691126, 1], [10.791943, 106.644197, 1], [10.863217, 106.762197, 1], [10.797119, 106.710693, 1], [10.803279, 106.657625, 1], [10.794996, 106.674872, 1], [10.769789, 106.682251, 1], [10.738243, 106.715297, 1], [10.770789, 106.6437, 1], [10.836675, 106.657606, 1], [10.7761, 106.684302, 1], [10.755461, 106.695683, 1], [10.826455, 106.610949, 1], [10.771988, 106.703706, 1], [10.740247, 106.671355, 1], [10.806196, 106.706869, 1], [10.805328, 106.715737, 1], [10.807707, 106.644944, 1], [10.780186, 106.651045, 1], [10.764514, 106.68528, 1], [10.778296, 106.689573, 1], [10.772457, 106.695947, 1], [10.783942, 106.665608, 1], [10.754981, 106.690938, 1], [10.71692, 106.703885, 1], [10.776641, 106.690048, 1], [10.786144, 106.671366, 1], [10.761241, 106.652083, 1], [10.797273, 106.680388, 1], [10.757109, 106.65839, 1], [10.793324, 106.687209, 1], [10.874298, 106.604144, 1], [10.774195, 106.678469, 1], [10.79838, 106.660199, 1], [10.794623, 106.629864, 1], [10.768803, 106.685824, 1], [10.762844, 106.647319, 1], [10.785155, 106.650821, 1], [10.760501, 106.679613, 1], [10.796401, 106.690539, 1], [10.771557, 106.705818, 1], [10.792164, 106.666947, 1], [10.772572, 106.702073, 1], [10.788539, 106.637063, 1], [10.772251, 106.68397, 1], [10.790563, 106.678237, 1], [10.791753, 106.642768, 1], [10.79459, 106.67974, 1], [10.729084, 106.718873, 1], [10.800173, 106.647716, 1], [10.809274, 106.608416, 1], [10.729346, 106.724008, 1], [10.759183, 106.705267, 1], [10.740992, 106.700638, 1], [10.759183, 106.705267, 1], [10.844338, 106.641599, 1], [10.787659, 106.687747, 1], [10.775041, 106.670828, 1], [10.758472, 106.700199, 1], [10.800509, 106.641609, 1], [10.786791, 106.681755, 1], [10.796743, 106.686803, 1], [10.755686, 106.645701, 1], [10.759411, 106.705006, 1], [10.757187, 106.684124, 1], [10.771988, 106.703706, 1], [10.751821, 106.647534, 1], [10.790147, 106.672119, 1], [10.756955, 106.668874, 1], [10.768089, 106.617655, 1], [10.763192, 106.689088, 1], [10.79972, 106.675467, 1], [10.80659, 106.694984, 1], [10.768287, 106.650966, 1], [10.789671, 106.680072, 1], [10.756275, 106.650534, 1], [10.665686, 106.571757, 1], [10.810857, 106.70297, 1], [10.737922, 106.614648, 1], [10.804705, 106.692109, 1], [10.865739, 106.727819, 1], [10.813935, 106.717072, 1], [10.771244, 106.659096, 1], [10.791255, 106.688097, 1], [10.800388, 106.718364, 1], [10.836433, 106.617512, 1], [10.775547, 106.701057, 1], [10.759518, 106.679808, 1], [10.827226, 106.600419, 1], [10.780822, 106.654069, 1], [10.784906, 106.621261, 1], [10.751393, 106.675979, 1], [10.796268, 106.696444, 1], [10.764406, 106.678308, 1], [10.796422, 106.629353, 1], [10.780669, 106.670646, 1], [10.771988, 106.703706, 1], [10.754371, 106.72146, 1], [10.812453, 106.689959, 1], [10.788077, 106.684008, 1], [10.768148, 106.615058, 1], [10.722491, 106.712463, 1], [10.82958, 106.637075, 1], [10.790565, 106.689401, 1], [10.766569, 106.667038, 1], [10.820423, 106.631091, 1], [10.726437, 106.711675, 1], [10.798385, 106.695269, 1], [10.797289, 106.65742, 1], [10.733725, 106.670989, 1], [10.788276, 106.682042, 1], [10.836603, 106.687871, 1], [10.765856, 106.704776, 1], [10.750311, 106.643577, 1], [10.747755, 106.634476, 1], [10.79231, 106.662411, 1], [10.787824, 106.602584, 1], [10.806619, 106.698824, 1], [10.757217, 106.651492, 1], [10.770789, 106.6437, 1], [10.772951, 106.687193, 1], [10.814686, 106.670272, 1], [10.822628, 106.632234, 1], [10.796495, 106.663395, 1], [10.755284, 106.626546, 1], [10.849091, 106.649975, 1], [10.767879, 106.693582, 1], [10.81642, 106.669803, 1], [10.815319, 106.674348, 1], [10.817428, 106.693855, 1], [10.779683, 106.695498, 1], [10.776078, 106.680435, 1], [10.766163, 106.617142, 1], [10.85171, 106.725464, 1], [10.707293, 106.727933, 1], [10.754749, 106.586389, 1], [10.806804, 106.646642, 1], [10.785334, 106.69559, 1], [10.751606, 106.663437, 1], [10.749803, 106.624618, 1], [10.814833, 106.672327, 1], [10.793234, 106.703527, 1], [10.834012, 106.66417, 1], [10.788914, 106.676312, 1], [10.808873, 106.670435, 1], [10.802363, 106.640181, 1], [10.867428, 106.621092, 1], [10.787103, 106.674398, 1], [10.795161, 106.719594, 1], [10.813936, 106.717074, 1], [10.790778, 106.69658, 1], [10.798314, 106.692031, 1], [10.805103, 106.683018, 1], [10.715959, 106.740329, 1], [10.772151, 106.678618, 1], [10.808627, 106.674969, 1], [10.768084, 106.691693, 1], [10.79972, 106.675467, 1], [10.789042, 106.690896, 1], [10.795266, 106.640775, 1], [10.791572, 106.709008, 1], [10.760856, 106.642748, 1], [10.766822, 106.687481, 1], [10.797886, 106.68103, 1], [10.791998, 106.711469, 1], [10.841124, 106.677931, 1], [10.799503, 106.658702, 1], [10.799953, 106.730107, 1], [10.841941, 106.647452, 1], [10.772499, 106.686381, 1], [10.801666, 106.6391, 1], [10.802017, 106.649517, 1], [10.772296, 106.685161, 1], [10.79748, 106.659504, 1], [10.773581, 106.702345, 1], [10.750417, 106.674286, 1], [10.774343, 106.700402, 1], [10.828005, 106.675326, 1], [10.789917, 106.730063, 1], [10.769159, 106.634194, 1], [10.767407, 106.641027, 1], [10.781173, 106.675097, 1], [10.776842, 106.633905, 1], [10.771733, 106.676914, 1], [10.795478, 106.719256, 1], [10.768888, 106.666099, 1], [10.754124, 106.682345, 1], [10.79838, 106.660199, 1], [10.770273, 106.688425, 1], [10.756925, 106.684734, 1], [10.79576, 106.652097, 1], [10.859302, 106.690074, 1], [10.801583, 106.712186, 1], [10.796664, 106.640482, 1], [10.766334, 106.682275, 1], [10.790629, 106.657806, 1], [10.799399, 106.75031, 1], [10.782106, 106.672758, 1], [10.792454, 106.70155, 1], [10.846574, 106.65585, 1], [10.780281, 106.663659, 1], [10.756417, 106.669487, 1], [10.780728, 106.689865, 1], [10.816736, 106.707301, 1], [10.783942, 106.665608, 1], [10.788944, 106.682697, 1], [10.812686, 106.689901, 1], [10.797928, 106.673185, 1], [10.76309, 106.66918, 1], [10.764476, 106.661491, 1], [10.778649, 106.692574, 1], [10.796686, 106.630772, 1], [10.759788, 106.5966, 1], [10.769953, 106.684549, 1], [10.838845, 106.7887, 1], [10.786003, 106.675741, 1], [10.798667, 106.705152, 1], [10.737922, 106.614648, 1], [10.806857, 106.640889, 1], [10.802423, 106.732668, 1], [10.772596, 106.668798, 1], [10.798241, 106.703955, 1], [10.792834, 106.696065, 1], [10.770302, 106.687757, 1], [10.770334, 106.687204, 1], [10.756529, 106.66713, 1], [10.79843, 106.719368, 1], [10.82269, 106.639492, 1], [10.779712, 106.635861, 1], [10.791073, 106.670851, 1], [10.74634, 106.674372, 1], [10.849445, 106.678617, 1], [10.803224, 106.715623, 1], [10.788438, 106.678386, 1]],
# "vehicle_nums": 25,
# "min_parcels": 15,
# "max_distance": 30000,
# "min_vehicles": False
# }
test_300 = {
"points": [[10.777996, 106.702069, 0], [10.760551, 106.674746, 1], [10.784789, 106.644823, 1], [10.798254, 106.751791, 1], [10.816109, 106.669295, 1], [10.772456, 106.664881, 1], [10.769162, 106.696997, 1], [10.864858, 106.683749, 1], [10.790107, 106.720561, 1], [10.757434, 106.694729, 1], [10.795161, 106.719594, 1], [10.798385, 106.695269, 1], [10.79227, 106.654518, 1], [10.795269, 106.701586, 1], [10.767407, 106.641027, 1], [10.806281, 106.571058, 1], [10.729387, 106.726431, 1], [10.840673, 106.650483, 1], [10.876962, 106.6429, 1], [10.768695, 106.68064, 1], [10.747914, 106.669036, 1], [10.77142, 106.652953, 1], [10.765462, 106.662946, 1], [10.771312, 106.70039, 1], [10.753656, 106.647321, 1], [10.762599, 106.687925, 1], [10.802423, 106.732668, 1], [10.784827, 106.688579, 1], [10.763318, 106.677517, 1], [10.835041, 106.687098, 1], [10.775041, 106.670828, 1], [10.752945, 106.662138, 1], [10.780728, 106.689865, 1], [10.730083, 106.705849, 1], [10.825847, 106.690042, 1], [10.754379, 106.691953, 1], [10.758714, 106.700882, 1], [10.761559, 106.656239, 1], [10.765606, 106.670859, 1], [10.795782, 106.692653, 1], [10.818343, 106.566965, 1], [10.776459, 106.677361, 1], [10.78856, 106.770657, 1], [10.774102, 106.688587, 1], [10.79838, 106.660199, 1], [10.777899, 106.680975, 1], [10.727517, 106.67675, 1], [10.759002, 106.688086, 1], [10.780731, 106.670705, 1], [10.782805, 106.660796, 1], [10.752199, 106.655333, 1], [10.808873, 106.670435, 1], [10.736463, 106.730862, 1], [10.767792, 106.691716, 1], [10.75418, 106.68125, 1], [10.757164, 106.670898, 1], [10.767186, 106.703489, 1], [10.773453, 106.703822, 1], [10.79219, 106.678196, 1], [10.750311, 106.643577, 1], [10.751667, 106.654305, 1], [10.768958, 106.652784, 1], [10.795999, 106.683155, 1], [10.834114, 106.728163, 1], [10.765867, 106.66387, 1], [10.740059, 106.617809, 1], [10.765898, 106.691126, 1], [10.791943, 106.644197, 1], [10.863217, 106.762197, 1], [10.797119, 106.710693, 1], [10.803279, 106.657625, 1], [10.794996, 106.674872, 1], [10.769789, 106.682251, 1], [10.738243, 106.715297, 1], [10.770789, 106.6437, 1], [10.836675, 106.657606, 1], [10.7761, 106.684302, 1], [10.755461, 106.695683, 1], [10.826455, 106.610949, 1], [10.771988, 106.703706, 1], [10.740247, 106.671355, 1], [10.806196, 106.706869, 1], [10.805328, 106.715737, 1], [10.807707, 106.644944, 1], [10.780186, 106.651045, 1], [10.764514, 106.68528, 1], [10.778296, 106.689573, 1], [10.772457, 106.695947, 1], [10.783942, 106.665608, 1], [10.754981, 106.690938, 1], [10.71692, 106.703885, 1], [10.776641, 106.690048, 1], [10.786144, 106.671366, 1], [10.761241, 106.652083, 1], [10.797273, 106.680388, 1], [10.757109, 106.65839, 1], [10.793324, 106.687209, 1], [10.874298, 106.604144, 1], [10.774195, 106.678469, 1], [10.79838, 106.660199, 1], [10.794623, 106.629864, 1], [10.768803, 106.685824, 1], [10.762844, 106.647319, 1], [10.785155, 106.650821, 1], [10.760501, 106.679613, 1], [10.796401, 106.690539, 1], [10.771557, 106.705818, 1], [10.792164, 106.666947, 1], [10.772572, 106.702073, 1], [10.788539, 106.637063, 1], [10.772251, 106.68397, 1], [10.790563, 106.678237, 1], [10.791753, 106.642768, 1], [10.79459, 106.67974, 1], [10.729084, 106.718873, 1], [10.800173, 106.647716, 1], [10.809274, 106.608416, 1], [10.729346, 106.724008, 1], [10.759183, 106.705267, 1], [10.740992, 106.700638, 1], [10.759183, 106.705267, 1], [10.844338, 106.641599, 1], [10.787659, 106.687747, 1], [10.775041, 106.670828, 1], [10.758472, 106.700199, 1], [10.800509, 106.641609, 1], [10.786791, 106.681755, 1], [10.796743, 106.686803, 1], [10.755686, 106.645701, 1], [10.759411, 106.705006, 1], [10.757187, 106.684124, 1], [10.771988, 106.703706, 1], [10.751821, 106.647534, 1], [10.790147, 106.672119, 1], [10.756955, 106.668874, 1], [10.768089, 106.617655, 1], [10.763192, 106.689088, 1], [10.79972, 106.675467, 1], [10.80659, 106.694984, 1], [10.768287, 106.650966, 1], [10.789671, 106.680072, 1], [10.756275, 106.650534, 1], [10.665686, 106.571757, 1], [10.810857, 106.70297, 1], [10.737922, 106.614648, 1], [10.804705, 106.692109, 1], [10.865739, 106.727819, 1], [10.813935, 106.717072, 1], [10.771244, 106.659096, 1], [10.791255, 106.688097, 1], [10.800388, 106.718364, 1], [10.836433, 106.617512, 1], [10.775547, 106.701057, 1], [10.759518, 106.679808, 1], [10.827226, 106.600419, 1], [10.780822, 106.654069, 1], [10.784906, 106.621261, 1], [10.751393, 106.675979, 1], [10.796268, 106.696444, 1], [10.764406, 106.678308, 1], [10.796422, 106.629353, 1], [10.780669, 106.670646, 1], [10.771988, 106.703706, 1], [10.754371, 106.72146, 1], [10.812453, 106.689959, 1], [10.788077, 106.684008, 1], [10.768148, 106.615058, 1], [10.722491, 106.712463, 1], [10.82958, 106.637075, 1], [10.790565, 106.689401, 1], [10.766569, 106.667038, 1], [10.820423, 106.631091, 1], [10.726437, 106.711675, 1], [10.798385, 106.695269, 1], [10.797289, 106.65742, 1], [10.733725, 106.670989, 1], [10.788276, 106.682042, 1], [10.836603, 106.687871, 1], [10.765856, 106.704776, 1], [10.750311, 106.643577, 1], [10.747755, 106.634476, 1], [10.79231, 106.662411, 1], [10.787824, 106.602584, 1], [10.806619, 106.698824, 1], [10.757217, 106.651492, 1], [10.770789, 106.6437, 1], [10.772951, 106.687193, 1], [10.814686, 106.670272, 1], [10.822628, 106.632234, 1], [10.796495, 106.663395, 1], [10.755284, 106.626546, 1], [10.849091, 106.649975, 1], [10.767879, 106.693582, 1], [10.81642, 106.669803, 1], [10.815319, 106.674348, 1], [10.817428, 106.693855, 1], [10.779683, 106.695498, 1], [10.776078, 106.680435, 1], [10.766163, 106.617142, 1], [10.85171, 106.725464, 1], [10.707293, 106.727933, 1], [10.754749, 106.586389, 1], [10.806804, 106.646642, 1], [10.785334, 106.69559, 1], [10.751606, 106.663437, 1], [10.749803, 106.624618, 1], [10.814833, 106.672327, 1], [10.793234, 106.703527, 1], [10.834012, 106.66417, 1], [10.788914, 106.676312, 1], [10.808873, 106.670435, 1], [10.802363, 106.640181, 1], [10.867428, 106.621092, 1], [10.787103, 106.674398, 1], [10.795161, 106.719594, 1], [10.813936, 106.717074, 1], [10.790778, 106.69658, 1], [10.798314, 106.692031, 1], [10.805103, 106.683018, 1], [10.715959, 106.740329, 1], [10.772151, 106.678618, 1], [10.808627, 106.674969, 1], [10.768084, 106.691693, 1], [10.79972, 106.675467, 1], [10.789042, 106.690896, 1], [10.795266, 106.640775, 1], [10.791572, 106.709008, 1], [10.760856, 106.642748, 1], [10.766822, 106.687481, 1], [10.797886, 106.68103, 1], [10.791998, 106.711469, 1], [10.841124, 106.677931, 1], [10.799503, 106.658702, 1], [10.799953, 106.730107, 1], [10.841941, 106.647452, 1], [10.772499, 106.686381, 1], [10.801666, 106.6391, 1], [10.802017, 106.649517, 1], [10.772296, 106.685161, 1], [10.79748, 106.659504, 1], [10.773581, 106.702345, 1], [10.750417, 106.674286, 1], [10.774343, 106.700402, 1], [10.828005, 106.675326, 1], [10.789917, 106.730063, 1], [10.769159, 106.634194, 1], [10.767407, 106.641027, 1], [10.781173, 106.675097, 1], [10.776842, 106.633905, 1], [10.771733, 106.676914, 1], [10.795478, 106.719256, 1], [10.768888, 106.666099, 1], [10.754124, 106.682345, 1], [10.79838, 106.660199, 1], [10.770273, 106.688425, 1], [10.756925, 106.684734, 1], [10.79576, 106.652097, 1], [10.859302, 106.690074, 1], [10.801583, 106.712186, 1], [10.796664, 106.640482, 1], [10.766334, 106.682275, 1], [10.790629, 106.657806, 1], [10.799399, 106.75031, 1], [10.782106, 106.672758, 1], [10.792454, 106.70155, 1], [10.846574, 106.65585, 1], [10.780281, 106.663659, 1], [10.756417, 106.669487, 1], [10.780728, 106.689865, 1], [10.816736, 106.707301, 1], [10.783942, 106.665608, 1], [10.788944, 106.682697, 1], [10.812686, 106.689901, 1], [10.797928, 106.673185, 1], [10.76309, 106.66918, 1], [10.764476, 106.661491, 1], [10.778649, 106.692574, 1], [10.796686, 106.630772, 1], [10.759788, 106.5966, 1], [10.769953, 106.684549, 1], [10.838845, 106.7887, 1], [10.786003, 106.675741, 1], [10.798667, 106.705152, 1], [10.737922, 106.614648, 1], [10.806857, 106.640889, 1], [10.802423, 106.732668, 1], [10.772596, 106.668798, 1], [10.798241, 106.703955, 1], [10.792834, 106.696065, 1], [10.770302, 106.687757, 1], [10.770334, 106.687204, 1], [10.756529, 106.66713, 1], [10.79843, 106.719368, 1], [10.82269, 106.639492, 1], [10.779712, 106.635861, 1], [10.791073, 106.670851, 1], [10.74634, 106.674372, 1], [10.849445, 106.678617, 1], [10.803224, 106.715623, 1], [10.788438, 106.678386, 1]],
"vehicle_num": 20,
#"min_parcels": 7,
"max_distance": 50000,
"min_vehicles": True,
"transport_mode": "1N"
}
test_100 = {
"points": [[10.729461, 106.714041, 1], [10.823866, 106.5643, 1], [10.79944, 106.647973, 1], [10.814017, 106.620823, 1], [10.731643, 106.738308, 1], [10.762599, 106.687925, 1], [10.775522, 106.662882, 1], [10.802723, 106.692658, 1], [10.744354, 106.711855, 1], [10.850008, 106.758743, 1], [10.780519, 106.707268, 1], [10.748424, 106.636702, 1], [10.806455, 106.681468, 1], [10.754666, 106.689363, 1], [10.758743, 106.670432, 1], [10.820834, 106.689434, 1], [10.800979, 106.712649, 1], [10.800279, 106.609387, 1], [10.741001, 106.725929, 1], [10.750331, 106.679101, 1], [10.79668, 106.658022, 1], [10.762186, 106.668393, 1], [10.778365, 106.680896, 1], [10.771231, 106.689969, 1], [10.798319, 106.693211, 1], [10.767895, 106.688581, 1], [10.795009, 106.643572, 1], [10.788437, 106.765777, 1], [10.826968, 106.67886, 1], [10.779935, 106.69516, 1], [10.775864, 106.634517, 1], [10.772158, 106.6694, 1], [10.865188, 106.613091, 1], [10.729346, 106.724008, 1], [10.758868, 106.641151, 1], [10.804648, 106.638839, 1], [10.809129, 106.623127, 1], [10.808921, 106.693806, 1], [10.770099, 106.675264, 1], [10.778296, 106.689573, 1], [10.844042, 106.681619, 1], [10.768287, 106.650966, 1], [10.768056, 106.697908, 1], [10.768287, 106.650966, 1], [10.806934, 106.638023, 1], [10.816738, 106.707299, 1], [10.781411, 106.700665, 1], [10.795732, 106.629417, 1], [10.81447, 106.632585, 1], [10.771316, 106.674337, 1], [10.848241, 106.65811, 1], [10.798513, 106.661119, 1], [10.813909, 106.728877, 1], [10.81642, 106.669803, 1], [10.789646, 106.658173, 1], [10.856459, 106.656223, 1], [10.808417, 106.71568, 1], [10.756668, 106.662223, 1], [10.766861, 106.667155, 1], [10.746556, 106.650745, 1], [10.799058, 106.67915, 1], [10.790942, 106.66273, 1], [10.789196, 106.629526, 1], [10.805357, 106.627313, 1], [10.754124, 106.682345, 1], [10.754796, 106.681392, 1], [10.77827, 106.689555, 1], [10.804765, 106.679195, 1], [10.789317, 106.711883, 1], [10.810853, 106.70084, 1], [10.792975, 106.684736, 1], [10.871146, 106.646462, 1], [10.810716, 106.712404, 1], [10.761701, 106.669916, 1], [10.784062, 106.684159, 1], [10.833191, 106.682354, 1], [10.751593, 106.698112, 1], [10.752842, 106.629677, 1], [10.727435, 106.624663, 1], [10.749648, 106.688812, 1], [10.796834, 106.691267, 1], [10.744594, 106.734553, 1], [10.767217, 106.676145, 1], [10.777651, 106.655916, 1], [10.797289, 106.65742, 1], [10.763466, 106.677676, 1], [10.809293, 106.696336, 1], [10.801942, 106.71812, 1], [10.732435, 106.646739, 1], [10.763993, 106.66923, 1], [10.789123, 106.67303, 1], [10.774195, 106.678469, 1], [10.794409, 106.709557, 1], [10.791761, 106.694682, 1], [10.793501, 106.690105, 1], [10.802234, 106.642131, 1], [10.760518, 106.701964, 1], [10.820097, 106.683179, 1], [10.806049, 106.632269, 1], [10.784069, 106.642595, 1]],
"vehicle_num": 10,
"min_parcels": 5,
"max_distance": 20000,
"min_vehicles": True
}
test_20 = {
"points": [[10.773687, 106.703263, 0], [10.731158, 106.716759, 1], [10.729461, 106.714041, 1], [10.768337, 106.700743, 1], [10.827278, 106.678072, 1], [10.772264, 106.681347, 1], [10.786769, 106.640134, 1], [10.875387, 106.755127, 1], [10.808667, 106.711705, 1], [10.774575, 106.705748, 1], [10.827971, 106.727006, 1], [10.770907, 106.6681, 1], [10.769285, 106.674728, 1], [10.737721, 106.675189, 1], [10.786519, 106.693997, 1], [10.798453, 106.667866, 1], [10.772691, 106.693676, 1], [10.783066, 106.695901, 1], [10.754833, 106.66052, 1], [10.770541, 106.703162, 1]],
"vehicle_num": 10,
"min_vehicles": True,
"max_parcels": 10,
"transport_mode": "1N",
"distance_calculation": "OSRM"
}
def get_routing_assignment(data, routing, assignment, distance_matrix, violated_points):
cluster = []
violated_cluster = []
for vehicle_id in xrange(data.num_vehicles):
index = routing.Start(vehicle_id)
if data.transport_mode == "N1":
index = assignment.Value(routing.NextVar(index))
route_dist = 0
route_load = 0
route = []
while not routing.IsEnd(index):
node_index = routing.IndexToNode(index)
next_node_index = routing.IndexToNode(
assignment.Value(routing.NextVar(index)))
route_dist += distance_matrix[node_index][next_node_index]
route_load += data.parcels[node_index]
route.append([data.locations[node_index][0], data.locations[node_index][1]])
index = assignment.Value(routing.NextVar(index))
if data.transport_mode != "1N":
node_index = routing.IndexToNode(index)
route.append([data.locations[node_index][0], data.locations[node_index][1]])
if (data.maximum_distance != 0 and route_dist > data.maximum_distance) or (route_load < data.min_parcels):
violated_cluster.append(route)
else:
cluster.append(route)
return {
"cluster": cluster,
"violated_points": violated_points,
"violated_cluster": violated_cluster
}
def vrp(event, context):
start_time = time.time()
"""Entry point of the program"""
# Instantiate the data problem.
try:
locations = event["points"]
min_parcels = event.get("min_parcels", 0)
maximum_distance = event.get("max_distance", 0)
num_vehicles = event["vehicle_num"]
min_vehicles = event.get("min_vehicles", False)
max_parcels = event.get("max_parcels", 20)
transport_mode = event["transport_mode"]
distance_calculation = event.get("distance_calculation", "VINCENTY")
except KeyError as e:
print("Missing required input: " + str(e))
cluster = {"title": "Missing required input: " + str(e)}
return return_lambda_gateway_response(400, cluster)
if min_parcels < 0 or maximum_distance < 0 or num_vehicles < 0 or max_parcels < 0:
cluster = {"title": "Numerical input cannot be negative"}
return return_lambda_gateway_response(400, cluster)
if transport_mode != "1N" and transport_mode != "N1" and transport_mode != "1N1":
cluster = {"title": "Invalid transport_mode"}
return return_lambda_gateway_response(400, cluster)
if distance_calculation != "VINCENTY" and distance_calculation != "OSRM":
cluster = {"title": "Invalid distance_calculation"}
return return_lambda_gateway_response(400, cluster)
if distance_calculation == "OSRM" and len(locations) > 100:
cluster = {"title": "Bad request: OSRM cannot be used with more than 100 points"}
return return_lambda_gateway_response(400, cluster)
data = data_problem.DataProblem(locations, num_vehicles, min_parcels,
max_parcels, maximum_distance, transport_mode, distance_calculation)
# Define weight of each edge
distance = pickup_constraints.CreateDistanceEvaluator(data)
distance_matrix = distance.get_distance_matrix()
distance_evaluator = distance.distance_evaluator
print("Violated points: " + str(distance.get_violated_points))
if len(data.locations) <= 1:
cluster = {
"cluster": [],
"violated_points": distance.get_violated_points,
"violated_cluster": []
}
return return_lambda_gateway_response(200, cluster)
# Create Routing Model
routing = pywrapcp.RoutingModel(data.num_locations, data.num_vehicles, data.depot)
if data.num_locations > 100:
routing.SetArcCostEvaluatorOfAllVehicles(distance.cluster_distance_evaluator)
else:
routing.SetArcCostEvaluatorOfAllVehicles(distance_evaluator)
if maximum_distance != 0:
pickup_constraints.add_distance_dimension(routing, data, distance_evaluator)
# still need when min_parcels = 0 because we have max_parcels
parcels_evaluator = pickup_constraints.CreateParcelsEvaluator(data).parcels_evaluator
pickup_constraints.add_parcels_constraints(routing, data, parcels_evaluator)
# Setting first solution heuristic (cheapest addition).
search_parameters = pywrapcp.RoutingModel.DefaultSearchParameters()
search_parameters.time_limit_ms = 25000
search_parameters.first_solution_strategy = (
routing_enums_pb2.FirstSolutionStrategy.PATH_MOST_CONSTRAINED_ARC)
#minimize the total number of vehicle
if min_vehicles:
if data.num_vehicles*data.min_parcels >= data.num_locations:
routing.SetFixedCostOfAllVehicles(1000000)
else:
routing.SetFixedCostOfAllVehicles(10000)
# Solve the problem.
assignment = routing.SolveWithParameters(search_parameters)
if assignment is None:
print("change distance to soft constraint")
print("\nThe program took " + str(time.time() - start_time) + " seconds to run")
start_time = time.time()
routing = pywrapcp.RoutingModel(data.num_locations, data.num_vehicles, data.depot)
if data.num_locations > 100:
routing.SetArcCostEvaluatorOfAllVehicles(distance.cluster_distance_evaluator)
else:
routing.SetArcCostEvaluatorOfAllVehicles(distance_evaluator)
if maximum_distance != 0:
pickup_constraints.add_distance_soft(routing, data, distance_evaluator)
parcels_evaluator = pickup_constraints.CreateParcelsEvaluator(data).parcels_evaluator
pickup_constraints.add_parcels_constraints(routing, data, parcels_evaluator)
search_parameters = pywrapcp.RoutingModel.DefaultSearchParameters()
search_parameters.time_limit_ms = 60000
search_parameters.first_solution_strategy = (
routing_enums_pb2.FirstSolutionStrategy.PATH_MOST_CONSTRAINED_ARC)
if min_vehicles:
if data.num_vehicles*data.min_parcels >= data.num_locations:
routing.SetFixedCostOfAllVehicles(1000000)
else:
routing.SetFixedCostOfAllVehicles(100)
assignment = routing.SolveWithParameters(search_parameters)
if assignment is None:
print("No solution found")
cluster = "No solution found"
else:
cluster = get_routing_assignment(data, routing, assignment, distance_matrix, distance.get_violated_points)
p = printer.ConsolePrinter(data, routing, assignment, distance_matrix)
p.print()
print("\nThe program took " + str(time.time() - start_time) + " seconds to run")
return return_lambda_gateway_response(200, cluster)
########
# Main #
########
def main():
event = test_20
print(vrp(event, ""))
if __name__ == '__main__':
main() | 125.644444 | 8,359 | 0.653449 | 4,516 | 28,270 | 4.039637 | 0.220549 | 0.11725 | 0.008332 | 0.01184 | 0.745382 | 0.732226 | 0.727622 | 0.705586 | 0.694623 | 0.694623 | 0 | 0.532104 | 0.13838 | 28,270 | 225 | 8,360 | 125.644444 | 0.216849 | 0.318429 | 0 | 0.351852 | 0 | 0 | 0.041165 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.024691 | false | 0 | 0.074074 | 0 | 0.154321 | 0.067901 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
91916eee0f4be961fa63bb63edb7a4393f21b490 | 1,199 | py | Python | cmc_api/common/exceptions.py | 9nehS/mop_cmc_test | 2d6cabaf7e6fe3a6825961d5b8ddd69d7467542e | [
"Apache-2.0"
] | null | null | null | cmc_api/common/exceptions.py | 9nehS/mop_cmc_test | 2d6cabaf7e6fe3a6825961d5b8ddd69d7467542e | [
"Apache-2.0"
] | null | null | null | cmc_api/common/exceptions.py | 9nehS/mop_cmc_test | 2d6cabaf7e6fe3a6825961d5b8ddd69d7467542e | [
"Apache-2.0"
] | null | null | null |
class InvalidUserCountException(Exception):
def __init__(self, msg="UserCounts is invalid, please check the format", screen=None, stacktrace=None):
self.msg = msg
self.screen = screen
self.stacktrace = stacktrace
def __str__(self):
exception_msg = "Message: %s\n" % self.msg
if self.screen is not None:
exception_msg += "Screenshot: available via screen\n"
if self.stacktrace is not None:
stacktrace = "\n".join(self.stacktrace)
exception_msg += "Stacktrace:\n%s" % stacktrace
return exception_msg
class InvalidHasWapException(Exception):
def __init__(self, msg="HasWap is invalid, it should be 'Y/N' or None", screen=None, stacktrace=None):
self.msg = msg
self.screen = screen
self.stacktrace = stacktrace
def __str__(self):
exception_msg = "Message: %s\n" % self.msg
if self.screen is not None:
exception_msg += "Screenshot: available via screen\n"
if self.stacktrace is not None:
stacktrace = "\n".join(self.stacktrace)
exception_msg += "Stacktrace:\n%s" % stacktrace
return exception_msg
| 35.264706 | 107 | 0.633028 | 143 | 1,199 | 5.13986 | 0.244755 | 0.130612 | 0.04898 | 0.054422 | 0.82449 | 0.761905 | 0.761905 | 0.761905 | 0.761905 | 0.761905 | 0 | 0 | 0.267723 | 1,199 | 33 | 108 | 36.333333 | 0.83713 | 0 | 0 | 0.846154 | 0 | 0 | 0.18311 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.153846 | false | 0 | 0 | 0 | 0.307692 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9194ced7df210b7d7030966b75de7ef888a5a372 | 277,954 | py | Python | examples/grids/grid_uri/cigre/cigre_eur_lv_res/bpu/cigre_eur_lv_res_bpu.py | pydae/pydae | 8076bcfeb2cdc865a5fc58561ff8d246d0ed7d9d | [
"MIT"
] | 1 | 2020-12-20T03:45:26.000Z | 2020-12-20T03:45:26.000Z | examples/grids/grid_uri/cigre/cigre_eur_lv_res/bpu/cigre_eur_lv_res_bpu.py | pydae/pydae | 8076bcfeb2cdc865a5fc58561ff8d246d0ed7d9d | [
"MIT"
] | null | null | null | examples/grids/grid_uri/cigre/cigre_eur_lv_res/bpu/cigre_eur_lv_res_bpu.py | pydae/pydae | 8076bcfeb2cdc865a5fc58561ff8d246d0ed7d9d | [
"MIT"
] | null | null | null | import numpy as np
import numba
import scipy.optimize as sopt
import json
sin = np.sin
cos = np.cos
atan2 = np.arctan2
sqrt = np.sqrt
sign = np.sign
class cigre_eur_lv_res_bpu_class:
def __init__(self):
self.t_end = 10.000000
self.Dt = 0.0010000
self.decimation = 10.000000
self.itol = 1e-6
self.Dt_max = 0.001000
self.Dt_min = 0.001000
self.solvern = 5
self.imax = 100
self.N_x = 15
self.N_y = 51
self.N_z = 18
self.N_store = 10000
self.params_list = ['S_base', 'g_R01_R02', 'b_R01_R02', 'bs_R01_R02', 'g_R02_R03', 'b_R02_R03', 'bs_R02_R03', 'g_R03_R04', 'b_R03_R04', 'bs_R03_R04', 'g_R04_R05', 'b_R04_R05', 'bs_R04_R05', 'g_R05_R06', 'b_R05_R06', 'bs_R05_R06', 'g_R06_R07', 'b_R06_R07', 'bs_R06_R07', 'g_R07_R08', 'b_R07_R08', 'bs_R07_R08', 'g_R08_R09', 'b_R08_R09', 'bs_R08_R09', 'g_R09_R10', 'b_R09_R10', 'bs_R09_R10', 'g_R03_R11', 'b_R03_R11', 'bs_R03_R11', 'g_R04_R12', 'b_R04_R12', 'bs_R04_R12', 'g_R12_R13', 'b_R12_R13', 'bs_R12_R13', 'g_R13_R14', 'b_R13_R14', 'bs_R13_R14', 'g_R14_R15', 'b_R14_R15', 'bs_R14_R15', 'g_R06_R16', 'b_R06_R16', 'bs_R06_R16', 'g_R09_R17', 'b_R09_R17', 'bs_R09_R17', 'g_R10_R18', 'b_R10_R18', 'bs_R10_R18', 'U_R01_n', 'U_R02_n', 'U_R03_n', 'U_R04_n', 'U_R05_n', 'U_R06_n', 'U_R07_n', 'U_R08_n', 'U_R09_n', 'U_R10_n', 'U_R11_n', 'U_R12_n', 'U_R13_n', 'U_R14_n', 'U_R15_n', 'U_R16_n', 'U_R17_n', 'U_R18_n', 'S_n_R10', 'H_R10', 'Omega_b_R10', 'T1d0_R10', 'T1q0_R10', 'X_d_R10', 'X_q_R10', 'X1d_R10', 'X1q_R10', 'D_R10', 'R_a_R10', 'K_delta_R10', 'K_a_R10', 'K_ai_R10', 'T_r_R10', 'Droop_R10', 'T_m_R10', 'S_n_R14', 'H_R14', 'Omega_b_R14', 'T1d0_R14', 'T1q0_R14', 'X_d_R14', 'X_q_R14', 'X1d_R14', 'X1q_R14', 'D_R14', 'R_a_R14', 'K_delta_R14', 'K_a_R14', 'K_ai_R14', 'T_r_R14', 'Droop_R14', 'T_m_R14', 'K_sec_R10', 'K_sec_R14']
self.params_values_list = [1000000.0, 644.4416190074766, -322.61846569219165, 0.0, 644.4416190074766, -322.61846569219165, 0.0, 644.4416190074766, -322.61846569219165, 0.0, 644.4416190074766, -322.61846569219165, 0.0, 644.4416190074766, -322.61846569219165, 0.0, 644.4416190074766, -322.61846569219165, 0.0, 644.4416190074766, -322.61846569219165, 0.0, 644.4416190074766, -322.61846569219165, 0.0, 644.4416190074766, -322.61846569219165, 0.0, 183.38422558864576, -19.0143186828563, 0.0, 157.1864790759821, -16.29798744244826, 0.0, 157.1864790759821, -16.29798744244826, 0.0, 157.1864790759821, -16.29798744244826, 0.0, 157.1864790759821, -16.29798744244826, 0.0, 183.38422558864576, -19.0143186828563, 0.0, 183.38422558864576, -19.0143186828563, 0.0, 183.38422558864576, -19.0143186828563, 0.0, 400.0, 400.0, 400.0, 400.0, 400.0, 400.0, 400.0, 400.0, 400.0, 400.0, 400.0, 400.0, 400.0, 400.0, 400.0, 400.0, 400.0, 400.0, 1000000.0, 6.5, 314.1592653589793, 8.0, 0.4, 1.8, 1.7, 0.3, 0.55, 0.0, 0.0025, 0.01, 100, 1e-06, 0.1, 0.05, 5.0, 1000000.0, 6.5, 314.1592653589793, 8.0, 0.4, 1.8, 1.7, 0.3, 0.55, 0.0, 0.0025, 0.01, 100, 1e-06, 0.1, 0.05, 5.0, 0.001, 0.001]
self.inputs_ini_list = ['P_R01', 'Q_R01', 'P_R02', 'Q_R02', 'P_R03', 'Q_R03', 'P_R04', 'Q_R04', 'P_R05', 'Q_R05', 'P_R06', 'Q_R06', 'P_R07', 'Q_R07', 'P_R08', 'Q_R08', 'P_R09', 'Q_R09', 'P_R10', 'Q_R10', 'P_R11', 'Q_R11', 'P_R12', 'Q_R12', 'P_R13', 'Q_R13', 'P_R14', 'Q_R14', 'P_R15', 'Q_R15', 'P_R16', 'Q_R16', 'P_R17', 'Q_R17', 'P_R18', 'Q_R18', 'v_ref_R10', 'v_pss_R10', 'p_c_R10', 'v_ref_R14', 'v_pss_R14', 'p_c_R14']
self.inputs_ini_values_list = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -14250.0, -4683.748, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -49400.0, -16236.995, -52250.0, -17173.744, -33250.0, -10928.746, -44650.0, -14675.745, 1.0, 0.0, 0.7, 1.0, 0.0, 0.7]
self.inputs_run_list = ['P_R01', 'Q_R01', 'P_R02', 'Q_R02', 'P_R03', 'Q_R03', 'P_R04', 'Q_R04', 'P_R05', 'Q_R05', 'P_R06', 'Q_R06', 'P_R07', 'Q_R07', 'P_R08', 'Q_R08', 'P_R09', 'Q_R09', 'P_R10', 'Q_R10', 'P_R11', 'Q_R11', 'P_R12', 'Q_R12', 'P_R13', 'Q_R13', 'P_R14', 'Q_R14', 'P_R15', 'Q_R15', 'P_R16', 'Q_R16', 'P_R17', 'Q_R17', 'P_R18', 'Q_R18', 'v_ref_R10', 'v_pss_R10', 'p_c_R10', 'v_ref_R14', 'v_pss_R14', 'p_c_R14']
self.inputs_run_values_list = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -14250.0, -4683.748, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -49400.0, -16236.995, -52250.0, -17173.744, -33250.0, -10928.746, -44650.0, -14675.745, 1.0, 0.0, 0.7, 1.0, 0.0, 0.7]
self.outputs_list = ['V_R01', 'V_R02', 'V_R03', 'V_R04', 'V_R05', 'V_R06', 'V_R07', 'V_R08', 'V_R09', 'V_R10', 'V_R11', 'V_R12', 'V_R13', 'V_R14', 'V_R15', 'V_R16', 'V_R17', 'V_R18']
self.x_list = ['delta_R10', 'omega_R10', 'e1q_R10', 'e1d_R10', 'v_c_R10', 'xi_v_R10', 'p_m_R10', 'delta_R14', 'omega_R14', 'e1q_R14', 'e1d_R14', 'v_c_R14', 'xi_v_R14', 'p_m_R14', 'xi_freq']
self.y_run_list = ['V_R01', 'theta_R01', 'V_R02', 'theta_R02', 'V_R03', 'theta_R03', 'V_R04', 'theta_R04', 'V_R05', 'theta_R05', 'V_R06', 'theta_R06', 'V_R07', 'theta_R07', 'V_R08', 'theta_R08', 'V_R09', 'theta_R09', 'V_R10', 'theta_R10', 'V_R11', 'theta_R11', 'V_R12', 'theta_R12', 'V_R13', 'theta_R13', 'V_R14', 'theta_R14', 'V_R15', 'theta_R15', 'V_R16', 'theta_R16', 'V_R17', 'theta_R17', 'V_R18', 'theta_R18', 'i_d_R10', 'i_q_R10', 'p_g_R10_1', 'q_g_R10_1', 'v_f_R10', 'p_m_ref_R10', 'i_d_R14', 'i_q_R14', 'p_g_R14_1', 'q_g_R14_1', 'v_f_R14', 'p_m_ref_R14', 'omega_coi', 'p_r_R10', 'p_r_R14']
self.xy_list = self.x_list + self.y_run_list
self.y_ini_list = ['V_R01', 'theta_R01', 'V_R02', 'theta_R02', 'V_R03', 'theta_R03', 'V_R04', 'theta_R04', 'V_R05', 'theta_R05', 'V_R06', 'theta_R06', 'V_R07', 'theta_R07', 'V_R08', 'theta_R08', 'V_R09', 'theta_R09', 'V_R10', 'theta_R10', 'V_R11', 'theta_R11', 'V_R12', 'theta_R12', 'V_R13', 'theta_R13', 'V_R14', 'theta_R14', 'V_R15', 'theta_R15', 'V_R16', 'theta_R16', 'V_R17', 'theta_R17', 'V_R18', 'theta_R18', 'i_d_R10', 'i_q_R10', 'p_g_R10_1', 'q_g_R10_1', 'v_f_R10', 'p_m_ref_R10', 'i_d_R14', 'i_q_R14', 'p_g_R14_1', 'q_g_R14_1', 'v_f_R14', 'p_m_ref_R14', 'omega_coi', 'p_r_R10', 'p_r_R14']
self.xy_ini_list = self.x_list + self.y_ini_list
self.t = 0.0
self.it = 0
self.it_store = 0
self.xy_prev = np.zeros((self.N_x+self.N_y,1))
self.initialization_tol = 1e-6
self.N_u = len(self.inputs_run_list)
self.sopt_root_method='hybr'
self.sopt_root_jac=True
self.u_ini_list = self.inputs_ini_list
self.u_ini_values_list = self.inputs_ini_values_list
self.u_run_list = self.inputs_run_list
self.u_run_values_list = self.inputs_run_values_list
self.N_u = len(self.u_run_list)
Fx_ini_rows,Fx_ini_cols,Fy_ini_rows,Fy_ini_cols,Gx_ini_rows,Gx_ini_cols,Gy_ini_rows,Gy_ini_cols = nonzeros()
self.Fx_ini_rows = np.array(Fx_ini_rows)
if len(Fx_ini_rows) == 1:
self.Fx_ini_rows = np.array([[Fx_ini_rows]]).reshape(1,)
self.Fx_ini_cols = np.array([[Fx_ini_cols]]).reshape(1,)
self.Fx_ini_cols = np.array(Fx_ini_cols)
self.Fy_ini_rows = np.array(Fy_ini_rows)
self.Fy_ini_cols = np.array(Fy_ini_cols)
self.Gx_ini_rows = np.array(Gx_ini_rows)
self.Gx_ini_cols = np.array(Gx_ini_cols)
self.Gy_ini_rows = np.array(Gy_ini_rows)
self.Gy_ini_cols = np.array(Gy_ini_cols)
self.yini2urun = list(set(self.inputs_run_list).intersection(set(self.y_ini_list)))
self.uini2yrun = list(set(self.y_run_list).intersection(set(self.inputs_ini_list)))
self.update()
def update(self):
self.N_steps = int(np.ceil(self.t_end/self.Dt))
dt = [
('t_end', np.float64),
('Dt', np.float64),
('decimation', np.float64),
('itol', np.float64),
('Dt_max', np.float64),
('Dt_min', np.float64),
('solvern', np.int64),
('imax', np.int64),
('N_steps', np.int64),
('N_store', np.int64),
('N_x', np.int64),
('N_y', np.int64),
('N_z', np.int64),
('t', np.float64),
('it', np.int64),
('it_store', np.int64),
('idx', np.int64),
('idy', np.int64),
('f', np.float64, (self.N_x,1)),
('x', np.float64, (self.N_x,1)),
('x_0', np.float64, (self.N_x,1)),
('g', np.float64, (self.N_y,1)),
('y_run', np.float64, (self.N_y,1)),
('y_ini', np.float64, (self.N_y,1)),
('u_run', np.float64, (self.N_u,1)),
('y_0', np.float64, (self.N_y,1)),
('h', np.float64, (self.N_z,1)),
('Fx', np.float64, (self.N_x,self.N_x)),
('Fy', np.float64, (self.N_x,self.N_y)),
('Gx', np.float64, (self.N_y,self.N_x)),
('Gy', np.float64, (self.N_y,self.N_y)),
('Fu', np.float64, (self.N_x,self.N_u)),
('Gu', np.float64, (self.N_y,self.N_u)),
('Hx', np.float64, (self.N_z,self.N_x)),
('Hy', np.float64, (self.N_z,self.N_y)),
('Hu', np.float64, (self.N_z,self.N_u)),
('Fx_ini', np.float64, (self.N_x,self.N_x)),
('Fy_ini', np.float64, (self.N_x,self.N_y)),
('Gx_ini', np.float64, (self.N_y,self.N_x)),
('Gy_ini', np.float64, (self.N_y,self.N_y)),
('T', np.float64, (self.N_store+1,1)),
('X', np.float64, (self.N_store+1,self.N_x)),
('Y', np.float64, (self.N_store+1,self.N_y)),
('Z', np.float64, (self.N_store+1,self.N_z)),
('iters', np.float64, (self.N_store+1,1)),
('store', np.int64),
('Fx_ini_rows', np.int64, self.Fx_ini_rows.shape),
('Fx_ini_cols', np.int64, self.Fx_ini_cols.shape),
('Fy_ini_rows', np.int64, self.Fy_ini_rows.shape),
('Fy_ini_cols', np.int64, self.Fy_ini_cols.shape),
('Gx_ini_rows', np.int64, self.Gx_ini_rows.shape),
('Gx_ini_cols', np.int64, self.Gx_ini_cols.shape),
('Gy_ini_rows', np.int64, self.Gy_ini_rows.shape),
('Gy_ini_cols', np.int64, self.Gy_ini_cols.shape),
('Ac_ini', np.float64, ((self.N_x+self.N_y,self.N_x+self.N_y))),
('fg', np.float64, ((self.N_x+self.N_y,1))),
]
values = [
self.t_end,
self.Dt,
self.decimation,
self.itol,
self.Dt_max,
self.Dt_min,
self.solvern,
self.imax,
self.N_steps,
self.N_store,
self.N_x,
self.N_y,
self.N_z,
self.t,
self.it,
self.it_store,
0, # idx
0, # idy
np.zeros((self.N_x,1)), # f
np.zeros((self.N_x,1)), # x
np.zeros((self.N_x,1)), # x_0
np.zeros((self.N_y,1)), # g
np.zeros((self.N_y,1)), # y_run
np.zeros((self.N_y,1)), # y_ini
np.zeros((self.N_u,1)), # u_run
np.zeros((self.N_y,1)), # y_0
np.zeros((self.N_z,1)), # h
np.zeros((self.N_x,self.N_x)), # Fx
np.zeros((self.N_x,self.N_y)), # Fy
np.zeros((self.N_y,self.N_x)), # Gx
np.zeros((self.N_y,self.N_y)), # Fy
np.zeros((self.N_x,self.N_u)), # Fu
np.zeros((self.N_y,self.N_u)), # Gu
np.zeros((self.N_z,self.N_x)), # Hx
np.zeros((self.N_z,self.N_y)), # Hy
np.zeros((self.N_z,self.N_u)), # Hu
np.zeros((self.N_x,self.N_x)), # Fx_ini
np.zeros((self.N_x,self.N_y)), # Fy_ini
np.zeros((self.N_y,self.N_x)), # Gx_ini
np.zeros((self.N_y,self.N_y)), # Fy_ini
np.zeros((self.N_store+1,1)), # T
np.zeros((self.N_store+1,self.N_x)), # X
np.zeros((self.N_store+1,self.N_y)), # Y
np.zeros((self.N_store+1,self.N_z)), # Z
np.zeros((self.N_store+1,1)), # iters
1,
self.Fx_ini_rows,
self.Fx_ini_cols,
self.Fy_ini_rows,
self.Fy_ini_cols,
self.Gx_ini_rows,
self.Gx_ini_cols,
self.Gy_ini_rows,
self.Gy_ini_cols,
np.zeros((self.N_x+self.N_y,self.N_x+self.N_y)),
np.zeros((self.N_x+self.N_y,1)),
]
dt += [(item,np.float64) for item in self.params_list]
values += [item for item in self.params_values_list]
for item_id,item_val in zip(self.inputs_ini_list,self.inputs_ini_values_list):
if item_id in self.inputs_run_list: continue
dt += [(item_id,np.float64)]
values += [item_val]
dt += [(item,np.float64) for item in self.inputs_run_list]
values += [item for item in self.inputs_run_values_list]
self.struct = np.rec.array([tuple(values)], dtype=np.dtype(dt))
xy0 = np.zeros((self.N_x+self.N_y,))
self.ini_dae_jacobian_nn(xy0)
self.run_dae_jacobian_nn(xy0)
def load_params(self,data_input):
if type(data_input) == str:
json_file = data_input
self.json_file = json_file
self.json_data = open(json_file).read().replace("'",'"')
data = json.loads(self.json_data)
elif type(data_input) == dict:
data = data_input
self.data = data
for item in self.data:
self.struct[0][item] = self.data[item]
self.params_values_list[self.params_list.index(item)] = self.data[item]
def ini_problem(self,x):
self.struct[0].x[:,0] = x[0:self.N_x]
self.struct[0].y_ini[:,0] = x[self.N_x:(self.N_x+self.N_y)]
if self.compile:
ini(self.struct,2)
ini(self.struct,3)
else:
ini.py_func(self.struct,2)
ini.py_func(self.struct,3)
fg = np.vstack((self.struct[0].f,self.struct[0].g))[:,0]
return fg
def run_problem(self,x):
t = self.struct[0].t
self.struct[0].x[:,0] = x[0:self.N_x]
self.struct[0].y_run[:,0] = x[self.N_x:(self.N_x+self.N_y)]
if self.compile:
run(t,self.struct,2)
run(t,self.struct,3)
run(t,self.struct,10)
run(t,self.struct,11)
run(t,self.struct,12)
run(t,self.struct,13)
else:
run.py_func(t,self.struct,2)
run.py_func(t,self.struct,3)
run.py_func(t,self.struct,10)
run.py_func(t,self.struct,11)
run.py_func(t,self.struct,12)
run.py_func(t,self.struct,13)
fg = np.vstack((self.struct[0].f,self.struct[0].g))[:,0]
return fg
def run_dae_jacobian(self,x):
self.struct[0].x[:,0] = x[0:self.N_x]
self.struct[0].y_run[:,0] = x[self.N_x:(self.N_x+self.N_y)]
run(0.0,self.struct,10)
run(0.0,self.struct,11)
run(0.0,self.struct,12)
run(0.0,self.struct,13)
A_c = np.block([[self.struct[0].Fx,self.struct[0].Fy],
[self.struct[0].Gx,self.struct[0].Gy]])
return A_c
def run_dae_jacobian_nn(self,x):
self.struct[0].x[:,0] = x[0:self.N_x]
self.struct[0].y_run[:,0] = x[self.N_x:(self.N_x+self.N_y)]
run_nn(0.0,self.struct,10)
run_nn(0.0,self.struct,11)
run_nn(0.0,self.struct,12)
run_nn(0.0,self.struct,13)
def eval_jacobians(self):
run(0.0,self.struct,10)
run(0.0,self.struct,11)
run(0.0,self.struct,12)
return 1
def ini_dae_jacobian(self,x):
self.struct[0].x[:,0] = x[0:self.N_x]
self.struct[0].y_ini[:,0] = x[self.N_x:(self.N_x+self.N_y)]
if self.compile:
ini(self.struct,10)
ini(self.struct,11)
else:
ini.py_func(self.struct,10)
ini.py_func(self.struct,11)
A_c = np.block([[self.struct[0].Fx_ini,self.struct[0].Fy_ini],
[self.struct[0].Gx_ini,self.struct[0].Gy_ini]])
return A_c
def ini_dae_jacobian_nn(self,x):
self.struct[0].x[:,0] = x[0:self.N_x]
self.struct[0].y_ini[:,0] = x[self.N_x:(self.N_x+self.N_y)]
ini_nn(self.struct,10)
ini_nn(self.struct,11)
def f_ode(self,x):
self.struct[0].x[:,0] = x
run(self.struct,1)
return self.struct[0].f[:,0]
def f_odeint(self,x,t):
self.struct[0].x[:,0] = x
run(self.struct,1)
return self.struct[0].f[:,0]
def f_ivp(self,t,x):
self.struct[0].x[:,0] = x
run(self.struct,1)
return self.struct[0].f[:,0]
def Fx_ode(self,x):
self.struct[0].x[:,0] = x
run(self.struct,10)
return self.struct[0].Fx
def eval_A(self):
Fx = self.struct[0].Fx
Fy = self.struct[0].Fy
Gx = self.struct[0].Gx
Gy = self.struct[0].Gy
A = Fx - Fy @ np.linalg.solve(Gy,Gx)
self.A = A
return A
def eval_A_ini(self):
Fx = self.struct[0].Fx_ini
Fy = self.struct[0].Fy_ini
Gx = self.struct[0].Gx_ini
Gy = self.struct[0].Gy_ini
A = Fx - Fy @ np.linalg.solve(Gy,Gx)
return A
def reset(self):
for param,param_value in zip(self.params_list,self.params_values_list):
self.struct[0][param] = param_value
for input_name,input_value in zip(self.inputs_ini_list,self.inputs_ini_values_list):
self.struct[0][input_name] = input_value
for input_name,input_value in zip(self.inputs_run_list,self.inputs_run_values_list):
self.struct[0][input_name] = input_value
def simulate(self,events,xy0=0):
# initialize both the ini and the run system
self.initialize(events,xy0=xy0)
# simulation run
for event in events:
# make all the desired changes
self.run([event])
# post process
T,X,Y,Z = self.post()
return T,X,Y,Z
def run(self,events):
# simulation run
for event in events:
# make all the desired changes
for item in event:
self.struct[0][item] = event[item]
daesolver(self.struct) # run until next event
return 1
def rtrun(self,events):
# simulation run
for event in events:
# make all the desired changes
for item in event:
self.struct[0][item] = event[item]
self.struct[0].it_store = self.struct[0].N_store-1
daesolver(self.struct) # run until next event
return 1
def post(self):
# post process result
T = self.struct[0]['T'][:self.struct[0].it_store]
X = self.struct[0]['X'][:self.struct[0].it_store,:]
Y = self.struct[0]['Y'][:self.struct[0].it_store,:]
Z = self.struct[0]['Z'][:self.struct[0].it_store,:]
iters = self.struct[0]['iters'][:self.struct[0].it_store,:]
self.T = T
self.X = X
self.Y = Y
self.Z = Z
self.iters = iters
return T,X,Y,Z
def save_0(self,file_name = 'xy_0.json'):
xy_0_dict = {}
for item in self.x_list:
xy_0_dict.update({item:self.get_value(item)})
for item in self.y_ini_list:
xy_0_dict.update({item:self.get_value(item)})
xy_0_str = json.dumps(xy_0_dict, indent=4)
with open(file_name,'w') as fobj:
fobj.write(xy_0_str)
def load_0(self,file_name = 'xy_0.json'):
with open(file_name) as fobj:
xy_0_str = fobj.read()
xy_0_dict = json.loads(xy_0_str)
for item in xy_0_dict:
if item in self.x_list:
self.xy_prev[self.x_list.index(item)] = xy_0_dict[item]
if item in self.y_ini_list:
self.xy_prev[self.y_ini_list.index(item)+self.N_x] = xy_0_dict[item]
def initialize(self,events=[{}],xy0=0,compile=True):
'''
Parameters
----------
events : dictionary
Dictionary with at least 't_end' and all inputs and parameters
that need to be changed.
xy0 : float or string, optional
0 means all states should be zero as initial guess.
If not zero all the states initial guess are the given input.
If 'prev' it uses the last known initialization result as initial guess.
Returns
-------
T : TYPE
DESCRIPTION.
X : TYPE
DESCRIPTION.
Y : TYPE
DESCRIPTION.
Z : TYPE
DESCRIPTION.
'''
self.compile = compile
# simulation parameters
self.struct[0].it = 0 # set time step to zero
self.struct[0].it_store = 0 # set storage to zero
self.struct[0].t = 0.0 # set time to zero
# initialization
it_event = 0
event = events[it_event]
for item in event:
self.struct[0][item] = event[item]
## compute initial conditions using x and y_ini
if type(xy0) == str:
if xy0 == 'prev':
xy0 = self.xy_prev
else:
self.load_0(xy0)
xy0 = self.xy_prev
elif type(xy0) == dict:
with open('xy_0.json','w') as fobj:
fobj.write(json.dumps(xy0))
self.load_0('xy_0.json')
xy0 = self.xy_prev
else:
if xy0 == 0:
xy0 = np.zeros(self.N_x+self.N_y)
elif xy0 == 1:
xy0 = np.ones(self.N_x+self.N_y)
else:
xy0 = xy0*np.ones(self.N_x+self.N_y)
#xy = sopt.fsolve(self.ini_problem,xy0, jac=self.ini_dae_jacobian )
if self.sopt_root_jac:
sol = sopt.root(self.ini_problem, xy0,
jac=self.ini_dae_jacobian,
method=self.sopt_root_method, tol=self.initialization_tol)
else:
sol = sopt.root(self.ini_problem, xy0, method=self.sopt_root_method)
self.initialization_ok = True
if sol.success == False:
print('initialization not found!')
self.initialization_ok = False
T = self.struct[0]['T'][:self.struct[0].it_store]
X = self.struct[0]['X'][:self.struct[0].it_store,:]
Y = self.struct[0]['Y'][:self.struct[0].it_store,:]
Z = self.struct[0]['Z'][:self.struct[0].it_store,:]
iters = self.struct[0]['iters'][:self.struct[0].it_store,:]
if self.initialization_ok:
xy = sol.x
self.xy_prev = xy
self.struct[0].x[:,0] = xy[0:self.N_x]
self.struct[0].y_run[:,0] = xy[self.N_x:]
## y_ini to u_run
for item in self.inputs_run_list:
if item in self.y_ini_list:
self.struct[0][item] = self.struct[0].y_ini[self.y_ini_list.index(item)]
## u_ini to y_run
for item in self.inputs_ini_list:
if item in self.y_run_list:
self.struct[0].y_run[self.y_run_list.index(item)] = self.struct[0][item]
#xy = sopt.fsolve(self.ini_problem,xy0, jac=self.ini_dae_jacobian )
if self.sopt_root_jac:
sol = sopt.root(self.run_problem, xy0,
jac=self.run_dae_jacobian,
method=self.sopt_root_method, tol=self.initialization_tol)
else:
sol = sopt.root(self.run_problem, xy0, method=self.sopt_root_method)
if self.compile:
# evaluate f and g
run(0.0,self.struct,2)
run(0.0,self.struct,3)
# evaluate run jacobians
run(0.0,self.struct,10)
run(0.0,self.struct,11)
run(0.0,self.struct,12)
run(0.0,self.struct,14)
else:
# evaluate f and g
run.py_func(0.0,self.struct,2)
run.py_func(0.0,self.struct,3)
# evaluate run jacobians
run.py_func(0.0,self.struct,10)
run.py_func(0.0,self.struct,11)
run.py_func(0.0,self.struct,12)
run.py_func(0.0,self.struct,14)
# post process result
T = self.struct[0]['T'][:self.struct[0].it_store]
X = self.struct[0]['X'][:self.struct[0].it_store,:]
Y = self.struct[0]['Y'][:self.struct[0].it_store,:]
Z = self.struct[0]['Z'][:self.struct[0].it_store,:]
iters = self.struct[0]['iters'][:self.struct[0].it_store,:]
self.T = T
self.X = X
self.Y = Y
self.Z = Z
self.iters = iters
return self.initialization_ok
def get_value(self,name):
if name in self.inputs_run_list:
value = self.struct[0][name]
if name in self.x_list:
idx = self.x_list.index(name)
value = self.struct[0].x[idx,0]
if name in self.y_run_list:
idy = self.y_run_list.index(name)
value = self.struct[0].y_run[idy,0]
if name in self.params_list:
value = self.struct[0][name]
if name in self.outputs_list:
value = self.struct[0].h[self.outputs_list.index(name),0]
return value
def get_values(self,name):
if name in self.x_list:
values = self.X[:,self.x_list.index(name)]
if name in self.y_run_list:
values = self.Y[:,self.y_run_list.index(name)]
if name in self.outputs_list:
values = self.Z[:,self.outputs_list.index(name)]
return values
def get_mvalue(self,names):
'''
Parameters
----------
names : list
list of variables names to return each value.
Returns
-------
mvalue : TYPE
list of value of each variable.
'''
mvalue = []
for name in names:
mvalue += [self.get_value(name)]
return mvalue
def set_value(self,name_,value):
if name_ in self.inputs_run_list:
self.struct[0][name_] = value
return
elif name_ in self.params_list:
self.struct[0][name_] = value
return
elif name_ in self.inputs_ini_list:
self.struct[0][name_] = value
return
else:
print(f'Input or parameter {name_} not found.')
def set_values(self,dictionary):
for item in dictionary:
self.set_value(item,dictionary[item])
def report_x(self,value_format='5.2f'):
for item in self.x_list:
print(f'{item:5s} = {self.get_value(item):5.2f}')
def report_y(self,value_format='5.2f'):
for item in self.y_run_list:
print(f'{item:5s} = {self.get_value(item):5.2f}')
def report_u(self,value_format='5.2f'):
for item in self.inputs_run_list:
print(f'{item:5s} = {self.get_value(item):5.2f}')
def report_z(self,value_format='5.2f'):
for item in self.outputs_list:
print(f'{item:5s} = {self.get_value(item):5.2f}')
def report_params(self,value_format='5.2f'):
for item in self.params_list:
print(f'{item:5s} = {self.get_value(item):5.2f}')
def get_x(self):
return self.struct[0].x
def ss(self):
ssate(self.struct,self.xy_prev.reshape(len(self.xy_prev),1))
## y_ini to y_run
self.struct[0].y_run = self.struct[0].y_ini
## y_ini to u_run
for item in self.yini2urun:
self.struct[0][item] = self.struct[0].y_ini[self.y_ini_list.index(item)]
## u_ini to y_run
for item in self.uini2yrun:
self.struct[0].y_run[self.y_run_list.index(item)] = self.struct[0][item]
@numba.njit(cache=True)
def ini(struct,mode):
# Parameters:
S_base = struct[0].S_base
g_R01_R02 = struct[0].g_R01_R02
b_R01_R02 = struct[0].b_R01_R02
bs_R01_R02 = struct[0].bs_R01_R02
g_R02_R03 = struct[0].g_R02_R03
b_R02_R03 = struct[0].b_R02_R03
bs_R02_R03 = struct[0].bs_R02_R03
g_R03_R04 = struct[0].g_R03_R04
b_R03_R04 = struct[0].b_R03_R04
bs_R03_R04 = struct[0].bs_R03_R04
g_R04_R05 = struct[0].g_R04_R05
b_R04_R05 = struct[0].b_R04_R05
bs_R04_R05 = struct[0].bs_R04_R05
g_R05_R06 = struct[0].g_R05_R06
b_R05_R06 = struct[0].b_R05_R06
bs_R05_R06 = struct[0].bs_R05_R06
g_R06_R07 = struct[0].g_R06_R07
b_R06_R07 = struct[0].b_R06_R07
bs_R06_R07 = struct[0].bs_R06_R07
g_R07_R08 = struct[0].g_R07_R08
b_R07_R08 = struct[0].b_R07_R08
bs_R07_R08 = struct[0].bs_R07_R08
g_R08_R09 = struct[0].g_R08_R09
b_R08_R09 = struct[0].b_R08_R09
bs_R08_R09 = struct[0].bs_R08_R09
g_R09_R10 = struct[0].g_R09_R10
b_R09_R10 = struct[0].b_R09_R10
bs_R09_R10 = struct[0].bs_R09_R10
g_R03_R11 = struct[0].g_R03_R11
b_R03_R11 = struct[0].b_R03_R11
bs_R03_R11 = struct[0].bs_R03_R11
g_R04_R12 = struct[0].g_R04_R12
b_R04_R12 = struct[0].b_R04_R12
bs_R04_R12 = struct[0].bs_R04_R12
g_R12_R13 = struct[0].g_R12_R13
b_R12_R13 = struct[0].b_R12_R13
bs_R12_R13 = struct[0].bs_R12_R13
g_R13_R14 = struct[0].g_R13_R14
b_R13_R14 = struct[0].b_R13_R14
bs_R13_R14 = struct[0].bs_R13_R14
g_R14_R15 = struct[0].g_R14_R15
b_R14_R15 = struct[0].b_R14_R15
bs_R14_R15 = struct[0].bs_R14_R15
g_R06_R16 = struct[0].g_R06_R16
b_R06_R16 = struct[0].b_R06_R16
bs_R06_R16 = struct[0].bs_R06_R16
g_R09_R17 = struct[0].g_R09_R17
b_R09_R17 = struct[0].b_R09_R17
bs_R09_R17 = struct[0].bs_R09_R17
g_R10_R18 = struct[0].g_R10_R18
b_R10_R18 = struct[0].b_R10_R18
bs_R10_R18 = struct[0].bs_R10_R18
U_R01_n = struct[0].U_R01_n
U_R02_n = struct[0].U_R02_n
U_R03_n = struct[0].U_R03_n
U_R04_n = struct[0].U_R04_n
U_R05_n = struct[0].U_R05_n
U_R06_n = struct[0].U_R06_n
U_R07_n = struct[0].U_R07_n
U_R08_n = struct[0].U_R08_n
U_R09_n = struct[0].U_R09_n
U_R10_n = struct[0].U_R10_n
U_R11_n = struct[0].U_R11_n
U_R12_n = struct[0].U_R12_n
U_R13_n = struct[0].U_R13_n
U_R14_n = struct[0].U_R14_n
U_R15_n = struct[0].U_R15_n
U_R16_n = struct[0].U_R16_n
U_R17_n = struct[0].U_R17_n
U_R18_n = struct[0].U_R18_n
S_n_R10 = struct[0].S_n_R10
H_R10 = struct[0].H_R10
Omega_b_R10 = struct[0].Omega_b_R10
T1d0_R10 = struct[0].T1d0_R10
T1q0_R10 = struct[0].T1q0_R10
X_d_R10 = struct[0].X_d_R10
X_q_R10 = struct[0].X_q_R10
X1d_R10 = struct[0].X1d_R10
X1q_R10 = struct[0].X1q_R10
D_R10 = struct[0].D_R10
R_a_R10 = struct[0].R_a_R10
K_delta_R10 = struct[0].K_delta_R10
K_a_R10 = struct[0].K_a_R10
K_ai_R10 = struct[0].K_ai_R10
T_r_R10 = struct[0].T_r_R10
Droop_R10 = struct[0].Droop_R10
T_m_R10 = struct[0].T_m_R10
S_n_R14 = struct[0].S_n_R14
H_R14 = struct[0].H_R14
Omega_b_R14 = struct[0].Omega_b_R14
T1d0_R14 = struct[0].T1d0_R14
T1q0_R14 = struct[0].T1q0_R14
X_d_R14 = struct[0].X_d_R14
X_q_R14 = struct[0].X_q_R14
X1d_R14 = struct[0].X1d_R14
X1q_R14 = struct[0].X1q_R14
D_R14 = struct[0].D_R14
R_a_R14 = struct[0].R_a_R14
K_delta_R14 = struct[0].K_delta_R14
K_a_R14 = struct[0].K_a_R14
K_ai_R14 = struct[0].K_ai_R14
T_r_R14 = struct[0].T_r_R14
Droop_R14 = struct[0].Droop_R14
T_m_R14 = struct[0].T_m_R14
K_sec_R10 = struct[0].K_sec_R10
K_sec_R14 = struct[0].K_sec_R14
# Inputs:
P_R01 = struct[0].P_R01
Q_R01 = struct[0].Q_R01
P_R02 = struct[0].P_R02
Q_R02 = struct[0].Q_R02
P_R03 = struct[0].P_R03
Q_R03 = struct[0].Q_R03
P_R04 = struct[0].P_R04
Q_R04 = struct[0].Q_R04
P_R05 = struct[0].P_R05
Q_R05 = struct[0].Q_R05
P_R06 = struct[0].P_R06
Q_R06 = struct[0].Q_R06
P_R07 = struct[0].P_R07
Q_R07 = struct[0].Q_R07
P_R08 = struct[0].P_R08
Q_R08 = struct[0].Q_R08
P_R09 = struct[0].P_R09
Q_R09 = struct[0].Q_R09
P_R10 = struct[0].P_R10
Q_R10 = struct[0].Q_R10
P_R11 = struct[0].P_R11
Q_R11 = struct[0].Q_R11
P_R12 = struct[0].P_R12
Q_R12 = struct[0].Q_R12
P_R13 = struct[0].P_R13
Q_R13 = struct[0].Q_R13
P_R14 = struct[0].P_R14
Q_R14 = struct[0].Q_R14
P_R15 = struct[0].P_R15
Q_R15 = struct[0].Q_R15
P_R16 = struct[0].P_R16
Q_R16 = struct[0].Q_R16
P_R17 = struct[0].P_R17
Q_R17 = struct[0].Q_R17
P_R18 = struct[0].P_R18
Q_R18 = struct[0].Q_R18
v_ref_R10 = struct[0].v_ref_R10
v_pss_R10 = struct[0].v_pss_R10
p_c_R10 = struct[0].p_c_R10
v_ref_R14 = struct[0].v_ref_R14
v_pss_R14 = struct[0].v_pss_R14
p_c_R14 = struct[0].p_c_R14
# Dynamical states:
delta_R10 = struct[0].x[0,0]
omega_R10 = struct[0].x[1,0]
e1q_R10 = struct[0].x[2,0]
e1d_R10 = struct[0].x[3,0]
v_c_R10 = struct[0].x[4,0]
xi_v_R10 = struct[0].x[5,0]
p_m_R10 = struct[0].x[6,0]
delta_R14 = struct[0].x[7,0]
omega_R14 = struct[0].x[8,0]
e1q_R14 = struct[0].x[9,0]
e1d_R14 = struct[0].x[10,0]
v_c_R14 = struct[0].x[11,0]
xi_v_R14 = struct[0].x[12,0]
p_m_R14 = struct[0].x[13,0]
xi_freq = struct[0].x[14,0]
# Algebraic states:
V_R01 = struct[0].y_ini[0,0]
theta_R01 = struct[0].y_ini[1,0]
V_R02 = struct[0].y_ini[2,0]
theta_R02 = struct[0].y_ini[3,0]
V_R03 = struct[0].y_ini[4,0]
theta_R03 = struct[0].y_ini[5,0]
V_R04 = struct[0].y_ini[6,0]
theta_R04 = struct[0].y_ini[7,0]
V_R05 = struct[0].y_ini[8,0]
theta_R05 = struct[0].y_ini[9,0]
V_R06 = struct[0].y_ini[10,0]
theta_R06 = struct[0].y_ini[11,0]
V_R07 = struct[0].y_ini[12,0]
theta_R07 = struct[0].y_ini[13,0]
V_R08 = struct[0].y_ini[14,0]
theta_R08 = struct[0].y_ini[15,0]
V_R09 = struct[0].y_ini[16,0]
theta_R09 = struct[0].y_ini[17,0]
V_R10 = struct[0].y_ini[18,0]
theta_R10 = struct[0].y_ini[19,0]
V_R11 = struct[0].y_ini[20,0]
theta_R11 = struct[0].y_ini[21,0]
V_R12 = struct[0].y_ini[22,0]
theta_R12 = struct[0].y_ini[23,0]
V_R13 = struct[0].y_ini[24,0]
theta_R13 = struct[0].y_ini[25,0]
V_R14 = struct[0].y_ini[26,0]
theta_R14 = struct[0].y_ini[27,0]
V_R15 = struct[0].y_ini[28,0]
theta_R15 = struct[0].y_ini[29,0]
V_R16 = struct[0].y_ini[30,0]
theta_R16 = struct[0].y_ini[31,0]
V_R17 = struct[0].y_ini[32,0]
theta_R17 = struct[0].y_ini[33,0]
V_R18 = struct[0].y_ini[34,0]
theta_R18 = struct[0].y_ini[35,0]
i_d_R10 = struct[0].y_ini[36,0]
i_q_R10 = struct[0].y_ini[37,0]
p_g_R10_1 = struct[0].y_ini[38,0]
q_g_R10_1 = struct[0].y_ini[39,0]
v_f_R10 = struct[0].y_ini[40,0]
p_m_ref_R10 = struct[0].y_ini[41,0]
i_d_R14 = struct[0].y_ini[42,0]
i_q_R14 = struct[0].y_ini[43,0]
p_g_R14_1 = struct[0].y_ini[44,0]
q_g_R14_1 = struct[0].y_ini[45,0]
v_f_R14 = struct[0].y_ini[46,0]
p_m_ref_R14 = struct[0].y_ini[47,0]
omega_coi = struct[0].y_ini[48,0]
p_r_R10 = struct[0].y_ini[49,0]
p_r_R14 = struct[0].y_ini[50,0]
# Differential equations:
if mode == 2:
struct[0].f[0,0] = -K_delta_R10*delta_R10 + Omega_b_R10*(omega_R10 - omega_coi)
struct[0].f[1,0] = (-D_R10*(omega_R10 - omega_coi) - i_d_R10*(R_a_R10*i_d_R10 + V_R10*sin(delta_R10 - theta_R10)) - i_q_R10*(R_a_R10*i_q_R10 + V_R10*cos(delta_R10 - theta_R10)) + p_m_R10)/(2*H_R10)
struct[0].f[2,0] = (-e1q_R10 - i_d_R10*(-X1d_R10 + X_d_R10) + v_f_R10)/T1d0_R10
struct[0].f[3,0] = (-e1d_R10 + i_q_R10*(-X1q_R10 + X_q_R10))/T1q0_R10
struct[0].f[4,0] = (V_R10 - v_c_R10)/T_r_R10
struct[0].f[5,0] = -V_R10 + v_ref_R10
struct[0].f[6,0] = (-p_m_R10 + p_m_ref_R10)/T_m_R10
struct[0].f[7,0] = -K_delta_R14*delta_R14 + Omega_b_R14*(omega_R14 - omega_coi)
struct[0].f[8,0] = (-D_R14*(omega_R14 - omega_coi) - i_d_R14*(R_a_R14*i_d_R14 + V_R14*sin(delta_R14 - theta_R14)) - i_q_R14*(R_a_R14*i_q_R14 + V_R14*cos(delta_R14 - theta_R14)) + p_m_R14)/(2*H_R14)
struct[0].f[9,0] = (-e1q_R14 - i_d_R14*(-X1d_R14 + X_d_R14) + v_f_R14)/T1d0_R14
struct[0].f[10,0] = (-e1d_R14 + i_q_R14*(-X1q_R14 + X_q_R14))/T1q0_R14
struct[0].f[11,0] = (V_R14 - v_c_R14)/T_r_R14
struct[0].f[12,0] = -V_R14 + v_ref_R14
struct[0].f[13,0] = (-p_m_R14 + p_m_ref_R14)/T_m_R14
struct[0].f[14,0] = 1 - omega_coi
# Algebraic equations:
if mode == 3:
struct[0].g[:,:] = np.ascontiguousarray(struct[0].Gy_ini) @ np.ascontiguousarray(struct[0].y_ini)
struct[0].g[0,0] = -P_R01/S_base + V_R01**2*g_R01_R02 + V_R01*V_R02*(-b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].g[1,0] = -Q_R01/S_base + V_R01**2*(-b_R01_R02 - bs_R01_R02/2) + V_R01*V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) - g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].g[2,0] = -P_R02/S_base + V_R01*V_R02*(b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02)) + V_R02**2*(g_R01_R02 + g_R02_R03) + V_R02*V_R03*(-b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].g[3,0] = -Q_R02/S_base + V_R01*V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) + g_R01_R02*sin(theta_R01 - theta_R02)) + V_R02**2*(-b_R01_R02 - b_R02_R03 - bs_R01_R02/2 - bs_R02_R03/2) + V_R02*V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) - g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].g[4,0] = -P_R03/S_base + V_R02*V_R03*(b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03)) + V_R03**2*(g_R02_R03 + g_R03_R04 + g_R03_R11) + V_R03*V_R04*(-b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + V_R03*V_R11*(-b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].g[5,0] = -Q_R03/S_base + V_R02*V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) + g_R02_R03*sin(theta_R02 - theta_R03)) + V_R03**2*(-b_R02_R03 - b_R03_R04 - b_R03_R11 - bs_R02_R03/2 - bs_R03_R04/2 - bs_R03_R11/2) + V_R03*V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) - g_R03_R04*sin(theta_R03 - theta_R04)) + V_R03*V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) - g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].g[6,0] = -P_R04/S_base + V_R03*V_R04*(b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + V_R04**2*(g_R03_R04 + g_R04_R05 + g_R04_R12) + V_R04*V_R05*(-b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + V_R04*V_R12*(-b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].g[7,0] = -Q_R04/S_base + V_R03*V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) + g_R03_R04*sin(theta_R03 - theta_R04)) + V_R04**2*(-b_R03_R04 - b_R04_R05 - b_R04_R12 - bs_R03_R04/2 - bs_R04_R05/2 - bs_R04_R12/2) + V_R04*V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) - g_R04_R05*sin(theta_R04 - theta_R05)) + V_R04*V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) - g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].g[8,0] = -P_R05/S_base + V_R04*V_R05*(b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + V_R05**2*(g_R04_R05 + g_R05_R06) + V_R05*V_R06*(-b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].g[9,0] = -Q_R05/S_base + V_R04*V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) + g_R04_R05*sin(theta_R04 - theta_R05)) + V_R05**2*(-b_R04_R05 - b_R05_R06 - bs_R04_R05/2 - bs_R05_R06/2) + V_R05*V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) - g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].g[10,0] = -P_R06/S_base + V_R05*V_R06*(b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06)) + V_R06**2*(g_R05_R06 + g_R06_R07 + g_R06_R16) + V_R06*V_R07*(-b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + V_R06*V_R16*(-b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].g[11,0] = -Q_R06/S_base + V_R05*V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) + g_R05_R06*sin(theta_R05 - theta_R06)) + V_R06**2*(-b_R05_R06 - b_R06_R07 - b_R06_R16 - bs_R05_R06/2 - bs_R06_R07/2 - bs_R06_R16/2) + V_R06*V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) - g_R06_R07*sin(theta_R06 - theta_R07)) + V_R06*V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) - g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].g[12,0] = -P_R07/S_base + V_R06*V_R07*(b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + V_R07**2*(g_R06_R07 + g_R07_R08) + V_R07*V_R08*(-b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].g[13,0] = -Q_R07/S_base + V_R06*V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) + g_R06_R07*sin(theta_R06 - theta_R07)) + V_R07**2*(-b_R06_R07 - b_R07_R08 - bs_R06_R07/2 - bs_R07_R08/2) + V_R07*V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) - g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].g[14,0] = -P_R08/S_base + V_R07*V_R08*(b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08)) + V_R08**2*(g_R07_R08 + g_R08_R09) + V_R08*V_R09*(-b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].g[15,0] = -Q_R08/S_base + V_R07*V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) + g_R07_R08*sin(theta_R07 - theta_R08)) + V_R08**2*(-b_R07_R08 - b_R08_R09 - bs_R07_R08/2 - bs_R08_R09/2) + V_R08*V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) - g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].g[16,0] = -P_R09/S_base + V_R08*V_R09*(b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09)) + V_R09**2*(g_R08_R09 + g_R09_R10 + g_R09_R17) + V_R09*V_R10*(-b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + V_R09*V_R17*(-b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].g[17,0] = -Q_R09/S_base + V_R08*V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) + g_R08_R09*sin(theta_R08 - theta_R09)) + V_R09**2*(-b_R08_R09 - b_R09_R10 - b_R09_R17 - bs_R08_R09/2 - bs_R09_R10/2 - bs_R09_R17/2) + V_R09*V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) - g_R09_R10*sin(theta_R09 - theta_R10)) + V_R09*V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) - g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].g[18,0] = -P_R10/S_base + V_R09*V_R10*(b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + V_R10**2*(g_R09_R10 + g_R10_R18) + V_R10*V_R18*(-b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18)) - S_n_R10*p_g_R10_1/S_base
struct[0].g[19,0] = -Q_R10/S_base + V_R09*V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) + g_R09_R10*sin(theta_R09 - theta_R10)) + V_R10**2*(-b_R09_R10 - b_R10_R18 - bs_R09_R10/2 - bs_R10_R18/2) + V_R10*V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) - g_R10_R18*sin(theta_R10 - theta_R18)) - S_n_R10*q_g_R10_1/S_base
struct[0].g[20,0] = -P_R11/S_base + V_R03*V_R11*(b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11)) + V_R11**2*g_R03_R11
struct[0].g[21,0] = -Q_R11/S_base + V_R03*V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) + g_R03_R11*sin(theta_R03 - theta_R11)) + V_R11**2*(-b_R03_R11 - bs_R03_R11/2)
struct[0].g[22,0] = -P_R12/S_base + V_R04*V_R12*(b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12)) + V_R12**2*(g_R04_R12 + g_R12_R13) + V_R12*V_R13*(-b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].g[23,0] = -Q_R12/S_base + V_R04*V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) + g_R04_R12*sin(theta_R04 - theta_R12)) + V_R12**2*(-b_R04_R12 - b_R12_R13 - bs_R04_R12/2 - bs_R12_R13/2) + V_R12*V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) - g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].g[24,0] = -P_R13/S_base + V_R12*V_R13*(b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13)) + V_R13**2*(g_R12_R13 + g_R13_R14) + V_R13*V_R14*(-b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].g[25,0] = -Q_R13/S_base + V_R12*V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) + g_R12_R13*sin(theta_R12 - theta_R13)) + V_R13**2*(-b_R12_R13 - b_R13_R14 - bs_R12_R13/2 - bs_R13_R14/2) + V_R13*V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) - g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].g[26,0] = -P_R14/S_base + V_R13*V_R14*(b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14)) + V_R14**2*(g_R13_R14 + g_R14_R15) + V_R14*V_R15*(-b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15)) - S_n_R14*p_g_R14_1/S_base
struct[0].g[27,0] = -Q_R14/S_base + V_R13*V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) + g_R13_R14*sin(theta_R13 - theta_R14)) + V_R14**2*(-b_R13_R14 - b_R14_R15 - bs_R13_R14/2 - bs_R14_R15/2) + V_R14*V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) - g_R14_R15*sin(theta_R14 - theta_R15)) - S_n_R14*q_g_R14_1/S_base
struct[0].g[28,0] = -P_R15/S_base + V_R14*V_R15*(b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15)) + V_R15**2*g_R14_R15
struct[0].g[29,0] = -Q_R15/S_base + V_R14*V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) + g_R14_R15*sin(theta_R14 - theta_R15)) + V_R15**2*(-b_R14_R15 - bs_R14_R15/2)
struct[0].g[30,0] = -P_R16/S_base + V_R06*V_R16*(b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16)) + V_R16**2*g_R06_R16
struct[0].g[31,0] = -Q_R16/S_base + V_R06*V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) + g_R06_R16*sin(theta_R06 - theta_R16)) + V_R16**2*(-b_R06_R16 - bs_R06_R16/2)
struct[0].g[32,0] = -P_R17/S_base + V_R09*V_R17*(b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17)) + V_R17**2*g_R09_R17
struct[0].g[33,0] = -Q_R17/S_base + V_R09*V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) + g_R09_R17*sin(theta_R09 - theta_R17)) + V_R17**2*(-b_R09_R17 - bs_R09_R17/2)
struct[0].g[34,0] = -P_R18/S_base + V_R10*V_R18*(b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18)) + V_R18**2*g_R10_R18
struct[0].g[35,0] = -Q_R18/S_base + V_R10*V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) + g_R10_R18*sin(theta_R10 - theta_R18)) + V_R18**2*(-b_R10_R18 - bs_R10_R18/2)
struct[0].g[36,0] = R_a_R10*i_q_R10 + V_R10*cos(delta_R10 - theta_R10) + X1d_R10*i_d_R10 - e1q_R10
struct[0].g[37,0] = R_a_R10*i_d_R10 + V_R10*sin(delta_R10 - theta_R10) - X1q_R10*i_q_R10 - e1d_R10
struct[0].g[38,0] = V_R10*i_d_R10*sin(delta_R10 - theta_R10) + V_R10*i_q_R10*cos(delta_R10 - theta_R10) - p_g_R10_1
struct[0].g[39,0] = V_R10*i_d_R10*cos(delta_R10 - theta_R10) - V_R10*i_q_R10*sin(delta_R10 - theta_R10) - q_g_R10_1
struct[0].g[40,0] = K_a_R10*(-v_c_R10 + v_pss_R10 + v_ref_R10) + K_ai_R10*xi_v_R10 - v_f_R10
struct[0].g[41,0] = p_c_R10 - p_m_ref_R10 + p_r_R10 - (omega_R10 - 1)/Droop_R10
struct[0].g[42,0] = R_a_R14*i_q_R14 + V_R14*cos(delta_R14 - theta_R14) + X1d_R14*i_d_R14 - e1q_R14
struct[0].g[43,0] = R_a_R14*i_d_R14 + V_R14*sin(delta_R14 - theta_R14) - X1q_R14*i_q_R14 - e1d_R14
struct[0].g[44,0] = V_R14*i_d_R14*sin(delta_R14 - theta_R14) + V_R14*i_q_R14*cos(delta_R14 - theta_R14) - p_g_R14_1
struct[0].g[45,0] = V_R14*i_d_R14*cos(delta_R14 - theta_R14) - V_R14*i_q_R14*sin(delta_R14 - theta_R14) - q_g_R14_1
struct[0].g[46,0] = K_a_R14*(-v_c_R14 + v_pss_R14 + v_ref_R14) + K_ai_R14*xi_v_R14 - v_f_R14
struct[0].g[47,0] = p_c_R14 - p_m_ref_R14 + p_r_R14 - (omega_R14 - 1)/Droop_R14
struct[0].g[48,0] = omega_R10/2 + omega_R14/2 - omega_coi
struct[0].g[49,0] = K_sec_R10*xi_freq/2 - p_r_R10
struct[0].g[50,0] = K_sec_R14*xi_freq/2 - p_r_R14
# Outputs:
if mode == 3:
struct[0].h[0,0] = V_R01
struct[0].h[1,0] = V_R02
struct[0].h[2,0] = V_R03
struct[0].h[3,0] = V_R04
struct[0].h[4,0] = V_R05
struct[0].h[5,0] = V_R06
struct[0].h[6,0] = V_R07
struct[0].h[7,0] = V_R08
struct[0].h[8,0] = V_R09
struct[0].h[9,0] = V_R10
struct[0].h[10,0] = V_R11
struct[0].h[11,0] = V_R12
struct[0].h[12,0] = V_R13
struct[0].h[13,0] = V_R14
struct[0].h[14,0] = V_R15
struct[0].h[15,0] = V_R16
struct[0].h[16,0] = V_R17
struct[0].h[17,0] = V_R18
if mode == 10:
struct[0].Fx_ini[0,0] = -K_delta_R10
struct[0].Fx_ini[0,1] = Omega_b_R10
struct[0].Fx_ini[1,0] = (-V_R10*i_d_R10*cos(delta_R10 - theta_R10) + V_R10*i_q_R10*sin(delta_R10 - theta_R10))/(2*H_R10)
struct[0].Fx_ini[1,1] = -D_R10/(2*H_R10)
struct[0].Fx_ini[1,6] = 1/(2*H_R10)
struct[0].Fx_ini[2,2] = -1/T1d0_R10
struct[0].Fx_ini[3,3] = -1/T1q0_R10
struct[0].Fx_ini[4,4] = -1/T_r_R10
struct[0].Fx_ini[6,6] = -1/T_m_R10
struct[0].Fx_ini[7,7] = -K_delta_R14
struct[0].Fx_ini[7,8] = Omega_b_R14
struct[0].Fx_ini[8,7] = (-V_R14*i_d_R14*cos(delta_R14 - theta_R14) + V_R14*i_q_R14*sin(delta_R14 - theta_R14))/(2*H_R14)
struct[0].Fx_ini[8,8] = -D_R14/(2*H_R14)
struct[0].Fx_ini[8,13] = 1/(2*H_R14)
struct[0].Fx_ini[9,9] = -1/T1d0_R14
struct[0].Fx_ini[10,10] = -1/T1q0_R14
struct[0].Fx_ini[11,11] = -1/T_r_R14
struct[0].Fx_ini[13,13] = -1/T_m_R14
if mode == 11:
struct[0].Fy_ini[0,48] = -Omega_b_R10
struct[0].Fy_ini[1,18] = (-i_d_R10*sin(delta_R10 - theta_R10) - i_q_R10*cos(delta_R10 - theta_R10))/(2*H_R10)
struct[0].Fy_ini[1,19] = (V_R10*i_d_R10*cos(delta_R10 - theta_R10) - V_R10*i_q_R10*sin(delta_R10 - theta_R10))/(2*H_R10)
struct[0].Fy_ini[1,36] = (-2*R_a_R10*i_d_R10 - V_R10*sin(delta_R10 - theta_R10))/(2*H_R10)
struct[0].Fy_ini[1,37] = (-2*R_a_R10*i_q_R10 - V_R10*cos(delta_R10 - theta_R10))/(2*H_R10)
struct[0].Fy_ini[1,48] = D_R10/(2*H_R10)
struct[0].Fy_ini[2,36] = (X1d_R10 - X_d_R10)/T1d0_R10
struct[0].Fy_ini[2,40] = 1/T1d0_R10
struct[0].Fy_ini[3,37] = (-X1q_R10 + X_q_R10)/T1q0_R10
struct[0].Fy_ini[4,18] = 1/T_r_R10
struct[0].Fy_ini[5,18] = -1
struct[0].Fy_ini[6,41] = 1/T_m_R10
struct[0].Fy_ini[7,48] = -Omega_b_R14
struct[0].Fy_ini[8,26] = (-i_d_R14*sin(delta_R14 - theta_R14) - i_q_R14*cos(delta_R14 - theta_R14))/(2*H_R14)
struct[0].Fy_ini[8,27] = (V_R14*i_d_R14*cos(delta_R14 - theta_R14) - V_R14*i_q_R14*sin(delta_R14 - theta_R14))/(2*H_R14)
struct[0].Fy_ini[8,42] = (-2*R_a_R14*i_d_R14 - V_R14*sin(delta_R14 - theta_R14))/(2*H_R14)
struct[0].Fy_ini[8,43] = (-2*R_a_R14*i_q_R14 - V_R14*cos(delta_R14 - theta_R14))/(2*H_R14)
struct[0].Fy_ini[8,48] = D_R14/(2*H_R14)
struct[0].Fy_ini[9,42] = (X1d_R14 - X_d_R14)/T1d0_R14
struct[0].Fy_ini[9,46] = 1/T1d0_R14
struct[0].Fy_ini[10,43] = (-X1q_R14 + X_q_R14)/T1q0_R14
struct[0].Fy_ini[11,26] = 1/T_r_R14
struct[0].Fy_ini[12,26] = -1
struct[0].Fy_ini[13,47] = 1/T_m_R14
struct[0].Fy_ini[14,48] = -1
struct[0].Gx_ini[36,0] = -V_R10*sin(delta_R10 - theta_R10)
struct[0].Gx_ini[36,2] = -1
struct[0].Gx_ini[37,0] = V_R10*cos(delta_R10 - theta_R10)
struct[0].Gx_ini[37,3] = -1
struct[0].Gx_ini[38,0] = V_R10*i_d_R10*cos(delta_R10 - theta_R10) - V_R10*i_q_R10*sin(delta_R10 - theta_R10)
struct[0].Gx_ini[39,0] = -V_R10*i_d_R10*sin(delta_R10 - theta_R10) - V_R10*i_q_R10*cos(delta_R10 - theta_R10)
struct[0].Gx_ini[40,4] = -K_a_R10
struct[0].Gx_ini[40,5] = K_ai_R10
struct[0].Gx_ini[41,1] = -1/Droop_R10
struct[0].Gx_ini[42,7] = -V_R14*sin(delta_R14 - theta_R14)
struct[0].Gx_ini[42,9] = -1
struct[0].Gx_ini[43,7] = V_R14*cos(delta_R14 - theta_R14)
struct[0].Gx_ini[43,10] = -1
struct[0].Gx_ini[44,7] = V_R14*i_d_R14*cos(delta_R14 - theta_R14) - V_R14*i_q_R14*sin(delta_R14 - theta_R14)
struct[0].Gx_ini[45,7] = -V_R14*i_d_R14*sin(delta_R14 - theta_R14) - V_R14*i_q_R14*cos(delta_R14 - theta_R14)
struct[0].Gx_ini[46,11] = -K_a_R14
struct[0].Gx_ini[46,12] = K_ai_R14
struct[0].Gx_ini[47,8] = -1/Droop_R14
struct[0].Gx_ini[48,1] = 1/2
struct[0].Gx_ini[48,8] = 1/2
struct[0].Gx_ini[49,14] = K_sec_R10/2
struct[0].Gx_ini[50,14] = K_sec_R14/2
struct[0].Gy_ini[0,0] = 2*V_R01*g_R01_R02 + V_R02*(-b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy_ini[0,1] = V_R01*V_R02*(-b_R01_R02*cos(theta_R01 - theta_R02) + g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy_ini[0,2] = V_R01*(-b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy_ini[0,3] = V_R01*V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) - g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy_ini[1,0] = 2*V_R01*(-b_R01_R02 - bs_R01_R02/2) + V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) - g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy_ini[1,1] = V_R01*V_R02*(-b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy_ini[1,2] = V_R01*(b_R01_R02*cos(theta_R01 - theta_R02) - g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy_ini[1,3] = V_R01*V_R02*(b_R01_R02*sin(theta_R01 - theta_R02) + g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy_ini[2,0] = V_R02*(b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy_ini[2,1] = V_R01*V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) + g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy_ini[2,2] = V_R01*(b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02)) + 2*V_R02*(g_R01_R02 + g_R02_R03) + V_R03*(-b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy_ini[2,3] = V_R01*V_R02*(-b_R01_R02*cos(theta_R01 - theta_R02) - g_R01_R02*sin(theta_R01 - theta_R02)) + V_R02*V_R03*(-b_R02_R03*cos(theta_R02 - theta_R03) + g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy_ini[2,4] = V_R02*(-b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy_ini[2,5] = V_R02*V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) - g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy_ini[3,0] = V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) + g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy_ini[3,1] = V_R01*V_R02*(-b_R01_R02*sin(theta_R01 - theta_R02) + g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy_ini[3,2] = V_R01*(b_R01_R02*cos(theta_R01 - theta_R02) + g_R01_R02*sin(theta_R01 - theta_R02)) + 2*V_R02*(-b_R01_R02 - b_R02_R03 - bs_R01_R02/2 - bs_R02_R03/2) + V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) - g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy_ini[3,3] = V_R01*V_R02*(b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02)) + V_R02*V_R03*(-b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy_ini[3,4] = V_R02*(b_R02_R03*cos(theta_R02 - theta_R03) - g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy_ini[3,5] = V_R02*V_R03*(b_R02_R03*sin(theta_R02 - theta_R03) + g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy_ini[4,2] = V_R03*(b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy_ini[4,3] = V_R02*V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) + g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy_ini[4,4] = V_R02*(b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03)) + 2*V_R03*(g_R02_R03 + g_R03_R04 + g_R03_R11) + V_R04*(-b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + V_R11*(-b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy_ini[4,5] = V_R02*V_R03*(-b_R02_R03*cos(theta_R02 - theta_R03) - g_R02_R03*sin(theta_R02 - theta_R03)) + V_R03*V_R04*(-b_R03_R04*cos(theta_R03 - theta_R04) + g_R03_R04*sin(theta_R03 - theta_R04)) + V_R03*V_R11*(-b_R03_R11*cos(theta_R03 - theta_R11) + g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy_ini[4,6] = V_R03*(-b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04))
struct[0].Gy_ini[4,7] = V_R03*V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) - g_R03_R04*sin(theta_R03 - theta_R04))
struct[0].Gy_ini[4,20] = V_R03*(-b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy_ini[4,21] = V_R03*V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) - g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy_ini[5,2] = V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) + g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy_ini[5,3] = V_R02*V_R03*(-b_R02_R03*sin(theta_R02 - theta_R03) + g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy_ini[5,4] = V_R02*(b_R02_R03*cos(theta_R02 - theta_R03) + g_R02_R03*sin(theta_R02 - theta_R03)) + 2*V_R03*(-b_R02_R03 - b_R03_R04 - b_R03_R11 - bs_R02_R03/2 - bs_R03_R04/2 - bs_R03_R11/2) + V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) - g_R03_R04*sin(theta_R03 - theta_R04)) + V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) - g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy_ini[5,5] = V_R02*V_R03*(b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03)) + V_R03*V_R04*(-b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + V_R03*V_R11*(-b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy_ini[5,6] = V_R03*(b_R03_R04*cos(theta_R03 - theta_R04) - g_R03_R04*sin(theta_R03 - theta_R04))
struct[0].Gy_ini[5,7] = V_R03*V_R04*(b_R03_R04*sin(theta_R03 - theta_R04) + g_R03_R04*cos(theta_R03 - theta_R04))
struct[0].Gy_ini[5,20] = V_R03*(b_R03_R11*cos(theta_R03 - theta_R11) - g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy_ini[5,21] = V_R03*V_R11*(b_R03_R11*sin(theta_R03 - theta_R11) + g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy_ini[6,4] = V_R04*(b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04))
struct[0].Gy_ini[6,5] = V_R03*V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) + g_R03_R04*sin(theta_R03 - theta_R04))
struct[0].Gy_ini[6,6] = V_R03*(b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + 2*V_R04*(g_R03_R04 + g_R04_R05 + g_R04_R12) + V_R05*(-b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + V_R12*(-b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy_ini[6,7] = V_R03*V_R04*(-b_R03_R04*cos(theta_R03 - theta_R04) - g_R03_R04*sin(theta_R03 - theta_R04)) + V_R04*V_R05*(-b_R04_R05*cos(theta_R04 - theta_R05) + g_R04_R05*sin(theta_R04 - theta_R05)) + V_R04*V_R12*(-b_R04_R12*cos(theta_R04 - theta_R12) + g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy_ini[6,8] = V_R04*(-b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05))
struct[0].Gy_ini[6,9] = V_R04*V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) - g_R04_R05*sin(theta_R04 - theta_R05))
struct[0].Gy_ini[6,22] = V_R04*(-b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy_ini[6,23] = V_R04*V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) - g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy_ini[7,4] = V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) + g_R03_R04*sin(theta_R03 - theta_R04))
struct[0].Gy_ini[7,5] = V_R03*V_R04*(-b_R03_R04*sin(theta_R03 - theta_R04) + g_R03_R04*cos(theta_R03 - theta_R04))
struct[0].Gy_ini[7,6] = V_R03*(b_R03_R04*cos(theta_R03 - theta_R04) + g_R03_R04*sin(theta_R03 - theta_R04)) + 2*V_R04*(-b_R03_R04 - b_R04_R05 - b_R04_R12 - bs_R03_R04/2 - bs_R04_R05/2 - bs_R04_R12/2) + V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) - g_R04_R05*sin(theta_R04 - theta_R05)) + V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) - g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy_ini[7,7] = V_R03*V_R04*(b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + V_R04*V_R05*(-b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + V_R04*V_R12*(-b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy_ini[7,8] = V_R04*(b_R04_R05*cos(theta_R04 - theta_R05) - g_R04_R05*sin(theta_R04 - theta_R05))
struct[0].Gy_ini[7,9] = V_R04*V_R05*(b_R04_R05*sin(theta_R04 - theta_R05) + g_R04_R05*cos(theta_R04 - theta_R05))
struct[0].Gy_ini[7,22] = V_R04*(b_R04_R12*cos(theta_R04 - theta_R12) - g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy_ini[7,23] = V_R04*V_R12*(b_R04_R12*sin(theta_R04 - theta_R12) + g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy_ini[8,6] = V_R05*(b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05))
struct[0].Gy_ini[8,7] = V_R04*V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) + g_R04_R05*sin(theta_R04 - theta_R05))
struct[0].Gy_ini[8,8] = V_R04*(b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + 2*V_R05*(g_R04_R05 + g_R05_R06) + V_R06*(-b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy_ini[8,9] = V_R04*V_R05*(-b_R04_R05*cos(theta_R04 - theta_R05) - g_R04_R05*sin(theta_R04 - theta_R05)) + V_R05*V_R06*(-b_R05_R06*cos(theta_R05 - theta_R06) + g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy_ini[8,10] = V_R05*(-b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy_ini[8,11] = V_R05*V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) - g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy_ini[9,6] = V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) + g_R04_R05*sin(theta_R04 - theta_R05))
struct[0].Gy_ini[9,7] = V_R04*V_R05*(-b_R04_R05*sin(theta_R04 - theta_R05) + g_R04_R05*cos(theta_R04 - theta_R05))
struct[0].Gy_ini[9,8] = V_R04*(b_R04_R05*cos(theta_R04 - theta_R05) + g_R04_R05*sin(theta_R04 - theta_R05)) + 2*V_R05*(-b_R04_R05 - b_R05_R06 - bs_R04_R05/2 - bs_R05_R06/2) + V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) - g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy_ini[9,9] = V_R04*V_R05*(b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + V_R05*V_R06*(-b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy_ini[9,10] = V_R05*(b_R05_R06*cos(theta_R05 - theta_R06) - g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy_ini[9,11] = V_R05*V_R06*(b_R05_R06*sin(theta_R05 - theta_R06) + g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy_ini[10,8] = V_R06*(b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy_ini[10,9] = V_R05*V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) + g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy_ini[10,10] = V_R05*(b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06)) + 2*V_R06*(g_R05_R06 + g_R06_R07 + g_R06_R16) + V_R07*(-b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + V_R16*(-b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy_ini[10,11] = V_R05*V_R06*(-b_R05_R06*cos(theta_R05 - theta_R06) - g_R05_R06*sin(theta_R05 - theta_R06)) + V_R06*V_R07*(-b_R06_R07*cos(theta_R06 - theta_R07) + g_R06_R07*sin(theta_R06 - theta_R07)) + V_R06*V_R16*(-b_R06_R16*cos(theta_R06 - theta_R16) + g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy_ini[10,12] = V_R06*(-b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07))
struct[0].Gy_ini[10,13] = V_R06*V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) - g_R06_R07*sin(theta_R06 - theta_R07))
struct[0].Gy_ini[10,30] = V_R06*(-b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy_ini[10,31] = V_R06*V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) - g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy_ini[11,8] = V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) + g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy_ini[11,9] = V_R05*V_R06*(-b_R05_R06*sin(theta_R05 - theta_R06) + g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy_ini[11,10] = V_R05*(b_R05_R06*cos(theta_R05 - theta_R06) + g_R05_R06*sin(theta_R05 - theta_R06)) + 2*V_R06*(-b_R05_R06 - b_R06_R07 - b_R06_R16 - bs_R05_R06/2 - bs_R06_R07/2 - bs_R06_R16/2) + V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) - g_R06_R07*sin(theta_R06 - theta_R07)) + V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) - g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy_ini[11,11] = V_R05*V_R06*(b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06)) + V_R06*V_R07*(-b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + V_R06*V_R16*(-b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy_ini[11,12] = V_R06*(b_R06_R07*cos(theta_R06 - theta_R07) - g_R06_R07*sin(theta_R06 - theta_R07))
struct[0].Gy_ini[11,13] = V_R06*V_R07*(b_R06_R07*sin(theta_R06 - theta_R07) + g_R06_R07*cos(theta_R06 - theta_R07))
struct[0].Gy_ini[11,30] = V_R06*(b_R06_R16*cos(theta_R06 - theta_R16) - g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy_ini[11,31] = V_R06*V_R16*(b_R06_R16*sin(theta_R06 - theta_R16) + g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy_ini[12,10] = V_R07*(b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07))
struct[0].Gy_ini[12,11] = V_R06*V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) + g_R06_R07*sin(theta_R06 - theta_R07))
struct[0].Gy_ini[12,12] = V_R06*(b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + 2*V_R07*(g_R06_R07 + g_R07_R08) + V_R08*(-b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy_ini[12,13] = V_R06*V_R07*(-b_R06_R07*cos(theta_R06 - theta_R07) - g_R06_R07*sin(theta_R06 - theta_R07)) + V_R07*V_R08*(-b_R07_R08*cos(theta_R07 - theta_R08) + g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy_ini[12,14] = V_R07*(-b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy_ini[12,15] = V_R07*V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) - g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy_ini[13,10] = V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) + g_R06_R07*sin(theta_R06 - theta_R07))
struct[0].Gy_ini[13,11] = V_R06*V_R07*(-b_R06_R07*sin(theta_R06 - theta_R07) + g_R06_R07*cos(theta_R06 - theta_R07))
struct[0].Gy_ini[13,12] = V_R06*(b_R06_R07*cos(theta_R06 - theta_R07) + g_R06_R07*sin(theta_R06 - theta_R07)) + 2*V_R07*(-b_R06_R07 - b_R07_R08 - bs_R06_R07/2 - bs_R07_R08/2) + V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) - g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy_ini[13,13] = V_R06*V_R07*(b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + V_R07*V_R08*(-b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy_ini[13,14] = V_R07*(b_R07_R08*cos(theta_R07 - theta_R08) - g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy_ini[13,15] = V_R07*V_R08*(b_R07_R08*sin(theta_R07 - theta_R08) + g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy_ini[14,12] = V_R08*(b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy_ini[14,13] = V_R07*V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) + g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy_ini[14,14] = V_R07*(b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08)) + 2*V_R08*(g_R07_R08 + g_R08_R09) + V_R09*(-b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy_ini[14,15] = V_R07*V_R08*(-b_R07_R08*cos(theta_R07 - theta_R08) - g_R07_R08*sin(theta_R07 - theta_R08)) + V_R08*V_R09*(-b_R08_R09*cos(theta_R08 - theta_R09) + g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy_ini[14,16] = V_R08*(-b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy_ini[14,17] = V_R08*V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) - g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy_ini[15,12] = V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) + g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy_ini[15,13] = V_R07*V_R08*(-b_R07_R08*sin(theta_R07 - theta_R08) + g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy_ini[15,14] = V_R07*(b_R07_R08*cos(theta_R07 - theta_R08) + g_R07_R08*sin(theta_R07 - theta_R08)) + 2*V_R08*(-b_R07_R08 - b_R08_R09 - bs_R07_R08/2 - bs_R08_R09/2) + V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) - g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy_ini[15,15] = V_R07*V_R08*(b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08)) + V_R08*V_R09*(-b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy_ini[15,16] = V_R08*(b_R08_R09*cos(theta_R08 - theta_R09) - g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy_ini[15,17] = V_R08*V_R09*(b_R08_R09*sin(theta_R08 - theta_R09) + g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy_ini[16,14] = V_R09*(b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy_ini[16,15] = V_R08*V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) + g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy_ini[16,16] = V_R08*(b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09)) + 2*V_R09*(g_R08_R09 + g_R09_R10 + g_R09_R17) + V_R10*(-b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + V_R17*(-b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy_ini[16,17] = V_R08*V_R09*(-b_R08_R09*cos(theta_R08 - theta_R09) - g_R08_R09*sin(theta_R08 - theta_R09)) + V_R09*V_R10*(-b_R09_R10*cos(theta_R09 - theta_R10) + g_R09_R10*sin(theta_R09 - theta_R10)) + V_R09*V_R17*(-b_R09_R17*cos(theta_R09 - theta_R17) + g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy_ini[16,18] = V_R09*(-b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10))
struct[0].Gy_ini[16,19] = V_R09*V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) - g_R09_R10*sin(theta_R09 - theta_R10))
struct[0].Gy_ini[16,32] = V_R09*(-b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy_ini[16,33] = V_R09*V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) - g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy_ini[17,14] = V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) + g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy_ini[17,15] = V_R08*V_R09*(-b_R08_R09*sin(theta_R08 - theta_R09) + g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy_ini[17,16] = V_R08*(b_R08_R09*cos(theta_R08 - theta_R09) + g_R08_R09*sin(theta_R08 - theta_R09)) + 2*V_R09*(-b_R08_R09 - b_R09_R10 - b_R09_R17 - bs_R08_R09/2 - bs_R09_R10/2 - bs_R09_R17/2) + V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) - g_R09_R10*sin(theta_R09 - theta_R10)) + V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) - g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy_ini[17,17] = V_R08*V_R09*(b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09)) + V_R09*V_R10*(-b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + V_R09*V_R17*(-b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy_ini[17,18] = V_R09*(b_R09_R10*cos(theta_R09 - theta_R10) - g_R09_R10*sin(theta_R09 - theta_R10))
struct[0].Gy_ini[17,19] = V_R09*V_R10*(b_R09_R10*sin(theta_R09 - theta_R10) + g_R09_R10*cos(theta_R09 - theta_R10))
struct[0].Gy_ini[17,32] = V_R09*(b_R09_R17*cos(theta_R09 - theta_R17) - g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy_ini[17,33] = V_R09*V_R17*(b_R09_R17*sin(theta_R09 - theta_R17) + g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy_ini[18,16] = V_R10*(b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10))
struct[0].Gy_ini[18,17] = V_R09*V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) + g_R09_R10*sin(theta_R09 - theta_R10))
struct[0].Gy_ini[18,18] = V_R09*(b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + 2*V_R10*(g_R09_R10 + g_R10_R18) + V_R18*(-b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy_ini[18,19] = V_R09*V_R10*(-b_R09_R10*cos(theta_R09 - theta_R10) - g_R09_R10*sin(theta_R09 - theta_R10)) + V_R10*V_R18*(-b_R10_R18*cos(theta_R10 - theta_R18) + g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy_ini[18,34] = V_R10*(-b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy_ini[18,35] = V_R10*V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) - g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy_ini[18,38] = -S_n_R10/S_base
struct[0].Gy_ini[19,16] = V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) + g_R09_R10*sin(theta_R09 - theta_R10))
struct[0].Gy_ini[19,17] = V_R09*V_R10*(-b_R09_R10*sin(theta_R09 - theta_R10) + g_R09_R10*cos(theta_R09 - theta_R10))
struct[0].Gy_ini[19,18] = V_R09*(b_R09_R10*cos(theta_R09 - theta_R10) + g_R09_R10*sin(theta_R09 - theta_R10)) + 2*V_R10*(-b_R09_R10 - b_R10_R18 - bs_R09_R10/2 - bs_R10_R18/2) + V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) - g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy_ini[19,19] = V_R09*V_R10*(b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + V_R10*V_R18*(-b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy_ini[19,34] = V_R10*(b_R10_R18*cos(theta_R10 - theta_R18) - g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy_ini[19,35] = V_R10*V_R18*(b_R10_R18*sin(theta_R10 - theta_R18) + g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy_ini[19,39] = -S_n_R10/S_base
struct[0].Gy_ini[20,4] = V_R11*(b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy_ini[20,5] = V_R03*V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) + g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy_ini[20,20] = V_R03*(b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11)) + 2*V_R11*g_R03_R11
struct[0].Gy_ini[20,21] = V_R03*V_R11*(-b_R03_R11*cos(theta_R03 - theta_R11) - g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy_ini[21,4] = V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) + g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy_ini[21,5] = V_R03*V_R11*(-b_R03_R11*sin(theta_R03 - theta_R11) + g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy_ini[21,20] = V_R03*(b_R03_R11*cos(theta_R03 - theta_R11) + g_R03_R11*sin(theta_R03 - theta_R11)) + 2*V_R11*(-b_R03_R11 - bs_R03_R11/2)
struct[0].Gy_ini[21,21] = V_R03*V_R11*(b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy_ini[22,6] = V_R12*(b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy_ini[22,7] = V_R04*V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) + g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy_ini[22,22] = V_R04*(b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12)) + 2*V_R12*(g_R04_R12 + g_R12_R13) + V_R13*(-b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy_ini[22,23] = V_R04*V_R12*(-b_R04_R12*cos(theta_R04 - theta_R12) - g_R04_R12*sin(theta_R04 - theta_R12)) + V_R12*V_R13*(-b_R12_R13*cos(theta_R12 - theta_R13) + g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy_ini[22,24] = V_R12*(-b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy_ini[22,25] = V_R12*V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) - g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy_ini[23,6] = V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) + g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy_ini[23,7] = V_R04*V_R12*(-b_R04_R12*sin(theta_R04 - theta_R12) + g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy_ini[23,22] = V_R04*(b_R04_R12*cos(theta_R04 - theta_R12) + g_R04_R12*sin(theta_R04 - theta_R12)) + 2*V_R12*(-b_R04_R12 - b_R12_R13 - bs_R04_R12/2 - bs_R12_R13/2) + V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) - g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy_ini[23,23] = V_R04*V_R12*(b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12)) + V_R12*V_R13*(-b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy_ini[23,24] = V_R12*(b_R12_R13*cos(theta_R12 - theta_R13) - g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy_ini[23,25] = V_R12*V_R13*(b_R12_R13*sin(theta_R12 - theta_R13) + g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy_ini[24,22] = V_R13*(b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy_ini[24,23] = V_R12*V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) + g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy_ini[24,24] = V_R12*(b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13)) + 2*V_R13*(g_R12_R13 + g_R13_R14) + V_R14*(-b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy_ini[24,25] = V_R12*V_R13*(-b_R12_R13*cos(theta_R12 - theta_R13) - g_R12_R13*sin(theta_R12 - theta_R13)) + V_R13*V_R14*(-b_R13_R14*cos(theta_R13 - theta_R14) + g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy_ini[24,26] = V_R13*(-b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy_ini[24,27] = V_R13*V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) - g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy_ini[25,22] = V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) + g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy_ini[25,23] = V_R12*V_R13*(-b_R12_R13*sin(theta_R12 - theta_R13) + g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy_ini[25,24] = V_R12*(b_R12_R13*cos(theta_R12 - theta_R13) + g_R12_R13*sin(theta_R12 - theta_R13)) + 2*V_R13*(-b_R12_R13 - b_R13_R14 - bs_R12_R13/2 - bs_R13_R14/2) + V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) - g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy_ini[25,25] = V_R12*V_R13*(b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13)) + V_R13*V_R14*(-b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy_ini[25,26] = V_R13*(b_R13_R14*cos(theta_R13 - theta_R14) - g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy_ini[25,27] = V_R13*V_R14*(b_R13_R14*sin(theta_R13 - theta_R14) + g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy_ini[26,24] = V_R14*(b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy_ini[26,25] = V_R13*V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) + g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy_ini[26,26] = V_R13*(b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14)) + 2*V_R14*(g_R13_R14 + g_R14_R15) + V_R15*(-b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy_ini[26,27] = V_R13*V_R14*(-b_R13_R14*cos(theta_R13 - theta_R14) - g_R13_R14*sin(theta_R13 - theta_R14)) + V_R14*V_R15*(-b_R14_R15*cos(theta_R14 - theta_R15) + g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy_ini[26,28] = V_R14*(-b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy_ini[26,29] = V_R14*V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) - g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy_ini[26,44] = -S_n_R14/S_base
struct[0].Gy_ini[27,24] = V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) + g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy_ini[27,25] = V_R13*V_R14*(-b_R13_R14*sin(theta_R13 - theta_R14) + g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy_ini[27,26] = V_R13*(b_R13_R14*cos(theta_R13 - theta_R14) + g_R13_R14*sin(theta_R13 - theta_R14)) + 2*V_R14*(-b_R13_R14 - b_R14_R15 - bs_R13_R14/2 - bs_R14_R15/2) + V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) - g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy_ini[27,27] = V_R13*V_R14*(b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14)) + V_R14*V_R15*(-b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy_ini[27,28] = V_R14*(b_R14_R15*cos(theta_R14 - theta_R15) - g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy_ini[27,29] = V_R14*V_R15*(b_R14_R15*sin(theta_R14 - theta_R15) + g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy_ini[27,45] = -S_n_R14/S_base
struct[0].Gy_ini[28,26] = V_R15*(b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy_ini[28,27] = V_R14*V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) + g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy_ini[28,28] = V_R14*(b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15)) + 2*V_R15*g_R14_R15
struct[0].Gy_ini[28,29] = V_R14*V_R15*(-b_R14_R15*cos(theta_R14 - theta_R15) - g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy_ini[29,26] = V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) + g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy_ini[29,27] = V_R14*V_R15*(-b_R14_R15*sin(theta_R14 - theta_R15) + g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy_ini[29,28] = V_R14*(b_R14_R15*cos(theta_R14 - theta_R15) + g_R14_R15*sin(theta_R14 - theta_R15)) + 2*V_R15*(-b_R14_R15 - bs_R14_R15/2)
struct[0].Gy_ini[29,29] = V_R14*V_R15*(b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy_ini[30,10] = V_R16*(b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy_ini[30,11] = V_R06*V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) + g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy_ini[30,30] = V_R06*(b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16)) + 2*V_R16*g_R06_R16
struct[0].Gy_ini[30,31] = V_R06*V_R16*(-b_R06_R16*cos(theta_R06 - theta_R16) - g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy_ini[31,10] = V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) + g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy_ini[31,11] = V_R06*V_R16*(-b_R06_R16*sin(theta_R06 - theta_R16) + g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy_ini[31,30] = V_R06*(b_R06_R16*cos(theta_R06 - theta_R16) + g_R06_R16*sin(theta_R06 - theta_R16)) + 2*V_R16*(-b_R06_R16 - bs_R06_R16/2)
struct[0].Gy_ini[31,31] = V_R06*V_R16*(b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy_ini[32,16] = V_R17*(b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy_ini[32,17] = V_R09*V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) + g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy_ini[32,32] = V_R09*(b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17)) + 2*V_R17*g_R09_R17
struct[0].Gy_ini[32,33] = V_R09*V_R17*(-b_R09_R17*cos(theta_R09 - theta_R17) - g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy_ini[33,16] = V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) + g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy_ini[33,17] = V_R09*V_R17*(-b_R09_R17*sin(theta_R09 - theta_R17) + g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy_ini[33,32] = V_R09*(b_R09_R17*cos(theta_R09 - theta_R17) + g_R09_R17*sin(theta_R09 - theta_R17)) + 2*V_R17*(-b_R09_R17 - bs_R09_R17/2)
struct[0].Gy_ini[33,33] = V_R09*V_R17*(b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy_ini[34,18] = V_R18*(b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy_ini[34,19] = V_R10*V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) + g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy_ini[34,34] = V_R10*(b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18)) + 2*V_R18*g_R10_R18
struct[0].Gy_ini[34,35] = V_R10*V_R18*(-b_R10_R18*cos(theta_R10 - theta_R18) - g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy_ini[35,18] = V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) + g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy_ini[35,19] = V_R10*V_R18*(-b_R10_R18*sin(theta_R10 - theta_R18) + g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy_ini[35,34] = V_R10*(b_R10_R18*cos(theta_R10 - theta_R18) + g_R10_R18*sin(theta_R10 - theta_R18)) + 2*V_R18*(-b_R10_R18 - bs_R10_R18/2)
struct[0].Gy_ini[35,35] = V_R10*V_R18*(b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy_ini[36,18] = cos(delta_R10 - theta_R10)
struct[0].Gy_ini[36,19] = V_R10*sin(delta_R10 - theta_R10)
struct[0].Gy_ini[36,36] = X1d_R10
struct[0].Gy_ini[36,37] = R_a_R10
struct[0].Gy_ini[37,18] = sin(delta_R10 - theta_R10)
struct[0].Gy_ini[37,19] = -V_R10*cos(delta_R10 - theta_R10)
struct[0].Gy_ini[37,36] = R_a_R10
struct[0].Gy_ini[37,37] = -X1q_R10
struct[0].Gy_ini[38,18] = i_d_R10*sin(delta_R10 - theta_R10) + i_q_R10*cos(delta_R10 - theta_R10)
struct[0].Gy_ini[38,19] = -V_R10*i_d_R10*cos(delta_R10 - theta_R10) + V_R10*i_q_R10*sin(delta_R10 - theta_R10)
struct[0].Gy_ini[38,36] = V_R10*sin(delta_R10 - theta_R10)
struct[0].Gy_ini[38,37] = V_R10*cos(delta_R10 - theta_R10)
struct[0].Gy_ini[39,18] = i_d_R10*cos(delta_R10 - theta_R10) - i_q_R10*sin(delta_R10 - theta_R10)
struct[0].Gy_ini[39,19] = V_R10*i_d_R10*sin(delta_R10 - theta_R10) + V_R10*i_q_R10*cos(delta_R10 - theta_R10)
struct[0].Gy_ini[39,36] = V_R10*cos(delta_R10 - theta_R10)
struct[0].Gy_ini[39,37] = -V_R10*sin(delta_R10 - theta_R10)
struct[0].Gy_ini[42,26] = cos(delta_R14 - theta_R14)
struct[0].Gy_ini[42,27] = V_R14*sin(delta_R14 - theta_R14)
struct[0].Gy_ini[42,42] = X1d_R14
struct[0].Gy_ini[42,43] = R_a_R14
struct[0].Gy_ini[43,26] = sin(delta_R14 - theta_R14)
struct[0].Gy_ini[43,27] = -V_R14*cos(delta_R14 - theta_R14)
struct[0].Gy_ini[43,42] = R_a_R14
struct[0].Gy_ini[43,43] = -X1q_R14
struct[0].Gy_ini[44,26] = i_d_R14*sin(delta_R14 - theta_R14) + i_q_R14*cos(delta_R14 - theta_R14)
struct[0].Gy_ini[44,27] = -V_R14*i_d_R14*cos(delta_R14 - theta_R14) + V_R14*i_q_R14*sin(delta_R14 - theta_R14)
struct[0].Gy_ini[44,42] = V_R14*sin(delta_R14 - theta_R14)
struct[0].Gy_ini[44,43] = V_R14*cos(delta_R14 - theta_R14)
struct[0].Gy_ini[45,26] = i_d_R14*cos(delta_R14 - theta_R14) - i_q_R14*sin(delta_R14 - theta_R14)
struct[0].Gy_ini[45,27] = V_R14*i_d_R14*sin(delta_R14 - theta_R14) + V_R14*i_q_R14*cos(delta_R14 - theta_R14)
struct[0].Gy_ini[45,42] = V_R14*cos(delta_R14 - theta_R14)
struct[0].Gy_ini[45,43] = -V_R14*sin(delta_R14 - theta_R14)
@numba.njit(cache=True)
def run(t,struct,mode):
# Parameters:
S_base = struct[0].S_base
g_R01_R02 = struct[0].g_R01_R02
b_R01_R02 = struct[0].b_R01_R02
bs_R01_R02 = struct[0].bs_R01_R02
g_R02_R03 = struct[0].g_R02_R03
b_R02_R03 = struct[0].b_R02_R03
bs_R02_R03 = struct[0].bs_R02_R03
g_R03_R04 = struct[0].g_R03_R04
b_R03_R04 = struct[0].b_R03_R04
bs_R03_R04 = struct[0].bs_R03_R04
g_R04_R05 = struct[0].g_R04_R05
b_R04_R05 = struct[0].b_R04_R05
bs_R04_R05 = struct[0].bs_R04_R05
g_R05_R06 = struct[0].g_R05_R06
b_R05_R06 = struct[0].b_R05_R06
bs_R05_R06 = struct[0].bs_R05_R06
g_R06_R07 = struct[0].g_R06_R07
b_R06_R07 = struct[0].b_R06_R07
bs_R06_R07 = struct[0].bs_R06_R07
g_R07_R08 = struct[0].g_R07_R08
b_R07_R08 = struct[0].b_R07_R08
bs_R07_R08 = struct[0].bs_R07_R08
g_R08_R09 = struct[0].g_R08_R09
b_R08_R09 = struct[0].b_R08_R09
bs_R08_R09 = struct[0].bs_R08_R09
g_R09_R10 = struct[0].g_R09_R10
b_R09_R10 = struct[0].b_R09_R10
bs_R09_R10 = struct[0].bs_R09_R10
g_R03_R11 = struct[0].g_R03_R11
b_R03_R11 = struct[0].b_R03_R11
bs_R03_R11 = struct[0].bs_R03_R11
g_R04_R12 = struct[0].g_R04_R12
b_R04_R12 = struct[0].b_R04_R12
bs_R04_R12 = struct[0].bs_R04_R12
g_R12_R13 = struct[0].g_R12_R13
b_R12_R13 = struct[0].b_R12_R13
bs_R12_R13 = struct[0].bs_R12_R13
g_R13_R14 = struct[0].g_R13_R14
b_R13_R14 = struct[0].b_R13_R14
bs_R13_R14 = struct[0].bs_R13_R14
g_R14_R15 = struct[0].g_R14_R15
b_R14_R15 = struct[0].b_R14_R15
bs_R14_R15 = struct[0].bs_R14_R15
g_R06_R16 = struct[0].g_R06_R16
b_R06_R16 = struct[0].b_R06_R16
bs_R06_R16 = struct[0].bs_R06_R16
g_R09_R17 = struct[0].g_R09_R17
b_R09_R17 = struct[0].b_R09_R17
bs_R09_R17 = struct[0].bs_R09_R17
g_R10_R18 = struct[0].g_R10_R18
b_R10_R18 = struct[0].b_R10_R18
bs_R10_R18 = struct[0].bs_R10_R18
U_R01_n = struct[0].U_R01_n
U_R02_n = struct[0].U_R02_n
U_R03_n = struct[0].U_R03_n
U_R04_n = struct[0].U_R04_n
U_R05_n = struct[0].U_R05_n
U_R06_n = struct[0].U_R06_n
U_R07_n = struct[0].U_R07_n
U_R08_n = struct[0].U_R08_n
U_R09_n = struct[0].U_R09_n
U_R10_n = struct[0].U_R10_n
U_R11_n = struct[0].U_R11_n
U_R12_n = struct[0].U_R12_n
U_R13_n = struct[0].U_R13_n
U_R14_n = struct[0].U_R14_n
U_R15_n = struct[0].U_R15_n
U_R16_n = struct[0].U_R16_n
U_R17_n = struct[0].U_R17_n
U_R18_n = struct[0].U_R18_n
S_n_R10 = struct[0].S_n_R10
H_R10 = struct[0].H_R10
Omega_b_R10 = struct[0].Omega_b_R10
T1d0_R10 = struct[0].T1d0_R10
T1q0_R10 = struct[0].T1q0_R10
X_d_R10 = struct[0].X_d_R10
X_q_R10 = struct[0].X_q_R10
X1d_R10 = struct[0].X1d_R10
X1q_R10 = struct[0].X1q_R10
D_R10 = struct[0].D_R10
R_a_R10 = struct[0].R_a_R10
K_delta_R10 = struct[0].K_delta_R10
K_a_R10 = struct[0].K_a_R10
K_ai_R10 = struct[0].K_ai_R10
T_r_R10 = struct[0].T_r_R10
Droop_R10 = struct[0].Droop_R10
T_m_R10 = struct[0].T_m_R10
S_n_R14 = struct[0].S_n_R14
H_R14 = struct[0].H_R14
Omega_b_R14 = struct[0].Omega_b_R14
T1d0_R14 = struct[0].T1d0_R14
T1q0_R14 = struct[0].T1q0_R14
X_d_R14 = struct[0].X_d_R14
X_q_R14 = struct[0].X_q_R14
X1d_R14 = struct[0].X1d_R14
X1q_R14 = struct[0].X1q_R14
D_R14 = struct[0].D_R14
R_a_R14 = struct[0].R_a_R14
K_delta_R14 = struct[0].K_delta_R14
K_a_R14 = struct[0].K_a_R14
K_ai_R14 = struct[0].K_ai_R14
T_r_R14 = struct[0].T_r_R14
Droop_R14 = struct[0].Droop_R14
T_m_R14 = struct[0].T_m_R14
K_sec_R10 = struct[0].K_sec_R10
K_sec_R14 = struct[0].K_sec_R14
# Inputs:
P_R01 = struct[0].P_R01
Q_R01 = struct[0].Q_R01
P_R02 = struct[0].P_R02
Q_R02 = struct[0].Q_R02
P_R03 = struct[0].P_R03
Q_R03 = struct[0].Q_R03
P_R04 = struct[0].P_R04
Q_R04 = struct[0].Q_R04
P_R05 = struct[0].P_R05
Q_R05 = struct[0].Q_R05
P_R06 = struct[0].P_R06
Q_R06 = struct[0].Q_R06
P_R07 = struct[0].P_R07
Q_R07 = struct[0].Q_R07
P_R08 = struct[0].P_R08
Q_R08 = struct[0].Q_R08
P_R09 = struct[0].P_R09
Q_R09 = struct[0].Q_R09
P_R10 = struct[0].P_R10
Q_R10 = struct[0].Q_R10
P_R11 = struct[0].P_R11
Q_R11 = struct[0].Q_R11
P_R12 = struct[0].P_R12
Q_R12 = struct[0].Q_R12
P_R13 = struct[0].P_R13
Q_R13 = struct[0].Q_R13
P_R14 = struct[0].P_R14
Q_R14 = struct[0].Q_R14
P_R15 = struct[0].P_R15
Q_R15 = struct[0].Q_R15
P_R16 = struct[0].P_R16
Q_R16 = struct[0].Q_R16
P_R17 = struct[0].P_R17
Q_R17 = struct[0].Q_R17
P_R18 = struct[0].P_R18
Q_R18 = struct[0].Q_R18
v_ref_R10 = struct[0].v_ref_R10
v_pss_R10 = struct[0].v_pss_R10
p_c_R10 = struct[0].p_c_R10
v_ref_R14 = struct[0].v_ref_R14
v_pss_R14 = struct[0].v_pss_R14
p_c_R14 = struct[0].p_c_R14
# Dynamical states:
delta_R10 = struct[0].x[0,0]
omega_R10 = struct[0].x[1,0]
e1q_R10 = struct[0].x[2,0]
e1d_R10 = struct[0].x[3,0]
v_c_R10 = struct[0].x[4,0]
xi_v_R10 = struct[0].x[5,0]
p_m_R10 = struct[0].x[6,0]
delta_R14 = struct[0].x[7,0]
omega_R14 = struct[0].x[8,0]
e1q_R14 = struct[0].x[9,0]
e1d_R14 = struct[0].x[10,0]
v_c_R14 = struct[0].x[11,0]
xi_v_R14 = struct[0].x[12,0]
p_m_R14 = struct[0].x[13,0]
xi_freq = struct[0].x[14,0]
# Algebraic states:
V_R01 = struct[0].y_run[0,0]
theta_R01 = struct[0].y_run[1,0]
V_R02 = struct[0].y_run[2,0]
theta_R02 = struct[0].y_run[3,0]
V_R03 = struct[0].y_run[4,0]
theta_R03 = struct[0].y_run[5,0]
V_R04 = struct[0].y_run[6,0]
theta_R04 = struct[0].y_run[7,0]
V_R05 = struct[0].y_run[8,0]
theta_R05 = struct[0].y_run[9,0]
V_R06 = struct[0].y_run[10,0]
theta_R06 = struct[0].y_run[11,0]
V_R07 = struct[0].y_run[12,0]
theta_R07 = struct[0].y_run[13,0]
V_R08 = struct[0].y_run[14,0]
theta_R08 = struct[0].y_run[15,0]
V_R09 = struct[0].y_run[16,0]
theta_R09 = struct[0].y_run[17,0]
V_R10 = struct[0].y_run[18,0]
theta_R10 = struct[0].y_run[19,0]
V_R11 = struct[0].y_run[20,0]
theta_R11 = struct[0].y_run[21,0]
V_R12 = struct[0].y_run[22,0]
theta_R12 = struct[0].y_run[23,0]
V_R13 = struct[0].y_run[24,0]
theta_R13 = struct[0].y_run[25,0]
V_R14 = struct[0].y_run[26,0]
theta_R14 = struct[0].y_run[27,0]
V_R15 = struct[0].y_run[28,0]
theta_R15 = struct[0].y_run[29,0]
V_R16 = struct[0].y_run[30,0]
theta_R16 = struct[0].y_run[31,0]
V_R17 = struct[0].y_run[32,0]
theta_R17 = struct[0].y_run[33,0]
V_R18 = struct[0].y_run[34,0]
theta_R18 = struct[0].y_run[35,0]
i_d_R10 = struct[0].y_run[36,0]
i_q_R10 = struct[0].y_run[37,0]
p_g_R10_1 = struct[0].y_run[38,0]
q_g_R10_1 = struct[0].y_run[39,0]
v_f_R10 = struct[0].y_run[40,0]
p_m_ref_R10 = struct[0].y_run[41,0]
i_d_R14 = struct[0].y_run[42,0]
i_q_R14 = struct[0].y_run[43,0]
p_g_R14_1 = struct[0].y_run[44,0]
q_g_R14_1 = struct[0].y_run[45,0]
v_f_R14 = struct[0].y_run[46,0]
p_m_ref_R14 = struct[0].y_run[47,0]
omega_coi = struct[0].y_run[48,0]
p_r_R10 = struct[0].y_run[49,0]
p_r_R14 = struct[0].y_run[50,0]
struct[0].u_run[0,0] = P_R01
struct[0].u_run[1,0] = Q_R01
struct[0].u_run[2,0] = P_R02
struct[0].u_run[3,0] = Q_R02
struct[0].u_run[4,0] = P_R03
struct[0].u_run[5,0] = Q_R03
struct[0].u_run[6,0] = P_R04
struct[0].u_run[7,0] = Q_R04
struct[0].u_run[8,0] = P_R05
struct[0].u_run[9,0] = Q_R05
struct[0].u_run[10,0] = P_R06
struct[0].u_run[11,0] = Q_R06
struct[0].u_run[12,0] = P_R07
struct[0].u_run[13,0] = Q_R07
struct[0].u_run[14,0] = P_R08
struct[0].u_run[15,0] = Q_R08
struct[0].u_run[16,0] = P_R09
struct[0].u_run[17,0] = Q_R09
struct[0].u_run[18,0] = P_R10
struct[0].u_run[19,0] = Q_R10
struct[0].u_run[20,0] = P_R11
struct[0].u_run[21,0] = Q_R11
struct[0].u_run[22,0] = P_R12
struct[0].u_run[23,0] = Q_R12
struct[0].u_run[24,0] = P_R13
struct[0].u_run[25,0] = Q_R13
struct[0].u_run[26,0] = P_R14
struct[0].u_run[27,0] = Q_R14
struct[0].u_run[28,0] = P_R15
struct[0].u_run[29,0] = Q_R15
struct[0].u_run[30,0] = P_R16
struct[0].u_run[31,0] = Q_R16
struct[0].u_run[32,0] = P_R17
struct[0].u_run[33,0] = Q_R17
struct[0].u_run[34,0] = P_R18
struct[0].u_run[35,0] = Q_R18
struct[0].u_run[36,0] = v_ref_R10
struct[0].u_run[37,0] = v_pss_R10
struct[0].u_run[38,0] = p_c_R10
struct[0].u_run[39,0] = v_ref_R14
struct[0].u_run[40,0] = v_pss_R14
struct[0].u_run[41,0] = p_c_R14
# Differential equations:
if mode == 2:
struct[0].f[0,0] = -K_delta_R10*delta_R10 + Omega_b_R10*(omega_R10 - omega_coi)
struct[0].f[1,0] = (-D_R10*(omega_R10 - omega_coi) - i_d_R10*(R_a_R10*i_d_R10 + V_R10*sin(delta_R10 - theta_R10)) - i_q_R10*(R_a_R10*i_q_R10 + V_R10*cos(delta_R10 - theta_R10)) + p_m_R10)/(2*H_R10)
struct[0].f[2,0] = (-e1q_R10 - i_d_R10*(-X1d_R10 + X_d_R10) + v_f_R10)/T1d0_R10
struct[0].f[3,0] = (-e1d_R10 + i_q_R10*(-X1q_R10 + X_q_R10))/T1q0_R10
struct[0].f[4,0] = (V_R10 - v_c_R10)/T_r_R10
struct[0].f[5,0] = -V_R10 + v_ref_R10
struct[0].f[6,0] = (-p_m_R10 + p_m_ref_R10)/T_m_R10
struct[0].f[7,0] = -K_delta_R14*delta_R14 + Omega_b_R14*(omega_R14 - omega_coi)
struct[0].f[8,0] = (-D_R14*(omega_R14 - omega_coi) - i_d_R14*(R_a_R14*i_d_R14 + V_R14*sin(delta_R14 - theta_R14)) - i_q_R14*(R_a_R14*i_q_R14 + V_R14*cos(delta_R14 - theta_R14)) + p_m_R14)/(2*H_R14)
struct[0].f[9,0] = (-e1q_R14 - i_d_R14*(-X1d_R14 + X_d_R14) + v_f_R14)/T1d0_R14
struct[0].f[10,0] = (-e1d_R14 + i_q_R14*(-X1q_R14 + X_q_R14))/T1q0_R14
struct[0].f[11,0] = (V_R14 - v_c_R14)/T_r_R14
struct[0].f[12,0] = -V_R14 + v_ref_R14
struct[0].f[13,0] = (-p_m_R14 + p_m_ref_R14)/T_m_R14
struct[0].f[14,0] = 1 - omega_coi
# Algebraic equations:
if mode == 3:
struct[0].g[:,:] = np.ascontiguousarray(struct[0].Gy) @ np.ascontiguousarray(struct[0].y_run) + np.ascontiguousarray(struct[0].Gu) @ np.ascontiguousarray(struct[0].u_run)
struct[0].g[0,0] = -P_R01/S_base + V_R01**2*g_R01_R02 + V_R01*V_R02*(-b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].g[1,0] = -Q_R01/S_base + V_R01**2*(-b_R01_R02 - bs_R01_R02/2) + V_R01*V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) - g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].g[2,0] = -P_R02/S_base + V_R01*V_R02*(b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02)) + V_R02**2*(g_R01_R02 + g_R02_R03) + V_R02*V_R03*(-b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].g[3,0] = -Q_R02/S_base + V_R01*V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) + g_R01_R02*sin(theta_R01 - theta_R02)) + V_R02**2*(-b_R01_R02 - b_R02_R03 - bs_R01_R02/2 - bs_R02_R03/2) + V_R02*V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) - g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].g[4,0] = -P_R03/S_base + V_R02*V_R03*(b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03)) + V_R03**2*(g_R02_R03 + g_R03_R04 + g_R03_R11) + V_R03*V_R04*(-b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + V_R03*V_R11*(-b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].g[5,0] = -Q_R03/S_base + V_R02*V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) + g_R02_R03*sin(theta_R02 - theta_R03)) + V_R03**2*(-b_R02_R03 - b_R03_R04 - b_R03_R11 - bs_R02_R03/2 - bs_R03_R04/2 - bs_R03_R11/2) + V_R03*V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) - g_R03_R04*sin(theta_R03 - theta_R04)) + V_R03*V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) - g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].g[6,0] = -P_R04/S_base + V_R03*V_R04*(b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + V_R04**2*(g_R03_R04 + g_R04_R05 + g_R04_R12) + V_R04*V_R05*(-b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + V_R04*V_R12*(-b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].g[7,0] = -Q_R04/S_base + V_R03*V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) + g_R03_R04*sin(theta_R03 - theta_R04)) + V_R04**2*(-b_R03_R04 - b_R04_R05 - b_R04_R12 - bs_R03_R04/2 - bs_R04_R05/2 - bs_R04_R12/2) + V_R04*V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) - g_R04_R05*sin(theta_R04 - theta_R05)) + V_R04*V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) - g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].g[8,0] = -P_R05/S_base + V_R04*V_R05*(b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + V_R05**2*(g_R04_R05 + g_R05_R06) + V_R05*V_R06*(-b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].g[9,0] = -Q_R05/S_base + V_R04*V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) + g_R04_R05*sin(theta_R04 - theta_R05)) + V_R05**2*(-b_R04_R05 - b_R05_R06 - bs_R04_R05/2 - bs_R05_R06/2) + V_R05*V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) - g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].g[10,0] = -P_R06/S_base + V_R05*V_R06*(b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06)) + V_R06**2*(g_R05_R06 + g_R06_R07 + g_R06_R16) + V_R06*V_R07*(-b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + V_R06*V_R16*(-b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].g[11,0] = -Q_R06/S_base + V_R05*V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) + g_R05_R06*sin(theta_R05 - theta_R06)) + V_R06**2*(-b_R05_R06 - b_R06_R07 - b_R06_R16 - bs_R05_R06/2 - bs_R06_R07/2 - bs_R06_R16/2) + V_R06*V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) - g_R06_R07*sin(theta_R06 - theta_R07)) + V_R06*V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) - g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].g[12,0] = -P_R07/S_base + V_R06*V_R07*(b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + V_R07**2*(g_R06_R07 + g_R07_R08) + V_R07*V_R08*(-b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].g[13,0] = -Q_R07/S_base + V_R06*V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) + g_R06_R07*sin(theta_R06 - theta_R07)) + V_R07**2*(-b_R06_R07 - b_R07_R08 - bs_R06_R07/2 - bs_R07_R08/2) + V_R07*V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) - g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].g[14,0] = -P_R08/S_base + V_R07*V_R08*(b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08)) + V_R08**2*(g_R07_R08 + g_R08_R09) + V_R08*V_R09*(-b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].g[15,0] = -Q_R08/S_base + V_R07*V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) + g_R07_R08*sin(theta_R07 - theta_R08)) + V_R08**2*(-b_R07_R08 - b_R08_R09 - bs_R07_R08/2 - bs_R08_R09/2) + V_R08*V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) - g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].g[16,0] = -P_R09/S_base + V_R08*V_R09*(b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09)) + V_R09**2*(g_R08_R09 + g_R09_R10 + g_R09_R17) + V_R09*V_R10*(-b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + V_R09*V_R17*(-b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].g[17,0] = -Q_R09/S_base + V_R08*V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) + g_R08_R09*sin(theta_R08 - theta_R09)) + V_R09**2*(-b_R08_R09 - b_R09_R10 - b_R09_R17 - bs_R08_R09/2 - bs_R09_R10/2 - bs_R09_R17/2) + V_R09*V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) - g_R09_R10*sin(theta_R09 - theta_R10)) + V_R09*V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) - g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].g[18,0] = -P_R10/S_base + V_R09*V_R10*(b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + V_R10**2*(g_R09_R10 + g_R10_R18) + V_R10*V_R18*(-b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18)) - S_n_R10*p_g_R10_1/S_base
struct[0].g[19,0] = -Q_R10/S_base + V_R09*V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) + g_R09_R10*sin(theta_R09 - theta_R10)) + V_R10**2*(-b_R09_R10 - b_R10_R18 - bs_R09_R10/2 - bs_R10_R18/2) + V_R10*V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) - g_R10_R18*sin(theta_R10 - theta_R18)) - S_n_R10*q_g_R10_1/S_base
struct[0].g[20,0] = -P_R11/S_base + V_R03*V_R11*(b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11)) + V_R11**2*g_R03_R11
struct[0].g[21,0] = -Q_R11/S_base + V_R03*V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) + g_R03_R11*sin(theta_R03 - theta_R11)) + V_R11**2*(-b_R03_R11 - bs_R03_R11/2)
struct[0].g[22,0] = -P_R12/S_base + V_R04*V_R12*(b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12)) + V_R12**2*(g_R04_R12 + g_R12_R13) + V_R12*V_R13*(-b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].g[23,0] = -Q_R12/S_base + V_R04*V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) + g_R04_R12*sin(theta_R04 - theta_R12)) + V_R12**2*(-b_R04_R12 - b_R12_R13 - bs_R04_R12/2 - bs_R12_R13/2) + V_R12*V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) - g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].g[24,0] = -P_R13/S_base + V_R12*V_R13*(b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13)) + V_R13**2*(g_R12_R13 + g_R13_R14) + V_R13*V_R14*(-b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].g[25,0] = -Q_R13/S_base + V_R12*V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) + g_R12_R13*sin(theta_R12 - theta_R13)) + V_R13**2*(-b_R12_R13 - b_R13_R14 - bs_R12_R13/2 - bs_R13_R14/2) + V_R13*V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) - g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].g[26,0] = -P_R14/S_base + V_R13*V_R14*(b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14)) + V_R14**2*(g_R13_R14 + g_R14_R15) + V_R14*V_R15*(-b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15)) - S_n_R14*p_g_R14_1/S_base
struct[0].g[27,0] = -Q_R14/S_base + V_R13*V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) + g_R13_R14*sin(theta_R13 - theta_R14)) + V_R14**2*(-b_R13_R14 - b_R14_R15 - bs_R13_R14/2 - bs_R14_R15/2) + V_R14*V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) - g_R14_R15*sin(theta_R14 - theta_R15)) - S_n_R14*q_g_R14_1/S_base
struct[0].g[28,0] = -P_R15/S_base + V_R14*V_R15*(b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15)) + V_R15**2*g_R14_R15
struct[0].g[29,0] = -Q_R15/S_base + V_R14*V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) + g_R14_R15*sin(theta_R14 - theta_R15)) + V_R15**2*(-b_R14_R15 - bs_R14_R15/2)
struct[0].g[30,0] = -P_R16/S_base + V_R06*V_R16*(b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16)) + V_R16**2*g_R06_R16
struct[0].g[31,0] = -Q_R16/S_base + V_R06*V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) + g_R06_R16*sin(theta_R06 - theta_R16)) + V_R16**2*(-b_R06_R16 - bs_R06_R16/2)
struct[0].g[32,0] = -P_R17/S_base + V_R09*V_R17*(b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17)) + V_R17**2*g_R09_R17
struct[0].g[33,0] = -Q_R17/S_base + V_R09*V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) + g_R09_R17*sin(theta_R09 - theta_R17)) + V_R17**2*(-b_R09_R17 - bs_R09_R17/2)
struct[0].g[34,0] = -P_R18/S_base + V_R10*V_R18*(b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18)) + V_R18**2*g_R10_R18
struct[0].g[35,0] = -Q_R18/S_base + V_R10*V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) + g_R10_R18*sin(theta_R10 - theta_R18)) + V_R18**2*(-b_R10_R18 - bs_R10_R18/2)
struct[0].g[36,0] = R_a_R10*i_q_R10 + V_R10*cos(delta_R10 - theta_R10) + X1d_R10*i_d_R10 - e1q_R10
struct[0].g[37,0] = R_a_R10*i_d_R10 + V_R10*sin(delta_R10 - theta_R10) - X1q_R10*i_q_R10 - e1d_R10
struct[0].g[38,0] = V_R10*i_d_R10*sin(delta_R10 - theta_R10) + V_R10*i_q_R10*cos(delta_R10 - theta_R10) - p_g_R10_1
struct[0].g[39,0] = V_R10*i_d_R10*cos(delta_R10 - theta_R10) - V_R10*i_q_R10*sin(delta_R10 - theta_R10) - q_g_R10_1
struct[0].g[40,0] = K_a_R10*(-v_c_R10 + v_pss_R10 + v_ref_R10) + K_ai_R10*xi_v_R10 - v_f_R10
struct[0].g[41,0] = p_c_R10 - p_m_ref_R10 + p_r_R10 - (omega_R10 - 1)/Droop_R10
struct[0].g[42,0] = R_a_R14*i_q_R14 + V_R14*cos(delta_R14 - theta_R14) + X1d_R14*i_d_R14 - e1q_R14
struct[0].g[43,0] = R_a_R14*i_d_R14 + V_R14*sin(delta_R14 - theta_R14) - X1q_R14*i_q_R14 - e1d_R14
struct[0].g[44,0] = V_R14*i_d_R14*sin(delta_R14 - theta_R14) + V_R14*i_q_R14*cos(delta_R14 - theta_R14) - p_g_R14_1
struct[0].g[45,0] = V_R14*i_d_R14*cos(delta_R14 - theta_R14) - V_R14*i_q_R14*sin(delta_R14 - theta_R14) - q_g_R14_1
struct[0].g[46,0] = K_a_R14*(-v_c_R14 + v_pss_R14 + v_ref_R14) + K_ai_R14*xi_v_R14 - v_f_R14
struct[0].g[47,0] = p_c_R14 - p_m_ref_R14 + p_r_R14 - (omega_R14 - 1)/Droop_R14
struct[0].g[48,0] = omega_R10/2 + omega_R14/2 - omega_coi
struct[0].g[49,0] = K_sec_R10*xi_freq/2 - p_r_R10
struct[0].g[50,0] = K_sec_R14*xi_freq/2 - p_r_R14
# Outputs:
if mode == 3:
struct[0].h[0,0] = V_R01
struct[0].h[1,0] = V_R02
struct[0].h[2,0] = V_R03
struct[0].h[3,0] = V_R04
struct[0].h[4,0] = V_R05
struct[0].h[5,0] = V_R06
struct[0].h[6,0] = V_R07
struct[0].h[7,0] = V_R08
struct[0].h[8,0] = V_R09
struct[0].h[9,0] = V_R10
struct[0].h[10,0] = V_R11
struct[0].h[11,0] = V_R12
struct[0].h[12,0] = V_R13
struct[0].h[13,0] = V_R14
struct[0].h[14,0] = V_R15
struct[0].h[15,0] = V_R16
struct[0].h[16,0] = V_R17
struct[0].h[17,0] = V_R18
if mode == 10:
struct[0].Fx[0,0] = -K_delta_R10
struct[0].Fx[0,1] = Omega_b_R10
struct[0].Fx[1,0] = (-V_R10*i_d_R10*cos(delta_R10 - theta_R10) + V_R10*i_q_R10*sin(delta_R10 - theta_R10))/(2*H_R10)
struct[0].Fx[1,1] = -D_R10/(2*H_R10)
struct[0].Fx[1,6] = 1/(2*H_R10)
struct[0].Fx[2,2] = -1/T1d0_R10
struct[0].Fx[3,3] = -1/T1q0_R10
struct[0].Fx[4,4] = -1/T_r_R10
struct[0].Fx[6,6] = -1/T_m_R10
struct[0].Fx[7,7] = -K_delta_R14
struct[0].Fx[7,8] = Omega_b_R14
struct[0].Fx[8,7] = (-V_R14*i_d_R14*cos(delta_R14 - theta_R14) + V_R14*i_q_R14*sin(delta_R14 - theta_R14))/(2*H_R14)
struct[0].Fx[8,8] = -D_R14/(2*H_R14)
struct[0].Fx[8,13] = 1/(2*H_R14)
struct[0].Fx[9,9] = -1/T1d0_R14
struct[0].Fx[10,10] = -1/T1q0_R14
struct[0].Fx[11,11] = -1/T_r_R14
struct[0].Fx[13,13] = -1/T_m_R14
if mode == 11:
struct[0].Fy[0,48] = -Omega_b_R10
struct[0].Fy[1,18] = (-i_d_R10*sin(delta_R10 - theta_R10) - i_q_R10*cos(delta_R10 - theta_R10))/(2*H_R10)
struct[0].Fy[1,19] = (V_R10*i_d_R10*cos(delta_R10 - theta_R10) - V_R10*i_q_R10*sin(delta_R10 - theta_R10))/(2*H_R10)
struct[0].Fy[1,36] = (-2*R_a_R10*i_d_R10 - V_R10*sin(delta_R10 - theta_R10))/(2*H_R10)
struct[0].Fy[1,37] = (-2*R_a_R10*i_q_R10 - V_R10*cos(delta_R10 - theta_R10))/(2*H_R10)
struct[0].Fy[1,48] = D_R10/(2*H_R10)
struct[0].Fy[2,36] = (X1d_R10 - X_d_R10)/T1d0_R10
struct[0].Fy[2,40] = 1/T1d0_R10
struct[0].Fy[3,37] = (-X1q_R10 + X_q_R10)/T1q0_R10
struct[0].Fy[4,18] = 1/T_r_R10
struct[0].Fy[5,18] = -1
struct[0].Fy[6,41] = 1/T_m_R10
struct[0].Fy[7,48] = -Omega_b_R14
struct[0].Fy[8,26] = (-i_d_R14*sin(delta_R14 - theta_R14) - i_q_R14*cos(delta_R14 - theta_R14))/(2*H_R14)
struct[0].Fy[8,27] = (V_R14*i_d_R14*cos(delta_R14 - theta_R14) - V_R14*i_q_R14*sin(delta_R14 - theta_R14))/(2*H_R14)
struct[0].Fy[8,42] = (-2*R_a_R14*i_d_R14 - V_R14*sin(delta_R14 - theta_R14))/(2*H_R14)
struct[0].Fy[8,43] = (-2*R_a_R14*i_q_R14 - V_R14*cos(delta_R14 - theta_R14))/(2*H_R14)
struct[0].Fy[8,48] = D_R14/(2*H_R14)
struct[0].Fy[9,42] = (X1d_R14 - X_d_R14)/T1d0_R14
struct[0].Fy[9,46] = 1/T1d0_R14
struct[0].Fy[10,43] = (-X1q_R14 + X_q_R14)/T1q0_R14
struct[0].Fy[11,26] = 1/T_r_R14
struct[0].Fy[12,26] = -1
struct[0].Fy[13,47] = 1/T_m_R14
struct[0].Fy[14,48] = -1
struct[0].Gx[36,0] = -V_R10*sin(delta_R10 - theta_R10)
struct[0].Gx[36,2] = -1
struct[0].Gx[37,0] = V_R10*cos(delta_R10 - theta_R10)
struct[0].Gx[37,3] = -1
struct[0].Gx[38,0] = V_R10*i_d_R10*cos(delta_R10 - theta_R10) - V_R10*i_q_R10*sin(delta_R10 - theta_R10)
struct[0].Gx[39,0] = -V_R10*i_d_R10*sin(delta_R10 - theta_R10) - V_R10*i_q_R10*cos(delta_R10 - theta_R10)
struct[0].Gx[40,4] = -K_a_R10
struct[0].Gx[40,5] = K_ai_R10
struct[0].Gx[41,1] = -1/Droop_R10
struct[0].Gx[42,7] = -V_R14*sin(delta_R14 - theta_R14)
struct[0].Gx[42,9] = -1
struct[0].Gx[43,7] = V_R14*cos(delta_R14 - theta_R14)
struct[0].Gx[43,10] = -1
struct[0].Gx[44,7] = V_R14*i_d_R14*cos(delta_R14 - theta_R14) - V_R14*i_q_R14*sin(delta_R14 - theta_R14)
struct[0].Gx[45,7] = -V_R14*i_d_R14*sin(delta_R14 - theta_R14) - V_R14*i_q_R14*cos(delta_R14 - theta_R14)
struct[0].Gx[46,11] = -K_a_R14
struct[0].Gx[46,12] = K_ai_R14
struct[0].Gx[47,8] = -1/Droop_R14
struct[0].Gx[48,1] = 1/2
struct[0].Gx[48,8] = 1/2
struct[0].Gx[49,14] = K_sec_R10/2
struct[0].Gx[50,14] = K_sec_R14/2
struct[0].Gy[0,0] = 2*V_R01*g_R01_R02 + V_R02*(-b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy[0,1] = V_R01*V_R02*(-b_R01_R02*cos(theta_R01 - theta_R02) + g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy[0,2] = V_R01*(-b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy[0,3] = V_R01*V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) - g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy[1,0] = 2*V_R01*(-b_R01_R02 - bs_R01_R02/2) + V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) - g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy[1,1] = V_R01*V_R02*(-b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy[1,2] = V_R01*(b_R01_R02*cos(theta_R01 - theta_R02) - g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy[1,3] = V_R01*V_R02*(b_R01_R02*sin(theta_R01 - theta_R02) + g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy[2,0] = V_R02*(b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy[2,1] = V_R01*V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) + g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy[2,2] = V_R01*(b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02)) + 2*V_R02*(g_R01_R02 + g_R02_R03) + V_R03*(-b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy[2,3] = V_R01*V_R02*(-b_R01_R02*cos(theta_R01 - theta_R02) - g_R01_R02*sin(theta_R01 - theta_R02)) + V_R02*V_R03*(-b_R02_R03*cos(theta_R02 - theta_R03) + g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy[2,4] = V_R02*(-b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy[2,5] = V_R02*V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) - g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy[3,0] = V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) + g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy[3,1] = V_R01*V_R02*(-b_R01_R02*sin(theta_R01 - theta_R02) + g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy[3,2] = V_R01*(b_R01_R02*cos(theta_R01 - theta_R02) + g_R01_R02*sin(theta_R01 - theta_R02)) + 2*V_R02*(-b_R01_R02 - b_R02_R03 - bs_R01_R02/2 - bs_R02_R03/2) + V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) - g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy[3,3] = V_R01*V_R02*(b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02)) + V_R02*V_R03*(-b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy[3,4] = V_R02*(b_R02_R03*cos(theta_R02 - theta_R03) - g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy[3,5] = V_R02*V_R03*(b_R02_R03*sin(theta_R02 - theta_R03) + g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy[4,2] = V_R03*(b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy[4,3] = V_R02*V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) + g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy[4,4] = V_R02*(b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03)) + 2*V_R03*(g_R02_R03 + g_R03_R04 + g_R03_R11) + V_R04*(-b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + V_R11*(-b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy[4,5] = V_R02*V_R03*(-b_R02_R03*cos(theta_R02 - theta_R03) - g_R02_R03*sin(theta_R02 - theta_R03)) + V_R03*V_R04*(-b_R03_R04*cos(theta_R03 - theta_R04) + g_R03_R04*sin(theta_R03 - theta_R04)) + V_R03*V_R11*(-b_R03_R11*cos(theta_R03 - theta_R11) + g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy[4,6] = V_R03*(-b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04))
struct[0].Gy[4,7] = V_R03*V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) - g_R03_R04*sin(theta_R03 - theta_R04))
struct[0].Gy[4,20] = V_R03*(-b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy[4,21] = V_R03*V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) - g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy[5,2] = V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) + g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy[5,3] = V_R02*V_R03*(-b_R02_R03*sin(theta_R02 - theta_R03) + g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy[5,4] = V_R02*(b_R02_R03*cos(theta_R02 - theta_R03) + g_R02_R03*sin(theta_R02 - theta_R03)) + 2*V_R03*(-b_R02_R03 - b_R03_R04 - b_R03_R11 - bs_R02_R03/2 - bs_R03_R04/2 - bs_R03_R11/2) + V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) - g_R03_R04*sin(theta_R03 - theta_R04)) + V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) - g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy[5,5] = V_R02*V_R03*(b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03)) + V_R03*V_R04*(-b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + V_R03*V_R11*(-b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy[5,6] = V_R03*(b_R03_R04*cos(theta_R03 - theta_R04) - g_R03_R04*sin(theta_R03 - theta_R04))
struct[0].Gy[5,7] = V_R03*V_R04*(b_R03_R04*sin(theta_R03 - theta_R04) + g_R03_R04*cos(theta_R03 - theta_R04))
struct[0].Gy[5,20] = V_R03*(b_R03_R11*cos(theta_R03 - theta_R11) - g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy[5,21] = V_R03*V_R11*(b_R03_R11*sin(theta_R03 - theta_R11) + g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy[6,4] = V_R04*(b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04))
struct[0].Gy[6,5] = V_R03*V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) + g_R03_R04*sin(theta_R03 - theta_R04))
struct[0].Gy[6,6] = V_R03*(b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + 2*V_R04*(g_R03_R04 + g_R04_R05 + g_R04_R12) + V_R05*(-b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + V_R12*(-b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy[6,7] = V_R03*V_R04*(-b_R03_R04*cos(theta_R03 - theta_R04) - g_R03_R04*sin(theta_R03 - theta_R04)) + V_R04*V_R05*(-b_R04_R05*cos(theta_R04 - theta_R05) + g_R04_R05*sin(theta_R04 - theta_R05)) + V_R04*V_R12*(-b_R04_R12*cos(theta_R04 - theta_R12) + g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy[6,8] = V_R04*(-b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05))
struct[0].Gy[6,9] = V_R04*V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) - g_R04_R05*sin(theta_R04 - theta_R05))
struct[0].Gy[6,22] = V_R04*(-b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy[6,23] = V_R04*V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) - g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy[7,4] = V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) + g_R03_R04*sin(theta_R03 - theta_R04))
struct[0].Gy[7,5] = V_R03*V_R04*(-b_R03_R04*sin(theta_R03 - theta_R04) + g_R03_R04*cos(theta_R03 - theta_R04))
struct[0].Gy[7,6] = V_R03*(b_R03_R04*cos(theta_R03 - theta_R04) + g_R03_R04*sin(theta_R03 - theta_R04)) + 2*V_R04*(-b_R03_R04 - b_R04_R05 - b_R04_R12 - bs_R03_R04/2 - bs_R04_R05/2 - bs_R04_R12/2) + V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) - g_R04_R05*sin(theta_R04 - theta_R05)) + V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) - g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy[7,7] = V_R03*V_R04*(b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + V_R04*V_R05*(-b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + V_R04*V_R12*(-b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy[7,8] = V_R04*(b_R04_R05*cos(theta_R04 - theta_R05) - g_R04_R05*sin(theta_R04 - theta_R05))
struct[0].Gy[7,9] = V_R04*V_R05*(b_R04_R05*sin(theta_R04 - theta_R05) + g_R04_R05*cos(theta_R04 - theta_R05))
struct[0].Gy[7,22] = V_R04*(b_R04_R12*cos(theta_R04 - theta_R12) - g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy[7,23] = V_R04*V_R12*(b_R04_R12*sin(theta_R04 - theta_R12) + g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy[8,6] = V_R05*(b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05))
struct[0].Gy[8,7] = V_R04*V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) + g_R04_R05*sin(theta_R04 - theta_R05))
struct[0].Gy[8,8] = V_R04*(b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + 2*V_R05*(g_R04_R05 + g_R05_R06) + V_R06*(-b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy[8,9] = V_R04*V_R05*(-b_R04_R05*cos(theta_R04 - theta_R05) - g_R04_R05*sin(theta_R04 - theta_R05)) + V_R05*V_R06*(-b_R05_R06*cos(theta_R05 - theta_R06) + g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy[8,10] = V_R05*(-b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy[8,11] = V_R05*V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) - g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy[9,6] = V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) + g_R04_R05*sin(theta_R04 - theta_R05))
struct[0].Gy[9,7] = V_R04*V_R05*(-b_R04_R05*sin(theta_R04 - theta_R05) + g_R04_R05*cos(theta_R04 - theta_R05))
struct[0].Gy[9,8] = V_R04*(b_R04_R05*cos(theta_R04 - theta_R05) + g_R04_R05*sin(theta_R04 - theta_R05)) + 2*V_R05*(-b_R04_R05 - b_R05_R06 - bs_R04_R05/2 - bs_R05_R06/2) + V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) - g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy[9,9] = V_R04*V_R05*(b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + V_R05*V_R06*(-b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy[9,10] = V_R05*(b_R05_R06*cos(theta_R05 - theta_R06) - g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy[9,11] = V_R05*V_R06*(b_R05_R06*sin(theta_R05 - theta_R06) + g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy[10,8] = V_R06*(b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy[10,9] = V_R05*V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) + g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy[10,10] = V_R05*(b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06)) + 2*V_R06*(g_R05_R06 + g_R06_R07 + g_R06_R16) + V_R07*(-b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + V_R16*(-b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy[10,11] = V_R05*V_R06*(-b_R05_R06*cos(theta_R05 - theta_R06) - g_R05_R06*sin(theta_R05 - theta_R06)) + V_R06*V_R07*(-b_R06_R07*cos(theta_R06 - theta_R07) + g_R06_R07*sin(theta_R06 - theta_R07)) + V_R06*V_R16*(-b_R06_R16*cos(theta_R06 - theta_R16) + g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy[10,12] = V_R06*(-b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07))
struct[0].Gy[10,13] = V_R06*V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) - g_R06_R07*sin(theta_R06 - theta_R07))
struct[0].Gy[10,30] = V_R06*(-b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy[10,31] = V_R06*V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) - g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy[11,8] = V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) + g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy[11,9] = V_R05*V_R06*(-b_R05_R06*sin(theta_R05 - theta_R06) + g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy[11,10] = V_R05*(b_R05_R06*cos(theta_R05 - theta_R06) + g_R05_R06*sin(theta_R05 - theta_R06)) + 2*V_R06*(-b_R05_R06 - b_R06_R07 - b_R06_R16 - bs_R05_R06/2 - bs_R06_R07/2 - bs_R06_R16/2) + V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) - g_R06_R07*sin(theta_R06 - theta_R07)) + V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) - g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy[11,11] = V_R05*V_R06*(b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06)) + V_R06*V_R07*(-b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + V_R06*V_R16*(-b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy[11,12] = V_R06*(b_R06_R07*cos(theta_R06 - theta_R07) - g_R06_R07*sin(theta_R06 - theta_R07))
struct[0].Gy[11,13] = V_R06*V_R07*(b_R06_R07*sin(theta_R06 - theta_R07) + g_R06_R07*cos(theta_R06 - theta_R07))
struct[0].Gy[11,30] = V_R06*(b_R06_R16*cos(theta_R06 - theta_R16) - g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy[11,31] = V_R06*V_R16*(b_R06_R16*sin(theta_R06 - theta_R16) + g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy[12,10] = V_R07*(b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07))
struct[0].Gy[12,11] = V_R06*V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) + g_R06_R07*sin(theta_R06 - theta_R07))
struct[0].Gy[12,12] = V_R06*(b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + 2*V_R07*(g_R06_R07 + g_R07_R08) + V_R08*(-b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy[12,13] = V_R06*V_R07*(-b_R06_R07*cos(theta_R06 - theta_R07) - g_R06_R07*sin(theta_R06 - theta_R07)) + V_R07*V_R08*(-b_R07_R08*cos(theta_R07 - theta_R08) + g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy[12,14] = V_R07*(-b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy[12,15] = V_R07*V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) - g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy[13,10] = V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) + g_R06_R07*sin(theta_R06 - theta_R07))
struct[0].Gy[13,11] = V_R06*V_R07*(-b_R06_R07*sin(theta_R06 - theta_R07) + g_R06_R07*cos(theta_R06 - theta_R07))
struct[0].Gy[13,12] = V_R06*(b_R06_R07*cos(theta_R06 - theta_R07) + g_R06_R07*sin(theta_R06 - theta_R07)) + 2*V_R07*(-b_R06_R07 - b_R07_R08 - bs_R06_R07/2 - bs_R07_R08/2) + V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) - g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy[13,13] = V_R06*V_R07*(b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + V_R07*V_R08*(-b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy[13,14] = V_R07*(b_R07_R08*cos(theta_R07 - theta_R08) - g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy[13,15] = V_R07*V_R08*(b_R07_R08*sin(theta_R07 - theta_R08) + g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy[14,12] = V_R08*(b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy[14,13] = V_R07*V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) + g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy[14,14] = V_R07*(b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08)) + 2*V_R08*(g_R07_R08 + g_R08_R09) + V_R09*(-b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy[14,15] = V_R07*V_R08*(-b_R07_R08*cos(theta_R07 - theta_R08) - g_R07_R08*sin(theta_R07 - theta_R08)) + V_R08*V_R09*(-b_R08_R09*cos(theta_R08 - theta_R09) + g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy[14,16] = V_R08*(-b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy[14,17] = V_R08*V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) - g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy[15,12] = V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) + g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy[15,13] = V_R07*V_R08*(-b_R07_R08*sin(theta_R07 - theta_R08) + g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy[15,14] = V_R07*(b_R07_R08*cos(theta_R07 - theta_R08) + g_R07_R08*sin(theta_R07 - theta_R08)) + 2*V_R08*(-b_R07_R08 - b_R08_R09 - bs_R07_R08/2 - bs_R08_R09/2) + V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) - g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy[15,15] = V_R07*V_R08*(b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08)) + V_R08*V_R09*(-b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy[15,16] = V_R08*(b_R08_R09*cos(theta_R08 - theta_R09) - g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy[15,17] = V_R08*V_R09*(b_R08_R09*sin(theta_R08 - theta_R09) + g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy[16,14] = V_R09*(b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy[16,15] = V_R08*V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) + g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy[16,16] = V_R08*(b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09)) + 2*V_R09*(g_R08_R09 + g_R09_R10 + g_R09_R17) + V_R10*(-b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + V_R17*(-b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy[16,17] = V_R08*V_R09*(-b_R08_R09*cos(theta_R08 - theta_R09) - g_R08_R09*sin(theta_R08 - theta_R09)) + V_R09*V_R10*(-b_R09_R10*cos(theta_R09 - theta_R10) + g_R09_R10*sin(theta_R09 - theta_R10)) + V_R09*V_R17*(-b_R09_R17*cos(theta_R09 - theta_R17) + g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy[16,18] = V_R09*(-b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10))
struct[0].Gy[16,19] = V_R09*V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) - g_R09_R10*sin(theta_R09 - theta_R10))
struct[0].Gy[16,32] = V_R09*(-b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy[16,33] = V_R09*V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) - g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy[17,14] = V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) + g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy[17,15] = V_R08*V_R09*(-b_R08_R09*sin(theta_R08 - theta_R09) + g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy[17,16] = V_R08*(b_R08_R09*cos(theta_R08 - theta_R09) + g_R08_R09*sin(theta_R08 - theta_R09)) + 2*V_R09*(-b_R08_R09 - b_R09_R10 - b_R09_R17 - bs_R08_R09/2 - bs_R09_R10/2 - bs_R09_R17/2) + V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) - g_R09_R10*sin(theta_R09 - theta_R10)) + V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) - g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy[17,17] = V_R08*V_R09*(b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09)) + V_R09*V_R10*(-b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + V_R09*V_R17*(-b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy[17,18] = V_R09*(b_R09_R10*cos(theta_R09 - theta_R10) - g_R09_R10*sin(theta_R09 - theta_R10))
struct[0].Gy[17,19] = V_R09*V_R10*(b_R09_R10*sin(theta_R09 - theta_R10) + g_R09_R10*cos(theta_R09 - theta_R10))
struct[0].Gy[17,32] = V_R09*(b_R09_R17*cos(theta_R09 - theta_R17) - g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy[17,33] = V_R09*V_R17*(b_R09_R17*sin(theta_R09 - theta_R17) + g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy[18,16] = V_R10*(b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10))
struct[0].Gy[18,17] = V_R09*V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) + g_R09_R10*sin(theta_R09 - theta_R10))
struct[0].Gy[18,18] = V_R09*(b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + 2*V_R10*(g_R09_R10 + g_R10_R18) + V_R18*(-b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy[18,19] = V_R09*V_R10*(-b_R09_R10*cos(theta_R09 - theta_R10) - g_R09_R10*sin(theta_R09 - theta_R10)) + V_R10*V_R18*(-b_R10_R18*cos(theta_R10 - theta_R18) + g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy[18,34] = V_R10*(-b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy[18,35] = V_R10*V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) - g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy[18,38] = -S_n_R10/S_base
struct[0].Gy[19,16] = V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) + g_R09_R10*sin(theta_R09 - theta_R10))
struct[0].Gy[19,17] = V_R09*V_R10*(-b_R09_R10*sin(theta_R09 - theta_R10) + g_R09_R10*cos(theta_R09 - theta_R10))
struct[0].Gy[19,18] = V_R09*(b_R09_R10*cos(theta_R09 - theta_R10) + g_R09_R10*sin(theta_R09 - theta_R10)) + 2*V_R10*(-b_R09_R10 - b_R10_R18 - bs_R09_R10/2 - bs_R10_R18/2) + V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) - g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy[19,19] = V_R09*V_R10*(b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + V_R10*V_R18*(-b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy[19,34] = V_R10*(b_R10_R18*cos(theta_R10 - theta_R18) - g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy[19,35] = V_R10*V_R18*(b_R10_R18*sin(theta_R10 - theta_R18) + g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy[19,39] = -S_n_R10/S_base
struct[0].Gy[20,4] = V_R11*(b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy[20,5] = V_R03*V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) + g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy[20,20] = V_R03*(b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11)) + 2*V_R11*g_R03_R11
struct[0].Gy[20,21] = V_R03*V_R11*(-b_R03_R11*cos(theta_R03 - theta_R11) - g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy[21,4] = V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) + g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy[21,5] = V_R03*V_R11*(-b_R03_R11*sin(theta_R03 - theta_R11) + g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy[21,20] = V_R03*(b_R03_R11*cos(theta_R03 - theta_R11) + g_R03_R11*sin(theta_R03 - theta_R11)) + 2*V_R11*(-b_R03_R11 - bs_R03_R11/2)
struct[0].Gy[21,21] = V_R03*V_R11*(b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy[22,6] = V_R12*(b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy[22,7] = V_R04*V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) + g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy[22,22] = V_R04*(b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12)) + 2*V_R12*(g_R04_R12 + g_R12_R13) + V_R13*(-b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy[22,23] = V_R04*V_R12*(-b_R04_R12*cos(theta_R04 - theta_R12) - g_R04_R12*sin(theta_R04 - theta_R12)) + V_R12*V_R13*(-b_R12_R13*cos(theta_R12 - theta_R13) + g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy[22,24] = V_R12*(-b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy[22,25] = V_R12*V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) - g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy[23,6] = V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) + g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy[23,7] = V_R04*V_R12*(-b_R04_R12*sin(theta_R04 - theta_R12) + g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy[23,22] = V_R04*(b_R04_R12*cos(theta_R04 - theta_R12) + g_R04_R12*sin(theta_R04 - theta_R12)) + 2*V_R12*(-b_R04_R12 - b_R12_R13 - bs_R04_R12/2 - bs_R12_R13/2) + V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) - g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy[23,23] = V_R04*V_R12*(b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12)) + V_R12*V_R13*(-b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy[23,24] = V_R12*(b_R12_R13*cos(theta_R12 - theta_R13) - g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy[23,25] = V_R12*V_R13*(b_R12_R13*sin(theta_R12 - theta_R13) + g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy[24,22] = V_R13*(b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy[24,23] = V_R12*V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) + g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy[24,24] = V_R12*(b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13)) + 2*V_R13*(g_R12_R13 + g_R13_R14) + V_R14*(-b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy[24,25] = V_R12*V_R13*(-b_R12_R13*cos(theta_R12 - theta_R13) - g_R12_R13*sin(theta_R12 - theta_R13)) + V_R13*V_R14*(-b_R13_R14*cos(theta_R13 - theta_R14) + g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy[24,26] = V_R13*(-b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy[24,27] = V_R13*V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) - g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy[25,22] = V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) + g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy[25,23] = V_R12*V_R13*(-b_R12_R13*sin(theta_R12 - theta_R13) + g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy[25,24] = V_R12*(b_R12_R13*cos(theta_R12 - theta_R13) + g_R12_R13*sin(theta_R12 - theta_R13)) + 2*V_R13*(-b_R12_R13 - b_R13_R14 - bs_R12_R13/2 - bs_R13_R14/2) + V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) - g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy[25,25] = V_R12*V_R13*(b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13)) + V_R13*V_R14*(-b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy[25,26] = V_R13*(b_R13_R14*cos(theta_R13 - theta_R14) - g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy[25,27] = V_R13*V_R14*(b_R13_R14*sin(theta_R13 - theta_R14) + g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy[26,24] = V_R14*(b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy[26,25] = V_R13*V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) + g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy[26,26] = V_R13*(b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14)) + 2*V_R14*(g_R13_R14 + g_R14_R15) + V_R15*(-b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy[26,27] = V_R13*V_R14*(-b_R13_R14*cos(theta_R13 - theta_R14) - g_R13_R14*sin(theta_R13 - theta_R14)) + V_R14*V_R15*(-b_R14_R15*cos(theta_R14 - theta_R15) + g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy[26,28] = V_R14*(-b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy[26,29] = V_R14*V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) - g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy[26,44] = -S_n_R14/S_base
struct[0].Gy[27,24] = V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) + g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy[27,25] = V_R13*V_R14*(-b_R13_R14*sin(theta_R13 - theta_R14) + g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy[27,26] = V_R13*(b_R13_R14*cos(theta_R13 - theta_R14) + g_R13_R14*sin(theta_R13 - theta_R14)) + 2*V_R14*(-b_R13_R14 - b_R14_R15 - bs_R13_R14/2 - bs_R14_R15/2) + V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) - g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy[27,27] = V_R13*V_R14*(b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14)) + V_R14*V_R15*(-b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy[27,28] = V_R14*(b_R14_R15*cos(theta_R14 - theta_R15) - g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy[27,29] = V_R14*V_R15*(b_R14_R15*sin(theta_R14 - theta_R15) + g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy[27,45] = -S_n_R14/S_base
struct[0].Gy[28,26] = V_R15*(b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy[28,27] = V_R14*V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) + g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy[28,28] = V_R14*(b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15)) + 2*V_R15*g_R14_R15
struct[0].Gy[28,29] = V_R14*V_R15*(-b_R14_R15*cos(theta_R14 - theta_R15) - g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy[29,26] = V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) + g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy[29,27] = V_R14*V_R15*(-b_R14_R15*sin(theta_R14 - theta_R15) + g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy[29,28] = V_R14*(b_R14_R15*cos(theta_R14 - theta_R15) + g_R14_R15*sin(theta_R14 - theta_R15)) + 2*V_R15*(-b_R14_R15 - bs_R14_R15/2)
struct[0].Gy[29,29] = V_R14*V_R15*(b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy[30,10] = V_R16*(b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy[30,11] = V_R06*V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) + g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy[30,30] = V_R06*(b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16)) + 2*V_R16*g_R06_R16
struct[0].Gy[30,31] = V_R06*V_R16*(-b_R06_R16*cos(theta_R06 - theta_R16) - g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy[31,10] = V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) + g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy[31,11] = V_R06*V_R16*(-b_R06_R16*sin(theta_R06 - theta_R16) + g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy[31,30] = V_R06*(b_R06_R16*cos(theta_R06 - theta_R16) + g_R06_R16*sin(theta_R06 - theta_R16)) + 2*V_R16*(-b_R06_R16 - bs_R06_R16/2)
struct[0].Gy[31,31] = V_R06*V_R16*(b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy[32,16] = V_R17*(b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy[32,17] = V_R09*V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) + g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy[32,32] = V_R09*(b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17)) + 2*V_R17*g_R09_R17
struct[0].Gy[32,33] = V_R09*V_R17*(-b_R09_R17*cos(theta_R09 - theta_R17) - g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy[33,16] = V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) + g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy[33,17] = V_R09*V_R17*(-b_R09_R17*sin(theta_R09 - theta_R17) + g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy[33,32] = V_R09*(b_R09_R17*cos(theta_R09 - theta_R17) + g_R09_R17*sin(theta_R09 - theta_R17)) + 2*V_R17*(-b_R09_R17 - bs_R09_R17/2)
struct[0].Gy[33,33] = V_R09*V_R17*(b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy[34,18] = V_R18*(b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy[34,19] = V_R10*V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) + g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy[34,34] = V_R10*(b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18)) + 2*V_R18*g_R10_R18
struct[0].Gy[34,35] = V_R10*V_R18*(-b_R10_R18*cos(theta_R10 - theta_R18) - g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy[35,18] = V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) + g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy[35,19] = V_R10*V_R18*(-b_R10_R18*sin(theta_R10 - theta_R18) + g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy[35,34] = V_R10*(b_R10_R18*cos(theta_R10 - theta_R18) + g_R10_R18*sin(theta_R10 - theta_R18)) + 2*V_R18*(-b_R10_R18 - bs_R10_R18/2)
struct[0].Gy[35,35] = V_R10*V_R18*(b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy[36,18] = cos(delta_R10 - theta_R10)
struct[0].Gy[36,19] = V_R10*sin(delta_R10 - theta_R10)
struct[0].Gy[36,36] = X1d_R10
struct[0].Gy[36,37] = R_a_R10
struct[0].Gy[37,18] = sin(delta_R10 - theta_R10)
struct[0].Gy[37,19] = -V_R10*cos(delta_R10 - theta_R10)
struct[0].Gy[37,36] = R_a_R10
struct[0].Gy[37,37] = -X1q_R10
struct[0].Gy[38,18] = i_d_R10*sin(delta_R10 - theta_R10) + i_q_R10*cos(delta_R10 - theta_R10)
struct[0].Gy[38,19] = -V_R10*i_d_R10*cos(delta_R10 - theta_R10) + V_R10*i_q_R10*sin(delta_R10 - theta_R10)
struct[0].Gy[38,36] = V_R10*sin(delta_R10 - theta_R10)
struct[0].Gy[38,37] = V_R10*cos(delta_R10 - theta_R10)
struct[0].Gy[39,18] = i_d_R10*cos(delta_R10 - theta_R10) - i_q_R10*sin(delta_R10 - theta_R10)
struct[0].Gy[39,19] = V_R10*i_d_R10*sin(delta_R10 - theta_R10) + V_R10*i_q_R10*cos(delta_R10 - theta_R10)
struct[0].Gy[39,36] = V_R10*cos(delta_R10 - theta_R10)
struct[0].Gy[39,37] = -V_R10*sin(delta_R10 - theta_R10)
struct[0].Gy[42,26] = cos(delta_R14 - theta_R14)
struct[0].Gy[42,27] = V_R14*sin(delta_R14 - theta_R14)
struct[0].Gy[42,42] = X1d_R14
struct[0].Gy[42,43] = R_a_R14
struct[0].Gy[43,26] = sin(delta_R14 - theta_R14)
struct[0].Gy[43,27] = -V_R14*cos(delta_R14 - theta_R14)
struct[0].Gy[43,42] = R_a_R14
struct[0].Gy[43,43] = -X1q_R14
struct[0].Gy[44,26] = i_d_R14*sin(delta_R14 - theta_R14) + i_q_R14*cos(delta_R14 - theta_R14)
struct[0].Gy[44,27] = -V_R14*i_d_R14*cos(delta_R14 - theta_R14) + V_R14*i_q_R14*sin(delta_R14 - theta_R14)
struct[0].Gy[44,42] = V_R14*sin(delta_R14 - theta_R14)
struct[0].Gy[44,43] = V_R14*cos(delta_R14 - theta_R14)
struct[0].Gy[45,26] = i_d_R14*cos(delta_R14 - theta_R14) - i_q_R14*sin(delta_R14 - theta_R14)
struct[0].Gy[45,27] = V_R14*i_d_R14*sin(delta_R14 - theta_R14) + V_R14*i_q_R14*cos(delta_R14 - theta_R14)
struct[0].Gy[45,42] = V_R14*cos(delta_R14 - theta_R14)
struct[0].Gy[45,43] = -V_R14*sin(delta_R14 - theta_R14)
if mode > 12:
struct[0].Fu[5,36] = 1
struct[0].Fu[12,39] = 1
struct[0].Gu[0,0] = -1/S_base
struct[0].Gu[1,1] = -1/S_base
struct[0].Gu[2,2] = -1/S_base
struct[0].Gu[3,3] = -1/S_base
struct[0].Gu[4,4] = -1/S_base
struct[0].Gu[5,5] = -1/S_base
struct[0].Gu[6,6] = -1/S_base
struct[0].Gu[7,7] = -1/S_base
struct[0].Gu[8,8] = -1/S_base
struct[0].Gu[9,9] = -1/S_base
struct[0].Gu[10,10] = -1/S_base
struct[0].Gu[11,11] = -1/S_base
struct[0].Gu[12,12] = -1/S_base
struct[0].Gu[13,13] = -1/S_base
struct[0].Gu[14,14] = -1/S_base
struct[0].Gu[15,15] = -1/S_base
struct[0].Gu[16,16] = -1/S_base
struct[0].Gu[17,17] = -1/S_base
struct[0].Gu[18,18] = -1/S_base
struct[0].Gu[19,19] = -1/S_base
struct[0].Gu[20,20] = -1/S_base
struct[0].Gu[21,21] = -1/S_base
struct[0].Gu[22,22] = -1/S_base
struct[0].Gu[23,23] = -1/S_base
struct[0].Gu[24,24] = -1/S_base
struct[0].Gu[25,25] = -1/S_base
struct[0].Gu[26,26] = -1/S_base
struct[0].Gu[27,27] = -1/S_base
struct[0].Gu[28,28] = -1/S_base
struct[0].Gu[29,29] = -1/S_base
struct[0].Gu[30,30] = -1/S_base
struct[0].Gu[31,31] = -1/S_base
struct[0].Gu[32,32] = -1/S_base
struct[0].Gu[33,33] = -1/S_base
struct[0].Gu[34,34] = -1/S_base
struct[0].Gu[35,35] = -1/S_base
struct[0].Gu[40,36] = K_a_R10
struct[0].Gu[40,37] = K_a_R10
struct[0].Gu[46,39] = K_a_R14
struct[0].Gu[46,40] = K_a_R14
struct[0].Hy[0,0] = 1
struct[0].Hy[1,2] = 1
struct[0].Hy[2,4] = 1
struct[0].Hy[3,6] = 1
struct[0].Hy[4,8] = 1
struct[0].Hy[5,10] = 1
struct[0].Hy[6,12] = 1
struct[0].Hy[7,14] = 1
struct[0].Hy[8,16] = 1
struct[0].Hy[9,18] = 1
struct[0].Hy[10,20] = 1
struct[0].Hy[11,22] = 1
struct[0].Hy[12,24] = 1
struct[0].Hy[13,26] = 1
struct[0].Hy[14,28] = 1
struct[0].Hy[15,30] = 1
struct[0].Hy[16,32] = 1
struct[0].Hy[17,34] = 1
def ini_nn(struct,mode):
# Parameters:
S_base = struct[0].S_base
g_R01_R02 = struct[0].g_R01_R02
b_R01_R02 = struct[0].b_R01_R02
bs_R01_R02 = struct[0].bs_R01_R02
g_R02_R03 = struct[0].g_R02_R03
b_R02_R03 = struct[0].b_R02_R03
bs_R02_R03 = struct[0].bs_R02_R03
g_R03_R04 = struct[0].g_R03_R04
b_R03_R04 = struct[0].b_R03_R04
bs_R03_R04 = struct[0].bs_R03_R04
g_R04_R05 = struct[0].g_R04_R05
b_R04_R05 = struct[0].b_R04_R05
bs_R04_R05 = struct[0].bs_R04_R05
g_R05_R06 = struct[0].g_R05_R06
b_R05_R06 = struct[0].b_R05_R06
bs_R05_R06 = struct[0].bs_R05_R06
g_R06_R07 = struct[0].g_R06_R07
b_R06_R07 = struct[0].b_R06_R07
bs_R06_R07 = struct[0].bs_R06_R07
g_R07_R08 = struct[0].g_R07_R08
b_R07_R08 = struct[0].b_R07_R08
bs_R07_R08 = struct[0].bs_R07_R08
g_R08_R09 = struct[0].g_R08_R09
b_R08_R09 = struct[0].b_R08_R09
bs_R08_R09 = struct[0].bs_R08_R09
g_R09_R10 = struct[0].g_R09_R10
b_R09_R10 = struct[0].b_R09_R10
bs_R09_R10 = struct[0].bs_R09_R10
g_R03_R11 = struct[0].g_R03_R11
b_R03_R11 = struct[0].b_R03_R11
bs_R03_R11 = struct[0].bs_R03_R11
g_R04_R12 = struct[0].g_R04_R12
b_R04_R12 = struct[0].b_R04_R12
bs_R04_R12 = struct[0].bs_R04_R12
g_R12_R13 = struct[0].g_R12_R13
b_R12_R13 = struct[0].b_R12_R13
bs_R12_R13 = struct[0].bs_R12_R13
g_R13_R14 = struct[0].g_R13_R14
b_R13_R14 = struct[0].b_R13_R14
bs_R13_R14 = struct[0].bs_R13_R14
g_R14_R15 = struct[0].g_R14_R15
b_R14_R15 = struct[0].b_R14_R15
bs_R14_R15 = struct[0].bs_R14_R15
g_R06_R16 = struct[0].g_R06_R16
b_R06_R16 = struct[0].b_R06_R16
bs_R06_R16 = struct[0].bs_R06_R16
g_R09_R17 = struct[0].g_R09_R17
b_R09_R17 = struct[0].b_R09_R17
bs_R09_R17 = struct[0].bs_R09_R17
g_R10_R18 = struct[0].g_R10_R18
b_R10_R18 = struct[0].b_R10_R18
bs_R10_R18 = struct[0].bs_R10_R18
U_R01_n = struct[0].U_R01_n
U_R02_n = struct[0].U_R02_n
U_R03_n = struct[0].U_R03_n
U_R04_n = struct[0].U_R04_n
U_R05_n = struct[0].U_R05_n
U_R06_n = struct[0].U_R06_n
U_R07_n = struct[0].U_R07_n
U_R08_n = struct[0].U_R08_n
U_R09_n = struct[0].U_R09_n
U_R10_n = struct[0].U_R10_n
U_R11_n = struct[0].U_R11_n
U_R12_n = struct[0].U_R12_n
U_R13_n = struct[0].U_R13_n
U_R14_n = struct[0].U_R14_n
U_R15_n = struct[0].U_R15_n
U_R16_n = struct[0].U_R16_n
U_R17_n = struct[0].U_R17_n
U_R18_n = struct[0].U_R18_n
S_n_R10 = struct[0].S_n_R10
H_R10 = struct[0].H_R10
Omega_b_R10 = struct[0].Omega_b_R10
T1d0_R10 = struct[0].T1d0_R10
T1q0_R10 = struct[0].T1q0_R10
X_d_R10 = struct[0].X_d_R10
X_q_R10 = struct[0].X_q_R10
X1d_R10 = struct[0].X1d_R10
X1q_R10 = struct[0].X1q_R10
D_R10 = struct[0].D_R10
R_a_R10 = struct[0].R_a_R10
K_delta_R10 = struct[0].K_delta_R10
K_a_R10 = struct[0].K_a_R10
K_ai_R10 = struct[0].K_ai_R10
T_r_R10 = struct[0].T_r_R10
Droop_R10 = struct[0].Droop_R10
T_m_R10 = struct[0].T_m_R10
S_n_R14 = struct[0].S_n_R14
H_R14 = struct[0].H_R14
Omega_b_R14 = struct[0].Omega_b_R14
T1d0_R14 = struct[0].T1d0_R14
T1q0_R14 = struct[0].T1q0_R14
X_d_R14 = struct[0].X_d_R14
X_q_R14 = struct[0].X_q_R14
X1d_R14 = struct[0].X1d_R14
X1q_R14 = struct[0].X1q_R14
D_R14 = struct[0].D_R14
R_a_R14 = struct[0].R_a_R14
K_delta_R14 = struct[0].K_delta_R14
K_a_R14 = struct[0].K_a_R14
K_ai_R14 = struct[0].K_ai_R14
T_r_R14 = struct[0].T_r_R14
Droop_R14 = struct[0].Droop_R14
T_m_R14 = struct[0].T_m_R14
K_sec_R10 = struct[0].K_sec_R10
K_sec_R14 = struct[0].K_sec_R14
# Inputs:
P_R01 = struct[0].P_R01
Q_R01 = struct[0].Q_R01
P_R02 = struct[0].P_R02
Q_R02 = struct[0].Q_R02
P_R03 = struct[0].P_R03
Q_R03 = struct[0].Q_R03
P_R04 = struct[0].P_R04
Q_R04 = struct[0].Q_R04
P_R05 = struct[0].P_R05
Q_R05 = struct[0].Q_R05
P_R06 = struct[0].P_R06
Q_R06 = struct[0].Q_R06
P_R07 = struct[0].P_R07
Q_R07 = struct[0].Q_R07
P_R08 = struct[0].P_R08
Q_R08 = struct[0].Q_R08
P_R09 = struct[0].P_R09
Q_R09 = struct[0].Q_R09
P_R10 = struct[0].P_R10
Q_R10 = struct[0].Q_R10
P_R11 = struct[0].P_R11
Q_R11 = struct[0].Q_R11
P_R12 = struct[0].P_R12
Q_R12 = struct[0].Q_R12
P_R13 = struct[0].P_R13
Q_R13 = struct[0].Q_R13
P_R14 = struct[0].P_R14
Q_R14 = struct[0].Q_R14
P_R15 = struct[0].P_R15
Q_R15 = struct[0].Q_R15
P_R16 = struct[0].P_R16
Q_R16 = struct[0].Q_R16
P_R17 = struct[0].P_R17
Q_R17 = struct[0].Q_R17
P_R18 = struct[0].P_R18
Q_R18 = struct[0].Q_R18
v_ref_R10 = struct[0].v_ref_R10
v_pss_R10 = struct[0].v_pss_R10
p_c_R10 = struct[0].p_c_R10
v_ref_R14 = struct[0].v_ref_R14
v_pss_R14 = struct[0].v_pss_R14
p_c_R14 = struct[0].p_c_R14
# Dynamical states:
delta_R10 = struct[0].x[0,0]
omega_R10 = struct[0].x[1,0]
e1q_R10 = struct[0].x[2,0]
e1d_R10 = struct[0].x[3,0]
v_c_R10 = struct[0].x[4,0]
xi_v_R10 = struct[0].x[5,0]
p_m_R10 = struct[0].x[6,0]
delta_R14 = struct[0].x[7,0]
omega_R14 = struct[0].x[8,0]
e1q_R14 = struct[0].x[9,0]
e1d_R14 = struct[0].x[10,0]
v_c_R14 = struct[0].x[11,0]
xi_v_R14 = struct[0].x[12,0]
p_m_R14 = struct[0].x[13,0]
xi_freq = struct[0].x[14,0]
# Algebraic states:
V_R01 = struct[0].y_ini[0,0]
theta_R01 = struct[0].y_ini[1,0]
V_R02 = struct[0].y_ini[2,0]
theta_R02 = struct[0].y_ini[3,0]
V_R03 = struct[0].y_ini[4,0]
theta_R03 = struct[0].y_ini[5,0]
V_R04 = struct[0].y_ini[6,0]
theta_R04 = struct[0].y_ini[7,0]
V_R05 = struct[0].y_ini[8,0]
theta_R05 = struct[0].y_ini[9,0]
V_R06 = struct[0].y_ini[10,0]
theta_R06 = struct[0].y_ini[11,0]
V_R07 = struct[0].y_ini[12,0]
theta_R07 = struct[0].y_ini[13,0]
V_R08 = struct[0].y_ini[14,0]
theta_R08 = struct[0].y_ini[15,0]
V_R09 = struct[0].y_ini[16,0]
theta_R09 = struct[0].y_ini[17,0]
V_R10 = struct[0].y_ini[18,0]
theta_R10 = struct[0].y_ini[19,0]
V_R11 = struct[0].y_ini[20,0]
theta_R11 = struct[0].y_ini[21,0]
V_R12 = struct[0].y_ini[22,0]
theta_R12 = struct[0].y_ini[23,0]
V_R13 = struct[0].y_ini[24,0]
theta_R13 = struct[0].y_ini[25,0]
V_R14 = struct[0].y_ini[26,0]
theta_R14 = struct[0].y_ini[27,0]
V_R15 = struct[0].y_ini[28,0]
theta_R15 = struct[0].y_ini[29,0]
V_R16 = struct[0].y_ini[30,0]
theta_R16 = struct[0].y_ini[31,0]
V_R17 = struct[0].y_ini[32,0]
theta_R17 = struct[0].y_ini[33,0]
V_R18 = struct[0].y_ini[34,0]
theta_R18 = struct[0].y_ini[35,0]
i_d_R10 = struct[0].y_ini[36,0]
i_q_R10 = struct[0].y_ini[37,0]
p_g_R10_1 = struct[0].y_ini[38,0]
q_g_R10_1 = struct[0].y_ini[39,0]
v_f_R10 = struct[0].y_ini[40,0]
p_m_ref_R10 = struct[0].y_ini[41,0]
i_d_R14 = struct[0].y_ini[42,0]
i_q_R14 = struct[0].y_ini[43,0]
p_g_R14_1 = struct[0].y_ini[44,0]
q_g_R14_1 = struct[0].y_ini[45,0]
v_f_R14 = struct[0].y_ini[46,0]
p_m_ref_R14 = struct[0].y_ini[47,0]
omega_coi = struct[0].y_ini[48,0]
p_r_R10 = struct[0].y_ini[49,0]
p_r_R14 = struct[0].y_ini[50,0]
# Differential equations:
if mode == 2:
struct[0].f[0,0] = -K_delta_R10*delta_R10 + Omega_b_R10*(omega_R10 - omega_coi)
struct[0].f[1,0] = (-D_R10*(omega_R10 - omega_coi) - i_d_R10*(R_a_R10*i_d_R10 + V_R10*sin(delta_R10 - theta_R10)) - i_q_R10*(R_a_R10*i_q_R10 + V_R10*cos(delta_R10 - theta_R10)) + p_m_R10)/(2*H_R10)
struct[0].f[2,0] = (-e1q_R10 - i_d_R10*(-X1d_R10 + X_d_R10) + v_f_R10)/T1d0_R10
struct[0].f[3,0] = (-e1d_R10 + i_q_R10*(-X1q_R10 + X_q_R10))/T1q0_R10
struct[0].f[4,0] = (V_R10 - v_c_R10)/T_r_R10
struct[0].f[5,0] = -V_R10 + v_ref_R10
struct[0].f[6,0] = (-p_m_R10 + p_m_ref_R10)/T_m_R10
struct[0].f[7,0] = -K_delta_R14*delta_R14 + Omega_b_R14*(omega_R14 - omega_coi)
struct[0].f[8,0] = (-D_R14*(omega_R14 - omega_coi) - i_d_R14*(R_a_R14*i_d_R14 + V_R14*sin(delta_R14 - theta_R14)) - i_q_R14*(R_a_R14*i_q_R14 + V_R14*cos(delta_R14 - theta_R14)) + p_m_R14)/(2*H_R14)
struct[0].f[9,0] = (-e1q_R14 - i_d_R14*(-X1d_R14 + X_d_R14) + v_f_R14)/T1d0_R14
struct[0].f[10,0] = (-e1d_R14 + i_q_R14*(-X1q_R14 + X_q_R14))/T1q0_R14
struct[0].f[11,0] = (V_R14 - v_c_R14)/T_r_R14
struct[0].f[12,0] = -V_R14 + v_ref_R14
struct[0].f[13,0] = (-p_m_R14 + p_m_ref_R14)/T_m_R14
struct[0].f[14,0] = 1 - omega_coi
# Algebraic equations:
if mode == 3:
struct[0].g[0,0] = -P_R01/S_base + V_R01**2*g_R01_R02 + V_R01*V_R02*(-b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].g[1,0] = -Q_R01/S_base + V_R01**2*(-b_R01_R02 - bs_R01_R02/2) + V_R01*V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) - g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].g[2,0] = -P_R02/S_base + V_R01*V_R02*(b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02)) + V_R02**2*(g_R01_R02 + g_R02_R03) + V_R02*V_R03*(-b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].g[3,0] = -Q_R02/S_base + V_R01*V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) + g_R01_R02*sin(theta_R01 - theta_R02)) + V_R02**2*(-b_R01_R02 - b_R02_R03 - bs_R01_R02/2 - bs_R02_R03/2) + V_R02*V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) - g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].g[4,0] = -P_R03/S_base + V_R02*V_R03*(b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03)) + V_R03**2*(g_R02_R03 + g_R03_R04 + g_R03_R11) + V_R03*V_R04*(-b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + V_R03*V_R11*(-b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].g[5,0] = -Q_R03/S_base + V_R02*V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) + g_R02_R03*sin(theta_R02 - theta_R03)) + V_R03**2*(-b_R02_R03 - b_R03_R04 - b_R03_R11 - bs_R02_R03/2 - bs_R03_R04/2 - bs_R03_R11/2) + V_R03*V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) - g_R03_R04*sin(theta_R03 - theta_R04)) + V_R03*V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) - g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].g[6,0] = -P_R04/S_base + V_R03*V_R04*(b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + V_R04**2*(g_R03_R04 + g_R04_R05 + g_R04_R12) + V_R04*V_R05*(-b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + V_R04*V_R12*(-b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].g[7,0] = -Q_R04/S_base + V_R03*V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) + g_R03_R04*sin(theta_R03 - theta_R04)) + V_R04**2*(-b_R03_R04 - b_R04_R05 - b_R04_R12 - bs_R03_R04/2 - bs_R04_R05/2 - bs_R04_R12/2) + V_R04*V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) - g_R04_R05*sin(theta_R04 - theta_R05)) + V_R04*V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) - g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].g[8,0] = -P_R05/S_base + V_R04*V_R05*(b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + V_R05**2*(g_R04_R05 + g_R05_R06) + V_R05*V_R06*(-b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].g[9,0] = -Q_R05/S_base + V_R04*V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) + g_R04_R05*sin(theta_R04 - theta_R05)) + V_R05**2*(-b_R04_R05 - b_R05_R06 - bs_R04_R05/2 - bs_R05_R06/2) + V_R05*V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) - g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].g[10,0] = -P_R06/S_base + V_R05*V_R06*(b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06)) + V_R06**2*(g_R05_R06 + g_R06_R07 + g_R06_R16) + V_R06*V_R07*(-b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + V_R06*V_R16*(-b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].g[11,0] = -Q_R06/S_base + V_R05*V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) + g_R05_R06*sin(theta_R05 - theta_R06)) + V_R06**2*(-b_R05_R06 - b_R06_R07 - b_R06_R16 - bs_R05_R06/2 - bs_R06_R07/2 - bs_R06_R16/2) + V_R06*V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) - g_R06_R07*sin(theta_R06 - theta_R07)) + V_R06*V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) - g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].g[12,0] = -P_R07/S_base + V_R06*V_R07*(b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + V_R07**2*(g_R06_R07 + g_R07_R08) + V_R07*V_R08*(-b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].g[13,0] = -Q_R07/S_base + V_R06*V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) + g_R06_R07*sin(theta_R06 - theta_R07)) + V_R07**2*(-b_R06_R07 - b_R07_R08 - bs_R06_R07/2 - bs_R07_R08/2) + V_R07*V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) - g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].g[14,0] = -P_R08/S_base + V_R07*V_R08*(b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08)) + V_R08**2*(g_R07_R08 + g_R08_R09) + V_R08*V_R09*(-b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].g[15,0] = -Q_R08/S_base + V_R07*V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) + g_R07_R08*sin(theta_R07 - theta_R08)) + V_R08**2*(-b_R07_R08 - b_R08_R09 - bs_R07_R08/2 - bs_R08_R09/2) + V_R08*V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) - g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].g[16,0] = -P_R09/S_base + V_R08*V_R09*(b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09)) + V_R09**2*(g_R08_R09 + g_R09_R10 + g_R09_R17) + V_R09*V_R10*(-b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + V_R09*V_R17*(-b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].g[17,0] = -Q_R09/S_base + V_R08*V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) + g_R08_R09*sin(theta_R08 - theta_R09)) + V_R09**2*(-b_R08_R09 - b_R09_R10 - b_R09_R17 - bs_R08_R09/2 - bs_R09_R10/2 - bs_R09_R17/2) + V_R09*V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) - g_R09_R10*sin(theta_R09 - theta_R10)) + V_R09*V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) - g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].g[18,0] = -P_R10/S_base + V_R09*V_R10*(b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + V_R10**2*(g_R09_R10 + g_R10_R18) + V_R10*V_R18*(-b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18)) - S_n_R10*p_g_R10_1/S_base
struct[0].g[19,0] = -Q_R10/S_base + V_R09*V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) + g_R09_R10*sin(theta_R09 - theta_R10)) + V_R10**2*(-b_R09_R10 - b_R10_R18 - bs_R09_R10/2 - bs_R10_R18/2) + V_R10*V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) - g_R10_R18*sin(theta_R10 - theta_R18)) - S_n_R10*q_g_R10_1/S_base
struct[0].g[20,0] = -P_R11/S_base + V_R03*V_R11*(b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11)) + V_R11**2*g_R03_R11
struct[0].g[21,0] = -Q_R11/S_base + V_R03*V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) + g_R03_R11*sin(theta_R03 - theta_R11)) + V_R11**2*(-b_R03_R11 - bs_R03_R11/2)
struct[0].g[22,0] = -P_R12/S_base + V_R04*V_R12*(b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12)) + V_R12**2*(g_R04_R12 + g_R12_R13) + V_R12*V_R13*(-b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].g[23,0] = -Q_R12/S_base + V_R04*V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) + g_R04_R12*sin(theta_R04 - theta_R12)) + V_R12**2*(-b_R04_R12 - b_R12_R13 - bs_R04_R12/2 - bs_R12_R13/2) + V_R12*V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) - g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].g[24,0] = -P_R13/S_base + V_R12*V_R13*(b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13)) + V_R13**2*(g_R12_R13 + g_R13_R14) + V_R13*V_R14*(-b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].g[25,0] = -Q_R13/S_base + V_R12*V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) + g_R12_R13*sin(theta_R12 - theta_R13)) + V_R13**2*(-b_R12_R13 - b_R13_R14 - bs_R12_R13/2 - bs_R13_R14/2) + V_R13*V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) - g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].g[26,0] = -P_R14/S_base + V_R13*V_R14*(b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14)) + V_R14**2*(g_R13_R14 + g_R14_R15) + V_R14*V_R15*(-b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15)) - S_n_R14*p_g_R14_1/S_base
struct[0].g[27,0] = -Q_R14/S_base + V_R13*V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) + g_R13_R14*sin(theta_R13 - theta_R14)) + V_R14**2*(-b_R13_R14 - b_R14_R15 - bs_R13_R14/2 - bs_R14_R15/2) + V_R14*V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) - g_R14_R15*sin(theta_R14 - theta_R15)) - S_n_R14*q_g_R14_1/S_base
struct[0].g[28,0] = -P_R15/S_base + V_R14*V_R15*(b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15)) + V_R15**2*g_R14_R15
struct[0].g[29,0] = -Q_R15/S_base + V_R14*V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) + g_R14_R15*sin(theta_R14 - theta_R15)) + V_R15**2*(-b_R14_R15 - bs_R14_R15/2)
struct[0].g[30,0] = -P_R16/S_base + V_R06*V_R16*(b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16)) + V_R16**2*g_R06_R16
struct[0].g[31,0] = -Q_R16/S_base + V_R06*V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) + g_R06_R16*sin(theta_R06 - theta_R16)) + V_R16**2*(-b_R06_R16 - bs_R06_R16/2)
struct[0].g[32,0] = -P_R17/S_base + V_R09*V_R17*(b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17)) + V_R17**2*g_R09_R17
struct[0].g[33,0] = -Q_R17/S_base + V_R09*V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) + g_R09_R17*sin(theta_R09 - theta_R17)) + V_R17**2*(-b_R09_R17 - bs_R09_R17/2)
struct[0].g[34,0] = -P_R18/S_base + V_R10*V_R18*(b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18)) + V_R18**2*g_R10_R18
struct[0].g[35,0] = -Q_R18/S_base + V_R10*V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) + g_R10_R18*sin(theta_R10 - theta_R18)) + V_R18**2*(-b_R10_R18 - bs_R10_R18/2)
struct[0].g[36,0] = R_a_R10*i_q_R10 + V_R10*cos(delta_R10 - theta_R10) + X1d_R10*i_d_R10 - e1q_R10
struct[0].g[37,0] = R_a_R10*i_d_R10 + V_R10*sin(delta_R10 - theta_R10) - X1q_R10*i_q_R10 - e1d_R10
struct[0].g[38,0] = V_R10*i_d_R10*sin(delta_R10 - theta_R10) + V_R10*i_q_R10*cos(delta_R10 - theta_R10) - p_g_R10_1
struct[0].g[39,0] = V_R10*i_d_R10*cos(delta_R10 - theta_R10) - V_R10*i_q_R10*sin(delta_R10 - theta_R10) - q_g_R10_1
struct[0].g[40,0] = K_a_R10*(-v_c_R10 + v_pss_R10 + v_ref_R10) + K_ai_R10*xi_v_R10 - v_f_R10
struct[0].g[41,0] = p_c_R10 - p_m_ref_R10 + p_r_R10 - (omega_R10 - 1)/Droop_R10
struct[0].g[42,0] = R_a_R14*i_q_R14 + V_R14*cos(delta_R14 - theta_R14) + X1d_R14*i_d_R14 - e1q_R14
struct[0].g[43,0] = R_a_R14*i_d_R14 + V_R14*sin(delta_R14 - theta_R14) - X1q_R14*i_q_R14 - e1d_R14
struct[0].g[44,0] = V_R14*i_d_R14*sin(delta_R14 - theta_R14) + V_R14*i_q_R14*cos(delta_R14 - theta_R14) - p_g_R14_1
struct[0].g[45,0] = V_R14*i_d_R14*cos(delta_R14 - theta_R14) - V_R14*i_q_R14*sin(delta_R14 - theta_R14) - q_g_R14_1
struct[0].g[46,0] = K_a_R14*(-v_c_R14 + v_pss_R14 + v_ref_R14) + K_ai_R14*xi_v_R14 - v_f_R14
struct[0].g[47,0] = p_c_R14 - p_m_ref_R14 + p_r_R14 - (omega_R14 - 1)/Droop_R14
struct[0].g[48,0] = omega_R10/2 + omega_R14/2 - omega_coi
struct[0].g[49,0] = K_sec_R10*xi_freq/2 - p_r_R10
struct[0].g[50,0] = K_sec_R14*xi_freq/2 - p_r_R14
# Outputs:
if mode == 3:
struct[0].h[0,0] = V_R01
struct[0].h[1,0] = V_R02
struct[0].h[2,0] = V_R03
struct[0].h[3,0] = V_R04
struct[0].h[4,0] = V_R05
struct[0].h[5,0] = V_R06
struct[0].h[6,0] = V_R07
struct[0].h[7,0] = V_R08
struct[0].h[8,0] = V_R09
struct[0].h[9,0] = V_R10
struct[0].h[10,0] = V_R11
struct[0].h[11,0] = V_R12
struct[0].h[12,0] = V_R13
struct[0].h[13,0] = V_R14
struct[0].h[14,0] = V_R15
struct[0].h[15,0] = V_R16
struct[0].h[16,0] = V_R17
struct[0].h[17,0] = V_R18
if mode == 10:
struct[0].Fx_ini[0,0] = -K_delta_R10
struct[0].Fx_ini[0,1] = Omega_b_R10
struct[0].Fx_ini[1,0] = (-V_R10*i_d_R10*cos(delta_R10 - theta_R10) + V_R10*i_q_R10*sin(delta_R10 - theta_R10))/(2*H_R10)
struct[0].Fx_ini[1,1] = -D_R10/(2*H_R10)
struct[0].Fx_ini[1,6] = 1/(2*H_R10)
struct[0].Fx_ini[2,2] = -1/T1d0_R10
struct[0].Fx_ini[3,3] = -1/T1q0_R10
struct[0].Fx_ini[4,4] = -1/T_r_R10
struct[0].Fx_ini[6,6] = -1/T_m_R10
struct[0].Fx_ini[7,7] = -K_delta_R14
struct[0].Fx_ini[7,8] = Omega_b_R14
struct[0].Fx_ini[8,7] = (-V_R14*i_d_R14*cos(delta_R14 - theta_R14) + V_R14*i_q_R14*sin(delta_R14 - theta_R14))/(2*H_R14)
struct[0].Fx_ini[8,8] = -D_R14/(2*H_R14)
struct[0].Fx_ini[8,13] = 1/(2*H_R14)
struct[0].Fx_ini[9,9] = -1/T1d0_R14
struct[0].Fx_ini[10,10] = -1/T1q0_R14
struct[0].Fx_ini[11,11] = -1/T_r_R14
struct[0].Fx_ini[13,13] = -1/T_m_R14
if mode == 11:
struct[0].Fy_ini[0,48] = -Omega_b_R10
struct[0].Fy_ini[1,18] = (-i_d_R10*sin(delta_R10 - theta_R10) - i_q_R10*cos(delta_R10 - theta_R10))/(2*H_R10)
struct[0].Fy_ini[1,19] = (V_R10*i_d_R10*cos(delta_R10 - theta_R10) - V_R10*i_q_R10*sin(delta_R10 - theta_R10))/(2*H_R10)
struct[0].Fy_ini[1,36] = (-2*R_a_R10*i_d_R10 - V_R10*sin(delta_R10 - theta_R10))/(2*H_R10)
struct[0].Fy_ini[1,37] = (-2*R_a_R10*i_q_R10 - V_R10*cos(delta_R10 - theta_R10))/(2*H_R10)
struct[0].Fy_ini[1,48] = D_R10/(2*H_R10)
struct[0].Fy_ini[2,36] = (X1d_R10 - X_d_R10)/T1d0_R10
struct[0].Fy_ini[2,40] = 1/T1d0_R10
struct[0].Fy_ini[3,37] = (-X1q_R10 + X_q_R10)/T1q0_R10
struct[0].Fy_ini[4,18] = 1/T_r_R10
struct[0].Fy_ini[5,18] = -1
struct[0].Fy_ini[6,41] = 1/T_m_R10
struct[0].Fy_ini[7,48] = -Omega_b_R14
struct[0].Fy_ini[8,26] = (-i_d_R14*sin(delta_R14 - theta_R14) - i_q_R14*cos(delta_R14 - theta_R14))/(2*H_R14)
struct[0].Fy_ini[8,27] = (V_R14*i_d_R14*cos(delta_R14 - theta_R14) - V_R14*i_q_R14*sin(delta_R14 - theta_R14))/(2*H_R14)
struct[0].Fy_ini[8,42] = (-2*R_a_R14*i_d_R14 - V_R14*sin(delta_R14 - theta_R14))/(2*H_R14)
struct[0].Fy_ini[8,43] = (-2*R_a_R14*i_q_R14 - V_R14*cos(delta_R14 - theta_R14))/(2*H_R14)
struct[0].Fy_ini[8,48] = D_R14/(2*H_R14)
struct[0].Fy_ini[9,42] = (X1d_R14 - X_d_R14)/T1d0_R14
struct[0].Fy_ini[9,46] = 1/T1d0_R14
struct[0].Fy_ini[10,43] = (-X1q_R14 + X_q_R14)/T1q0_R14
struct[0].Fy_ini[11,26] = 1/T_r_R14
struct[0].Fy_ini[12,26] = -1
struct[0].Fy_ini[13,47] = 1/T_m_R14
struct[0].Fy_ini[14,48] = -1
struct[0].Gy_ini[0,0] = 2*V_R01*g_R01_R02 + V_R02*(-b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy_ini[0,1] = V_R01*V_R02*(-b_R01_R02*cos(theta_R01 - theta_R02) + g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy_ini[0,2] = V_R01*(-b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy_ini[0,3] = V_R01*V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) - g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy_ini[1,0] = 2*V_R01*(-b_R01_R02 - bs_R01_R02/2) + V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) - g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy_ini[1,1] = V_R01*V_R02*(-b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy_ini[1,2] = V_R01*(b_R01_R02*cos(theta_R01 - theta_R02) - g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy_ini[1,3] = V_R01*V_R02*(b_R01_R02*sin(theta_R01 - theta_R02) + g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy_ini[2,0] = V_R02*(b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy_ini[2,1] = V_R01*V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) + g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy_ini[2,2] = V_R01*(b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02)) + 2*V_R02*(g_R01_R02 + g_R02_R03) + V_R03*(-b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy_ini[2,3] = V_R01*V_R02*(-b_R01_R02*cos(theta_R01 - theta_R02) - g_R01_R02*sin(theta_R01 - theta_R02)) + V_R02*V_R03*(-b_R02_R03*cos(theta_R02 - theta_R03) + g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy_ini[2,4] = V_R02*(-b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy_ini[2,5] = V_R02*V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) - g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy_ini[3,0] = V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) + g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy_ini[3,1] = V_R01*V_R02*(-b_R01_R02*sin(theta_R01 - theta_R02) + g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy_ini[3,2] = V_R01*(b_R01_R02*cos(theta_R01 - theta_R02) + g_R01_R02*sin(theta_R01 - theta_R02)) + 2*V_R02*(-b_R01_R02 - b_R02_R03 - bs_R01_R02/2 - bs_R02_R03/2) + V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) - g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy_ini[3,3] = V_R01*V_R02*(b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02)) + V_R02*V_R03*(-b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy_ini[3,4] = V_R02*(b_R02_R03*cos(theta_R02 - theta_R03) - g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy_ini[3,5] = V_R02*V_R03*(b_R02_R03*sin(theta_R02 - theta_R03) + g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy_ini[4,2] = V_R03*(b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy_ini[4,3] = V_R02*V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) + g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy_ini[4,4] = V_R02*(b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03)) + 2*V_R03*(g_R02_R03 + g_R03_R04 + g_R03_R11) + V_R04*(-b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + V_R11*(-b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy_ini[4,5] = V_R02*V_R03*(-b_R02_R03*cos(theta_R02 - theta_R03) - g_R02_R03*sin(theta_R02 - theta_R03)) + V_R03*V_R04*(-b_R03_R04*cos(theta_R03 - theta_R04) + g_R03_R04*sin(theta_R03 - theta_R04)) + V_R03*V_R11*(-b_R03_R11*cos(theta_R03 - theta_R11) + g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy_ini[4,6] = V_R03*(-b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04))
struct[0].Gy_ini[4,7] = V_R03*V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) - g_R03_R04*sin(theta_R03 - theta_R04))
struct[0].Gy_ini[4,20] = V_R03*(-b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy_ini[4,21] = V_R03*V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) - g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy_ini[5,2] = V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) + g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy_ini[5,3] = V_R02*V_R03*(-b_R02_R03*sin(theta_R02 - theta_R03) + g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy_ini[5,4] = V_R02*(b_R02_R03*cos(theta_R02 - theta_R03) + g_R02_R03*sin(theta_R02 - theta_R03)) + 2*V_R03*(-b_R02_R03 - b_R03_R04 - b_R03_R11 - bs_R02_R03/2 - bs_R03_R04/2 - bs_R03_R11/2) + V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) - g_R03_R04*sin(theta_R03 - theta_R04)) + V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) - g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy_ini[5,5] = V_R02*V_R03*(b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03)) + V_R03*V_R04*(-b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + V_R03*V_R11*(-b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy_ini[5,6] = V_R03*(b_R03_R04*cos(theta_R03 - theta_R04) - g_R03_R04*sin(theta_R03 - theta_R04))
struct[0].Gy_ini[5,7] = V_R03*V_R04*(b_R03_R04*sin(theta_R03 - theta_R04) + g_R03_R04*cos(theta_R03 - theta_R04))
struct[0].Gy_ini[5,20] = V_R03*(b_R03_R11*cos(theta_R03 - theta_R11) - g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy_ini[5,21] = V_R03*V_R11*(b_R03_R11*sin(theta_R03 - theta_R11) + g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy_ini[6,4] = V_R04*(b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04))
struct[0].Gy_ini[6,5] = V_R03*V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) + g_R03_R04*sin(theta_R03 - theta_R04))
struct[0].Gy_ini[6,6] = V_R03*(b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + 2*V_R04*(g_R03_R04 + g_R04_R05 + g_R04_R12) + V_R05*(-b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + V_R12*(-b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy_ini[6,7] = V_R03*V_R04*(-b_R03_R04*cos(theta_R03 - theta_R04) - g_R03_R04*sin(theta_R03 - theta_R04)) + V_R04*V_R05*(-b_R04_R05*cos(theta_R04 - theta_R05) + g_R04_R05*sin(theta_R04 - theta_R05)) + V_R04*V_R12*(-b_R04_R12*cos(theta_R04 - theta_R12) + g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy_ini[6,8] = V_R04*(-b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05))
struct[0].Gy_ini[6,9] = V_R04*V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) - g_R04_R05*sin(theta_R04 - theta_R05))
struct[0].Gy_ini[6,22] = V_R04*(-b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy_ini[6,23] = V_R04*V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) - g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy_ini[7,4] = V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) + g_R03_R04*sin(theta_R03 - theta_R04))
struct[0].Gy_ini[7,5] = V_R03*V_R04*(-b_R03_R04*sin(theta_R03 - theta_R04) + g_R03_R04*cos(theta_R03 - theta_R04))
struct[0].Gy_ini[7,6] = V_R03*(b_R03_R04*cos(theta_R03 - theta_R04) + g_R03_R04*sin(theta_R03 - theta_R04)) + 2*V_R04*(-b_R03_R04 - b_R04_R05 - b_R04_R12 - bs_R03_R04/2 - bs_R04_R05/2 - bs_R04_R12/2) + V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) - g_R04_R05*sin(theta_R04 - theta_R05)) + V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) - g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy_ini[7,7] = V_R03*V_R04*(b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + V_R04*V_R05*(-b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + V_R04*V_R12*(-b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy_ini[7,8] = V_R04*(b_R04_R05*cos(theta_R04 - theta_R05) - g_R04_R05*sin(theta_R04 - theta_R05))
struct[0].Gy_ini[7,9] = V_R04*V_R05*(b_R04_R05*sin(theta_R04 - theta_R05) + g_R04_R05*cos(theta_R04 - theta_R05))
struct[0].Gy_ini[7,22] = V_R04*(b_R04_R12*cos(theta_R04 - theta_R12) - g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy_ini[7,23] = V_R04*V_R12*(b_R04_R12*sin(theta_R04 - theta_R12) + g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy_ini[8,6] = V_R05*(b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05))
struct[0].Gy_ini[8,7] = V_R04*V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) + g_R04_R05*sin(theta_R04 - theta_R05))
struct[0].Gy_ini[8,8] = V_R04*(b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + 2*V_R05*(g_R04_R05 + g_R05_R06) + V_R06*(-b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy_ini[8,9] = V_R04*V_R05*(-b_R04_R05*cos(theta_R04 - theta_R05) - g_R04_R05*sin(theta_R04 - theta_R05)) + V_R05*V_R06*(-b_R05_R06*cos(theta_R05 - theta_R06) + g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy_ini[8,10] = V_R05*(-b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy_ini[8,11] = V_R05*V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) - g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy_ini[9,6] = V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) + g_R04_R05*sin(theta_R04 - theta_R05))
struct[0].Gy_ini[9,7] = V_R04*V_R05*(-b_R04_R05*sin(theta_R04 - theta_R05) + g_R04_R05*cos(theta_R04 - theta_R05))
struct[0].Gy_ini[9,8] = V_R04*(b_R04_R05*cos(theta_R04 - theta_R05) + g_R04_R05*sin(theta_R04 - theta_R05)) + 2*V_R05*(-b_R04_R05 - b_R05_R06 - bs_R04_R05/2 - bs_R05_R06/2) + V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) - g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy_ini[9,9] = V_R04*V_R05*(b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + V_R05*V_R06*(-b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy_ini[9,10] = V_R05*(b_R05_R06*cos(theta_R05 - theta_R06) - g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy_ini[9,11] = V_R05*V_R06*(b_R05_R06*sin(theta_R05 - theta_R06) + g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy_ini[10,8] = V_R06*(b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy_ini[10,9] = V_R05*V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) + g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy_ini[10,10] = V_R05*(b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06)) + 2*V_R06*(g_R05_R06 + g_R06_R07 + g_R06_R16) + V_R07*(-b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + V_R16*(-b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy_ini[10,11] = V_R05*V_R06*(-b_R05_R06*cos(theta_R05 - theta_R06) - g_R05_R06*sin(theta_R05 - theta_R06)) + V_R06*V_R07*(-b_R06_R07*cos(theta_R06 - theta_R07) + g_R06_R07*sin(theta_R06 - theta_R07)) + V_R06*V_R16*(-b_R06_R16*cos(theta_R06 - theta_R16) + g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy_ini[10,12] = V_R06*(-b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07))
struct[0].Gy_ini[10,13] = V_R06*V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) - g_R06_R07*sin(theta_R06 - theta_R07))
struct[0].Gy_ini[10,30] = V_R06*(-b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy_ini[10,31] = V_R06*V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) - g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy_ini[11,8] = V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) + g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy_ini[11,9] = V_R05*V_R06*(-b_R05_R06*sin(theta_R05 - theta_R06) + g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy_ini[11,10] = V_R05*(b_R05_R06*cos(theta_R05 - theta_R06) + g_R05_R06*sin(theta_R05 - theta_R06)) + 2*V_R06*(-b_R05_R06 - b_R06_R07 - b_R06_R16 - bs_R05_R06/2 - bs_R06_R07/2 - bs_R06_R16/2) + V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) - g_R06_R07*sin(theta_R06 - theta_R07)) + V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) - g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy_ini[11,11] = V_R05*V_R06*(b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06)) + V_R06*V_R07*(-b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + V_R06*V_R16*(-b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy_ini[11,12] = V_R06*(b_R06_R07*cos(theta_R06 - theta_R07) - g_R06_R07*sin(theta_R06 - theta_R07))
struct[0].Gy_ini[11,13] = V_R06*V_R07*(b_R06_R07*sin(theta_R06 - theta_R07) + g_R06_R07*cos(theta_R06 - theta_R07))
struct[0].Gy_ini[11,30] = V_R06*(b_R06_R16*cos(theta_R06 - theta_R16) - g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy_ini[11,31] = V_R06*V_R16*(b_R06_R16*sin(theta_R06 - theta_R16) + g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy_ini[12,10] = V_R07*(b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07))
struct[0].Gy_ini[12,11] = V_R06*V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) + g_R06_R07*sin(theta_R06 - theta_R07))
struct[0].Gy_ini[12,12] = V_R06*(b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + 2*V_R07*(g_R06_R07 + g_R07_R08) + V_R08*(-b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy_ini[12,13] = V_R06*V_R07*(-b_R06_R07*cos(theta_R06 - theta_R07) - g_R06_R07*sin(theta_R06 - theta_R07)) + V_R07*V_R08*(-b_R07_R08*cos(theta_R07 - theta_R08) + g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy_ini[12,14] = V_R07*(-b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy_ini[12,15] = V_R07*V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) - g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy_ini[13,10] = V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) + g_R06_R07*sin(theta_R06 - theta_R07))
struct[0].Gy_ini[13,11] = V_R06*V_R07*(-b_R06_R07*sin(theta_R06 - theta_R07) + g_R06_R07*cos(theta_R06 - theta_R07))
struct[0].Gy_ini[13,12] = V_R06*(b_R06_R07*cos(theta_R06 - theta_R07) + g_R06_R07*sin(theta_R06 - theta_R07)) + 2*V_R07*(-b_R06_R07 - b_R07_R08 - bs_R06_R07/2 - bs_R07_R08/2) + V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) - g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy_ini[13,13] = V_R06*V_R07*(b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + V_R07*V_R08*(-b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy_ini[13,14] = V_R07*(b_R07_R08*cos(theta_R07 - theta_R08) - g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy_ini[13,15] = V_R07*V_R08*(b_R07_R08*sin(theta_R07 - theta_R08) + g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy_ini[14,12] = V_R08*(b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy_ini[14,13] = V_R07*V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) + g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy_ini[14,14] = V_R07*(b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08)) + 2*V_R08*(g_R07_R08 + g_R08_R09) + V_R09*(-b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy_ini[14,15] = V_R07*V_R08*(-b_R07_R08*cos(theta_R07 - theta_R08) - g_R07_R08*sin(theta_R07 - theta_R08)) + V_R08*V_R09*(-b_R08_R09*cos(theta_R08 - theta_R09) + g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy_ini[14,16] = V_R08*(-b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy_ini[14,17] = V_R08*V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) - g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy_ini[15,12] = V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) + g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy_ini[15,13] = V_R07*V_R08*(-b_R07_R08*sin(theta_R07 - theta_R08) + g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy_ini[15,14] = V_R07*(b_R07_R08*cos(theta_R07 - theta_R08) + g_R07_R08*sin(theta_R07 - theta_R08)) + 2*V_R08*(-b_R07_R08 - b_R08_R09 - bs_R07_R08/2 - bs_R08_R09/2) + V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) - g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy_ini[15,15] = V_R07*V_R08*(b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08)) + V_R08*V_R09*(-b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy_ini[15,16] = V_R08*(b_R08_R09*cos(theta_R08 - theta_R09) - g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy_ini[15,17] = V_R08*V_R09*(b_R08_R09*sin(theta_R08 - theta_R09) + g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy_ini[16,14] = V_R09*(b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy_ini[16,15] = V_R08*V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) + g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy_ini[16,16] = V_R08*(b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09)) + 2*V_R09*(g_R08_R09 + g_R09_R10 + g_R09_R17) + V_R10*(-b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + V_R17*(-b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy_ini[16,17] = V_R08*V_R09*(-b_R08_R09*cos(theta_R08 - theta_R09) - g_R08_R09*sin(theta_R08 - theta_R09)) + V_R09*V_R10*(-b_R09_R10*cos(theta_R09 - theta_R10) + g_R09_R10*sin(theta_R09 - theta_R10)) + V_R09*V_R17*(-b_R09_R17*cos(theta_R09 - theta_R17) + g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy_ini[16,18] = V_R09*(-b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10))
struct[0].Gy_ini[16,19] = V_R09*V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) - g_R09_R10*sin(theta_R09 - theta_R10))
struct[0].Gy_ini[16,32] = V_R09*(-b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy_ini[16,33] = V_R09*V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) - g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy_ini[17,14] = V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) + g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy_ini[17,15] = V_R08*V_R09*(-b_R08_R09*sin(theta_R08 - theta_R09) + g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy_ini[17,16] = V_R08*(b_R08_R09*cos(theta_R08 - theta_R09) + g_R08_R09*sin(theta_R08 - theta_R09)) + 2*V_R09*(-b_R08_R09 - b_R09_R10 - b_R09_R17 - bs_R08_R09/2 - bs_R09_R10/2 - bs_R09_R17/2) + V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) - g_R09_R10*sin(theta_R09 - theta_R10)) + V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) - g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy_ini[17,17] = V_R08*V_R09*(b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09)) + V_R09*V_R10*(-b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + V_R09*V_R17*(-b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy_ini[17,18] = V_R09*(b_R09_R10*cos(theta_R09 - theta_R10) - g_R09_R10*sin(theta_R09 - theta_R10))
struct[0].Gy_ini[17,19] = V_R09*V_R10*(b_R09_R10*sin(theta_R09 - theta_R10) + g_R09_R10*cos(theta_R09 - theta_R10))
struct[0].Gy_ini[17,32] = V_R09*(b_R09_R17*cos(theta_R09 - theta_R17) - g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy_ini[17,33] = V_R09*V_R17*(b_R09_R17*sin(theta_R09 - theta_R17) + g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy_ini[18,16] = V_R10*(b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10))
struct[0].Gy_ini[18,17] = V_R09*V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) + g_R09_R10*sin(theta_R09 - theta_R10))
struct[0].Gy_ini[18,18] = V_R09*(b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + 2*V_R10*(g_R09_R10 + g_R10_R18) + V_R18*(-b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy_ini[18,19] = V_R09*V_R10*(-b_R09_R10*cos(theta_R09 - theta_R10) - g_R09_R10*sin(theta_R09 - theta_R10)) + V_R10*V_R18*(-b_R10_R18*cos(theta_R10 - theta_R18) + g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy_ini[18,34] = V_R10*(-b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy_ini[18,35] = V_R10*V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) - g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy_ini[18,38] = -S_n_R10/S_base
struct[0].Gy_ini[19,16] = V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) + g_R09_R10*sin(theta_R09 - theta_R10))
struct[0].Gy_ini[19,17] = V_R09*V_R10*(-b_R09_R10*sin(theta_R09 - theta_R10) + g_R09_R10*cos(theta_R09 - theta_R10))
struct[0].Gy_ini[19,18] = V_R09*(b_R09_R10*cos(theta_R09 - theta_R10) + g_R09_R10*sin(theta_R09 - theta_R10)) + 2*V_R10*(-b_R09_R10 - b_R10_R18 - bs_R09_R10/2 - bs_R10_R18/2) + V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) - g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy_ini[19,19] = V_R09*V_R10*(b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + V_R10*V_R18*(-b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy_ini[19,34] = V_R10*(b_R10_R18*cos(theta_R10 - theta_R18) - g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy_ini[19,35] = V_R10*V_R18*(b_R10_R18*sin(theta_R10 - theta_R18) + g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy_ini[19,39] = -S_n_R10/S_base
struct[0].Gy_ini[20,4] = V_R11*(b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy_ini[20,5] = V_R03*V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) + g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy_ini[20,20] = V_R03*(b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11)) + 2*V_R11*g_R03_R11
struct[0].Gy_ini[20,21] = V_R03*V_R11*(-b_R03_R11*cos(theta_R03 - theta_R11) - g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy_ini[21,4] = V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) + g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy_ini[21,5] = V_R03*V_R11*(-b_R03_R11*sin(theta_R03 - theta_R11) + g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy_ini[21,20] = V_R03*(b_R03_R11*cos(theta_R03 - theta_R11) + g_R03_R11*sin(theta_R03 - theta_R11)) + 2*V_R11*(-b_R03_R11 - bs_R03_R11/2)
struct[0].Gy_ini[21,21] = V_R03*V_R11*(b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy_ini[22,6] = V_R12*(b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy_ini[22,7] = V_R04*V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) + g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy_ini[22,22] = V_R04*(b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12)) + 2*V_R12*(g_R04_R12 + g_R12_R13) + V_R13*(-b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy_ini[22,23] = V_R04*V_R12*(-b_R04_R12*cos(theta_R04 - theta_R12) - g_R04_R12*sin(theta_R04 - theta_R12)) + V_R12*V_R13*(-b_R12_R13*cos(theta_R12 - theta_R13) + g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy_ini[22,24] = V_R12*(-b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy_ini[22,25] = V_R12*V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) - g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy_ini[23,6] = V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) + g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy_ini[23,7] = V_R04*V_R12*(-b_R04_R12*sin(theta_R04 - theta_R12) + g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy_ini[23,22] = V_R04*(b_R04_R12*cos(theta_R04 - theta_R12) + g_R04_R12*sin(theta_R04 - theta_R12)) + 2*V_R12*(-b_R04_R12 - b_R12_R13 - bs_R04_R12/2 - bs_R12_R13/2) + V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) - g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy_ini[23,23] = V_R04*V_R12*(b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12)) + V_R12*V_R13*(-b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy_ini[23,24] = V_R12*(b_R12_R13*cos(theta_R12 - theta_R13) - g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy_ini[23,25] = V_R12*V_R13*(b_R12_R13*sin(theta_R12 - theta_R13) + g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy_ini[24,22] = V_R13*(b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy_ini[24,23] = V_R12*V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) + g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy_ini[24,24] = V_R12*(b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13)) + 2*V_R13*(g_R12_R13 + g_R13_R14) + V_R14*(-b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy_ini[24,25] = V_R12*V_R13*(-b_R12_R13*cos(theta_R12 - theta_R13) - g_R12_R13*sin(theta_R12 - theta_R13)) + V_R13*V_R14*(-b_R13_R14*cos(theta_R13 - theta_R14) + g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy_ini[24,26] = V_R13*(-b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy_ini[24,27] = V_R13*V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) - g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy_ini[25,22] = V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) + g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy_ini[25,23] = V_R12*V_R13*(-b_R12_R13*sin(theta_R12 - theta_R13) + g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy_ini[25,24] = V_R12*(b_R12_R13*cos(theta_R12 - theta_R13) + g_R12_R13*sin(theta_R12 - theta_R13)) + 2*V_R13*(-b_R12_R13 - b_R13_R14 - bs_R12_R13/2 - bs_R13_R14/2) + V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) - g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy_ini[25,25] = V_R12*V_R13*(b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13)) + V_R13*V_R14*(-b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy_ini[25,26] = V_R13*(b_R13_R14*cos(theta_R13 - theta_R14) - g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy_ini[25,27] = V_R13*V_R14*(b_R13_R14*sin(theta_R13 - theta_R14) + g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy_ini[26,24] = V_R14*(b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy_ini[26,25] = V_R13*V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) + g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy_ini[26,26] = V_R13*(b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14)) + 2*V_R14*(g_R13_R14 + g_R14_R15) + V_R15*(-b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy_ini[26,27] = V_R13*V_R14*(-b_R13_R14*cos(theta_R13 - theta_R14) - g_R13_R14*sin(theta_R13 - theta_R14)) + V_R14*V_R15*(-b_R14_R15*cos(theta_R14 - theta_R15) + g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy_ini[26,28] = V_R14*(-b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy_ini[26,29] = V_R14*V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) - g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy_ini[26,44] = -S_n_R14/S_base
struct[0].Gy_ini[27,24] = V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) + g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy_ini[27,25] = V_R13*V_R14*(-b_R13_R14*sin(theta_R13 - theta_R14) + g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy_ini[27,26] = V_R13*(b_R13_R14*cos(theta_R13 - theta_R14) + g_R13_R14*sin(theta_R13 - theta_R14)) + 2*V_R14*(-b_R13_R14 - b_R14_R15 - bs_R13_R14/2 - bs_R14_R15/2) + V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) - g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy_ini[27,27] = V_R13*V_R14*(b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14)) + V_R14*V_R15*(-b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy_ini[27,28] = V_R14*(b_R14_R15*cos(theta_R14 - theta_R15) - g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy_ini[27,29] = V_R14*V_R15*(b_R14_R15*sin(theta_R14 - theta_R15) + g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy_ini[27,45] = -S_n_R14/S_base
struct[0].Gy_ini[28,26] = V_R15*(b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy_ini[28,27] = V_R14*V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) + g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy_ini[28,28] = V_R14*(b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15)) + 2*V_R15*g_R14_R15
struct[0].Gy_ini[28,29] = V_R14*V_R15*(-b_R14_R15*cos(theta_R14 - theta_R15) - g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy_ini[29,26] = V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) + g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy_ini[29,27] = V_R14*V_R15*(-b_R14_R15*sin(theta_R14 - theta_R15) + g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy_ini[29,28] = V_R14*(b_R14_R15*cos(theta_R14 - theta_R15) + g_R14_R15*sin(theta_R14 - theta_R15)) + 2*V_R15*(-b_R14_R15 - bs_R14_R15/2)
struct[0].Gy_ini[29,29] = V_R14*V_R15*(b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy_ini[30,10] = V_R16*(b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy_ini[30,11] = V_R06*V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) + g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy_ini[30,30] = V_R06*(b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16)) + 2*V_R16*g_R06_R16
struct[0].Gy_ini[30,31] = V_R06*V_R16*(-b_R06_R16*cos(theta_R06 - theta_R16) - g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy_ini[31,10] = V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) + g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy_ini[31,11] = V_R06*V_R16*(-b_R06_R16*sin(theta_R06 - theta_R16) + g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy_ini[31,30] = V_R06*(b_R06_R16*cos(theta_R06 - theta_R16) + g_R06_R16*sin(theta_R06 - theta_R16)) + 2*V_R16*(-b_R06_R16 - bs_R06_R16/2)
struct[0].Gy_ini[31,31] = V_R06*V_R16*(b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy_ini[32,16] = V_R17*(b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy_ini[32,17] = V_R09*V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) + g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy_ini[32,32] = V_R09*(b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17)) + 2*V_R17*g_R09_R17
struct[0].Gy_ini[32,33] = V_R09*V_R17*(-b_R09_R17*cos(theta_R09 - theta_R17) - g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy_ini[33,16] = V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) + g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy_ini[33,17] = V_R09*V_R17*(-b_R09_R17*sin(theta_R09 - theta_R17) + g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy_ini[33,32] = V_R09*(b_R09_R17*cos(theta_R09 - theta_R17) + g_R09_R17*sin(theta_R09 - theta_R17)) + 2*V_R17*(-b_R09_R17 - bs_R09_R17/2)
struct[0].Gy_ini[33,33] = V_R09*V_R17*(b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy_ini[34,18] = V_R18*(b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy_ini[34,19] = V_R10*V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) + g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy_ini[34,34] = V_R10*(b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18)) + 2*V_R18*g_R10_R18
struct[0].Gy_ini[34,35] = V_R10*V_R18*(-b_R10_R18*cos(theta_R10 - theta_R18) - g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy_ini[35,18] = V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) + g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy_ini[35,19] = V_R10*V_R18*(-b_R10_R18*sin(theta_R10 - theta_R18) + g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy_ini[35,34] = V_R10*(b_R10_R18*cos(theta_R10 - theta_R18) + g_R10_R18*sin(theta_R10 - theta_R18)) + 2*V_R18*(-b_R10_R18 - bs_R10_R18/2)
struct[0].Gy_ini[35,35] = V_R10*V_R18*(b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy_ini[36,18] = cos(delta_R10 - theta_R10)
struct[0].Gy_ini[36,19] = V_R10*sin(delta_R10 - theta_R10)
struct[0].Gy_ini[36,36] = X1d_R10
struct[0].Gy_ini[36,37] = R_a_R10
struct[0].Gy_ini[37,18] = sin(delta_R10 - theta_R10)
struct[0].Gy_ini[37,19] = -V_R10*cos(delta_R10 - theta_R10)
struct[0].Gy_ini[37,36] = R_a_R10
struct[0].Gy_ini[37,37] = -X1q_R10
struct[0].Gy_ini[38,18] = i_d_R10*sin(delta_R10 - theta_R10) + i_q_R10*cos(delta_R10 - theta_R10)
struct[0].Gy_ini[38,19] = -V_R10*i_d_R10*cos(delta_R10 - theta_R10) + V_R10*i_q_R10*sin(delta_R10 - theta_R10)
struct[0].Gy_ini[38,36] = V_R10*sin(delta_R10 - theta_R10)
struct[0].Gy_ini[38,37] = V_R10*cos(delta_R10 - theta_R10)
struct[0].Gy_ini[38,38] = -1
struct[0].Gy_ini[39,18] = i_d_R10*cos(delta_R10 - theta_R10) - i_q_R10*sin(delta_R10 - theta_R10)
struct[0].Gy_ini[39,19] = V_R10*i_d_R10*sin(delta_R10 - theta_R10) + V_R10*i_q_R10*cos(delta_R10 - theta_R10)
struct[0].Gy_ini[39,36] = V_R10*cos(delta_R10 - theta_R10)
struct[0].Gy_ini[39,37] = -V_R10*sin(delta_R10 - theta_R10)
struct[0].Gy_ini[39,39] = -1
struct[0].Gy_ini[40,40] = -1
struct[0].Gy_ini[41,41] = -1
struct[0].Gy_ini[41,49] = 1
struct[0].Gy_ini[42,26] = cos(delta_R14 - theta_R14)
struct[0].Gy_ini[42,27] = V_R14*sin(delta_R14 - theta_R14)
struct[0].Gy_ini[42,42] = X1d_R14
struct[0].Gy_ini[42,43] = R_a_R14
struct[0].Gy_ini[43,26] = sin(delta_R14 - theta_R14)
struct[0].Gy_ini[43,27] = -V_R14*cos(delta_R14 - theta_R14)
struct[0].Gy_ini[43,42] = R_a_R14
struct[0].Gy_ini[43,43] = -X1q_R14
struct[0].Gy_ini[44,26] = i_d_R14*sin(delta_R14 - theta_R14) + i_q_R14*cos(delta_R14 - theta_R14)
struct[0].Gy_ini[44,27] = -V_R14*i_d_R14*cos(delta_R14 - theta_R14) + V_R14*i_q_R14*sin(delta_R14 - theta_R14)
struct[0].Gy_ini[44,42] = V_R14*sin(delta_R14 - theta_R14)
struct[0].Gy_ini[44,43] = V_R14*cos(delta_R14 - theta_R14)
struct[0].Gy_ini[44,44] = -1
struct[0].Gy_ini[45,26] = i_d_R14*cos(delta_R14 - theta_R14) - i_q_R14*sin(delta_R14 - theta_R14)
struct[0].Gy_ini[45,27] = V_R14*i_d_R14*sin(delta_R14 - theta_R14) + V_R14*i_q_R14*cos(delta_R14 - theta_R14)
struct[0].Gy_ini[45,42] = V_R14*cos(delta_R14 - theta_R14)
struct[0].Gy_ini[45,43] = -V_R14*sin(delta_R14 - theta_R14)
struct[0].Gy_ini[45,45] = -1
struct[0].Gy_ini[46,46] = -1
struct[0].Gy_ini[47,47] = -1
struct[0].Gy_ini[47,50] = 1
struct[0].Gy_ini[48,48] = -1
struct[0].Gy_ini[49,49] = -1
struct[0].Gy_ini[50,50] = -1
def run_nn(t,struct,mode):
# Parameters:
S_base = struct[0].S_base
g_R01_R02 = struct[0].g_R01_R02
b_R01_R02 = struct[0].b_R01_R02
bs_R01_R02 = struct[0].bs_R01_R02
g_R02_R03 = struct[0].g_R02_R03
b_R02_R03 = struct[0].b_R02_R03
bs_R02_R03 = struct[0].bs_R02_R03
g_R03_R04 = struct[0].g_R03_R04
b_R03_R04 = struct[0].b_R03_R04
bs_R03_R04 = struct[0].bs_R03_R04
g_R04_R05 = struct[0].g_R04_R05
b_R04_R05 = struct[0].b_R04_R05
bs_R04_R05 = struct[0].bs_R04_R05
g_R05_R06 = struct[0].g_R05_R06
b_R05_R06 = struct[0].b_R05_R06
bs_R05_R06 = struct[0].bs_R05_R06
g_R06_R07 = struct[0].g_R06_R07
b_R06_R07 = struct[0].b_R06_R07
bs_R06_R07 = struct[0].bs_R06_R07
g_R07_R08 = struct[0].g_R07_R08
b_R07_R08 = struct[0].b_R07_R08
bs_R07_R08 = struct[0].bs_R07_R08
g_R08_R09 = struct[0].g_R08_R09
b_R08_R09 = struct[0].b_R08_R09
bs_R08_R09 = struct[0].bs_R08_R09
g_R09_R10 = struct[0].g_R09_R10
b_R09_R10 = struct[0].b_R09_R10
bs_R09_R10 = struct[0].bs_R09_R10
g_R03_R11 = struct[0].g_R03_R11
b_R03_R11 = struct[0].b_R03_R11
bs_R03_R11 = struct[0].bs_R03_R11
g_R04_R12 = struct[0].g_R04_R12
b_R04_R12 = struct[0].b_R04_R12
bs_R04_R12 = struct[0].bs_R04_R12
g_R12_R13 = struct[0].g_R12_R13
b_R12_R13 = struct[0].b_R12_R13
bs_R12_R13 = struct[0].bs_R12_R13
g_R13_R14 = struct[0].g_R13_R14
b_R13_R14 = struct[0].b_R13_R14
bs_R13_R14 = struct[0].bs_R13_R14
g_R14_R15 = struct[0].g_R14_R15
b_R14_R15 = struct[0].b_R14_R15
bs_R14_R15 = struct[0].bs_R14_R15
g_R06_R16 = struct[0].g_R06_R16
b_R06_R16 = struct[0].b_R06_R16
bs_R06_R16 = struct[0].bs_R06_R16
g_R09_R17 = struct[0].g_R09_R17
b_R09_R17 = struct[0].b_R09_R17
bs_R09_R17 = struct[0].bs_R09_R17
g_R10_R18 = struct[0].g_R10_R18
b_R10_R18 = struct[0].b_R10_R18
bs_R10_R18 = struct[0].bs_R10_R18
U_R01_n = struct[0].U_R01_n
U_R02_n = struct[0].U_R02_n
U_R03_n = struct[0].U_R03_n
U_R04_n = struct[0].U_R04_n
U_R05_n = struct[0].U_R05_n
U_R06_n = struct[0].U_R06_n
U_R07_n = struct[0].U_R07_n
U_R08_n = struct[0].U_R08_n
U_R09_n = struct[0].U_R09_n
U_R10_n = struct[0].U_R10_n
U_R11_n = struct[0].U_R11_n
U_R12_n = struct[0].U_R12_n
U_R13_n = struct[0].U_R13_n
U_R14_n = struct[0].U_R14_n
U_R15_n = struct[0].U_R15_n
U_R16_n = struct[0].U_R16_n
U_R17_n = struct[0].U_R17_n
U_R18_n = struct[0].U_R18_n
S_n_R10 = struct[0].S_n_R10
H_R10 = struct[0].H_R10
Omega_b_R10 = struct[0].Omega_b_R10
T1d0_R10 = struct[0].T1d0_R10
T1q0_R10 = struct[0].T1q0_R10
X_d_R10 = struct[0].X_d_R10
X_q_R10 = struct[0].X_q_R10
X1d_R10 = struct[0].X1d_R10
X1q_R10 = struct[0].X1q_R10
D_R10 = struct[0].D_R10
R_a_R10 = struct[0].R_a_R10
K_delta_R10 = struct[0].K_delta_R10
K_a_R10 = struct[0].K_a_R10
K_ai_R10 = struct[0].K_ai_R10
T_r_R10 = struct[0].T_r_R10
Droop_R10 = struct[0].Droop_R10
T_m_R10 = struct[0].T_m_R10
S_n_R14 = struct[0].S_n_R14
H_R14 = struct[0].H_R14
Omega_b_R14 = struct[0].Omega_b_R14
T1d0_R14 = struct[0].T1d0_R14
T1q0_R14 = struct[0].T1q0_R14
X_d_R14 = struct[0].X_d_R14
X_q_R14 = struct[0].X_q_R14
X1d_R14 = struct[0].X1d_R14
X1q_R14 = struct[0].X1q_R14
D_R14 = struct[0].D_R14
R_a_R14 = struct[0].R_a_R14
K_delta_R14 = struct[0].K_delta_R14
K_a_R14 = struct[0].K_a_R14
K_ai_R14 = struct[0].K_ai_R14
T_r_R14 = struct[0].T_r_R14
Droop_R14 = struct[0].Droop_R14
T_m_R14 = struct[0].T_m_R14
K_sec_R10 = struct[0].K_sec_R10
K_sec_R14 = struct[0].K_sec_R14
# Inputs:
P_R01 = struct[0].P_R01
Q_R01 = struct[0].Q_R01
P_R02 = struct[0].P_R02
Q_R02 = struct[0].Q_R02
P_R03 = struct[0].P_R03
Q_R03 = struct[0].Q_R03
P_R04 = struct[0].P_R04
Q_R04 = struct[0].Q_R04
P_R05 = struct[0].P_R05
Q_R05 = struct[0].Q_R05
P_R06 = struct[0].P_R06
Q_R06 = struct[0].Q_R06
P_R07 = struct[0].P_R07
Q_R07 = struct[0].Q_R07
P_R08 = struct[0].P_R08
Q_R08 = struct[0].Q_R08
P_R09 = struct[0].P_R09
Q_R09 = struct[0].Q_R09
P_R10 = struct[0].P_R10
Q_R10 = struct[0].Q_R10
P_R11 = struct[0].P_R11
Q_R11 = struct[0].Q_R11
P_R12 = struct[0].P_R12
Q_R12 = struct[0].Q_R12
P_R13 = struct[0].P_R13
Q_R13 = struct[0].Q_R13
P_R14 = struct[0].P_R14
Q_R14 = struct[0].Q_R14
P_R15 = struct[0].P_R15
Q_R15 = struct[0].Q_R15
P_R16 = struct[0].P_R16
Q_R16 = struct[0].Q_R16
P_R17 = struct[0].P_R17
Q_R17 = struct[0].Q_R17
P_R18 = struct[0].P_R18
Q_R18 = struct[0].Q_R18
v_ref_R10 = struct[0].v_ref_R10
v_pss_R10 = struct[0].v_pss_R10
p_c_R10 = struct[0].p_c_R10
v_ref_R14 = struct[0].v_ref_R14
v_pss_R14 = struct[0].v_pss_R14
p_c_R14 = struct[0].p_c_R14
# Dynamical states:
delta_R10 = struct[0].x[0,0]
omega_R10 = struct[0].x[1,0]
e1q_R10 = struct[0].x[2,0]
e1d_R10 = struct[0].x[3,0]
v_c_R10 = struct[0].x[4,0]
xi_v_R10 = struct[0].x[5,0]
p_m_R10 = struct[0].x[6,0]
delta_R14 = struct[0].x[7,0]
omega_R14 = struct[0].x[8,0]
e1q_R14 = struct[0].x[9,0]
e1d_R14 = struct[0].x[10,0]
v_c_R14 = struct[0].x[11,0]
xi_v_R14 = struct[0].x[12,0]
p_m_R14 = struct[0].x[13,0]
xi_freq = struct[0].x[14,0]
# Algebraic states:
V_R01 = struct[0].y_run[0,0]
theta_R01 = struct[0].y_run[1,0]
V_R02 = struct[0].y_run[2,0]
theta_R02 = struct[0].y_run[3,0]
V_R03 = struct[0].y_run[4,0]
theta_R03 = struct[0].y_run[5,0]
V_R04 = struct[0].y_run[6,0]
theta_R04 = struct[0].y_run[7,0]
V_R05 = struct[0].y_run[8,0]
theta_R05 = struct[0].y_run[9,0]
V_R06 = struct[0].y_run[10,0]
theta_R06 = struct[0].y_run[11,0]
V_R07 = struct[0].y_run[12,0]
theta_R07 = struct[0].y_run[13,0]
V_R08 = struct[0].y_run[14,0]
theta_R08 = struct[0].y_run[15,0]
V_R09 = struct[0].y_run[16,0]
theta_R09 = struct[0].y_run[17,0]
V_R10 = struct[0].y_run[18,0]
theta_R10 = struct[0].y_run[19,0]
V_R11 = struct[0].y_run[20,0]
theta_R11 = struct[0].y_run[21,0]
V_R12 = struct[0].y_run[22,0]
theta_R12 = struct[0].y_run[23,0]
V_R13 = struct[0].y_run[24,0]
theta_R13 = struct[0].y_run[25,0]
V_R14 = struct[0].y_run[26,0]
theta_R14 = struct[0].y_run[27,0]
V_R15 = struct[0].y_run[28,0]
theta_R15 = struct[0].y_run[29,0]
V_R16 = struct[0].y_run[30,0]
theta_R16 = struct[0].y_run[31,0]
V_R17 = struct[0].y_run[32,0]
theta_R17 = struct[0].y_run[33,0]
V_R18 = struct[0].y_run[34,0]
theta_R18 = struct[0].y_run[35,0]
i_d_R10 = struct[0].y_run[36,0]
i_q_R10 = struct[0].y_run[37,0]
p_g_R10_1 = struct[0].y_run[38,0]
q_g_R10_1 = struct[0].y_run[39,0]
v_f_R10 = struct[0].y_run[40,0]
p_m_ref_R10 = struct[0].y_run[41,0]
i_d_R14 = struct[0].y_run[42,0]
i_q_R14 = struct[0].y_run[43,0]
p_g_R14_1 = struct[0].y_run[44,0]
q_g_R14_1 = struct[0].y_run[45,0]
v_f_R14 = struct[0].y_run[46,0]
p_m_ref_R14 = struct[0].y_run[47,0]
omega_coi = struct[0].y_run[48,0]
p_r_R10 = struct[0].y_run[49,0]
p_r_R14 = struct[0].y_run[50,0]
# Differential equations:
if mode == 2:
struct[0].f[0,0] = -K_delta_R10*delta_R10 + Omega_b_R10*(omega_R10 - omega_coi)
struct[0].f[1,0] = (-D_R10*(omega_R10 - omega_coi) - i_d_R10*(R_a_R10*i_d_R10 + V_R10*sin(delta_R10 - theta_R10)) - i_q_R10*(R_a_R10*i_q_R10 + V_R10*cos(delta_R10 - theta_R10)) + p_m_R10)/(2*H_R10)
struct[0].f[2,0] = (-e1q_R10 - i_d_R10*(-X1d_R10 + X_d_R10) + v_f_R10)/T1d0_R10
struct[0].f[3,0] = (-e1d_R10 + i_q_R10*(-X1q_R10 + X_q_R10))/T1q0_R10
struct[0].f[4,0] = (V_R10 - v_c_R10)/T_r_R10
struct[0].f[5,0] = -V_R10 + v_ref_R10
struct[0].f[6,0] = (-p_m_R10 + p_m_ref_R10)/T_m_R10
struct[0].f[7,0] = -K_delta_R14*delta_R14 + Omega_b_R14*(omega_R14 - omega_coi)
struct[0].f[8,0] = (-D_R14*(omega_R14 - omega_coi) - i_d_R14*(R_a_R14*i_d_R14 + V_R14*sin(delta_R14 - theta_R14)) - i_q_R14*(R_a_R14*i_q_R14 + V_R14*cos(delta_R14 - theta_R14)) + p_m_R14)/(2*H_R14)
struct[0].f[9,0] = (-e1q_R14 - i_d_R14*(-X1d_R14 + X_d_R14) + v_f_R14)/T1d0_R14
struct[0].f[10,0] = (-e1d_R14 + i_q_R14*(-X1q_R14 + X_q_R14))/T1q0_R14
struct[0].f[11,0] = (V_R14 - v_c_R14)/T_r_R14
struct[0].f[12,0] = -V_R14 + v_ref_R14
struct[0].f[13,0] = (-p_m_R14 + p_m_ref_R14)/T_m_R14
struct[0].f[14,0] = 1 - omega_coi
# Algebraic equations:
if mode == 3:
struct[0].g[0,0] = -P_R01/S_base + V_R01**2*g_R01_R02 + V_R01*V_R02*(-b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].g[1,0] = -Q_R01/S_base + V_R01**2*(-b_R01_R02 - bs_R01_R02/2) + V_R01*V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) - g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].g[2,0] = -P_R02/S_base + V_R01*V_R02*(b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02)) + V_R02**2*(g_R01_R02 + g_R02_R03) + V_R02*V_R03*(-b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].g[3,0] = -Q_R02/S_base + V_R01*V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) + g_R01_R02*sin(theta_R01 - theta_R02)) + V_R02**2*(-b_R01_R02 - b_R02_R03 - bs_R01_R02/2 - bs_R02_R03/2) + V_R02*V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) - g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].g[4,0] = -P_R03/S_base + V_R02*V_R03*(b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03)) + V_R03**2*(g_R02_R03 + g_R03_R04 + g_R03_R11) + V_R03*V_R04*(-b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + V_R03*V_R11*(-b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].g[5,0] = -Q_R03/S_base + V_R02*V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) + g_R02_R03*sin(theta_R02 - theta_R03)) + V_R03**2*(-b_R02_R03 - b_R03_R04 - b_R03_R11 - bs_R02_R03/2 - bs_R03_R04/2 - bs_R03_R11/2) + V_R03*V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) - g_R03_R04*sin(theta_R03 - theta_R04)) + V_R03*V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) - g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].g[6,0] = -P_R04/S_base + V_R03*V_R04*(b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + V_R04**2*(g_R03_R04 + g_R04_R05 + g_R04_R12) + V_R04*V_R05*(-b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + V_R04*V_R12*(-b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].g[7,0] = -Q_R04/S_base + V_R03*V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) + g_R03_R04*sin(theta_R03 - theta_R04)) + V_R04**2*(-b_R03_R04 - b_R04_R05 - b_R04_R12 - bs_R03_R04/2 - bs_R04_R05/2 - bs_R04_R12/2) + V_R04*V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) - g_R04_R05*sin(theta_R04 - theta_R05)) + V_R04*V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) - g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].g[8,0] = -P_R05/S_base + V_R04*V_R05*(b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + V_R05**2*(g_R04_R05 + g_R05_R06) + V_R05*V_R06*(-b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].g[9,0] = -Q_R05/S_base + V_R04*V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) + g_R04_R05*sin(theta_R04 - theta_R05)) + V_R05**2*(-b_R04_R05 - b_R05_R06 - bs_R04_R05/2 - bs_R05_R06/2) + V_R05*V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) - g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].g[10,0] = -P_R06/S_base + V_R05*V_R06*(b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06)) + V_R06**2*(g_R05_R06 + g_R06_R07 + g_R06_R16) + V_R06*V_R07*(-b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + V_R06*V_R16*(-b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].g[11,0] = -Q_R06/S_base + V_R05*V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) + g_R05_R06*sin(theta_R05 - theta_R06)) + V_R06**2*(-b_R05_R06 - b_R06_R07 - b_R06_R16 - bs_R05_R06/2 - bs_R06_R07/2 - bs_R06_R16/2) + V_R06*V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) - g_R06_R07*sin(theta_R06 - theta_R07)) + V_R06*V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) - g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].g[12,0] = -P_R07/S_base + V_R06*V_R07*(b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + V_R07**2*(g_R06_R07 + g_R07_R08) + V_R07*V_R08*(-b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].g[13,0] = -Q_R07/S_base + V_R06*V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) + g_R06_R07*sin(theta_R06 - theta_R07)) + V_R07**2*(-b_R06_R07 - b_R07_R08 - bs_R06_R07/2 - bs_R07_R08/2) + V_R07*V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) - g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].g[14,0] = -P_R08/S_base + V_R07*V_R08*(b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08)) + V_R08**2*(g_R07_R08 + g_R08_R09) + V_R08*V_R09*(-b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].g[15,0] = -Q_R08/S_base + V_R07*V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) + g_R07_R08*sin(theta_R07 - theta_R08)) + V_R08**2*(-b_R07_R08 - b_R08_R09 - bs_R07_R08/2 - bs_R08_R09/2) + V_R08*V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) - g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].g[16,0] = -P_R09/S_base + V_R08*V_R09*(b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09)) + V_R09**2*(g_R08_R09 + g_R09_R10 + g_R09_R17) + V_R09*V_R10*(-b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + V_R09*V_R17*(-b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].g[17,0] = -Q_R09/S_base + V_R08*V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) + g_R08_R09*sin(theta_R08 - theta_R09)) + V_R09**2*(-b_R08_R09 - b_R09_R10 - b_R09_R17 - bs_R08_R09/2 - bs_R09_R10/2 - bs_R09_R17/2) + V_R09*V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) - g_R09_R10*sin(theta_R09 - theta_R10)) + V_R09*V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) - g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].g[18,0] = -P_R10/S_base + V_R09*V_R10*(b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + V_R10**2*(g_R09_R10 + g_R10_R18) + V_R10*V_R18*(-b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18)) - S_n_R10*p_g_R10_1/S_base
struct[0].g[19,0] = -Q_R10/S_base + V_R09*V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) + g_R09_R10*sin(theta_R09 - theta_R10)) + V_R10**2*(-b_R09_R10 - b_R10_R18 - bs_R09_R10/2 - bs_R10_R18/2) + V_R10*V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) - g_R10_R18*sin(theta_R10 - theta_R18)) - S_n_R10*q_g_R10_1/S_base
struct[0].g[20,0] = -P_R11/S_base + V_R03*V_R11*(b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11)) + V_R11**2*g_R03_R11
struct[0].g[21,0] = -Q_R11/S_base + V_R03*V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) + g_R03_R11*sin(theta_R03 - theta_R11)) + V_R11**2*(-b_R03_R11 - bs_R03_R11/2)
struct[0].g[22,0] = -P_R12/S_base + V_R04*V_R12*(b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12)) + V_R12**2*(g_R04_R12 + g_R12_R13) + V_R12*V_R13*(-b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].g[23,0] = -Q_R12/S_base + V_R04*V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) + g_R04_R12*sin(theta_R04 - theta_R12)) + V_R12**2*(-b_R04_R12 - b_R12_R13 - bs_R04_R12/2 - bs_R12_R13/2) + V_R12*V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) - g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].g[24,0] = -P_R13/S_base + V_R12*V_R13*(b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13)) + V_R13**2*(g_R12_R13 + g_R13_R14) + V_R13*V_R14*(-b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].g[25,0] = -Q_R13/S_base + V_R12*V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) + g_R12_R13*sin(theta_R12 - theta_R13)) + V_R13**2*(-b_R12_R13 - b_R13_R14 - bs_R12_R13/2 - bs_R13_R14/2) + V_R13*V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) - g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].g[26,0] = -P_R14/S_base + V_R13*V_R14*(b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14)) + V_R14**2*(g_R13_R14 + g_R14_R15) + V_R14*V_R15*(-b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15)) - S_n_R14*p_g_R14_1/S_base
struct[0].g[27,0] = -Q_R14/S_base + V_R13*V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) + g_R13_R14*sin(theta_R13 - theta_R14)) + V_R14**2*(-b_R13_R14 - b_R14_R15 - bs_R13_R14/2 - bs_R14_R15/2) + V_R14*V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) - g_R14_R15*sin(theta_R14 - theta_R15)) - S_n_R14*q_g_R14_1/S_base
struct[0].g[28,0] = -P_R15/S_base + V_R14*V_R15*(b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15)) + V_R15**2*g_R14_R15
struct[0].g[29,0] = -Q_R15/S_base + V_R14*V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) + g_R14_R15*sin(theta_R14 - theta_R15)) + V_R15**2*(-b_R14_R15 - bs_R14_R15/2)
struct[0].g[30,0] = -P_R16/S_base + V_R06*V_R16*(b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16)) + V_R16**2*g_R06_R16
struct[0].g[31,0] = -Q_R16/S_base + V_R06*V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) + g_R06_R16*sin(theta_R06 - theta_R16)) + V_R16**2*(-b_R06_R16 - bs_R06_R16/2)
struct[0].g[32,0] = -P_R17/S_base + V_R09*V_R17*(b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17)) + V_R17**2*g_R09_R17
struct[0].g[33,0] = -Q_R17/S_base + V_R09*V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) + g_R09_R17*sin(theta_R09 - theta_R17)) + V_R17**2*(-b_R09_R17 - bs_R09_R17/2)
struct[0].g[34,0] = -P_R18/S_base + V_R10*V_R18*(b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18)) + V_R18**2*g_R10_R18
struct[0].g[35,0] = -Q_R18/S_base + V_R10*V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) + g_R10_R18*sin(theta_R10 - theta_R18)) + V_R18**2*(-b_R10_R18 - bs_R10_R18/2)
struct[0].g[36,0] = R_a_R10*i_q_R10 + V_R10*cos(delta_R10 - theta_R10) + X1d_R10*i_d_R10 - e1q_R10
struct[0].g[37,0] = R_a_R10*i_d_R10 + V_R10*sin(delta_R10 - theta_R10) - X1q_R10*i_q_R10 - e1d_R10
struct[0].g[38,0] = V_R10*i_d_R10*sin(delta_R10 - theta_R10) + V_R10*i_q_R10*cos(delta_R10 - theta_R10) - p_g_R10_1
struct[0].g[39,0] = V_R10*i_d_R10*cos(delta_R10 - theta_R10) - V_R10*i_q_R10*sin(delta_R10 - theta_R10) - q_g_R10_1
struct[0].g[40,0] = K_a_R10*(-v_c_R10 + v_pss_R10 + v_ref_R10) + K_ai_R10*xi_v_R10 - v_f_R10
struct[0].g[41,0] = p_c_R10 - p_m_ref_R10 + p_r_R10 - (omega_R10 - 1)/Droop_R10
struct[0].g[42,0] = R_a_R14*i_q_R14 + V_R14*cos(delta_R14 - theta_R14) + X1d_R14*i_d_R14 - e1q_R14
struct[0].g[43,0] = R_a_R14*i_d_R14 + V_R14*sin(delta_R14 - theta_R14) - X1q_R14*i_q_R14 - e1d_R14
struct[0].g[44,0] = V_R14*i_d_R14*sin(delta_R14 - theta_R14) + V_R14*i_q_R14*cos(delta_R14 - theta_R14) - p_g_R14_1
struct[0].g[45,0] = V_R14*i_d_R14*cos(delta_R14 - theta_R14) - V_R14*i_q_R14*sin(delta_R14 - theta_R14) - q_g_R14_1
struct[0].g[46,0] = K_a_R14*(-v_c_R14 + v_pss_R14 + v_ref_R14) + K_ai_R14*xi_v_R14 - v_f_R14
struct[0].g[47,0] = p_c_R14 - p_m_ref_R14 + p_r_R14 - (omega_R14 - 1)/Droop_R14
struct[0].g[48,0] = omega_R10/2 + omega_R14/2 - omega_coi
struct[0].g[49,0] = K_sec_R10*xi_freq/2 - p_r_R10
struct[0].g[50,0] = K_sec_R14*xi_freq/2 - p_r_R14
# Outputs:
if mode == 3:
struct[0].h[0,0] = V_R01
struct[0].h[1,0] = V_R02
struct[0].h[2,0] = V_R03
struct[0].h[3,0] = V_R04
struct[0].h[4,0] = V_R05
struct[0].h[5,0] = V_R06
struct[0].h[6,0] = V_R07
struct[0].h[7,0] = V_R08
struct[0].h[8,0] = V_R09
struct[0].h[9,0] = V_R10
struct[0].h[10,0] = V_R11
struct[0].h[11,0] = V_R12
struct[0].h[12,0] = V_R13
struct[0].h[13,0] = V_R14
struct[0].h[14,0] = V_R15
struct[0].h[15,0] = V_R16
struct[0].h[16,0] = V_R17
struct[0].h[17,0] = V_R18
if mode == 10:
struct[0].Fx[0,0] = -K_delta_R10
struct[0].Fx[0,1] = Omega_b_R10
struct[0].Fx[1,0] = (-V_R10*i_d_R10*cos(delta_R10 - theta_R10) + V_R10*i_q_R10*sin(delta_R10 - theta_R10))/(2*H_R10)
struct[0].Fx[1,1] = -D_R10/(2*H_R10)
struct[0].Fx[1,6] = 1/(2*H_R10)
struct[0].Fx[2,2] = -1/T1d0_R10
struct[0].Fx[3,3] = -1/T1q0_R10
struct[0].Fx[4,4] = -1/T_r_R10
struct[0].Fx[6,6] = -1/T_m_R10
struct[0].Fx[7,7] = -K_delta_R14
struct[0].Fx[7,8] = Omega_b_R14
struct[0].Fx[8,7] = (-V_R14*i_d_R14*cos(delta_R14 - theta_R14) + V_R14*i_q_R14*sin(delta_R14 - theta_R14))/(2*H_R14)
struct[0].Fx[8,8] = -D_R14/(2*H_R14)
struct[0].Fx[8,13] = 1/(2*H_R14)
struct[0].Fx[9,9] = -1/T1d0_R14
struct[0].Fx[10,10] = -1/T1q0_R14
struct[0].Fx[11,11] = -1/T_r_R14
struct[0].Fx[13,13] = -1/T_m_R14
if mode == 11:
struct[0].Fy[0,48] = -Omega_b_R10
struct[0].Fy[1,18] = (-i_d_R10*sin(delta_R10 - theta_R10) - i_q_R10*cos(delta_R10 - theta_R10))/(2*H_R10)
struct[0].Fy[1,19] = (V_R10*i_d_R10*cos(delta_R10 - theta_R10) - V_R10*i_q_R10*sin(delta_R10 - theta_R10))/(2*H_R10)
struct[0].Fy[1,36] = (-2*R_a_R10*i_d_R10 - V_R10*sin(delta_R10 - theta_R10))/(2*H_R10)
struct[0].Fy[1,37] = (-2*R_a_R10*i_q_R10 - V_R10*cos(delta_R10 - theta_R10))/(2*H_R10)
struct[0].Fy[1,48] = D_R10/(2*H_R10)
struct[0].Fy[2,36] = (X1d_R10 - X_d_R10)/T1d0_R10
struct[0].Fy[2,40] = 1/T1d0_R10
struct[0].Fy[3,37] = (-X1q_R10 + X_q_R10)/T1q0_R10
struct[0].Fy[4,18] = 1/T_r_R10
struct[0].Fy[5,18] = -1
struct[0].Fy[6,41] = 1/T_m_R10
struct[0].Fy[7,48] = -Omega_b_R14
struct[0].Fy[8,26] = (-i_d_R14*sin(delta_R14 - theta_R14) - i_q_R14*cos(delta_R14 - theta_R14))/(2*H_R14)
struct[0].Fy[8,27] = (V_R14*i_d_R14*cos(delta_R14 - theta_R14) - V_R14*i_q_R14*sin(delta_R14 - theta_R14))/(2*H_R14)
struct[0].Fy[8,42] = (-2*R_a_R14*i_d_R14 - V_R14*sin(delta_R14 - theta_R14))/(2*H_R14)
struct[0].Fy[8,43] = (-2*R_a_R14*i_q_R14 - V_R14*cos(delta_R14 - theta_R14))/(2*H_R14)
struct[0].Fy[8,48] = D_R14/(2*H_R14)
struct[0].Fy[9,42] = (X1d_R14 - X_d_R14)/T1d0_R14
struct[0].Fy[9,46] = 1/T1d0_R14
struct[0].Fy[10,43] = (-X1q_R14 + X_q_R14)/T1q0_R14
struct[0].Fy[11,26] = 1/T_r_R14
struct[0].Fy[12,26] = -1
struct[0].Fy[13,47] = 1/T_m_R14
struct[0].Fy[14,48] = -1
struct[0].Gy[0,0] = 2*V_R01*g_R01_R02 + V_R02*(-b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy[0,1] = V_R01*V_R02*(-b_R01_R02*cos(theta_R01 - theta_R02) + g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy[0,2] = V_R01*(-b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy[0,3] = V_R01*V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) - g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy[1,0] = 2*V_R01*(-b_R01_R02 - bs_R01_R02/2) + V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) - g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy[1,1] = V_R01*V_R02*(-b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy[1,2] = V_R01*(b_R01_R02*cos(theta_R01 - theta_R02) - g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy[1,3] = V_R01*V_R02*(b_R01_R02*sin(theta_R01 - theta_R02) + g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy[2,0] = V_R02*(b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy[2,1] = V_R01*V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) + g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy[2,2] = V_R01*(b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02)) + 2*V_R02*(g_R01_R02 + g_R02_R03) + V_R03*(-b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy[2,3] = V_R01*V_R02*(-b_R01_R02*cos(theta_R01 - theta_R02) - g_R01_R02*sin(theta_R01 - theta_R02)) + V_R02*V_R03*(-b_R02_R03*cos(theta_R02 - theta_R03) + g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy[2,4] = V_R02*(-b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy[2,5] = V_R02*V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) - g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy[3,0] = V_R02*(b_R01_R02*cos(theta_R01 - theta_R02) + g_R01_R02*sin(theta_R01 - theta_R02))
struct[0].Gy[3,1] = V_R01*V_R02*(-b_R01_R02*sin(theta_R01 - theta_R02) + g_R01_R02*cos(theta_R01 - theta_R02))
struct[0].Gy[3,2] = V_R01*(b_R01_R02*cos(theta_R01 - theta_R02) + g_R01_R02*sin(theta_R01 - theta_R02)) + 2*V_R02*(-b_R01_R02 - b_R02_R03 - bs_R01_R02/2 - bs_R02_R03/2) + V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) - g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy[3,3] = V_R01*V_R02*(b_R01_R02*sin(theta_R01 - theta_R02) - g_R01_R02*cos(theta_R01 - theta_R02)) + V_R02*V_R03*(-b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy[3,4] = V_R02*(b_R02_R03*cos(theta_R02 - theta_R03) - g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy[3,5] = V_R02*V_R03*(b_R02_R03*sin(theta_R02 - theta_R03) + g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy[4,2] = V_R03*(b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy[4,3] = V_R02*V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) + g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy[4,4] = V_R02*(b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03)) + 2*V_R03*(g_R02_R03 + g_R03_R04 + g_R03_R11) + V_R04*(-b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + V_R11*(-b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy[4,5] = V_R02*V_R03*(-b_R02_R03*cos(theta_R02 - theta_R03) - g_R02_R03*sin(theta_R02 - theta_R03)) + V_R03*V_R04*(-b_R03_R04*cos(theta_R03 - theta_R04) + g_R03_R04*sin(theta_R03 - theta_R04)) + V_R03*V_R11*(-b_R03_R11*cos(theta_R03 - theta_R11) + g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy[4,6] = V_R03*(-b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04))
struct[0].Gy[4,7] = V_R03*V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) - g_R03_R04*sin(theta_R03 - theta_R04))
struct[0].Gy[4,20] = V_R03*(-b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy[4,21] = V_R03*V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) - g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy[5,2] = V_R03*(b_R02_R03*cos(theta_R02 - theta_R03) + g_R02_R03*sin(theta_R02 - theta_R03))
struct[0].Gy[5,3] = V_R02*V_R03*(-b_R02_R03*sin(theta_R02 - theta_R03) + g_R02_R03*cos(theta_R02 - theta_R03))
struct[0].Gy[5,4] = V_R02*(b_R02_R03*cos(theta_R02 - theta_R03) + g_R02_R03*sin(theta_R02 - theta_R03)) + 2*V_R03*(-b_R02_R03 - b_R03_R04 - b_R03_R11 - bs_R02_R03/2 - bs_R03_R04/2 - bs_R03_R11/2) + V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) - g_R03_R04*sin(theta_R03 - theta_R04)) + V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) - g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy[5,5] = V_R02*V_R03*(b_R02_R03*sin(theta_R02 - theta_R03) - g_R02_R03*cos(theta_R02 - theta_R03)) + V_R03*V_R04*(-b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + V_R03*V_R11*(-b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy[5,6] = V_R03*(b_R03_R04*cos(theta_R03 - theta_R04) - g_R03_R04*sin(theta_R03 - theta_R04))
struct[0].Gy[5,7] = V_R03*V_R04*(b_R03_R04*sin(theta_R03 - theta_R04) + g_R03_R04*cos(theta_R03 - theta_R04))
struct[0].Gy[5,20] = V_R03*(b_R03_R11*cos(theta_R03 - theta_R11) - g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy[5,21] = V_R03*V_R11*(b_R03_R11*sin(theta_R03 - theta_R11) + g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy[6,4] = V_R04*(b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04))
struct[0].Gy[6,5] = V_R03*V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) + g_R03_R04*sin(theta_R03 - theta_R04))
struct[0].Gy[6,6] = V_R03*(b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + 2*V_R04*(g_R03_R04 + g_R04_R05 + g_R04_R12) + V_R05*(-b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + V_R12*(-b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy[6,7] = V_R03*V_R04*(-b_R03_R04*cos(theta_R03 - theta_R04) - g_R03_R04*sin(theta_R03 - theta_R04)) + V_R04*V_R05*(-b_R04_R05*cos(theta_R04 - theta_R05) + g_R04_R05*sin(theta_R04 - theta_R05)) + V_R04*V_R12*(-b_R04_R12*cos(theta_R04 - theta_R12) + g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy[6,8] = V_R04*(-b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05))
struct[0].Gy[6,9] = V_R04*V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) - g_R04_R05*sin(theta_R04 - theta_R05))
struct[0].Gy[6,22] = V_R04*(-b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy[6,23] = V_R04*V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) - g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy[7,4] = V_R04*(b_R03_R04*cos(theta_R03 - theta_R04) + g_R03_R04*sin(theta_R03 - theta_R04))
struct[0].Gy[7,5] = V_R03*V_R04*(-b_R03_R04*sin(theta_R03 - theta_R04) + g_R03_R04*cos(theta_R03 - theta_R04))
struct[0].Gy[7,6] = V_R03*(b_R03_R04*cos(theta_R03 - theta_R04) + g_R03_R04*sin(theta_R03 - theta_R04)) + 2*V_R04*(-b_R03_R04 - b_R04_R05 - b_R04_R12 - bs_R03_R04/2 - bs_R04_R05/2 - bs_R04_R12/2) + V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) - g_R04_R05*sin(theta_R04 - theta_R05)) + V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) - g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy[7,7] = V_R03*V_R04*(b_R03_R04*sin(theta_R03 - theta_R04) - g_R03_R04*cos(theta_R03 - theta_R04)) + V_R04*V_R05*(-b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + V_R04*V_R12*(-b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy[7,8] = V_R04*(b_R04_R05*cos(theta_R04 - theta_R05) - g_R04_R05*sin(theta_R04 - theta_R05))
struct[0].Gy[7,9] = V_R04*V_R05*(b_R04_R05*sin(theta_R04 - theta_R05) + g_R04_R05*cos(theta_R04 - theta_R05))
struct[0].Gy[7,22] = V_R04*(b_R04_R12*cos(theta_R04 - theta_R12) - g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy[7,23] = V_R04*V_R12*(b_R04_R12*sin(theta_R04 - theta_R12) + g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy[8,6] = V_R05*(b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05))
struct[0].Gy[8,7] = V_R04*V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) + g_R04_R05*sin(theta_R04 - theta_R05))
struct[0].Gy[8,8] = V_R04*(b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + 2*V_R05*(g_R04_R05 + g_R05_R06) + V_R06*(-b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy[8,9] = V_R04*V_R05*(-b_R04_R05*cos(theta_R04 - theta_R05) - g_R04_R05*sin(theta_R04 - theta_R05)) + V_R05*V_R06*(-b_R05_R06*cos(theta_R05 - theta_R06) + g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy[8,10] = V_R05*(-b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy[8,11] = V_R05*V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) - g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy[9,6] = V_R05*(b_R04_R05*cos(theta_R04 - theta_R05) + g_R04_R05*sin(theta_R04 - theta_R05))
struct[0].Gy[9,7] = V_R04*V_R05*(-b_R04_R05*sin(theta_R04 - theta_R05) + g_R04_R05*cos(theta_R04 - theta_R05))
struct[0].Gy[9,8] = V_R04*(b_R04_R05*cos(theta_R04 - theta_R05) + g_R04_R05*sin(theta_R04 - theta_R05)) + 2*V_R05*(-b_R04_R05 - b_R05_R06 - bs_R04_R05/2 - bs_R05_R06/2) + V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) - g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy[9,9] = V_R04*V_R05*(b_R04_R05*sin(theta_R04 - theta_R05) - g_R04_R05*cos(theta_R04 - theta_R05)) + V_R05*V_R06*(-b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy[9,10] = V_R05*(b_R05_R06*cos(theta_R05 - theta_R06) - g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy[9,11] = V_R05*V_R06*(b_R05_R06*sin(theta_R05 - theta_R06) + g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy[10,8] = V_R06*(b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy[10,9] = V_R05*V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) + g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy[10,10] = V_R05*(b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06)) + 2*V_R06*(g_R05_R06 + g_R06_R07 + g_R06_R16) + V_R07*(-b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + V_R16*(-b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy[10,11] = V_R05*V_R06*(-b_R05_R06*cos(theta_R05 - theta_R06) - g_R05_R06*sin(theta_R05 - theta_R06)) + V_R06*V_R07*(-b_R06_R07*cos(theta_R06 - theta_R07) + g_R06_R07*sin(theta_R06 - theta_R07)) + V_R06*V_R16*(-b_R06_R16*cos(theta_R06 - theta_R16) + g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy[10,12] = V_R06*(-b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07))
struct[0].Gy[10,13] = V_R06*V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) - g_R06_R07*sin(theta_R06 - theta_R07))
struct[0].Gy[10,30] = V_R06*(-b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy[10,31] = V_R06*V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) - g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy[11,8] = V_R06*(b_R05_R06*cos(theta_R05 - theta_R06) + g_R05_R06*sin(theta_R05 - theta_R06))
struct[0].Gy[11,9] = V_R05*V_R06*(-b_R05_R06*sin(theta_R05 - theta_R06) + g_R05_R06*cos(theta_R05 - theta_R06))
struct[0].Gy[11,10] = V_R05*(b_R05_R06*cos(theta_R05 - theta_R06) + g_R05_R06*sin(theta_R05 - theta_R06)) + 2*V_R06*(-b_R05_R06 - b_R06_R07 - b_R06_R16 - bs_R05_R06/2 - bs_R06_R07/2 - bs_R06_R16/2) + V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) - g_R06_R07*sin(theta_R06 - theta_R07)) + V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) - g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy[11,11] = V_R05*V_R06*(b_R05_R06*sin(theta_R05 - theta_R06) - g_R05_R06*cos(theta_R05 - theta_R06)) + V_R06*V_R07*(-b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + V_R06*V_R16*(-b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy[11,12] = V_R06*(b_R06_R07*cos(theta_R06 - theta_R07) - g_R06_R07*sin(theta_R06 - theta_R07))
struct[0].Gy[11,13] = V_R06*V_R07*(b_R06_R07*sin(theta_R06 - theta_R07) + g_R06_R07*cos(theta_R06 - theta_R07))
struct[0].Gy[11,30] = V_R06*(b_R06_R16*cos(theta_R06 - theta_R16) - g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy[11,31] = V_R06*V_R16*(b_R06_R16*sin(theta_R06 - theta_R16) + g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy[12,10] = V_R07*(b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07))
struct[0].Gy[12,11] = V_R06*V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) + g_R06_R07*sin(theta_R06 - theta_R07))
struct[0].Gy[12,12] = V_R06*(b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + 2*V_R07*(g_R06_R07 + g_R07_R08) + V_R08*(-b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy[12,13] = V_R06*V_R07*(-b_R06_R07*cos(theta_R06 - theta_R07) - g_R06_R07*sin(theta_R06 - theta_R07)) + V_R07*V_R08*(-b_R07_R08*cos(theta_R07 - theta_R08) + g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy[12,14] = V_R07*(-b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy[12,15] = V_R07*V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) - g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy[13,10] = V_R07*(b_R06_R07*cos(theta_R06 - theta_R07) + g_R06_R07*sin(theta_R06 - theta_R07))
struct[0].Gy[13,11] = V_R06*V_R07*(-b_R06_R07*sin(theta_R06 - theta_R07) + g_R06_R07*cos(theta_R06 - theta_R07))
struct[0].Gy[13,12] = V_R06*(b_R06_R07*cos(theta_R06 - theta_R07) + g_R06_R07*sin(theta_R06 - theta_R07)) + 2*V_R07*(-b_R06_R07 - b_R07_R08 - bs_R06_R07/2 - bs_R07_R08/2) + V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) - g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy[13,13] = V_R06*V_R07*(b_R06_R07*sin(theta_R06 - theta_R07) - g_R06_R07*cos(theta_R06 - theta_R07)) + V_R07*V_R08*(-b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy[13,14] = V_R07*(b_R07_R08*cos(theta_R07 - theta_R08) - g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy[13,15] = V_R07*V_R08*(b_R07_R08*sin(theta_R07 - theta_R08) + g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy[14,12] = V_R08*(b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy[14,13] = V_R07*V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) + g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy[14,14] = V_R07*(b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08)) + 2*V_R08*(g_R07_R08 + g_R08_R09) + V_R09*(-b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy[14,15] = V_R07*V_R08*(-b_R07_R08*cos(theta_R07 - theta_R08) - g_R07_R08*sin(theta_R07 - theta_R08)) + V_R08*V_R09*(-b_R08_R09*cos(theta_R08 - theta_R09) + g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy[14,16] = V_R08*(-b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy[14,17] = V_R08*V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) - g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy[15,12] = V_R08*(b_R07_R08*cos(theta_R07 - theta_R08) + g_R07_R08*sin(theta_R07 - theta_R08))
struct[0].Gy[15,13] = V_R07*V_R08*(-b_R07_R08*sin(theta_R07 - theta_R08) + g_R07_R08*cos(theta_R07 - theta_R08))
struct[0].Gy[15,14] = V_R07*(b_R07_R08*cos(theta_R07 - theta_R08) + g_R07_R08*sin(theta_R07 - theta_R08)) + 2*V_R08*(-b_R07_R08 - b_R08_R09 - bs_R07_R08/2 - bs_R08_R09/2) + V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) - g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy[15,15] = V_R07*V_R08*(b_R07_R08*sin(theta_R07 - theta_R08) - g_R07_R08*cos(theta_R07 - theta_R08)) + V_R08*V_R09*(-b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy[15,16] = V_R08*(b_R08_R09*cos(theta_R08 - theta_R09) - g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy[15,17] = V_R08*V_R09*(b_R08_R09*sin(theta_R08 - theta_R09) + g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy[16,14] = V_R09*(b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy[16,15] = V_R08*V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) + g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy[16,16] = V_R08*(b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09)) + 2*V_R09*(g_R08_R09 + g_R09_R10 + g_R09_R17) + V_R10*(-b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + V_R17*(-b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy[16,17] = V_R08*V_R09*(-b_R08_R09*cos(theta_R08 - theta_R09) - g_R08_R09*sin(theta_R08 - theta_R09)) + V_R09*V_R10*(-b_R09_R10*cos(theta_R09 - theta_R10) + g_R09_R10*sin(theta_R09 - theta_R10)) + V_R09*V_R17*(-b_R09_R17*cos(theta_R09 - theta_R17) + g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy[16,18] = V_R09*(-b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10))
struct[0].Gy[16,19] = V_R09*V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) - g_R09_R10*sin(theta_R09 - theta_R10))
struct[0].Gy[16,32] = V_R09*(-b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy[16,33] = V_R09*V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) - g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy[17,14] = V_R09*(b_R08_R09*cos(theta_R08 - theta_R09) + g_R08_R09*sin(theta_R08 - theta_R09))
struct[0].Gy[17,15] = V_R08*V_R09*(-b_R08_R09*sin(theta_R08 - theta_R09) + g_R08_R09*cos(theta_R08 - theta_R09))
struct[0].Gy[17,16] = V_R08*(b_R08_R09*cos(theta_R08 - theta_R09) + g_R08_R09*sin(theta_R08 - theta_R09)) + 2*V_R09*(-b_R08_R09 - b_R09_R10 - b_R09_R17 - bs_R08_R09/2 - bs_R09_R10/2 - bs_R09_R17/2) + V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) - g_R09_R10*sin(theta_R09 - theta_R10)) + V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) - g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy[17,17] = V_R08*V_R09*(b_R08_R09*sin(theta_R08 - theta_R09) - g_R08_R09*cos(theta_R08 - theta_R09)) + V_R09*V_R10*(-b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + V_R09*V_R17*(-b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy[17,18] = V_R09*(b_R09_R10*cos(theta_R09 - theta_R10) - g_R09_R10*sin(theta_R09 - theta_R10))
struct[0].Gy[17,19] = V_R09*V_R10*(b_R09_R10*sin(theta_R09 - theta_R10) + g_R09_R10*cos(theta_R09 - theta_R10))
struct[0].Gy[17,32] = V_R09*(b_R09_R17*cos(theta_R09 - theta_R17) - g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy[17,33] = V_R09*V_R17*(b_R09_R17*sin(theta_R09 - theta_R17) + g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy[18,16] = V_R10*(b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10))
struct[0].Gy[18,17] = V_R09*V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) + g_R09_R10*sin(theta_R09 - theta_R10))
struct[0].Gy[18,18] = V_R09*(b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + 2*V_R10*(g_R09_R10 + g_R10_R18) + V_R18*(-b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy[18,19] = V_R09*V_R10*(-b_R09_R10*cos(theta_R09 - theta_R10) - g_R09_R10*sin(theta_R09 - theta_R10)) + V_R10*V_R18*(-b_R10_R18*cos(theta_R10 - theta_R18) + g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy[18,34] = V_R10*(-b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy[18,35] = V_R10*V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) - g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy[18,38] = -S_n_R10/S_base
struct[0].Gy[19,16] = V_R10*(b_R09_R10*cos(theta_R09 - theta_R10) + g_R09_R10*sin(theta_R09 - theta_R10))
struct[0].Gy[19,17] = V_R09*V_R10*(-b_R09_R10*sin(theta_R09 - theta_R10) + g_R09_R10*cos(theta_R09 - theta_R10))
struct[0].Gy[19,18] = V_R09*(b_R09_R10*cos(theta_R09 - theta_R10) + g_R09_R10*sin(theta_R09 - theta_R10)) + 2*V_R10*(-b_R09_R10 - b_R10_R18 - bs_R09_R10/2 - bs_R10_R18/2) + V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) - g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy[19,19] = V_R09*V_R10*(b_R09_R10*sin(theta_R09 - theta_R10) - g_R09_R10*cos(theta_R09 - theta_R10)) + V_R10*V_R18*(-b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy[19,34] = V_R10*(b_R10_R18*cos(theta_R10 - theta_R18) - g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy[19,35] = V_R10*V_R18*(b_R10_R18*sin(theta_R10 - theta_R18) + g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy[19,39] = -S_n_R10/S_base
struct[0].Gy[20,4] = V_R11*(b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy[20,5] = V_R03*V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) + g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy[20,20] = V_R03*(b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11)) + 2*V_R11*g_R03_R11
struct[0].Gy[20,21] = V_R03*V_R11*(-b_R03_R11*cos(theta_R03 - theta_R11) - g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy[21,4] = V_R11*(b_R03_R11*cos(theta_R03 - theta_R11) + g_R03_R11*sin(theta_R03 - theta_R11))
struct[0].Gy[21,5] = V_R03*V_R11*(-b_R03_R11*sin(theta_R03 - theta_R11) + g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy[21,20] = V_R03*(b_R03_R11*cos(theta_R03 - theta_R11) + g_R03_R11*sin(theta_R03 - theta_R11)) + 2*V_R11*(-b_R03_R11 - bs_R03_R11/2)
struct[0].Gy[21,21] = V_R03*V_R11*(b_R03_R11*sin(theta_R03 - theta_R11) - g_R03_R11*cos(theta_R03 - theta_R11))
struct[0].Gy[22,6] = V_R12*(b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy[22,7] = V_R04*V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) + g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy[22,22] = V_R04*(b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12)) + 2*V_R12*(g_R04_R12 + g_R12_R13) + V_R13*(-b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy[22,23] = V_R04*V_R12*(-b_R04_R12*cos(theta_R04 - theta_R12) - g_R04_R12*sin(theta_R04 - theta_R12)) + V_R12*V_R13*(-b_R12_R13*cos(theta_R12 - theta_R13) + g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy[22,24] = V_R12*(-b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy[22,25] = V_R12*V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) - g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy[23,6] = V_R12*(b_R04_R12*cos(theta_R04 - theta_R12) + g_R04_R12*sin(theta_R04 - theta_R12))
struct[0].Gy[23,7] = V_R04*V_R12*(-b_R04_R12*sin(theta_R04 - theta_R12) + g_R04_R12*cos(theta_R04 - theta_R12))
struct[0].Gy[23,22] = V_R04*(b_R04_R12*cos(theta_R04 - theta_R12) + g_R04_R12*sin(theta_R04 - theta_R12)) + 2*V_R12*(-b_R04_R12 - b_R12_R13 - bs_R04_R12/2 - bs_R12_R13/2) + V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) - g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy[23,23] = V_R04*V_R12*(b_R04_R12*sin(theta_R04 - theta_R12) - g_R04_R12*cos(theta_R04 - theta_R12)) + V_R12*V_R13*(-b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy[23,24] = V_R12*(b_R12_R13*cos(theta_R12 - theta_R13) - g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy[23,25] = V_R12*V_R13*(b_R12_R13*sin(theta_R12 - theta_R13) + g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy[24,22] = V_R13*(b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy[24,23] = V_R12*V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) + g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy[24,24] = V_R12*(b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13)) + 2*V_R13*(g_R12_R13 + g_R13_R14) + V_R14*(-b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy[24,25] = V_R12*V_R13*(-b_R12_R13*cos(theta_R12 - theta_R13) - g_R12_R13*sin(theta_R12 - theta_R13)) + V_R13*V_R14*(-b_R13_R14*cos(theta_R13 - theta_R14) + g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy[24,26] = V_R13*(-b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy[24,27] = V_R13*V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) - g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy[25,22] = V_R13*(b_R12_R13*cos(theta_R12 - theta_R13) + g_R12_R13*sin(theta_R12 - theta_R13))
struct[0].Gy[25,23] = V_R12*V_R13*(-b_R12_R13*sin(theta_R12 - theta_R13) + g_R12_R13*cos(theta_R12 - theta_R13))
struct[0].Gy[25,24] = V_R12*(b_R12_R13*cos(theta_R12 - theta_R13) + g_R12_R13*sin(theta_R12 - theta_R13)) + 2*V_R13*(-b_R12_R13 - b_R13_R14 - bs_R12_R13/2 - bs_R13_R14/2) + V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) - g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy[25,25] = V_R12*V_R13*(b_R12_R13*sin(theta_R12 - theta_R13) - g_R12_R13*cos(theta_R12 - theta_R13)) + V_R13*V_R14*(-b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy[25,26] = V_R13*(b_R13_R14*cos(theta_R13 - theta_R14) - g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy[25,27] = V_R13*V_R14*(b_R13_R14*sin(theta_R13 - theta_R14) + g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy[26,24] = V_R14*(b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy[26,25] = V_R13*V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) + g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy[26,26] = V_R13*(b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14)) + 2*V_R14*(g_R13_R14 + g_R14_R15) + V_R15*(-b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy[26,27] = V_R13*V_R14*(-b_R13_R14*cos(theta_R13 - theta_R14) - g_R13_R14*sin(theta_R13 - theta_R14)) + V_R14*V_R15*(-b_R14_R15*cos(theta_R14 - theta_R15) + g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy[26,28] = V_R14*(-b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy[26,29] = V_R14*V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) - g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy[26,44] = -S_n_R14/S_base
struct[0].Gy[27,24] = V_R14*(b_R13_R14*cos(theta_R13 - theta_R14) + g_R13_R14*sin(theta_R13 - theta_R14))
struct[0].Gy[27,25] = V_R13*V_R14*(-b_R13_R14*sin(theta_R13 - theta_R14) + g_R13_R14*cos(theta_R13 - theta_R14))
struct[0].Gy[27,26] = V_R13*(b_R13_R14*cos(theta_R13 - theta_R14) + g_R13_R14*sin(theta_R13 - theta_R14)) + 2*V_R14*(-b_R13_R14 - b_R14_R15 - bs_R13_R14/2 - bs_R14_R15/2) + V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) - g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy[27,27] = V_R13*V_R14*(b_R13_R14*sin(theta_R13 - theta_R14) - g_R13_R14*cos(theta_R13 - theta_R14)) + V_R14*V_R15*(-b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy[27,28] = V_R14*(b_R14_R15*cos(theta_R14 - theta_R15) - g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy[27,29] = V_R14*V_R15*(b_R14_R15*sin(theta_R14 - theta_R15) + g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy[27,45] = -S_n_R14/S_base
struct[0].Gy[28,26] = V_R15*(b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy[28,27] = V_R14*V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) + g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy[28,28] = V_R14*(b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15)) + 2*V_R15*g_R14_R15
struct[0].Gy[28,29] = V_R14*V_R15*(-b_R14_R15*cos(theta_R14 - theta_R15) - g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy[29,26] = V_R15*(b_R14_R15*cos(theta_R14 - theta_R15) + g_R14_R15*sin(theta_R14 - theta_R15))
struct[0].Gy[29,27] = V_R14*V_R15*(-b_R14_R15*sin(theta_R14 - theta_R15) + g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy[29,28] = V_R14*(b_R14_R15*cos(theta_R14 - theta_R15) + g_R14_R15*sin(theta_R14 - theta_R15)) + 2*V_R15*(-b_R14_R15 - bs_R14_R15/2)
struct[0].Gy[29,29] = V_R14*V_R15*(b_R14_R15*sin(theta_R14 - theta_R15) - g_R14_R15*cos(theta_R14 - theta_R15))
struct[0].Gy[30,10] = V_R16*(b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy[30,11] = V_R06*V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) + g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy[30,30] = V_R06*(b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16)) + 2*V_R16*g_R06_R16
struct[0].Gy[30,31] = V_R06*V_R16*(-b_R06_R16*cos(theta_R06 - theta_R16) - g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy[31,10] = V_R16*(b_R06_R16*cos(theta_R06 - theta_R16) + g_R06_R16*sin(theta_R06 - theta_R16))
struct[0].Gy[31,11] = V_R06*V_R16*(-b_R06_R16*sin(theta_R06 - theta_R16) + g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy[31,30] = V_R06*(b_R06_R16*cos(theta_R06 - theta_R16) + g_R06_R16*sin(theta_R06 - theta_R16)) + 2*V_R16*(-b_R06_R16 - bs_R06_R16/2)
struct[0].Gy[31,31] = V_R06*V_R16*(b_R06_R16*sin(theta_R06 - theta_R16) - g_R06_R16*cos(theta_R06 - theta_R16))
struct[0].Gy[32,16] = V_R17*(b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy[32,17] = V_R09*V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) + g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy[32,32] = V_R09*(b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17)) + 2*V_R17*g_R09_R17
struct[0].Gy[32,33] = V_R09*V_R17*(-b_R09_R17*cos(theta_R09 - theta_R17) - g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy[33,16] = V_R17*(b_R09_R17*cos(theta_R09 - theta_R17) + g_R09_R17*sin(theta_R09 - theta_R17))
struct[0].Gy[33,17] = V_R09*V_R17*(-b_R09_R17*sin(theta_R09 - theta_R17) + g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy[33,32] = V_R09*(b_R09_R17*cos(theta_R09 - theta_R17) + g_R09_R17*sin(theta_R09 - theta_R17)) + 2*V_R17*(-b_R09_R17 - bs_R09_R17/2)
struct[0].Gy[33,33] = V_R09*V_R17*(b_R09_R17*sin(theta_R09 - theta_R17) - g_R09_R17*cos(theta_R09 - theta_R17))
struct[0].Gy[34,18] = V_R18*(b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy[34,19] = V_R10*V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) + g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy[34,34] = V_R10*(b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18)) + 2*V_R18*g_R10_R18
struct[0].Gy[34,35] = V_R10*V_R18*(-b_R10_R18*cos(theta_R10 - theta_R18) - g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy[35,18] = V_R18*(b_R10_R18*cos(theta_R10 - theta_R18) + g_R10_R18*sin(theta_R10 - theta_R18))
struct[0].Gy[35,19] = V_R10*V_R18*(-b_R10_R18*sin(theta_R10 - theta_R18) + g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy[35,34] = V_R10*(b_R10_R18*cos(theta_R10 - theta_R18) + g_R10_R18*sin(theta_R10 - theta_R18)) + 2*V_R18*(-b_R10_R18 - bs_R10_R18/2)
struct[0].Gy[35,35] = V_R10*V_R18*(b_R10_R18*sin(theta_R10 - theta_R18) - g_R10_R18*cos(theta_R10 - theta_R18))
struct[0].Gy[36,18] = cos(delta_R10 - theta_R10)
struct[0].Gy[36,19] = V_R10*sin(delta_R10 - theta_R10)
struct[0].Gy[36,36] = X1d_R10
struct[0].Gy[36,37] = R_a_R10
struct[0].Gy[37,18] = sin(delta_R10 - theta_R10)
struct[0].Gy[37,19] = -V_R10*cos(delta_R10 - theta_R10)
struct[0].Gy[37,36] = R_a_R10
struct[0].Gy[37,37] = -X1q_R10
struct[0].Gy[38,18] = i_d_R10*sin(delta_R10 - theta_R10) + i_q_R10*cos(delta_R10 - theta_R10)
struct[0].Gy[38,19] = -V_R10*i_d_R10*cos(delta_R10 - theta_R10) + V_R10*i_q_R10*sin(delta_R10 - theta_R10)
struct[0].Gy[38,36] = V_R10*sin(delta_R10 - theta_R10)
struct[0].Gy[38,37] = V_R10*cos(delta_R10 - theta_R10)
struct[0].Gy[38,38] = -1
struct[0].Gy[39,18] = i_d_R10*cos(delta_R10 - theta_R10) - i_q_R10*sin(delta_R10 - theta_R10)
struct[0].Gy[39,19] = V_R10*i_d_R10*sin(delta_R10 - theta_R10) + V_R10*i_q_R10*cos(delta_R10 - theta_R10)
struct[0].Gy[39,36] = V_R10*cos(delta_R10 - theta_R10)
struct[0].Gy[39,37] = -V_R10*sin(delta_R10 - theta_R10)
struct[0].Gy[39,39] = -1
struct[0].Gy[40,40] = -1
struct[0].Gy[41,41] = -1
struct[0].Gy[41,49] = 1
struct[0].Gy[42,26] = cos(delta_R14 - theta_R14)
struct[0].Gy[42,27] = V_R14*sin(delta_R14 - theta_R14)
struct[0].Gy[42,42] = X1d_R14
struct[0].Gy[42,43] = R_a_R14
struct[0].Gy[43,26] = sin(delta_R14 - theta_R14)
struct[0].Gy[43,27] = -V_R14*cos(delta_R14 - theta_R14)
struct[0].Gy[43,42] = R_a_R14
struct[0].Gy[43,43] = -X1q_R14
struct[0].Gy[44,26] = i_d_R14*sin(delta_R14 - theta_R14) + i_q_R14*cos(delta_R14 - theta_R14)
struct[0].Gy[44,27] = -V_R14*i_d_R14*cos(delta_R14 - theta_R14) + V_R14*i_q_R14*sin(delta_R14 - theta_R14)
struct[0].Gy[44,42] = V_R14*sin(delta_R14 - theta_R14)
struct[0].Gy[44,43] = V_R14*cos(delta_R14 - theta_R14)
struct[0].Gy[44,44] = -1
struct[0].Gy[45,26] = i_d_R14*cos(delta_R14 - theta_R14) - i_q_R14*sin(delta_R14 - theta_R14)
struct[0].Gy[45,27] = V_R14*i_d_R14*sin(delta_R14 - theta_R14) + V_R14*i_q_R14*cos(delta_R14 - theta_R14)
struct[0].Gy[45,42] = V_R14*cos(delta_R14 - theta_R14)
struct[0].Gy[45,43] = -V_R14*sin(delta_R14 - theta_R14)
struct[0].Gy[45,45] = -1
struct[0].Gy[46,46] = -1
struct[0].Gy[47,47] = -1
struct[0].Gy[47,50] = 1
struct[0].Gy[48,48] = -1
struct[0].Gy[49,49] = -1
struct[0].Gy[50,50] = -1
struct[0].Gu[0,0] = -1/S_base
struct[0].Gu[1,1] = -1/S_base
struct[0].Gu[2,2] = -1/S_base
struct[0].Gu[3,3] = -1/S_base
struct[0].Gu[4,4] = -1/S_base
struct[0].Gu[5,5] = -1/S_base
struct[0].Gu[6,6] = -1/S_base
struct[0].Gu[7,7] = -1/S_base
struct[0].Gu[8,8] = -1/S_base
struct[0].Gu[9,9] = -1/S_base
struct[0].Gu[10,10] = -1/S_base
struct[0].Gu[11,11] = -1/S_base
struct[0].Gu[12,12] = -1/S_base
struct[0].Gu[13,13] = -1/S_base
struct[0].Gu[14,14] = -1/S_base
struct[0].Gu[15,15] = -1/S_base
struct[0].Gu[16,16] = -1/S_base
struct[0].Gu[17,17] = -1/S_base
struct[0].Gu[18,18] = -1/S_base
struct[0].Gu[19,19] = -1/S_base
struct[0].Gu[20,20] = -1/S_base
struct[0].Gu[21,21] = -1/S_base
struct[0].Gu[22,22] = -1/S_base
struct[0].Gu[23,23] = -1/S_base
struct[0].Gu[24,24] = -1/S_base
struct[0].Gu[25,25] = -1/S_base
struct[0].Gu[26,26] = -1/S_base
struct[0].Gu[27,27] = -1/S_base
struct[0].Gu[28,28] = -1/S_base
struct[0].Gu[29,29] = -1/S_base
struct[0].Gu[30,30] = -1/S_base
struct[0].Gu[31,31] = -1/S_base
struct[0].Gu[32,32] = -1/S_base
struct[0].Gu[33,33] = -1/S_base
struct[0].Gu[34,34] = -1/S_base
struct[0].Gu[35,35] = -1/S_base
struct[0].Gu[40,36] = K_a_R10
struct[0].Gu[40,37] = K_a_R10
struct[0].Gu[41,38] = 1
struct[0].Gu[46,39] = K_a_R14
struct[0].Gu[46,40] = K_a_R14
struct[0].Gu[47,41] = 1
@numba.njit(cache=True)
def Piecewise(arg):
out = arg[0][1]
N = len(arg)
for it in range(N-1,-1,-1):
if arg[it][1]: out = arg[it][0]
return out
@numba.njit(cache=True)
def ITE(arg):
out = arg[0][1]
N = len(arg)
for it in range(N-1,-1,-1):
if arg[it][1]: out = arg[it][0]
return out
@numba.njit(cache=True)
def Abs(x):
return np.abs(x)
@numba.njit(cache=True)
def ini_dae_jacobian_numba(struct,x):
N_x = struct[0].N_x
N_y = struct[0].N_y
struct[0].x[:,0] = x[0:N_x]
struct[0].y_ini[:,0] = x[N_x:(N_x+N_y)]
ini(struct,10)
ini(struct,11)
for row,col in zip(struct[0].Fx_ini_rows,struct[0].Fx_ini_cols):
struct[0].Ac_ini[row,col] = struct[0].Fx_ini[row,col]
for row,col in zip(struct[0].Fy_ini_rows,struct[0].Fy_ini_cols):
struct[0].Ac_ini[row,col+N_x] = struct[0].Fy_ini[row,col]
for row,col in zip(struct[0].Gx_ini_rows,struct[0].Gx_ini_cols):
struct[0].Ac_ini[row+N_x,col] = struct[0].Gx_ini[row,col]
for row,col in zip(struct[0].Gy_ini_rows,struct[0].Gy_ini_cols):
struct[0].Ac_ini[row+N_x,col+N_x] = struct[0].Gy_ini[row,col]
@numba.njit(cache=True)
def ini_dae_problem(struct,x):
N_x = struct[0].N_x
N_y = struct[0].N_y
struct[0].x[:,0] = x[0:N_x]
struct[0].y_ini[:,0] = x[N_x:(N_x+N_y)]
ini(struct,2)
ini(struct,3)
struct[0].fg[:N_x,:] = struct[0].f[:]
struct[0].fg[N_x:,:] = struct[0].g[:]
@numba.njit(cache=True)
def ssate(struct,xy):
for it in range(100):
ini_dae_jacobian_numba(struct,xy[:,0])
ini_dae_problem(struct,xy[:,0])
xy[:] += np.linalg.solve(struct[0].Ac_ini,-struct[0].fg)
if np.max(np.abs(struct[0].fg[:,0]))<1e-8: break
N_x = struct[0].N_x
struct[0].x[:,0] = xy[:N_x,0]
struct[0].y_ini[:,0] = xy[N_x:,0]
return xy,it
@numba.njit(cache=True)
def daesolver(struct):
sin = np.sin
cos = np.cos
sqrt = np.sqrt
i = 0
Dt = struct[i].Dt
N_x = struct[i].N_x
N_y = struct[i].N_y
N_z = struct[i].N_z
decimation = struct[i].decimation
eye = np.eye(N_x)
t = struct[i].t
t_end = struct[i].t_end
if struct[i].it == 0:
run(t,struct, 1)
struct[i].it_store = 0
struct[i]['T'][0] = t
struct[i].X[0,:] = struct[i].x[:,0]
struct[i].Y[0,:] = struct[i].y_run[:,0]
struct[i].Z[0,:] = struct[i].h[:,0]
solver = struct[i].solvern
while t<t_end:
struct[i].it += 1
struct[i].t += Dt
t = struct[i].t
if solver == 5: # Teapezoidal DAE as in Milano's book
run(t,struct, 2)
run(t,struct, 3)
x = np.copy(struct[i].x[:])
y = np.copy(struct[i].y_run[:])
f = np.copy(struct[i].f[:])
g = np.copy(struct[i].g[:])
for iter in range(struct[i].imax):
run(t,struct, 2)
run(t,struct, 3)
run(t,struct,10)
run(t,struct,11)
x_i = struct[i].x[:]
y_i = struct[i].y_run[:]
f_i = struct[i].f[:]
g_i = struct[i].g[:]
F_x_i = struct[i].Fx[:,:]
F_y_i = struct[i].Fy[:,:]
G_x_i = struct[i].Gx[:,:]
G_y_i = struct[i].Gy[:,:]
A_c_i = np.vstack((np.hstack((eye-0.5*Dt*F_x_i, -0.5*Dt*F_y_i)),
np.hstack((G_x_i, G_y_i))))
f_n_i = x_i - x - 0.5*Dt*(f_i+f)
# print(t,iter,g_i)
Dxy_i = np.linalg.solve(-A_c_i,np.vstack((f_n_i,g_i)))
x_i = x_i + Dxy_i[0:N_x]
y_i = y_i + Dxy_i[N_x:(N_x+N_y)]
struct[i].x[:] = x_i
struct[i].y_run[:] = y_i
# [f_i,g_i,F_x_i,F_y_i,G_x_i,G_y_i] = smib_transient(x_i,y_i,u);
# A_c_i = [[eye(N_x)-0.5*Dt*F_x_i, -0.5*Dt*F_y_i],
# [ G_x_i, G_y_i]];
# f_n_i = x_i - x - 0.5*Dt*(f_i+f);
# Dxy_i = -A_c_i\[f_n_i.',g_i.'].';
# x_i = x_i + Dxy_i(1:N_x);
# y_i = y_i + Dxy_i(N_x+1:N_x+N_y);
xy = np.vstack((x_i,y_i))
max_relative = 0.0
for it_var in range(N_x+N_y):
abs_value = np.abs(xy[it_var,0])
if abs_value < 0.001:
abs_value = 0.001
relative_error = np.abs(Dxy_i[it_var,0])/abs_value
if relative_error > max_relative: max_relative = relative_error
if max_relative<struct[i].itol:
break
# if iter>struct[i].imax-2:
# print('Convergence problem')
struct[i].x[:] = x_i
struct[i].y_run[:] = y_i
# channels
if struct[i].store == 1:
it_store = struct[i].it_store
if struct[i].it >= it_store*decimation:
struct[i]['T'][it_store+1] = t
struct[i].X[it_store+1,:] = struct[i].x[:,0]
struct[i].Y[it_store+1,:] = struct[i].y_run[:,0]
struct[i].Z[it_store+1,:] = struct[i].h[:,0]
struct[i].iters[it_store+1,0] = iter
struct[i].it_store += 1
struct[i].t = t
return t
def nonzeros():
Fx_ini_rows = [0, 0, 1, 1, 1, 2, 3, 4, 6, 7, 7, 8, 8, 8, 9, 10, 11, 13]
Fx_ini_cols = [0, 1, 0, 1, 6, 2, 3, 4, 6, 7, 8, 7, 8, 13, 9, 10, 11, 13]
Fy_ini_rows = [0, 1, 1, 1, 1, 1, 2, 2, 3, 4, 5, 6, 7, 8, 8, 8, 8, 8, 9, 9, 10, 11, 12, 13, 14]
Fy_ini_cols = [48, 18, 19, 36, 37, 48, 36, 40, 37, 18, 18, 41, 48, 26, 27, 42, 43, 48, 42, 46, 43, 26, 26, 47, 48]
Gx_ini_rows = [36, 36, 37, 37, 38, 39, 40, 40, 41, 42, 42, 43, 43, 44, 45, 46, 46, 47, 48, 48, 49, 50]
Gx_ini_cols = [0, 2, 0, 3, 0, 0, 4, 5, 1, 7, 9, 7, 10, 7, 7, 11, 12, 8, 1, 8, 14, 14]
Gy_ini_rows = [0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 13, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 28, 28, 28, 28, 29, 29, 29, 29, 30, 30, 30, 30, 31, 31, 31, 31, 32, 32, 32, 32, 33, 33, 33, 33, 34, 34, 34, 34, 35, 35, 35, 35, 36, 36, 36, 36, 37, 37, 37, 37, 38, 38, 38, 38, 38, 39, 39, 39, 39, 39, 40, 41, 41, 42, 42, 42, 42, 43, 43, 43, 43, 44, 44, 44, 44, 44, 45, 45, 45, 45, 45, 46, 47, 47, 48, 49, 50]
Gy_ini_cols = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3, 4, 5, 0, 1, 2, 3, 4, 5, 2, 3, 4, 5, 6, 7, 20, 21, 2, 3, 4, 5, 6, 7, 20, 21, 4, 5, 6, 7, 8, 9, 22, 23, 4, 5, 6, 7, 8, 9, 22, 23, 6, 7, 8, 9, 10, 11, 6, 7, 8, 9, 10, 11, 8, 9, 10, 11, 12, 13, 30, 31, 8, 9, 10, 11, 12, 13, 30, 31, 10, 11, 12, 13, 14, 15, 10, 11, 12, 13, 14, 15, 12, 13, 14, 15, 16, 17, 12, 13, 14, 15, 16, 17, 14, 15, 16, 17, 18, 19, 32, 33, 14, 15, 16, 17, 18, 19, 32, 33, 16, 17, 18, 19, 34, 35, 38, 16, 17, 18, 19, 34, 35, 39, 4, 5, 20, 21, 4, 5, 20, 21, 6, 7, 22, 23, 24, 25, 6, 7, 22, 23, 24, 25, 22, 23, 24, 25, 26, 27, 22, 23, 24, 25, 26, 27, 24, 25, 26, 27, 28, 29, 44, 24, 25, 26, 27, 28, 29, 45, 26, 27, 28, 29, 26, 27, 28, 29, 10, 11, 30, 31, 10, 11, 30, 31, 16, 17, 32, 33, 16, 17, 32, 33, 18, 19, 34, 35, 18, 19, 34, 35, 18, 19, 36, 37, 18, 19, 36, 37, 18, 19, 36, 37, 38, 18, 19, 36, 37, 39, 40, 41, 49, 26, 27, 42, 43, 26, 27, 42, 43, 26, 27, 42, 43, 44, 26, 27, 42, 43, 45, 46, 47, 50, 48, 49, 50]
return Fx_ini_rows,Fx_ini_cols,Fy_ini_rows,Fy_ini_cols,Gx_ini_rows,Gx_ini_cols,Gy_ini_rows,Gy_ini_cols | 76.634684 | 1,334 | 0.653489 | 57,757 | 277,954 | 2.753865 | 0.006891 | 0.118827 | 0.057207 | 0.038251 | 0.961768 | 0.946673 | 0.934934 | 0.924642 | 0.917821 | 0.911741 | 0 | 0.22514 | 0.179177 | 277,954 | 3,627 | 1,335 | 76.634684 | 0.472008 | 0.008393 | 0 | 0.815513 | 0 | 0 | 0.010579 | 0.00049 | 0 | 0 | 0 | 0 | 0 | 1 | 0.014833 | false | 0 | 0.001236 | 0.000618 | 0.02534 | 0.002163 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
37d77c866060168ab11f428e6514767ac8a22e38 | 33,242 | py | Python | layint_api/apis/users_api.py | LayeredInsight/layint_api_python | a5c9a5b24098bd823c5102b7ab9e4745432f19b4 | [
"Apache-2.0"
] | null | null | null | layint_api/apis/users_api.py | LayeredInsight/layint_api_python | a5c9a5b24098bd823c5102b7ab9e4745432f19b4 | [
"Apache-2.0"
] | null | null | null | layint_api/apis/users_api.py | LayeredInsight/layint_api_python | a5c9a5b24098bd823c5102b7ab9e4745432f19b4 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Layered Insight Assessment, Compliance, Witness & Control
LI Assessment & Compliance performs static vulnerability analysis, license and package compliance. LI Witness provides deep insight and analytics into containerized applications. Control provides dynamic runtime security and analytics for containerized applications. You can find out more about the Layered Insight Suite at [http://layeredinsight.com](http://layeredinsight.com).
OpenAPI spec version: 0.10
Contact: help@layeredinsight.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class UsersApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def add_user(self, **kwargs):
"""
Add User
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_user(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param CommonUser common_user:
:return: APIResponseUser
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_user_with_http_info(**kwargs)
else:
(data) = self.add_user_with_http_info(**kwargs)
return data
def add_user_with_http_info(self, **kwargs):
"""
Add User
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_user_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param CommonUser common_user:
:return: APIResponseUser
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['common_user']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_user" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'common_user' in params:
body_params = params['common_user']
# Authentication setting
auth_settings = ['ApiKey']
return self.api_client.call_api('/Users', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APIResponseUser',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def assign_groups_to_user(self, user_id, **kwargs):
"""
Assign Group
Assigns Group to UserID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.assign_groups_to_user(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: hexadecimal ID of the User (required)
:param HoldGroup hold_group:
:return: UserGroups
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.assign_groups_to_user_with_http_info(user_id, **kwargs)
else:
(data) = self.assign_groups_to_user_with_http_info(user_id, **kwargs)
return data
def assign_groups_to_user_with_http_info(self, user_id, **kwargs):
"""
Assign Group
Assigns Group to UserID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.assign_groups_to_user_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: hexadecimal ID of the User (required)
:param HoldGroup hold_group:
:return: UserGroups
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'hold_group']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method assign_groups_to_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `assign_groups_to_user`")
collection_formats = {}
path_params = {}
if 'user_id' in params:
path_params['UserID'] = params['user_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'hold_group' in params:
body_params = params['hold_group']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['ApiKey']
return self.api_client.call_api('/Users/{UserID}/Groups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserGroups',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_user(self, user_id, **kwargs):
"""
Delete the specified user
Deletes the specified UserID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_user(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: hexadecimal ID of User (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_user_with_http_info(user_id, **kwargs)
else:
(data) = self.delete_user_with_http_info(user_id, **kwargs)
return data
def delete_user_with_http_info(self, user_id, **kwargs):
"""
Delete the specified user
Deletes the specified UserID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_user_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: hexadecimal ID of User (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `delete_user`")
collection_formats = {}
path_params = {}
if 'user_id' in params:
path_params['UserID'] = params['user_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiKey']
return self.api_client.call_api('/Users/{UserID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_user_by_id(self, user_id, **kwargs):
"""
Get specified UserID
Returns details about specified UserID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_user_by_id(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: hexadecimal ID of the User (required)
:return: APIResponseUser
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_user_by_id_with_http_info(user_id, **kwargs)
else:
(data) = self.list_user_by_id_with_http_info(user_id, **kwargs)
return data
def list_user_by_id_with_http_info(self, user_id, **kwargs):
"""
Get specified UserID
Returns details about specified UserID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_user_by_id_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: hexadecimal ID of the User (required)
:return: APIResponseUser
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_user_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `list_user_by_id`")
collection_formats = {}
path_params = {}
if 'user_id' in params:
path_params['UserID'] = params['user_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiKey']
return self.api_client.call_api('/Users/{UserID}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APIResponseUser',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_users(self, **kwargs):
"""
List Users
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_users(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: APIResponseUser
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_users_with_http_info(**kwargs)
else:
(data) = self.list_users_with_http_info(**kwargs)
return data
def list_users_with_http_info(self, **kwargs):
"""
List Users
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_users_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: APIResponseUser
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_users" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiKey']
return self.api_client.call_api('/Users', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APIResponseUser',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def modify_user(self, user_id, **kwargs):
"""
Modify User
Modifies an existing UserID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.modify_user(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: hexadecimal ID of the User (required)
:param APIResponseUser api_response_user:
:return: CommonUser
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.modify_user_with_http_info(user_id, **kwargs)
else:
(data) = self.modify_user_with_http_info(user_id, **kwargs)
return data
def modify_user_with_http_info(self, user_id, **kwargs):
"""
Modify User
Modifies an existing UserID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.modify_user_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: hexadecimal ID of the User (required)
:param APIResponseUser api_response_user:
:return: CommonUser
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'api_response_user']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method modify_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `modify_user`")
collection_formats = {}
path_params = {}
if 'user_id' in params:
path_params['UserID'] = params['user_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'api_response_user' in params:
body_params = params['api_response_user']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['ApiKey']
return self.api_client.call_api('/Users/{UserID}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CommonUser',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_user_by_email(self, **kwargs):
"""
Search User
Searches User by Email
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.search_user_by_email(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param SearchBy search_by:
:return: APIResponseUser
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.search_user_by_email_with_http_info(**kwargs)
else:
(data) = self.search_user_by_email_with_http_info(**kwargs)
return data
def search_user_by_email_with_http_info(self, **kwargs):
"""
Search User
Searches User by Email
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.search_user_by_email_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param SearchBy search_by:
:return: APIResponseUser
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['search_by']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_user_by_email" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'search_by' in params:
body_params = params['search_by']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['ApiKey']
return self.api_client.call_api('/UsersSearch', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APIResponseUser',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def suspend_user(self, user_id, **kwargs):
"""
Suspend User
Suspends UserID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.suspend_user(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: hexadecimal ID of the User (required)
:return: APIResponseUser
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.suspend_user_with_http_info(user_id, **kwargs)
else:
(data) = self.suspend_user_with_http_info(user_id, **kwargs)
return data
def suspend_user_with_http_info(self, user_id, **kwargs):
"""
Suspend User
Suspends UserID
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.suspend_user_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: hexadecimal ID of the User (required)
:return: APIResponseUser
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method suspend_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `suspend_user`")
collection_formats = {}
path_params = {}
if 'user_id' in params:
path_params['UserID'] = params['user_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['ApiKey']
return self.api_client.call_api('/Users/{UserID}/Suspend', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APIResponseUser',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.954327 | 383 | 0.55472 | 3,321 | 33,242 | 5.297802 | 0.062331 | 0.072752 | 0.025463 | 0.032738 | 0.935376 | 0.92344 | 0.920257 | 0.90241 | 0.898545 | 0.890986 | 0 | 0.000286 | 0.369141 | 33,242 | 831 | 384 | 40.002407 | 0.838682 | 0.326996 | 0 | 0.772277 | 0 | 0 | 0.145493 | 0.030607 | 0 | 0 | 0 | 0 | 0 | 1 | 0.042079 | false | 0 | 0.017327 | 0 | 0.121287 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
37e72b6c6159e3adbad545f6928c474d678d19c9 | 9,592 | py | Python | tests/test_expand.py | eerohele/disjure | baa57b6f07e7f91984c294f97bba2aefdaaf0bb1 | [
"Apache-2.0"
] | 46 | 2020-09-07T10:34:38.000Z | 2022-03-01T08:28:11.000Z | tests/test_expand.py | eerohele/disjure | baa57b6f07e7f91984c294f97bba2aefdaaf0bb1 | [
"Apache-2.0"
] | 66 | 2020-09-25T17:07:37.000Z | 2022-03-31T17:56:07.000Z | tests/test_expand.py | eerohele/disjure | baa57b6f07e7f91984c294f97bba2aefdaaf0bb1 | [
"Apache-2.0"
] | 6 | 2020-10-06T06:43:53.000Z | 2021-08-10T13:16:13.000Z | from .util import ViewTestCase
class TestExpandSelectionCommand(ViewTestCase):
def expand(self):
self.view.window().run_command("tutkain_expand_selection")
def shrink(self):
self.view.window().run_command("soft_undo")
def test_before_lparen(self):
self.set_view_content("(foo)")
self.set_selections((0, 0))
self.expand()
self.assertEquals("(foo)", self.selection(0))
def test_after_lparen(self):
self.set_view_content("(foo)")
self.set_selections((1, 1))
self.expand()
self.assertEquals("foo", self.selection(0))
def test_before_rparen(self):
self.set_view_content("(foo)")
self.set_selections((4, 4))
self.expand()
self.assertEquals("foo", self.selection(0))
def test_after_rparen(self):
self.set_view_content("(foo)")
self.set_selections((6, 6))
self.expand()
self.assertEquals("(foo)", self.selection(0))
def test_before_lbracket(self):
self.set_view_content("[foo]")
self.set_selections((0, 0))
self.expand()
self.assertEquals("[foo]", self.selection(0))
def test_after_lbracket(self):
self.set_view_content("[foo]")
self.set_selections((1, 1))
self.expand()
self.assertEquals("foo", self.selection(0))
def test_after_rbracket(self):
self.set_view_content("[foo]")
self.set_selections((6, 6))
self.expand()
self.assertEquals("[foo]", self.selection(0))
def test_before_lcurly(self):
self.set_view_content("{:a 1}")
self.set_selections((0, 0))
self.expand()
self.assertEquals("{:a 1}", self.selection(0))
def test_after_lcurly(self):
self.set_view_content("{:a 1}")
self.set_selections((1, 1))
self.expand()
self.assertEquals(":a", self.selection(0))
def test_after_rcurly(self):
self.set_view_content("{:a 1}")
self.set_selections((7, 7))
self.expand()
self.assertEquals("{:a 1}", self.selection(0))
def test_before_set(self):
self.set_view_content("#{1}")
self.set_selections((0, 0))
self.expand()
self.assertEquals("#{1}", self.selection(0))
def test_between_set_hash_and_bracket(self):
self.set_view_content("#{1}")
self.set_selections((1, 1))
self.expand()
self.assertEquals("#{1}", self.selection(0))
def test_between_on_symbol(self):
self.set_view_content("(inc 1)")
self.set_selections((2, 2))
self.expand()
self.assertEquals("inc", self.selection(0))
def test_before_at(self):
self.set_view_content("@(foo)")
self.set_selections((0, 0))
self.expand()
self.assertEquals("@(foo)", self.selection(0))
def test_after_at(self):
self.set_view_content("@(foo)")
self.set_selections((1, 1))
self.expand()
self.assertEquals("@(foo)", self.selection(0))
def test_after_at_rparen(self):
self.set_view_content("@(foo)")
self.set_selections((6, 6))
self.expand()
self.assertEquals("@(foo)", self.selection(0))
def test_before_quoted_list(self):
self.set_view_content("'(foo)")
self.set_selections((0, 0))
self.expand()
self.assertEquals("'(foo)", self.selection(0))
def test_after_quoted_list(self):
self.set_view_content("'(foo)")
self.set_selections((6, 6))
self.expand()
self.assertEquals("'(foo)", self.selection(0))
def test_nested(self):
self.set_view_content("(foo (bar))")
self.set_selections((5, 5))
self.expand()
self.assertEquals("(bar)", self.selection(0))
self.expand()
self.assertEquals("(foo (bar))", self.selection(0))
def test_before_string(self):
self.set_view_content('(a "b" c)')
self.set_selections((3, 3))
self.expand()
self.assertEquals('"b"', self.selection(0))
def test_meta(self):
self.set_view_content("^{:foo true}")
self.set_selections((0, 0))
self.expand()
self.assertEquals("^{:foo true}", self.selection(0))
self.set_selections((12, 12))
self.expand()
self.assertEquals("^{:foo true}", self.selection(0))
self.set_view_content("^:foo")
self.set_selections((0, 0))
self.expand()
self.assertEquals("^:foo", self.selection(0))
self.set_view_content("^:foo")
self.set_selections((5, 5))
self.expand()
self.assertEquals("^:foo", self.selection(0))
def test_numbers(self):
self.set_view_content("0.2")
self.set_selections((0, 0))
self.expand()
self.assertEquals("0.2", self.selection(0))
self.set_view_content("0.2")
self.set_selections((3, 3))
self.expand()
self.assertEquals("0.2", self.selection(0))
self.set_view_content("1/2")
self.set_selections((0, 0))
self.expand()
self.assertEquals("1/2", self.selection(0))
self.set_selections((3, 3))
self.expand()
self.assertEquals("1/2", self.selection(0))
def test_string_close_paren(self):
self.set_view_content('(a "b")')
self.set_selections((6, 6))
self.expand()
self.assertEquals('"b"', self.selection(0))
def test_qualified_map(self):
self.set_view_content("#:foo{:bar 1} #:foo/bar{:baz 1} #::foo{:bar 1}")
self.set_selections((0, 0))
self.expand()
self.assertEquals("#:foo{:bar 1}", self.selection(0))
self.set_selections((13, 13))
self.expand()
self.assertEquals("#:foo{:bar 1}", self.selection(0))
self.set_selections((14, 14))
self.expand()
self.assertEquals("#:foo/bar{:baz 1}", self.selection(0))
self.set_selections((32, 32))
self.expand()
self.assertEquals("#::foo{:bar 1}", self.selection(0))
self.set_selections((46, 46))
self.expand()
self.assertEquals("#::foo{:bar 1}", self.selection(0))
def test_list_head(self):
self.set_view_content("(ns foo.bar)")
self.set_selections((1, 1))
self.expand()
self.assertEquals("ns", self.selection(0))
def test_special_form(self):
self.set_view_content("(fn [foo])")
self.set_selections((1, 1))
self.expand()
self.assertEquals("fn", self.selection(0))
def test_empty_sexp(self):
self.set_view_content("[]")
self.set_selections((1, 1))
self.expand()
self.assertEquals("[]", self.selection(0))
self.set_view_content("()")
self.set_selections((1, 1))
self.expand()
self.assertEquals("()", self.selection(0))
self.set_view_content("{}")
self.set_selections((1, 1))
self.expand()
self.assertEquals("{}", self.selection(0))
self.set_view_content("( )")
self.set_selections((1, 1))
self.expand()
self.assertEquals(" ", self.selection(0))
self.expand()
self.assertEquals("( )", self.selection(0))
self.expand()
self.assertEquals("( )", self.selection(0))
self.set_view_content("[ ( a ) ]")
self.set_selections((4, 4))
self.expand()
self.assertEquals(" a ", self.selection(0))
self.expand()
self.assertEquals("( a )", self.selection(0))
self.expand()
self.assertEquals("[ ( a ) ]", self.selection(0))
def test_shrink(self):
self.set_view_content("(a (b (c) d) e)")
self.set_selections((7, 7))
self.expand()
self.assertEquals("c", self.selection(0))
self.expand()
self.assertEquals("(c)", self.selection(0))
self.expand()
self.assertEquals("(b (c) d)", self.selection(0))
self.expand()
self.assertEquals("(a (b (c) d) e)", self.selection(0))
self.expand()
self.assertEquals("(a (b (c) d) e)", self.selection(0))
self.shrink()
self.assertEquals("(b (c) d)", self.selection(0))
self.shrink()
self.assertEquals("(c)", self.selection(0))
self.shrink()
self.assertEquals("c", self.selection(0))
self.shrink()
self.assertEquals("", self.selection(0))
self.shrink()
self.assertEquals("", self.selection(0))
def test_comment_after_open(self):
self.set_view_content("[;;foo\n]")
self.set_selections((0, 0))
self.expand()
self.assertEquals("[;;foo\n]", self.selection(0))
self.set_selections((1, 1))
self.expand()
self.assertEquals(";;", self.selection(0))
def test_issue_48(self):
self.set_view_content("[state @state-ref]")
self.set_selections((8, 8))
self.expand()
self.assertEquals("@state-ref", self.selection(0))
self.expand()
self.assertEquals("state @state-ref", self.selection(0))
self.expand()
self.assertEquals("[state @state-ref]", self.selection(0))
def test_tagged_literal(self):
self.set_view_content("(foo #bar/baz [:quux 1])")
self.set_selections((5, 5))
self.expand()
self.assertEquals("#bar/baz", self.selection(0))
self.expand()
self.assertEquals("foo #bar/baz [:quux 1]", self.selection(0))
self.expand()
self.assertEquals("(foo #bar/baz [:quux 1])", self.selection(0))
| 33.538462 | 79 | 0.582152 | 1,194 | 9,592 | 4.512563 | 0.074539 | 0.110431 | 0.166295 | 0.284707 | 0.920007 | 0.885486 | 0.845212 | 0.80902 | 0.79974 | 0.733853 | 0 | 0.028077 | 0.246247 | 9,592 | 285 | 80 | 33.65614 | 0.717151 | 0 | 0 | 0.68 | 0 | 0 | 0.080379 | 0.002502 | 0 | 0 | 0 | 0 | 0.256 | 1 | 0.132 | false | 0 | 0.004 | 0 | 0.14 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
530003e42386923a44dd0fed1d47b831a8d45a78 | 11,581 | py | Python | tests/test_materials.py | julia-shenshina/LMS | 110364bb5c59055d584c314fd5c04158c637d0e6 | [
"MIT"
] | null | null | null | tests/test_materials.py | julia-shenshina/LMS | 110364bb5c59055d584c314fd5c04158c637d0e6 | [
"MIT"
] | 4 | 2018-12-18T19:03:36.000Z | 2018-12-20T21:07:02.000Z | tests/test_materials.py | julia-shenshina/LMS | 110364bb5c59055d584c314fd5c04158c637d0e6 | [
"MIT"
] | null | null | null | from django.urls import reverse
from rest_framework.test import APITestCase
from lms.models.models import Course, Faculty, Group, Professor, Student, Material
class TestMaterials(APITestCase):
def test_read_materials_professor(self):
professor = Professor.objects.create(first_name='first', last_name='last', secret_key="123123")
courses = [
Course.objects.create(name="Курс_1", description="Описание курса_1"),
Course.objects.create(name="Курс_2", description="Описание курса_2")
]
materials = [
Material.objects.create(name="Учебник по курсу_1", text="text_1", course=courses[0]),
Material.objects.create(name="Учебник по курсу_2", text="text_2", course=courses[1])
]
courses[0].professor.set([professor])
for course in courses:
course.refresh_from_db()
response = self.client.get(
reverse('material-list'),
**{"HTTP_X_SECRET_KEY": professor.secret_key}
)
assert response.status_code == 200
assert response.json().get("count") == 1
assert response.json().get("results")[0]["id"] == materials[0].id
def test_read_materials_student(self):
faculty = Faculty.objects.create(name="Факультет_1")
group = Group.objects.create(name="Группа_1", faculty=faculty, level=1)
student = Student.objects.create(
first_name="first", last_name="last", group=group, start_year=2016, secret_key="123123"
)
courses = [
Course.objects.create(name="Курс_1", description="Описание курса_1"),
Course.objects.create(name="Курс_2", description="Описание курса_2")
]
materials = [
Material.objects.create(name="Учебник по курсу_1", text="text_1", course=courses[0]),
Material.objects.create(name="Учебник по курсу_2", text="text_2", course=courses[1])
]
group.courses.set([courses[0]])
for course, material in zip(courses, materials):
course.materials.set([material])
course.refresh_from_db()
group.refresh_from_db()
student.refresh_from_db()
response = self.client.get(
reverse('material-list'),
**{"HTTP_X_SECRET_KEY": student.secret_key}
)
assert response.status_code == 200
assert response.json().get("count") == 1
assert response.json().get("results")[0]["id"] == materials[0].id
def test_update_materials_professor_ok(self):
professor = Professor.objects.create(
first_name='first', last_name='last', secret_key="123123"
)
course = Course.objects.create(name="Курс", description="Описание курса")
course.professor.set([professor])
material = Material.objects.create(name="Учебник по курсу", text="text", course=course)
material.refresh_from_db()
course.refresh_from_db()
professor.refresh_from_db()
response = self.client.patch(
reverse('material-detail', args=[material.id]),
data={'text': 'Текст учебника по курсу'},
**{'HTTP_X_SECRET_KEY': professor.secret_key}
)
assert response.status_code == 200
def test_update_materials_professor_failed(self):
professor = Professor.objects.create(
first_name='first', last_name='last', secret_key="123123"
)
course = Course.objects.create(name="Курс", description="Описание курса")
material = Material.objects.create(name="Учебник по курсу", text="text", course=course)
material.refresh_from_db()
course.refresh_from_db()
professor.refresh_from_db()
response = self.client.patch(
reverse('material-detail', args=[material.id]),
data={'text': 'Текст учебника по курсу'},
**{'HTTP_X_SECRET_KEY': professor.secret_key}
)
assert response.status_code == 404
def test_update_materials_student_failed(self):
faculty = Faculty.objects.create(name="Факультет_1")
group = Group.objects.create(name="Группа_1", faculty=faculty, level=1)
student = Student.objects.create(
first_name="first", last_name="last", group=group, secret_key="123123", start_year=2017
)
course = Course.objects.create(name="Курс", description="Описание курса")
course.groups.set([group])
material = Material.objects.create(name="Учебник по курсу", text="text", course=course)
material.refresh_from_db()
course.refresh_from_db()
group.refresh_from_db()
student.refresh_from_db()
response = self.client.patch(
reverse('material-detail', args=[material.id]),
data={'text': 'Текст учебника по курсу'},
**{'HTTP_X_SECRET_KEY': student.secret_key}
)
assert response.status_code == 403
def test_update_materials_student_ok(self):
faculty = Faculty.objects.create(name="Факультет_1")
group = Group.objects.create(name="Группа_1", faculty=faculty, level=1)
student = Student.objects.create(
first_name="first", last_name="last", group=group, secret_key="123123", start_year=2017
)
course = Course.objects.create(name="Курс", description="Описание курса")
course.groups.set([group])
course.headmen.set([student])
material = Material.objects.create(name="Учебник по курсу", text="text", course=course)
material.refresh_from_db()
course.refresh_from_db()
group.refresh_from_db()
student.refresh_from_db()
response = self.client.patch(
reverse('material-detail', args=[material.id]),
data={'text': 'Текст учебника по курсу'},
**{'HTTP_X_SECRET_KEY': student.secret_key}
)
assert response.status_code == 200
def test_delete_materials_professor(self):
professor = Professor.objects.create(
first_name='first', last_name='last', secret_key="123123"
)
course = Course.objects.create(name="Курс", description="Описание курса")
course.professor.set([professor])
material = Material.objects.create(name="Учебник по курсу", text="text", course=course)
material.refresh_from_db()
course.refresh_from_db()
professor.refresh_from_db()
response = self.client.delete(
reverse('material-detail', args=[material.id]),
data={'text': 'Текст учебника по курсу'},
**{'HTTP_X_SECRET_KEY': professor.secret_key}
)
assert response.status_code == 204
def test_delete_materials_student_ok(self):
faculty = Faculty.objects.create(name="Факультет_1")
group = Group.objects.create(name="Группа_1", faculty=faculty, level=1)
student = Student.objects.create(
first_name="first", last_name="last", group=group, secret_key="123123", start_year=2017
)
course = Course.objects.create(name="Курс", description="Описание курса")
course.groups.set([group])
course.headmen.set([student])
material = Material.objects.create(name="Учебник по курсу", text="text", course=course)
material.refresh_from_db()
course.refresh_from_db()
group.refresh_from_db()
student.refresh_from_db()
response = self.client.delete(
reverse('material-detail', args=[material.id]),
data={'text': 'Текст учебника по курсу'},
**{'HTTP_X_SECRET_KEY': student.secret_key}
)
assert response.status_code == 204
def test_delete_materials_student_failed(self):
faculty = Faculty.objects.create(name="Факультет_1")
group = Group.objects.create(name="Группа_1", faculty=faculty, level=1)
student = Student.objects.create(
first_name="first", last_name="last", group=group, secret_key="123123", start_year=2017
)
course = Course.objects.create(name="Курс", description="Описание курса")
course.groups.set([group])
material = Material.objects.create(name="Учебник по курсу", text="text", course=course)
material.refresh_from_db()
course.refresh_from_db()
group.refresh_from_db()
student.refresh_from_db()
response = self.client.delete(
reverse('material-detail', args=[material.id]),
**{'HTTP_X_SECRET_KEY': student.secret_key}
)
assert response.status_code == 403
def test_create_materials_student_ok(self):
faculty = Faculty.objects.create(name="Факультет_1")
group = Group.objects.create(name="Группа_1", faculty=faculty, level=1)
student = Student.objects.create(
first_name="first", last_name="last", group=group, secret_key="123123", start_year=2017
)
course = Course.objects.create(name="Курс", description="Описание курса")
course.groups.set([group])
course.headmen.set([student])
course.refresh_from_db()
group.refresh_from_db()
student.refresh_from_db()
response = self.client.post(
reverse('material-list'),
data={'name': 'Новые материалы', 'text': 'Текст учебника по курсу', 'course': course.id},
**{'HTTP_X_SECRET_KEY': student.secret_key}
)
assert response.status_code == 201
def test_create_materials_student_failed(self):
faculty = Faculty.objects.create(name="Факультет_1")
group = Group.objects.create(name="Группа_1", faculty=faculty, level=1)
student = Student.objects.create(
first_name="first", last_name="last", group=group, secret_key="123123", start_year=2017
)
course = Course.objects.create(name="Курс", description="Описание курса")
course.groups.set([group])
course.refresh_from_db()
group.refresh_from_db()
student.refresh_from_db()
response = self.client.post(
reverse('material-list'),
data={'name': 'Новые материалы', 'text': 'Текст учебника по курсу', 'course': course.id},
**{'HTTP_X_SECRET_KEY': student.secret_key}
)
assert response.status_code == 403
def test_create_materials_professor_ok(self):
professor = Professor.objects.create(
first_name='first', last_name='last', secret_key="123123"
)
course = Course.objects.create(name="Курс", description="Описание курса")
course.professor.set([professor])
course.refresh_from_db()
professor.refresh_from_db()
response = self.client.post(
reverse('material-list'),
data={'name': 'Новые материалы', 'text': 'Текст учебника по курсу', 'course': course.id},
**{'HTTP_X_SECRET_KEY': professor.secret_key}
)
assert response.status_code == 201
def test_update_materials_professor_failed(self):
professor = Professor.objects.create(
first_name='first', last_name='last', secret_key="123123"
)
course = Course.objects.create(name="Курс", description="Описание курса")
response = self.client.post(
reverse('material-list'),
data={'name': 'Новые материалы', 'text': 'Текст учебника по курсу', 'course': course.id},
**{'HTTP_X_SECRET_KEY': professor.secret_key}
)
assert response.status_code == 403
| 36.882166 | 103 | 0.631724 | 1,331 | 11,581 | 5.297521 | 0.066867 | 0.097717 | 0.09644 | 0.048929 | 0.949794 | 0.945398 | 0.945398 | 0.944972 | 0.944972 | 0.942136 | 0 | 0.022003 | 0.238667 | 11,581 | 313 | 104 | 37 | 0.777702 | 0 | 0 | 0.770213 | 0 | 0 | 0.142647 | 0 | 0 | 0 | 0 | 0 | 0.07234 | 1 | 0.055319 | false | 0 | 0.012766 | 0 | 0.07234 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5302e41716a78451c7f0f4c216785506d1d21d3f | 861 | bzl | Python | test/com/facebook/buck/skylark/parser/testdata/attr/int_list/defs.bzl | Unknoob/buck | 2dfc734354b326f2f66896dde7746a11965d5a13 | [
"Apache-2.0"
] | 8,027 | 2015-01-02T05:31:44.000Z | 2022-03-31T07:08:09.000Z | test/com/facebook/buck/skylark/parser/testdata/attr/int_list/defs.bzl | Unknoob/buck | 2dfc734354b326f2f66896dde7746a11965d5a13 | [
"Apache-2.0"
] | 2,355 | 2015-01-01T15:30:53.000Z | 2022-03-30T20:21:16.000Z | test/com/facebook/buck/skylark/parser/testdata/attr/int_list/defs.bzl | Unknoob/buck | 2dfc734354b326f2f66896dde7746a11965d5a13 | [
"Apache-2.0"
] | 1,280 | 2015-01-09T03:29:04.000Z | 2022-03-30T15:14:14.000Z | """ Module docstring """
def well_formed():
""" Function docstring """
a = attr.int_list()
if repr(a) != "<attr.int_list>":
fail("Expected attr.int_list instance")
a = attr.int_list(mandatory = True, doc = "Some int_list", default = [1])
if repr(a) != "<attr.int_list>":
fail("Expected attr.int_list instance")
a = attr.int_list(mandatory = True, doc = "Some int_list", default = [1], allow_empty = True)
if repr(a) != "<attr.int_list>":
fail("Expected attr.int_list instance")
a = attr.int_list(mandatory = True, doc = "Some int_list", default = [1], allow_empty = False)
if repr(a) != "<attr.int_list>":
fail("Expected attr.int_list instance")
def malformed():
""" Function docstring """
_a = attr.int_list(mandatory = True, doc = "Some int_list", default = 3, allow_empty = True)
| 41 | 98 | 0.62137 | 120 | 861 | 4.275 | 0.216667 | 0.231969 | 0.278752 | 0.210526 | 0.875244 | 0.875244 | 0.785575 | 0.785575 | 0.785575 | 0.785575 | 0 | 0.005891 | 0.211382 | 861 | 20 | 99 | 43.05 | 0.749632 | 0.065041 | 0 | 0.533333 | 0 | 0 | 0.300637 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.133333 | false | 0 | 0 | 0 | 0.133333 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
53468eab96690d6ab3bbba6397f02feda661b631 | 42 | py | Python | labtoys/Rigol/__init__.py | ppudo/labtoys_python | c8ca27637602b8aac0574e92da370a4a97e9fcad | [
"MIT"
] | null | null | null | labtoys/Rigol/__init__.py | ppudo/labtoys_python | c8ca27637602b8aac0574e92da370a4a97e9fcad | [
"MIT"
] | null | null | null | labtoys/Rigol/__init__.py | ppudo/labtoys_python | c8ca27637602b8aac0574e92da370a4a97e9fcad | [
"MIT"
] | null | null | null | #__init__.py
from .DS1000Z import DS1000Z | 14 | 28 | 0.809524 | 6 | 42 | 5 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.216216 | 0.119048 | 42 | 3 | 28 | 14 | 0.594595 | 0.261905 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
535dfc246c90564f451a229683cac3c7545164f7 | 9,225 | py | Python | tests/test_encoding/test_woe_encoder.py | janrito/feature_engine | 38fa0b8851f7bd67e842515df1c8edf37d504b23 | [
"BSD-3-Clause"
] | 650 | 2018-12-31T11:44:19.000Z | 2022-03-05T03:16:21.000Z | tests/test_encoding/test_woe_encoder.py | janrito/feature_engine | 38fa0b8851f7bd67e842515df1c8edf37d504b23 | [
"BSD-3-Clause"
] | 205 | 2019-03-21T02:17:53.000Z | 2021-09-29T08:41:38.000Z | tests/test_encoding/test_woe_encoder.py | janrito/feature_engine | 38fa0b8851f7bd67e842515df1c8edf37d504b23 | [
"BSD-3-Clause"
] | 193 | 2019-01-09T21:06:46.000Z | 2021-12-14T09:00:54.000Z | import pandas as pd
import pytest
from sklearn.exceptions import NotFittedError
from feature_engine.encoding import WoEEncoder
def test_automatically_select_variables(df_enc):
# test case 1: automatically select variables, woe
encoder = WoEEncoder(variables=None)
encoder.fit(df_enc[["var_A", "var_B"]], df_enc["target"])
X = encoder.transform(df_enc[["var_A", "var_B"]])
# transformed dataframe
transf_df = df_enc.copy()
transf_df["var_A"] = [
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
0.8472978603872037,
0.8472978603872037,
0.8472978603872037,
0.8472978603872037,
]
transf_df["var_B"] = [
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.8472978603872037,
0.8472978603872037,
0.8472978603872037,
0.8472978603872037,
]
# init params
assert encoder.variables is None
# fit params
assert encoder.variables_ == ["var_A", "var_B"]
assert encoder.encoder_dict_ == {
"var_A": {
"A": 0.15415067982725836,
"B": -0.5389965007326869,
"C": 0.8472978603872037,
},
"var_B": {
"A": -0.5389965007326869,
"B": 0.15415067982725836,
"C": 0.8472978603872037,
},
}
assert encoder.n_features_in_ == 2
# transform params
pd.testing.assert_frame_equal(X, transf_df[["var_A", "var_B"]])
def test_error_target_is_not_passed(df_enc):
# test case 2: raises error if target is not passed
encoder = WoEEncoder(variables=None)
with pytest.raises(TypeError):
encoder.fit(df_enc)
def test_warn_if_transform_df_contains_categories_not_seen_in_fit(df_enc, df_enc_rare):
# test case 3: when dataset to be transformed contains categories not present
# in training dataset
encoder = WoEEncoder(variables=None)
with pytest.warns(UserWarning):
encoder.fit(df_enc[["var_A", "var_B"]], df_enc["target"])
encoder.transform(df_enc_rare[["var_A", "var_B"]])
def test_error_if_target_not_binary():
# test case 4: the target is not binary
encoder = WoEEncoder(variables=None)
with pytest.raises(ValueError):
df = {
"var_A": ["A"] * 6 + ["B"] * 10 + ["C"] * 4,
"var_B": ["A"] * 10 + ["B"] * 6 + ["C"] * 4,
"target": [1, 1, 2, 2, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0],
}
df = pd.DataFrame(df)
encoder.fit(df[["var_A", "var_B"]], df["target"])
def test_error_if_denominator_probability_is_zero():
# test case 5: when the denominator probability is zero
encoder = WoEEncoder(variables=None)
with pytest.raises(ValueError):
df = {
"var_A": ["A"] * 6 + ["B"] * 10 + ["C"] * 4,
"var_B": ["A"] * 10 + ["B"] * 6 + ["C"] * 4,
"target": [1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0],
}
df = pd.DataFrame(df)
encoder.fit(df[["var_A", "var_B"]], df["target"])
# # # test case 6: when the numerator probability is zero, woe
# # with pytest.raises(ValueError):
# # df = {'var_A': ['A'] * 6 + ['B'] * 10 + ['C'] * 4,
# # 'var_B': ['A'] * 10 + ['B'] * 6 + ['C'] * 4,
# # 'target': [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1,
# 1, 0, 0]}
# # df = pd.DataFrame(df)
# # encoder.fit(df[['var_A', 'var_B']], df['target'])
#
# # # test case 7: when the denominator probability is zero, woe
# # with pytest.raises(ValueError):
# # df = {'var_A': ['A'] * 6 + ['B'] * 10 + ['C'] * 4,
# # 'var_B': ['A'] * 10 + ['B'] * 6 + ['C'] * 4,
# # 'target': [1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1,
# 0, 0]}
# # df = pd.DataFrame(df)
# # encoder.fit(df[['var_A', 'var_B']], df['target'])
def test_non_fitted_error(df_enc):
# test case 8: non fitted error
with pytest.raises(NotFittedError):
imputer = WoEEncoder()
imputer.transform(df_enc)
def test_error_if_contains_na_in_fit(df_enc_na):
# test case 9: when dataset contains na, fit method
encoder = WoEEncoder(variables=None)
with pytest.raises(ValueError):
encoder.fit(df_enc_na[["var_A", "var_B"]], df_enc_na["target"])
def test_error_if_df_contains_na_in_transform(df_enc, df_enc_na):
# test case 10: when dataset contains na, transform method}
encoder = WoEEncoder(variables=None)
with pytest.raises(ValueError):
encoder.fit(df_enc[["var_A", "var_B"]], df_enc["target"])
encoder.transform(df_enc_na)
def test_on_numerical_variables(df_enc_numeric):
# ignore_format=True
encoder = WoEEncoder(variables=None, ignore_format=True)
encoder.fit(df_enc_numeric[["var_A", "var_B"]], df_enc_numeric["target"])
X = encoder.transform(df_enc_numeric[["var_A", "var_B"]])
# transformed dataframe
transf_df = df_enc_numeric.copy()
transf_df["var_A"] = [
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
0.8472978603872037,
0.8472978603872037,
0.8472978603872037,
0.8472978603872037,
]
transf_df["var_B"] = [
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.8472978603872037,
0.8472978603872037,
0.8472978603872037,
0.8472978603872037,
]
# init params
assert encoder.variables is None
# fit params
assert encoder.variables_ == ["var_A", "var_B"]
assert encoder.encoder_dict_ == {
"var_A": {
1: 0.15415067982725836,
2: -0.5389965007326869,
3: 0.8472978603872037,
},
"var_B": {
1: -0.5389965007326869,
2: 0.15415067982725836,
3: 0.8472978603872037,
},
}
assert encoder.n_features_in_ == 2
# transform params
pd.testing.assert_frame_equal(X, transf_df[["var_A", "var_B"]])
def test_variables_cast_as_category(df_enc_category_dtypes):
df = df_enc_category_dtypes.copy()
encoder = WoEEncoder(variables=None)
encoder.fit(df[["var_A", "var_B"]], df["target"])
X = encoder.transform(df[["var_A", "var_B"]])
# transformed dataframe
transf_df = df.copy()
transf_df["var_A"] = [
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
0.8472978603872037,
0.8472978603872037,
0.8472978603872037,
0.8472978603872037,
]
transf_df["var_B"] = [
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
-0.5389965007326869,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.15415067982725836,
0.8472978603872037,
0.8472978603872037,
0.8472978603872037,
0.8472978603872037,
]
pd.testing.assert_frame_equal(X, transf_df[["var_A", "var_B"]], check_dtype=False)
assert X["var_A"].dtypes == float
| 31.271186 | 87 | 0.58981 | 1,045 | 9,225 | 5.026794 | 0.101435 | 0.20712 | 0.205597 | 0.349515 | 0.771749 | 0.757282 | 0.727584 | 0.696554 | 0.696554 | 0.683229 | 0 | 0.362178 | 0.277182 | 9,225 | 294 | 88 | 31.377551 | 0.425615 | 0.144173 | 0 | 0.745614 | 0 | 0 | 0.041188 | 0 | 0 | 0 | 0 | 0 | 0.052632 | 1 | 0.04386 | false | 0.004386 | 0.017544 | 0 | 0.061404 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
5363bb000ed703c5c7858cb1b41e88b555166cb9 | 2,946 | py | Python | dj_bioinformatics_protein/migrations/0004_auto_20170621_1918.py | CyrusBiotechnology/dj-protein | 31e19b98bfa33cae6cb43fd27eec9bac43a9f844 | [
"MIT"
] | 4 | 2016-09-25T20:40:46.000Z | 2021-07-06T08:00:30.000Z | dj_bioinformatics_protein/migrations/0004_auto_20170621_1918.py | CyrusBiotechnology/dj-protein | 31e19b98bfa33cae6cb43fd27eec9bac43a9f844 | [
"MIT"
] | 3 | 2016-02-02T01:36:37.000Z | 2017-12-20T16:09:44.000Z | dj_bioinformatics_protein/migrations/0004_auto_20170621_1918.py | CyrusBiotechnology/dj-protein | 31e19b98bfa33cae6cb43fd27eec9bac43a9f844 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-21 19:18
from __future__ import unicode_literals
import dj_bioinformatics_protein.fields
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dj_bioinformatics_protein', '0003_auto_20160227_0008'),
]
operations = [
migrations.AlterField(
model_name='alignment',
name='alignment_method',
field=models.CharField(choices=[('H', 'hhsearch'), ('S', 'sparksX'), ('U', 'user')], max_length=1),
),
migrations.AlterField(
model_name='alignment',
name='full_query_sequence',
field=dj_bioinformatics_protein.fields.AminoAcidSequenceField(max_length=5000, validators=[django.core.validators.RegexValidator('^[ACDEFGHIKLMNPQRSTVWYX]*$', 'Only uppercase amino acid abbreviations are allowed, or X (non canonical residues).')]),
),
migrations.AlterField(
model_name='alignment',
name='modified_query_aln_seq',
field=dj_bioinformatics_protein.fields.AminoAcidAlignmentField(max_length=5000, null=True, validators=[django.core.validators.RegexValidator('^[ACDEFGHIKLMNPQRSTVWYX\\-]*$', 'Only uppercase amino acid abbreviations or dashes are allowed, or X (non canonical residues).')]),
),
migrations.AlterField(
model_name='alignment',
name='modified_target_aln_seq',
field=dj_bioinformatics_protein.fields.AminoAcidAlignmentField(max_length=5000, null=True, validators=[django.core.validators.RegexValidator('^[ACDEFGHIKLMNPQRSTVWYX\\-]*$', 'Only uppercase amino acid abbreviations or dashes are allowed, or X (non canonical residues).')]),
),
migrations.AlterField(
model_name='alignment',
name='query_aln_seq',
field=dj_bioinformatics_protein.fields.AminoAcidAlignmentField(max_length=5000, validators=[django.core.validators.RegexValidator('^[ACDEFGHIKLMNPQRSTVWYX\\-]*$', 'Only uppercase amino acid abbreviations or dashes are allowed, or X (non canonical residues).')]),
),
migrations.AlterField(
model_name='alignment',
name='target_aln_seq',
field=dj_bioinformatics_protein.fields.AminoAcidAlignmentField(max_length=5000, validators=[django.core.validators.RegexValidator('^[ACDEFGHIKLMNPQRSTVWYX\\-]*$', 'Only uppercase amino acid abbreviations or dashes are allowed, or X (non canonical residues).')]),
),
migrations.AlterField(
model_name='fasta',
name='sequence',
field=dj_bioinformatics_protein.fields.AminoAcidSequenceField(max_length=5000, validators=[django.core.validators.RegexValidator('^[ACDEFGHIKLMNPQRSTVWYX]*$', 'Only uppercase amino acid abbreviations are allowed, or X (non canonical residues).')]),
),
]
| 55.584906 | 285 | 0.690767 | 297 | 2,946 | 6.680135 | 0.272727 | 0.064516 | 0.092742 | 0.102319 | 0.818044 | 0.818044 | 0.775706 | 0.775706 | 0.775706 | 0.775706 | 0 | 0.024308 | 0.190088 | 2,946 | 52 | 286 | 56.653846 | 0.807209 | 0.023082 | 0 | 0.577778 | 1 | 0 | 0.330435 | 0.090783 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.088889 | 0 | 0.155556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
726285d0be08ba9b450fe0714fb5c31bb124f9e9 | 15,994 | py | Python | gcp_variant_transforms/transforms/merge_headers_test.py | allieychen/gcp-variant-transforms | b4cae16f1370fcd9ac1ab3f8fbb495e00b4a096f | [
"Apache-2.0"
] | null | null | null | gcp_variant_transforms/transforms/merge_headers_test.py | allieychen/gcp-variant-transforms | b4cae16f1370fcd9ac1ab3f8fbb495e00b4a096f | [
"Apache-2.0"
] | null | null | null | gcp_variant_transforms/transforms/merge_headers_test.py | allieychen/gcp-variant-transforms | b4cae16f1370fcd9ac1ab3f8fbb495e00b4a096f | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test cases for merge_headers module."""
from collections import OrderedDict
import unittest
import vcf
from apache_beam.testing.test_pipeline import TestPipeline
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import Create
from gcp_variant_transforms.beam_io import vcf_header_io
from gcp_variant_transforms.libs import vcf_field_conflict_resolver
from gcp_variant_transforms.libs.header_merger import HeaderMerger
from gcp_variant_transforms.transforms import merge_headers
FILE_1_LINES = [
'##fileformat=VCFv4.2\n',
'##INFO=<ID=NS,Number=1,Type=Integer,Description="Number samples">\n',
'##INFO=<ID=AF,Number=A,Type=Float,Description="Allele Frequency">\n',
'##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">\n',
'##FORMAT=<ID=GQ,Number=1,Type=Integer,Description="GQ">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample1 Sample2\n']
FILE_2_LINES = [
'##fileformat=VCFv4.2\n',
'##INFO=<ID=NS2,Number=1,Type=Integer,Description="Number samples">\n',
'##INFO=<ID=AF,Number=A,Type=Float,Description="Allele Frequency">\n',
'##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">\n',
'##FORMAT=<ID=GQ2,Number=1,Type=Integer,Description="GQ">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample3\n']
class MergeHeadersTest(unittest.TestCase):
"""Test cases for GetMergeHeaders `PTransform`."""
def _get_header_from_reader(self, reader):
"""Extracts values from a pyVCF reader into a VcfHeader object."""
return vcf_header_io.VcfHeader(
infos=reader.infos,
filters=reader.filters,
alts=reader.alts,
formats=reader.formats,
contigs=reader.contigs)
def _get_combiner_fn(self, split_alternate_allele_info_fields=True):
resolver = vcf_field_conflict_resolver.FieldConflictResolver(
split_alternate_allele_info_fields)
header_merger = HeaderMerger(resolver)
combiner_fn = merge_headers._MergeHeadersFn(header_merger)
return combiner_fn
def test_combine_single_header(self):
vcf_reader = vcf.Reader(fsock=iter(FILE_1_LINES))
headers = self._get_header_from_reader(vcf_reader)
combiner_fn = self._get_combiner_fn()
merged_headers = combiner_fn.create_accumulator()
merged_headers = combiner_fn.add_input(merged_headers, headers)
merged_headers = combiner_fn.extract_output(merged_headers)
self.assertItemsEqual(merged_headers.infos.keys(), ['NS', 'AF'])
self.assertItemsEqual(merged_headers.formats.keys(), ['GT', 'GQ'])
def test_combine_multiple_headers_as_inputs(self):
vcf_reader_1 = vcf.Reader(fsock=iter(FILE_1_LINES))
vcf_reader_2 = vcf.Reader(fsock=iter(FILE_2_LINES))
headers_1 = self._get_header_from_reader(vcf_reader_1)
headers_2 = self._get_header_from_reader(vcf_reader_2)
combiner_fn = self._get_combiner_fn()
merged_headers = combiner_fn.create_accumulator()
merged_headers = combiner_fn.add_input(merged_headers, headers_1)
merged_headers = combiner_fn.add_input(merged_headers, headers_2)
merged_headers = combiner_fn.extract_output(merged_headers)
self.assertItemsEqual(merged_headers.infos.keys(), ['NS', 'AF', 'NS2'])
self.assertItemsEqual(merged_headers.formats.keys(), ['GT', 'GQ', 'GQ2'])
def test_combine_multiple_headers_as_accumulators(self):
vcf_reader_1 = vcf.Reader(fsock=iter(FILE_1_LINES))
vcf_reader_2 = vcf.Reader(fsock=iter(FILE_2_LINES))
headers_1 = self._get_header_from_reader(vcf_reader_1)
headers_2 = self._get_header_from_reader(vcf_reader_2)
combiner_fn = self._get_combiner_fn()
merged_headers_1 = combiner_fn.create_accumulator()
merged_headers_1 = combiner_fn.add_input(merged_headers_1, headers_1)
merged_headers_2 = combiner_fn.create_accumulator()
merged_headers_2 = combiner_fn.add_input(merged_headers_2, headers_2)
merged_headers = combiner_fn.merge_accumulators([merged_headers_1,
merged_headers_2])
merged_headers = combiner_fn.extract_output(merged_headers)
self.assertItemsEqual(merged_headers.infos.keys(), ['NS', 'AF', 'NS2'])
self.assertItemsEqual(merged_headers.formats.keys(), ['GT', 'GQ', 'GQ2'])
def test_combine_two_type_conflicting_but_resolvable_headers(self):
# These two headers have type conflict (Integer vs Float), however pipeline
# doesn't raise error because the type conflict is resolvable.
lines_1 = [
'##fileformat=VCFv4.2\n',
'##INFO=<ID=NS,Number=1,Type=Integer,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample1 Sample2\n']
lines_2 = [
'##fileformat=VCFv4.2\n',
'##INFO=<ID=NS,Number=1,Type=Float,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample3\n']
vcf_reader_1 = vcf.Reader(fsock=iter(lines_1))
vcf_reader_2 = vcf.Reader(fsock=iter(lines_2))
headers_1 = self._get_header_from_reader(vcf_reader_1)
headers_2 = self._get_header_from_reader(vcf_reader_2)
combiner_fn = self._get_combiner_fn()
merged_headers = combiner_fn.create_accumulator()
merged_headers = combiner_fn.add_input(merged_headers, headers_1)
merged_headers = combiner_fn.add_input(merged_headers, headers_2)
merged_headers = combiner_fn.extract_output(merged_headers)
self.assertItemsEqual(merged_headers.infos.keys(), ['NS'])
self.assertItemsEqual(merged_headers.infos['NS'],
OrderedDict([('id', 'NS'),
('num', 1),
('type', 'Float'),
('desc', 'Number samples'),
('source', None),
('version', None)]))
def test_none_type_defaults_to_string(self):
# This header's type is `None`, so we convert it to `String` while merging.
lines = [
'##fileformat=VCFv4.2\n',
'##INFO=<ID=NS,Number=1,Type=Integer,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample1 Sample2\n']
vcf_reader = vcf.Reader(fsock=iter(lines))
headers = self._get_header_from_reader(vcf_reader)
headers.infos['NS']['type'] = None
combiner_fn = self._get_combiner_fn()
merged_headers = combiner_fn.create_accumulator()
merged_headers = combiner_fn.add_input(merged_headers, headers)
merged_headers = combiner_fn.extract_output(merged_headers)
self.assertItemsEqual(merged_headers.infos.keys(), ['NS'])
self.assertItemsEqual(merged_headers.infos['NS'],
OrderedDict([('id', 'NS'),
('num', 1),
('type', 'String'),
('desc', 'Number samples'),
('source', None),
('version', None)]))
def test_combine_two_num_conflicting_but_resolvable_headers_1(self):
# These two headers have conflict in Number field (2 vs dot), however
# pipeline doesn't raise error because the conflict is resolvable.
lines_1 = [
'##fileformat=VCFv4.2\n',
'##INFO=<ID=NS,Number=2,Type=Integer,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample1 Sample2\n']
lines_2 = [
'##fileformat=VCFv4.2\n',
'##INFO=<ID=NS,Number=.,Type=Integer,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample3\n']
vcf_reader_1 = vcf.Reader(fsock=iter(lines_1))
vcf_reader_2 = vcf.Reader(fsock=iter(lines_2))
headers_1 = self._get_header_from_reader(vcf_reader_1)
headers_2 = self._get_header_from_reader(vcf_reader_2)
combiner_fn = self._get_combiner_fn()
merged_headers = combiner_fn.create_accumulator()
merged_headers = combiner_fn.add_input(merged_headers, headers_1)
merged_headers = combiner_fn.add_input(merged_headers, headers_2)
merged_headers = combiner_fn.extract_output(merged_headers)
self.assertItemsEqual(merged_headers.infos.keys(), ['NS'])
self.assertItemsEqual(merged_headers.infos['NS'],
OrderedDict([('id', 'NS'),
('num', '.'),
('type', 'Integer'),
('desc', 'Number samples'),
('source', None),
('version', None)]))
def test_combine_two_num_conflicting_but_resolvable_headers_2(self):
# These two headers have conflict in Number field (2 vs 3), however
# pipeline doesn't raise error because the conflict is resolvable.
lines_1 = [
'##fileformat=VCFv4.2\n',
'##INFO=<ID=NS,Number=2,Type=Integer,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample1 Sample2\n']
lines_2 = [
'##fileformat=VCFv4.2\n',
'##INFO=<ID=NS,Number=3,Type=Integer,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample3\n']
vcf_reader_1 = vcf.Reader(fsock=iter(lines_1))
vcf_reader_2 = vcf.Reader(fsock=iter(lines_2))
headers_1 = self._get_header_from_reader(vcf_reader_1)
headers_2 = self._get_header_from_reader(vcf_reader_2)
combiner_fn = self._get_combiner_fn()
merged_headers = combiner_fn.create_accumulator()
merged_headers = combiner_fn.add_input(merged_headers, headers_1)
merged_headers = combiner_fn.add_input(merged_headers, headers_2)
merged_headers = combiner_fn.extract_output(merged_headers)
self.assertItemsEqual(merged_headers.infos.keys(), ['NS'])
self.assertItemsEqual(merged_headers.infos['NS'],
OrderedDict([('id', 'NS'),
('num', '.'),
('type', 'Integer'),
('desc', 'Number samples'),
('source', None),
('version', None)]))
def test_combine_two_num_conflicting_but_resolvable_headers_3(self):
# Test with split_alternate_allele_info_fields=False
#
# These two headers have incompable Number field (A vs dot).
# `Number=A` is compatible with dot when flag
# split_alternate_allele_info_fields is off.
lines_1 = [
'##fileformat=VCFv4.2\n',
'##INFO=<ID=NS,Number=A,Type=Integer,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample1 Sample2\n']
lines_2 = [
'##fileformat=VCFv4.2\n',
'##INFO=<ID=NS,Number=.,Type=Integer,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample3\n']
vcf_reader_1 = vcf.Reader(fsock=iter(lines_1))
vcf_reader_2 = vcf.Reader(fsock=iter(lines_2))
headers_1 = self._get_header_from_reader(vcf_reader_1)
headers_2 = self._get_header_from_reader(vcf_reader_2)
combiner_fn = self._get_combiner_fn(
split_alternate_allele_info_fields=False)
merged_headers = combiner_fn.create_accumulator()
merged_headers = combiner_fn.add_input(merged_headers, headers_1)
merged_headers = combiner_fn.add_input(merged_headers, headers_2)
merged_headers = combiner_fn.extract_output(merged_headers)
self.assertItemsEqual(merged_headers.infos.keys(), ['NS'])
self.assertItemsEqual(merged_headers.infos['NS'],
OrderedDict([('id', 'NS'),
('num', '.'),
('type', 'Integer'),
('desc', 'Number samples'),
('source', None),
('version', None)]))
def test_combine_two_num_conflicting_but_not_resolvable_headers(self):
# Test with split_alternate_allele_info_fields=True
#
# These two headers have incompable Number field (A vs dot).
# `Number=A` is incompatible with dot when flag
# split_alternate_allele_info_fields is set.
lines_1 = [
'##fileformat=VCFv4.2\n',
'##INFO=<ID=NS,Number=A,Type=Integer,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample1 Sample2\n']
lines_2 = [
'##fileformat=VCFv4.2\n',
'##INFO=<ID=NS,Number=.,Type=Integer,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample3\n']
vcf_reader_1 = vcf.Reader(fsock=iter(lines_1))
vcf_reader_2 = vcf.Reader(fsock=iter(lines_2))
headers_1 = self._get_header_from_reader(vcf_reader_1)
headers_2 = self._get_header_from_reader(vcf_reader_2)
combiner_fn = self._get_combiner_fn()
with self.assertRaises(ValueError):
merged_headers = combiner_fn.create_accumulator()
merged_headers = combiner_fn.add_input(merged_headers, headers_1)
merged_headers = combiner_fn.add_input(merged_headers, headers_2)
merged_headers = combiner_fn.extract_output(merged_headers)
def test_combine_two_headers_with_bad_conflict(self):
# Type mistmach (String vs Float) cannot be resolved..
lines_1 = [
'##fileformat=VCFv4.2\n',
'##INFO=<ID=NS,Number=1,Type=String,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample1 Sample2\n']
lines_2 = [
'##fileformat=VCFv4.2\n',
'##INFO=<ID=NS,Number=1,Type=Float,Description="Number samples">\n',
'#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT Sample3\n']
vcf_reader_1 = vcf.Reader(fsock=iter(lines_1))
vcf_reader_2 = vcf.Reader(fsock=iter(lines_2))
headers_1 = self._get_header_from_reader(vcf_reader_1)
headers_2 = self._get_header_from_reader(vcf_reader_2)
combiner_fn = self._get_combiner_fn()
with self.assertRaises(ValueError):
merged_headers = combiner_fn.create_accumulator()
merged_headers = combiner_fn.add_input(merged_headers, headers_1)
merged_headers = combiner_fn.add_input(merged_headers, headers_2)
merged_headers = combiner_fn.extract_output(merged_headers)
def test_combine_pipeline(self):
vcf_reader_1 = vcf.Reader(fsock=iter(FILE_1_LINES))
vcf_reader_2 = vcf.Reader(fsock=iter(FILE_2_LINES))
headers_1 = self._get_header_from_reader(vcf_reader_1)
headers_2 = self._get_header_from_reader(vcf_reader_2)
# TODO(nmousavi): Either use TestPipeline or combiner_fn.* everywhere.
# After moving out _HeaderMerger to its file, it makes sense to use
# TestPipeline everywhere.
header_merger = HeaderMerger(
vcf_field_conflict_resolver.FieldConflictResolver(
split_alternate_allele_info_fields=True))
expected = vcf_header_io.VcfHeader()
header_merger.merge(expected, headers_1)
header_merger.merge(expected, headers_2)
pipeline = TestPipeline()
merged_headers = (
pipeline
| Create([headers_1, headers_2])
| 'MergeHeaders' >> merge_headers.MergeHeaders())
assert_that(merged_headers, equal_to([expected]))
| 45.4375 | 80 | 0.671002 | 2,055 | 15,994 | 4.928467 | 0.110462 | 0.112954 | 0.074645 | 0.081754 | 0.809735 | 0.791173 | 0.75158 | 0.746544 | 0.729463 | 0.716331 | 0 | 0.016315 | 0.218207 | 15,994 | 351 | 81 | 45.566952 | 0.793666 | 0.112292 | 0 | 0.721569 | 0 | 0.054902 | 0.211322 | 0.105732 | 0 | 0 | 0 | 0.002849 | 0.078431 | 1 | 0.05098 | false | 0 | 0.043137 | 0 | 0.105882 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
726b068c83fdc94b0a186580f588139e79fb6608 | 9,849 | py | Python | transform.py | NJUVISION/PCGCv1 | 1ad6e5e50823c5a57fb523db7853b6301e3c1d0c | [
"Apache-2.0"
] | 29 | 2020-10-19T15:23:22.000Z | 2022-03-12T09:01:26.000Z | transform.py | xtorker/PCGCv1 | 3f73a234f8706779a88e615150afca77c028ce1f | [
"Apache-2.0"
] | 4 | 2021-04-28T11:43:56.000Z | 2022-03-12T14:31:22.000Z | transform.py | xtorker/PCGCv1 | 3f73a234f8706779a88e615150afca77c028ce1f | [
"Apache-2.0"
] | 11 | 2020-10-10T06:04:12.000Z | 2022-02-16T13:51:15.000Z | # Copyright (c) Nanjing University, Vision Lab.
# Last update:
# 2020.11.26
# 2019.11.13
# 2019.10.27
# 2019.10.07
# 2019.10.08
import os
import argparse
import numpy as np
import tensorflow as tf
import time
import importlib
import subprocess
tf.enable_eager_execution()
import models.model_voxception as model
from models.entropy_model import EntropyBottleneck
from models.conditional_entropy_model import SymmetricConditional
################### Compression Network (with factorized entropy model) ###################
def compress_factorized(cubes, model, ckpt_dir):
"""Compress cubes to bitstream.
Input: cubes with shape [batch size, length, width, height, channel(1)].
Output: compressed bitstream.
"""
print('===== Compress =====')
# load model.
#model = importlib.import_module(model)
analysis_transform = model.AnalysisTransform()
# synthesis_transform = model.SynthesisTransform()
entropy_bottleneck = EntropyBottleneck()
checkpoint = tf.train.Checkpoint(analysis_transform=analysis_transform,
estimator=entropy_bottleneck)
status = checkpoint.restore(tf.train.latest_checkpoint(ckpt_dir))
x = tf.convert_to_tensor(cubes, "float32")
def loop_analysis(x):
x = tf.expand_dims(x, 0)
y = analysis_transform(x)
return tf.squeeze(y)
start = time.time()
ys = tf.map_fn(loop_analysis, x, dtype=tf.float32, parallel_iterations=1, back_prop=False)
print("Analysis Transform: {}s".format(round(time.time()-start, 4)))
start = time.time()
strings, min_v, max_v = entropy_bottleneck.compress(ys)
shape = tf.shape(ys)[:]
print("Entropy Encode: {}s".format(round(time.time()-start, 4)))
return strings, min_v, max_v, shape
def decompress_factorized(strings, min_v, max_v, shape, model, ckpt_dir):
"""Decompress bitstream to cubes.
Input: compressed bitstream.
Output: cubes with shape [batch size, length, width, height, channel(1)]
"""
print('===== Decompress =====')
# load model.
#model = importlib.import_module(model)
# analysis_transform = model.AnalysisTransform()
synthesis_transform = model.SynthesisTransform()
entropy_bottleneck = EntropyBottleneck()
checkpoint = tf.train.Checkpoint(synthesis_transform=synthesis_transform,
estimator=entropy_bottleneck)
status = checkpoint.restore(tf.train.latest_checkpoint(ckpt_dir))
start = time.time()
ys = entropy_bottleneck.decompress(strings, min_v, max_v, shape, shape[-1])
print("Entropy Decode: {}s".format(round(time.time()-start, 4)))
def loop_synthesis(y):
y = tf.expand_dims(y, 0)
x = synthesis_transform(y)
return tf.squeeze(x, [0])
start = time.time()
xs = tf.map_fn(loop_synthesis, ys, dtype=tf.float32, parallel_iterations=1, back_prop=False)
print("Synthesis Transform: {}s".format(round(time.time()-start, 4)))
return xs
################### Compression Network (with conditional entropy model) ###################
def compress_hyper(cubes, model, ckpt_dir, decompress=False):
"""Compress cubes to bitstream.
Input: cubes with shape [batch size, length, width, height, channel(1)].
Output: compressed bitstream.
"""
print('===== Compress =====')
# load model.
#model = importlib.import_module(model)
analysis_transform = model.AnalysisTransform()
synthesis_transform = model.SynthesisTransform()
hyper_encoder = model.HyperEncoder()
hyper_decoder = model.HyperDecoder()
entropy_bottleneck = EntropyBottleneck()
conditional_entropy_model = SymmetricConditional()
checkpoint = tf.train.Checkpoint(analysis_transform=analysis_transform,
synthesis_transform=synthesis_transform,
hyper_encoder=hyper_encoder,
hyper_decoder=hyper_decoder,
estimator=entropy_bottleneck)
status = checkpoint.restore(tf.train.latest_checkpoint(ckpt_dir))
x = tf.convert_to_tensor(cubes, "float32")
def loop_analysis(x):
x = tf.expand_dims(x, 0)
y = analysis_transform(x)
return tf.squeeze(y)
start = time.time()
ys = tf.map_fn(loop_analysis, x, dtype=tf.float32, parallel_iterations=1, back_prop=False)
print("Analysis Transform: {}s".format(round(time.time()-start, 4)))
def loop_hyper_encoder(y):
y = tf.expand_dims(y, 0)
z = hyper_encoder(y)
return tf.squeeze(z)
start = time.time()
zs = tf.map_fn(loop_hyper_encoder, ys, dtype=tf.float32, parallel_iterations=1, back_prop=False)
print("Hyper Encoder: {}s".format(round(time.time()-start, 4)))
z_hats, _ = entropy_bottleneck(zs, False)
print("Quantize hyperprior.")
def loop_hyper_deocder(z):
z = tf.expand_dims(z, 0)
loc, scale = hyper_decoder(z)
return tf.squeeze(loc, [0]), tf.squeeze(scale, [0])
start = time.time()
locs, scales = tf.map_fn(loop_hyper_deocder, z_hats, dtype=(tf.float32, tf.float32),
parallel_iterations=1, back_prop=False)
lower_bound = 1e-9# TODO
scales = tf.maximum(scales, lower_bound)
print("Hyper Decoder: {}s".format(round(time.time()-start, 4)))
start = time.time()
z_strings, z_min_v, z_max_v = entropy_bottleneck.compress(zs)
z_shape = tf.shape(zs)[:]
print("Entropy Encode (Hyper): {}s".format(round(time.time()-start, 4)))
start = time.time()
# y_strings, y_min_v, y_max_v = conditional_entropy_model.compress(ys, locs, scales)
# y_shape = tf.shape(ys)[:]
def loop_range_encode(args):
y, loc, scale = args
y = tf.expand_dims(y, 0)
loc = tf.expand_dims(loc, 0)
scale = tf.expand_dims(scale, 0)
y_string, y_min_v, y_max_v = conditional_entropy_model.compress(y, loc, scale)
return y_string, y_min_v, y_max_v
args = (ys, locs, scales)
y_strings, y_min_vs, y_max_vs = tf.map_fn(loop_range_encode, args,
dtype=(tf.string, tf.int32, tf.int32),
parallel_iterations=1, back_prop=False)
y_shape = tf.convert_to_tensor(np.insert(tf.shape(ys)[1:].numpy(), 0, 1))
print("Entropy Encode: {}s".format(round(time.time()-start, 4)))
if decompress:
start = time.time()
def loop_range_decode(args):
y_string, loc, scale, y_min_v, y_max_v = args
loc = tf.expand_dims(loc, 0)
scale = tf.expand_dims(scale, 0)
y_decoded = conditional_entropy_model.decompress(y_string, loc, scale, y_min_v, y_max_v, y_shape)
return tf.squeeze(y_decoded, 0)
args = (y_strings, locs, scales, y_min_vs, y_max_vs)
y_decodeds = tf.map_fn(loop_range_decode, args, dtype=tf.float32, parallel_iterations=1, back_prop=False)
print("Entropy Decode: {}s".format(round(time.time()-start, 4)))
def loop_synthesis(y):
y = tf.expand_dims(y, 0)
x = synthesis_transform(y)
return tf.squeeze(x, [0])
start = time.time()
x_decodeds = tf.map_fn(loop_synthesis, y_decodeds, dtype=tf.float32, parallel_iterations=1, back_prop=False)
print("Synthesis Transform: {}s".format(round(time.time()-start, 4)))
return y_strings, y_min_vs, y_max_vs, y_shape, z_strings, z_min_v, z_max_v, z_shape, x_decodeds
return y_strings, y_min_vs, y_max_vs, y_shape, z_strings, z_min_v, z_max_v, z_shape
def decompress_hyper(y_strings, y_min_vs, y_max_vs, y_shape, z_strings, z_min_v, z_max_v, z_shape, model, ckpt_dir):
"""Decompress bitstream to cubes.
Input: compressed bitstream. latent representations (y) and hyper prior (z).
Output: cubes with shape [batch size, length, width, height, channel(1)]
"""
print('===== Decompress =====')
# load model.
#model = importlib.import_module(model)
synthesis_transform = model.SynthesisTransform()
hyper_encoder = model.HyperEncoder()
hyper_decoder = model.HyperDecoder()
entropy_bottleneck = EntropyBottleneck()
conditional_entropy_model = SymmetricConditional()
checkpoint = tf.train.Checkpoint(synthesis_transform=synthesis_transform,
hyper_encoder=hyper_encoder,
hyper_decoder=hyper_decoder,
estimator=entropy_bottleneck)
status = checkpoint.restore(tf.train.latest_checkpoint(ckpt_dir))
start = time.time()
zs = entropy_bottleneck.decompress(z_strings, z_min_v, z_max_v, z_shape, z_shape[-1])
print("Entropy Decoder (Hyper): {}s".format(round(time.time()-start, 4)))
def loop_hyper_deocder(z):
z = tf.expand_dims(z, 0)
loc, scale = hyper_decoder(z)
return tf.squeeze(loc, [0]), tf.squeeze(scale, [0])
start = time.time()
locs, scales = tf.map_fn(loop_hyper_deocder, zs, dtype=(tf.float32, tf.float32),
parallel_iterations=1, back_prop=False)
lower_bound = 1e-9# TODO
scales = tf.maximum(scales, lower_bound)
print("Hyper Decoder: {}s".format(round(time.time()-start, 4)))
start = time.time()
# ys = conditional_entropy_model.decompress(y_strings, locs, scales, y_min_v, y_max_v, y_shape)
def loop_range_decode(args):
y_string, loc, scale, y_min_v, y_max_v = args
loc = tf.expand_dims(loc, 0)
scale = tf.expand_dims(scale, 0)
y_decoded = conditional_entropy_model.decompress(y_string, loc, scale, y_min_v, y_max_v, y_shape)
return tf.squeeze(y_decoded, 0)
args = (y_strings, locs, scales, y_min_vs, y_max_vs)
ys = tf.map_fn(loop_range_decode, args, dtype=tf.float32, parallel_iterations=1, back_prop=False)
print("Entropy Decoder: {}s".format(round(time.time()-start, 4)))
def loop_synthesis(y):
y = tf.expand_dims(y, 0)
x = synthesis_transform(y)
return tf.squeeze(x, [0])
start = time.time()
xs = tf.map_fn(loop_synthesis, ys, dtype=tf.float32, parallel_iterations=1, back_prop=False)
print("Synthesis Transform: {}s".format(round(time.time()-start, 4)))
return xs
| 37.735632 | 116 | 0.68342 | 1,365 | 9,849 | 4.704029 | 0.104762 | 0.037377 | 0.028033 | 0.037377 | 0.829466 | 0.803302 | 0.784613 | 0.777293 | 0.748326 | 0.748326 | 0 | 0.016774 | 0.182861 | 9,849 | 260 | 117 | 37.880769 | 0.781064 | 0.133008 | 0 | 0.71345 | 0 | 0 | 0.05255 | 0 | 0 | 0 | 0 | 0.007692 | 0 | 1 | 0.087719 | false | 0 | 0.05848 | 0 | 0.239766 | 0.116959 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
72e1a6bef4599812c3144db92f43ed1120f3ebbb | 16,090 | py | Python | end_of_month/test/test_end_of_month_process.py | uktrade/fadmin2 | 0f774400fb816c9ca30e30b25ae542135966e185 | [
"MIT"
] | null | null | null | end_of_month/test/test_end_of_month_process.py | uktrade/fadmin2 | 0f774400fb816c9ca30e30b25ae542135966e185 | [
"MIT"
] | 6 | 2022-01-17T16:14:53.000Z | 2022-03-30T08:31:56.000Z | end_of_month/test/test_end_of_month_process.py | uktrade/fadmin2 | 0f774400fb816c9ca30e30b25ae542135966e185 | [
"MIT"
] | null | null | null | from django.db.models import F
from django.test import TestCase
from end_of_month.end_of_month_actions import (
ArchiveMonthAlreadyArchivedError,
ArchiveMonthArchivedPastError,
ArchiveMonthInvalidPeriodError,
end_of_month_archive,
)
from end_of_month.models import (
MonthlyTotalBudget,
forecast_budget_view_model,
)
from end_of_month.test.test_utils import (
MonthlyFigureSetup,
)
from forecast.models import (
BudgetMonthlyFigure,
ForecastMonthlyFigure,
)
class EndOfMonthForecastTest(TestCase):
def setUp(self):
self.init_data = MonthlyFigureSetup()
self.init_data.setup_forecast()
def test_error_invalid_period(self):
with self.assertRaises(ArchiveMonthInvalidPeriodError):
end_of_month_archive(16)
with self.assertRaises(ArchiveMonthInvalidPeriodError):
end_of_month_archive(0)
def test_error_already_archived_period(self):
period = 5
end_of_month_archive(period)
with self.assertRaises(ArchiveMonthAlreadyArchivedError):
end_of_month_archive(period)
def test_error_early_archived_period(self):
period = 5
end_of_month_archive(period)
with self.assertRaises(ArchiveMonthArchivedPastError):
end_of_month_archive(period - 1)
# The following tests test_end_of_month_xxx checkes that only forecast is saved,
# not actuals. This is tested by counting the records saved in the period tested.
def test_end_of_month_apr(self):
count = ForecastMonthlyFigure.objects.all().count()
self.assertEqual(count, 15)
end_of_month_archive(1)
count = ForecastMonthlyFigure.objects.all().count()
self.assertEqual(count, 30)
def test_end_of_month_may(self):
self.test_end_of_month_apr()
end_of_month_archive(2)
count = ForecastMonthlyFigure.objects.all().count()
self.assertEqual(count, 44)
def test_end_of_month_jun(self):
self.test_end_of_month_may()
end_of_month_archive(3)
count = ForecastMonthlyFigure.objects.all().count()
self.assertEqual(count, 57)
def test_end_of_month_jul(self):
self.test_end_of_month_jun()
end_of_month_archive(4)
count = ForecastMonthlyFigure.objects.all().count()
self.assertEqual(count, 69)
def test_end_of_month_aug(self):
self.test_end_of_month_jul()
end_of_month_archive(5)
count = ForecastMonthlyFigure.objects.all().count()
self.assertEqual(count, 80)
def test_end_of_month_sep(self):
self.test_end_of_month_aug()
end_of_month_archive(6)
count = ForecastMonthlyFigure.objects.all().count()
self.assertEqual(count, 90)
def test_end_of_month_oct(self):
self.test_end_of_month_sep()
end_of_month_archive(7)
count = ForecastMonthlyFigure.objects.all().count()
self.assertEqual(count, 99)
def test_end_of_month_nov(self):
self.test_end_of_month_oct()
end_of_month_archive(8)
count = ForecastMonthlyFigure.objects.all().count()
self.assertEqual(count, 107)
def test_end_of_month_dec(self):
self.test_end_of_month_nov()
end_of_month_archive(9)
count = ForecastMonthlyFigure.objects.all().count()
self.assertEqual(count, 114)
def test_end_of_month_jan(self):
self.test_end_of_month_dec()
end_of_month_archive(10)
count = ForecastMonthlyFigure.objects.all().count()
self.assertEqual(count, 120)
def test_end_of_month_feb(self):
self.test_end_of_month_jan()
end_of_month_archive(11)
count = ForecastMonthlyFigure.objects.all().count()
self.assertEqual(count, 125)
def test_end_of_month_mar(self):
self.test_end_of_month_feb()
end_of_month_archive(12)
count = ForecastMonthlyFigure.objects.all().count()
self.assertEqual(count, 129)
class ReadArchivedForecastTest(TestCase):
archived_figure = []
def setUp(self):
self.init_data = MonthlyFigureSetup()
self.init_data.setup_forecast()
for period in range(0, 16):
self.archived_figure.append(0)
def get_period_total(self, period):
data_model = forecast_budget_view_model[period]
tot_q = data_model.objects.filter(
financial_year=self.init_data.year_used
).annotate(
total=F("apr")
+ F("may")
+ F("jun")
+ F("jul")
+ F("aug")
+ F("sep")
+ F("oct")
+ F("nov")
+ F("dec")
+ F("jan")
+ F("feb")
+ F("mar")
+ F("adj1")
+ F("adj2")
+ F("adj3")
)
return tot_q[0].total
def get_current_total(self):
return self.get_period_total(0)
def check_archive_period(self, tested_period):
total_before = self.get_current_total()
end_of_month_archive(tested_period)
# run a query giving the full total
archived_total = self.get_period_total(tested_period)
self.assertEqual(total_before, archived_total)
change_amount = tested_period * 10000
self.init_data.monthly_figure_update(tested_period + 1, change_amount)
current_total = self.get_current_total()
self.archived_figure[tested_period] = archived_total
self.assertNotEqual(current_total, archived_total)
self.assertEqual(current_total, (archived_total + change_amount))
for period in range(1, tested_period + 1):
self.assertEqual(
self.archived_figure[period], self.get_period_total(period)
)
# The following tests check that the archived figures are not changed by
# changing the current figures.
def test_read_archived_figure_apr(self):
tested_period = 1
self.check_archive_period(tested_period)
def test_read_archived_figure_may(self):
tested_period = 2
self.test_read_archived_figure_apr()
self.check_archive_period(tested_period)
def test_read_archived_figure_jun(self):
tested_period = 3
self.test_read_archived_figure_may()
self.check_archive_period(tested_period)
def test_read_archived_figure_jul(self):
tested_period = 4
self.test_read_archived_figure_jun()
self.check_archive_period(tested_period)
def test_read_archived_figure_aug(self):
tested_period = 5
self.test_read_archived_figure_jul()
self.check_archive_period(tested_period)
def test_read_archived_figure_sep(self):
tested_period = 6
self.test_read_archived_figure_aug()
self.check_archive_period(tested_period)
def test_read_archived_figure_oct(self):
tested_period = 7
self.test_read_archived_figure_sep()
self.check_archive_period(tested_period)
def test_read_archived_figure_nov(self):
tested_period = 8
self.test_read_archived_figure_oct()
self.check_archive_period(tested_period)
def test_read_archived_figure_dec(self):
tested_period = 9
self.test_read_archived_figure_nov()
self.check_archive_period(tested_period)
def test_read_archived_figure_jan(self):
tested_period = 10
self.test_read_archived_figure_dec()
self.check_archive_period(tested_period)
def test_read_archived_figure_feb(self):
tested_period = 11
self.test_read_archived_figure_jan()
self.check_archive_period(tested_period)
def test_read_archived_figure_mar(self):
tested_period = 12
self.test_read_archived_figure_feb()
self.check_archive_period(tested_period)
class EndOfMonthBudgetTest(TestCase):
def setUp(self):
self.init_data = MonthlyFigureSetup()
self.init_data.setup_budget()
# The following tests test_end_of_month_xxx checkes that only forecast is saved,
# not actuals. This is tested by counting the records saved in the period tested.
def test_end_of_month_apr(self):
count = BudgetMonthlyFigure.objects.all().count()
self.assertEqual(count, 15)
budget_total_count = MonthlyTotalBudget.objects.all().count()
self.assertEqual(budget_total_count, 0)
end_of_month_archive(1)
count = BudgetMonthlyFigure.objects.all().count()
self.assertEqual(count, 30)
budget_total_count = MonthlyTotalBudget.objects.all().count()
self.assertEqual(budget_total_count, 1)
def test_end_of_month_may(self):
self.test_end_of_month_apr()
end_of_month_archive(2)
count = BudgetMonthlyFigure.objects.all().count()
self.assertEqual(count, 44)
budget_total_count = MonthlyTotalBudget.objects.all().count()
self.assertEqual(budget_total_count, 2)
def test_end_of_month_jun(self):
self.test_end_of_month_may()
end_of_month_archive(3)
count = BudgetMonthlyFigure.objects.all().count()
self.assertEqual(count, 57)
budget_total_count = MonthlyTotalBudget.objects.all().count()
self.assertEqual(budget_total_count, 3)
def test_end_of_month_jul(self):
self.test_end_of_month_jun()
end_of_month_archive(4)
count = BudgetMonthlyFigure.objects.all().count()
self.assertEqual(count, 69)
budget_total_count = MonthlyTotalBudget.objects.all().count()
self.assertEqual(budget_total_count, 4)
def test_end_of_month_aug(self):
self.test_end_of_month_jul()
end_of_month_archive(5)
count = BudgetMonthlyFigure.objects.all().count()
self.assertEqual(count, 80)
budget_total_count = MonthlyTotalBudget.objects.all().count()
self.assertEqual(budget_total_count, 5)
def test_end_of_month_sep(self):
self.test_end_of_month_aug()
end_of_month_archive(6)
count = BudgetMonthlyFigure.objects.all().count()
self.assertEqual(count, 90)
budget_total_count = MonthlyTotalBudget.objects.all().count()
self.assertEqual(budget_total_count, 6)
def test_end_of_month_oct(self):
self.test_end_of_month_sep()
end_of_month_archive(7)
count = BudgetMonthlyFigure.objects.all().count()
self.assertEqual(count, 99)
budget_total_count = MonthlyTotalBudget.objects.all().count()
self.assertEqual(budget_total_count, 7)
def test_end_of_month_nov(self):
self.test_end_of_month_oct()
end_of_month_archive(8)
count = BudgetMonthlyFigure.objects.all().count()
self.assertEqual(count, 107)
budget_total_count = MonthlyTotalBudget.objects.all().count()
self.assertEqual(budget_total_count, 8)
def test_end_of_month_dec(self):
self.test_end_of_month_nov()
end_of_month_archive(9)
count = BudgetMonthlyFigure.objects.all().count()
self.assertEqual(count, 114)
budget_total_count = MonthlyTotalBudget.objects.all().count()
self.assertEqual(budget_total_count, 9)
def test_end_of_month_jan(self):
self.test_end_of_month_dec()
end_of_month_archive(10)
count = BudgetMonthlyFigure.objects.all().count()
self.assertEqual(count, 120)
budget_total_count = MonthlyTotalBudget.objects.all().count()
self.assertEqual(budget_total_count, 10)
def test_end_of_month_feb(self):
self.test_end_of_month_jan()
end_of_month_archive(11)
count = BudgetMonthlyFigure.objects.all().count()
self.assertEqual(count, 125)
budget_total_count = MonthlyTotalBudget.objects.all().count()
self.assertEqual(budget_total_count, 11)
def test_end_of_month_mar(self):
self.test_end_of_month_feb()
end_of_month_archive(12)
count = BudgetMonthlyFigure.objects.all().count()
self.assertEqual(count, 129)
budget_total_count = MonthlyTotalBudget.objects.all().count()
self.assertEqual(budget_total_count, 12)
class ReadArchivedBudgetTest(TestCase):
archived_figure = []
def setUp(self):
self.init_data = MonthlyFigureSetup()
self.init_data.setup_budget()
for period in range(0, 16):
self.archived_figure.append(0)
def get_period_budget_total(self, period):
data_model = forecast_budget_view_model[period]
tot_q = data_model.objects.filter(financial_year=self.init_data.year_used)
return tot_q[0].budget
def get_current_budget_total(self):
return self.get_period_budget_total(0)
def check_archive_period(self, tested_period):
total_before = self.get_current_budget_total()
end_of_month_archive(tested_period)
# run a query giving the full total
archived_total = self.get_period_budget_total(tested_period)
self.assertEqual(total_before, archived_total)
change_amount = tested_period * 10000
self.init_data.monthly_figure_update(tested_period + 1, change_amount, "budget")
current_total = self.get_current_budget_total()
self.archived_figure[tested_period] = archived_total
self.assertNotEqual(current_total, archived_total)
self.assertNotEqual(current_total, archived_total)
self.assertEqual(current_total, (archived_total + change_amount))
for period in range(1, tested_period + 1):
# Check the full total. It is saved in a different table, for convenience
monthly_budget = MonthlyTotalBudget.objects.get(
archived_period=period)
self.assertEqual(
self.archived_figure[period], monthly_budget.amount
)
# Check that nothig has corrupted the archived figures
self.assertEqual(
self.archived_figure[period], self.get_period_budget_total(period)
)
# The following tests check that the archived figures are not changed by
# changing the current figures.
def test_read_archived_figure_apr(self):
tested_period = 1
self.check_archive_period(tested_period)
def test_read_archived_figure_may(self):
tested_period = 2
self.test_read_archived_figure_apr()
self.check_archive_period(tested_period)
def test_read_archived_figure_jun(self):
tested_period = 3
self.test_read_archived_figure_may()
self.check_archive_period(tested_period)
def test_read_archived_figure_jul(self):
tested_period = 4
self.test_read_archived_figure_jun()
self.check_archive_period(tested_period)
def test_read_archived_figure_aug(self):
tested_period = 5
self.test_read_archived_figure_jul()
self.check_archive_period(tested_period)
def test_read_archived_figure_sep(self):
tested_period = 6
self.test_read_archived_figure_aug()
self.check_archive_period(tested_period)
def test_read_archived_figure_oct(self):
tested_period = 7
self.test_read_archived_figure_sep()
self.check_archive_period(tested_period)
def test_read_archived_figure_nov(self):
tested_period = 8
self.test_read_archived_figure_oct()
self.check_archive_period(tested_period)
def test_read_archived_figure_dec(self):
tested_period = 9
self.test_read_archived_figure_nov()
self.check_archive_period(tested_period)
def test_read_archived_figure_jan(self):
tested_period = 10
self.test_read_archived_figure_dec()
self.check_archive_period(tested_period)
def test_read_archived_figure_feb(self):
tested_period = 11
self.test_read_archived_figure_jan()
self.check_archive_period(tested_period)
def test_read_archived_figure_mar(self):
tested_period = 12
self.test_read_archived_figure_feb()
self.check_archive_period(tested_period)
| 35.915179 | 88 | 0.690429 | 2,009 | 16,090 | 5.142359 | 0.072673 | 0.041138 | 0.082277 | 0.065047 | 0.912303 | 0.893815 | 0.881134 | 0.875133 | 0.722776 | 0.712709 | 0 | 0.01408 | 0.227533 | 16,090 | 447 | 89 | 35.995526 | 0.817121 | 0.044251 | 0 | 0.758242 | 0 | 0 | 0.003514 | 0 | 0 | 0 | 0 | 0 | 0.145604 | 1 | 0.167582 | false | 0 | 0.016484 | 0.005495 | 0.211538 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f42a919b4700f979d2c37ea141ce49147e333eed | 36,180 | py | Python | post_optimization_studies/mad_analyses/four_cuts_eff_flow_chart/Output/Histos/MadAnalysis5job_0/selection_0.py | sheride/axion_pheno | 7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5 | [
"MIT"
] | null | null | null | post_optimization_studies/mad_analyses/four_cuts_eff_flow_chart/Output/Histos/MadAnalysis5job_0/selection_0.py | sheride/axion_pheno | 7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5 | [
"MIT"
] | null | null | null | post_optimization_studies/mad_analyses/four_cuts_eff_flow_chart/Output/Histos/MadAnalysis5job_0/selection_0.py | sheride/axion_pheno | 7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5 | [
"MIT"
] | null | null | null | def selection_0():
# Library import
import numpy
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
# Library version
matplotlib_version = matplotlib.__version__
numpy_version = numpy.__version__
# Histo binning
xBinning = numpy.linspace(0.0,2000.0,201,endpoint=True)
# Creating data sequence: middle of each bin
xData = numpy.array([5.0,15.0,25.0,35.0,45.0,55.0,65.0,75.0,85.0,95.0,105.0,115.0,125.0,135.0,145.0,155.0,165.0,175.0,185.0,195.0,205.0,215.0,225.0,235.0,245.0,255.0,265.0,275.0,285.0,295.0,305.0,315.0,325.0,335.0,345.0,355.0,365.0,375.0,385.0,395.0,405.0,415.0,425.0,435.0,445.0,455.0,465.0,475.0,485.0,495.0,505.0,515.0,525.0,535.0,545.0,555.0,565.0,575.0,585.0,595.0,605.0,615.0,625.0,635.0,645.0,655.0,665.0,675.0,685.0,695.0,705.0,715.0,725.0,735.0,745.0,755.0,765.0,775.0,785.0,795.0,805.0,815.0,825.0,835.0,845.0,855.0,865.0,875.0,885.0,895.0,905.0,915.0,925.0,935.0,945.0,955.0,965.0,975.0,985.0,995.0,1005.0,1015.0,1025.0,1035.0,1045.0,1055.0,1065.0,1075.0,1085.0,1095.0,1105.0,1115.0,1125.0,1135.0,1145.0,1155.0,1165.0,1175.0,1185.0,1195.0,1205.0,1215.0,1225.0,1235.0,1245.0,1255.0,1265.0,1275.0,1285.0,1295.0,1305.0,1315.0,1325.0,1335.0,1345.0,1355.0,1365.0,1375.0,1385.0,1395.0,1405.0,1415.0,1425.0,1435.0,1445.0,1455.0,1465.0,1475.0,1485.0,1495.0,1505.0,1515.0,1525.0,1535.0,1545.0,1555.0,1565.0,1575.0,1585.0,1595.0,1605.0,1615.0,1625.0,1635.0,1645.0,1655.0,1665.0,1675.0,1685.0,1695.0,1705.0,1715.0,1725.0,1735.0,1745.0,1755.0,1765.0,1775.0,1785.0,1795.0,1805.0,1815.0,1825.0,1835.0,1845.0,1855.0,1865.0,1875.0,1885.0,1895.0,1905.0,1915.0,1925.0,1935.0,1945.0,1955.0,1965.0,1975.0,1985.0,1995.0])
# Creating weights for histo: y1_PT_0
y1_PT_0_weights = numpy.array([0.0,0.0,2.53014383058,6.14931472739,7.8401692776,8.80637644914,10.3825950976,10.8943586588,11.8646538269,13.2197966649,13.7602162016,15.0907950607,15.3405308466,15.8768583867,16.9208494915,16.8021215933,17.0395773897,17.0027294213,16.5810417829,16.732521653,16.9085655021,16.5974177689,16.6588257162,15.4633547412,15.7253785166,15.0252871169,15.0744190747,14.4439276153,14.198283826,13.772500191,13.571888363,12.6670971388,12.1184896092,11.9015017953,11.7623019146,11.3979302271,11.2914823183,10.7183108098,10.1451393012,10.0182234101,9.55149981025,9.56377979972,8.65080058254,8.63032860009,8.42152877912,8.09400505995,7.54539753035,7.27928175852,7.05410595159,6.73476622541,6.74295821838,6.76342620083,6.28851460804,6.16569071335,5.81769501173,5.711247103,5.1994875418,5.2772754751,4.95793574891,4.70410396656,4.58946806485,4.5689960824,4.20052839834,4.1759644194,4.09408448961,3.64782887224,3.74608678799,3.55775894947,3.41446587233,3.32030195307,3.2711731952,3.11969212508,2.74713044453,2.71437767261,2.84538836028,2.75531843751,2.50967344813,2.54242622005,2.22718169035,2.11664138513,2.1289237746,2.12482977811,2.0470418448,1.98153670097,2.0879830097,1.89965477118,1.64172779233,1.67857456074,1.58031624499,1.46568194328,1.40017679945,1.4984347152,1.32238926614,1.26916611178,1.46158794679,1.1831901855,1.24869572933,1.12996703113,1.0235211224,1.25278972582,1.08493226975,0.966203971547,0.998956343464,1.05217949783,0.798346515473,0.753311354088,0.851569269838,0.757405350578,0.802440511963,0.700088199723,0.839287280369,0.601830283972,0.618206669931,0.573171908545,0.601830283972,0.597736287483,0.499478371732,0.593642290993,0.532230743649,0.560889519076,0.474913592795,0.503572368222,0.41759644194,0.51585475769,0.384843910023,0.442161220877,0.376655717044,0.347997141617,0.380749793533,0.401220055982,0.249739105866,0.245645029376,0.376655717044,0.245645029376,0.3152444497,0.286585874273,0.221080530439,0.266115451824,0.245645029376,0.188327878522,0.266115451824,0.241550952887,0.245645029376,0.184233762032,0.180139685543,0.196516031501,0.155575186605,0.167857456074,0.171951532564,0.167857456074,0.10235211224,0.151481110115,0.114634341709,0.155575186605,0.139198840647,0.167857456074,0.118728418199,0.122822534688,0.147387033626,0.10644618873,0.10644618873,0.110540265219,0.10235211224,0.0777876133025,0.10235211224,0.0982579957506,0.094163919261,0.0818816897921,0.0573171908545,0.118728418199,0.0614112673441,0.0859757662818,0.0491289978753,0.0614112673441,0.0573171908545,0.0614112673441,0.0655053438337,0.0573171908545,0.0736934968129,0.0532230743649,0.0573171908545,0.0532230743649,0.0450349213857,0.0736934968129,0.0368467564065,0.0286585874273,0.020470418448,0.0327526719169,0.0491289978753,0.0450349213857,0.0491289978753,0.0368467564065,0.0450349213857,0.0532230743649,0.0286585874273,0.0368467564065,0.0163763339584,0.0409408448961,0.0245645029376,0.00818816897921])
# Creating weights for histo: y1_PT_1
y1_PT_1_weights = numpy.array([0.0,0.0,0.0,0.0242945760233,0.0121313846429,0.0121753353338,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_2
y1_PT_2_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.010029957153,0.03010776731,0.100374402715,0.140581268548,0.170818356211,0.200754011796,0.120572665874,0.140442803998,0.100357791927,0.0703121828455,0.0501908168896,0.0200707939587,0.0100369733513,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_3
y1_PT_3_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0440265968494,0.0714190116583,0.192486820862,0.253033928839,0.241983881145,0.319022290321,0.40163918686,0.362923028526,0.423526118819,0.412535628508,0.38512907593,0.401549485154,0.363071596977,0.263951008379,0.225517564798,0.236505211305,0.192500227367,0.258517514398,0.214471945313,0.187048126782,0.181531634347,0.115527185059,0.0989753108345,0.0880075303303,0.0385025602086,0.0549945804825,0.0110086744071,0.00549237291348,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_4
y1_PT_4_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0108488119682,0.0414613444492,0.075997968177,0.091765376085,0.0996830302501,0.114480552378,0.114485923486,0.134204061732,0.159878197324,0.172663397855,0.18849461773,0.199356039777,0.195396851948,0.226980769309,0.227964002692,0.232896603595,0.243748285499,0.195404387532,0.199324053627,0.212199761333,0.193421927691,0.16084900501,0.138161646245,0.129248854077,0.11251212155,0.0858471769082,0.089793417962,0.0858520269383,0.0720215847871,0.0552633678288,0.0542835815744,0.0276452319325,0.0286292870151,0.0315763737603,0.0177626862586,0.0118390798994,0.00394825762426,0.000986137789768,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_5
y1_PT_5_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00478977179329,0.00831996620615,0.0141167125109,0.0186577745275,0.0259574864965,0.0257089567703,0.0262221007353,0.0340277266408,0.0352918212886,0.0403339558834,0.0471417458413,0.0395736625795,0.0420940896974,0.0385639993137,0.0521718770686,0.0499146437911,0.0526832605397,0.0569631412806,0.0574715239098,0.059993431443,0.0667985406488,0.0544471153976,0.06477245222,0.0695822416293,0.0615176190611,0.0665427488852,0.0579731846531,0.0615109371864,0.051165714785,0.0534466747192,0.0436081944867,0.0375581371143,0.0337797370661,0.0302496866937,0.0267210727242,0.0247083440436,0.0204178243179,0.0168934115271,0.0176496716995,0.0126061607184,0.0113438265646,0.00756322990017,0.00756247768914,0.00453618864933,0.00252197275147,0.00277323084312,0.00100787996553,0.000505220942142,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_6
y1_PT_6_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000573579003039,0.00200662327204,0.00257215860296,0.00344123208825,0.00315184483854,0.00429612267815,0.00486300461936,0.00715697473162,0.00600797725789,0.00858253416677,0.00744785354967,0.00973314817267,0.0100305071153,0.0100147816664,0.0123025195823,0.0114516458299,0.00943981106606,0.00974558157437,0.011752188851,0.0134687820594,0.0128842312317,0.0143118480714,0.0131714181182,0.0111702772595,0.0140338685202,0.0154488190257,0.0180269029138,0.0168956103162,0.012880522305,0.0157410944395,0.0151750582536,0.0188999602462,0.0146132308495,0.0160299108544,0.0180317115222,0.0151741385197,0.0191822085619,0.0165962069652,0.0128887899124,0.0148764746653,0.0137316659793,0.0151739085863,0.0151776175129,0.012320934253,0.00859001900062,0.00945148068881,0.00915677597799,0.00858633506677,0.00630845729729,0.00744668688731,0.00429260869512,0.00830201435077,0.00457743826385,0.00429356341881,0.00428837392068,0.00371916065418,0.00429277764622,0.00286840786677,0.00286086405,0.00200826079813,0.00229235957836,0.00229599352668,0.002861893752,0.00200710713201,0.00143139474638,0.00315261461576,0.00114727597194,0.000572313169379,0.00171289627827,0.00114435681676,0.000571167600913,0.000284635424921,0.000574087955746,0.00028727636062,0.000572367653611,0.000574901520296,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_7
y1_PT_7_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.16292026894e-05,6.46606050088e-05,0.000215987763585,0.000172595666753,0.000302585459767,0.000410488921713,0.00058326815228,0.000432052640736,0.00045366246026,0.000583116020626,0.000496722843031,0.000496784450064,0.000475076143106,0.000928686635528,0.000561443755877,0.000561805435265,0.00043197091712,0.000885511839695,0.000885781737175,0.000647966014876,0.000777603139056,0.0007561718542,0.00060495047532,0.000669244329886,0.00047505267376,0.000820968204231,0.000842683216717,0.000864210474434,0.000775788036592,0.000648136167635,0.000712779128725,0.000711047007163,0.000885850049736,0.00069116386096,0.000647840286236,0.00082125947558,0.000516401889742,0.000647782031966,0.000540199806695,0.000712720455359,0.00056146387246,0.000496739606849,0.000864018947806,0.000820526896705,0.000691443397635,0.000733915370372,0.000820578445447,0.000712683994054,0.00073434536232,0.000777705817445,0.000905774267295,0.0010350945553,0.000907091484345,0.000864042836247,0.000907186619016,0.00103534140253,0.00110183257463,0.00129600998151,0.00123124380635,0.00101528097895,0.000755820233104,0.00096924459927,0.00112289463638,0.000712650885512,0.000777729705886,0.000734693630653,0.000475116376271,0.000691294618745,0.000691382628793,0.00056157660914,0.000648053605828,0.000561732093558,0.000647983616885,0.000367377576934,0.000237534299694,0.000323978774574,0.000495408978744,0.000237640079389,0.000172726340719,0.000237452785626,0.000259231542532,0.000194353636524,0.000172729148659,0.000171217345583,0.000302476914042,0.000171216926488,0.000151355950435,6.48758105307e-05,0.000151223222901,6.47704499305e-05,0.000129665413124,0.000108078308574,4.31353170403e-05,4.32729899009e-05,8.64352128701e-05,6.47910694274e-05,4.31142784479e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_8
y1_PT_8_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.83498695369e-05,0.0,8.51632887852e-05,8.51476236096e-05,2.83684895655e-05,8.51446687566e-05,2.83684895655e-05,0.0,2.83973996098e-05,0.0,0.0,0.000113625442737,2.84292645072e-05,2.83498695369e-05,0.000113272434317,2.84489090828e-05,5.68266789655e-05,2.84292645072e-05,0.000113140000477,5.68463086926e-05,8.52755880483e-05,2.84489090828e-05,0.0,5.66865981446e-05,0.0,8.50971386837e-05,2.83684895655e-05,2.83684895655e-05,0.0,0.0,5.65917607265e-05,5.66654241728e-05,2.84489090828e-05,2.83973996098e-05,2.84080905353e-05,0.0,0.0,8.52029046035e-05,0.0,2.84292645072e-05,0.0,5.68781735899e-05,5.67977540727e-05,2.84292645072e-05,2.84489090828e-05,0.0,8.51872245794e-05,5.66076337811e-05,0.0,2.83684895655e-05,2.84080905353e-05,2.84080905353e-05,8.4819055833e-05,2.83684895655e-05,2.83498695369e-05,5.68978181655e-05,5.68266789655e-05,2.83684895655e-05,0.0,5.67658891753e-05,0.000113664034008,8.41628705843e-05,5.68585290144e-05,5.67183442539e-05,0.000113516098327,2.84292645072e-05,8.52240785754e-05,5.67658891753e-05,0.000113672987658,0.000113644062765,5.65917607265e-05,5.65506600574e-05,0.00019874935328,2.83973996098e-05,0.000141956528912,8.5176533654e-05,0.000141992907755,0.000113496928903,0.0,5.66997242254e-05,8.51450696663e-05,5.68585290144e-05,0.000112208152684,5.68978181655e-05,8.5266307731e-05,2.84080905353e-05,5.68569996181e-05,5.68585290144e-05,2.83684895655e-05,8.50777019872e-05,0.0,2.84489090828e-05,0.000113507619829,5.51798903795e-05,5.67948140682e-05,8.52347695008e-05,2.70012835294e-05,0.0,2.83684895655e-05,2.83684895655e-05,0.0,2.81995283973e-05,0.0,0.0,5.59464594338e-05,2.83498695369e-05,8.47994112574e-05,2.83498695369e-05,2.83684895655e-05,0.0,2.81624962193e-05,2.83684895655e-05,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_9
y1_PT_9_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_10
y1_PT_10_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,1.0521138287,0.0,0.0,0.0,0.0,0.0,1.05462838872,1.0529581672,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_11
y1_PT_11_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.229982513746,0.460549425191,0.230838551855,0.0,0.0,0.921324801098,1.15244932663,0.69053190051,0.690450435286,1.15118046734,0.921344783134,1.38299821582,1.15191672852,1.1514275529,1.61272861007,1.61297146866,0.230186253659,0.922069131941,1.1512346494,0.230597153489,0.921089627905,0.689799866305,1.15227909505,0.230186253659,0.0,0.229703303219,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_12
y1_PT_12_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0277219316548,0.0,0.0553645355296,0.110809814445,0.138585000575,0.138469944135,0.138339731481,0.221242569973,0.110792388645,0.166061447718,0.332157939373,0.193816246047,0.249232135754,0.276601720046,0.387606718835,0.609408683908,0.664523449641,0.858733411053,0.885938046494,0.498350176758,0.747774919531,0.526090665158,0.636935677411,0.498603293231,0.387666728212,0.304814128339,0.526296466546,0.276857760053,0.47074509413,0.249254293063,0.166103146541,0.221562235307,0.387702503033,0.332391514331,0.166083605026,0.193702420569,0.0554578578039,0.0553318381128,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_13
y1_PT_13_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0100733687095,0.0,0.0100592768141,0.0302481867488,0.0201801100861,0.0403623867556,0.070516888496,0.030278015201,0.0403628844023,0.0302480714404,0.0302438717886,0.0403513899794,0.0301896343772,0.0402718878993,0.0504762529751,0.0402373560798,0.0805748726428,0.0604512029646,0.0403772433276,0.0907916181952,0.13099393874,0.0403336506985,0.050426852446,0.080606794852,0.201683000825,0.201703210132,0.211685030072,0.151239416529,0.181553074151,0.20151719954,0.201738712969,0.161305532949,0.171361028863,0.171291783158,0.131079873813,0.0907660682901,0.0908030883426,0.0806759191796,0.030254820014,0.0403699910386,0.0302922830934,0.0503890252349,0.0605671546243,0.0201159925689,0.0302180912667,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_14
y1_PT_14_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00283014097859,0.00283622802146,0.00848593643005,0.00566119995758,0.0,0.0,0.00849010706053,0.0,0.00283198082683,0.0113290483003,0.00849156139477,0.0169728808907,0.0,0.00848547473664,0.0339465312704,0.0113223537458,0.00283102935366,0.00566144234662,0.00565920313358,0.0141524958786,0.0113169134585,0.0254398725012,0.0141599252951,0.0226320801768,0.00848447440092,0.0113068716268,0.0254709675524,0.0254482599314,0.0282739467229,0.0141355902049,0.0282883938792,0.0283085391016,0.0311393660797,0.0311202789046,0.0226267937873,0.0198247610883,0.0650449451446,0.0622187658802,0.0395929031747,0.0424473611604,0.0509238598077,0.0735889279787,0.0537828962531,0.0367882849926,0.0254731413588,0.0509195506692,0.0424358572996,0.0424481691239,0.0282779865402,0.0226214304488,0.019815007815,0.00564715293556,0.0311185206222,0.022617005887,0.0169766552343,0.00848829106645,0.0311226720155,0.0141516455933,0.0198126878056,0.0169582759892,0.0169702415434,0.0226411216728,0.00848803328762,0.0141507991554,0.014143465925,0.00282513583727,0.00848746001831,0.0084809232091,0.0113224884064,0.00566001879194,0.0,0.00846523717547,0.00849566277124,0.00565887994819,0.00282347951216,0.00849061877072,0.00282879014062,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_15
y1_PT_15_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0015333359899,0.00154541021822,0.0015333359899,0.0015216293306,0.0,0.00153629545228,0.0,0.00303761393583,0.0,0.00152094974159,0.0,0.00152192834976,0.0,0.0,0.0,0.0,0.0,0.0,0.00306079323943,0.0015172740515,0.0,0.0,0.00150849612533,0.0,0.0,0.00150849612533,0.0,0.0,0.00152094974159,0.00153219782606,0.0,0.0,0.00153565722957,0.0,0.00152644436644,0.0,0.0,0.0,0.0,0.0,0.00304930523051,0.0,0.0,0.0,0.0,0.00152192834976,0.0015216293306,0.00152449660526,0.0015216293306,0.00761467307124,0.00457006468718,0.00456544230005,0.00456263412009,0.00151265402815,0.00151881878685,0.0,0.00151265402815,0.00152094974159,0.00458007414681,0.0015172740515,0.00304287809136,0.00306990800537,0.0015172740515,0.00150849612533,0.00304601838351,0.00302115015348,0.0,0.0,0.00152449660526,0.0,0.0,0.00455032824057,0.00305984417861,0.0,0.00152094974159,0.0,0.0,0.00306899321947,0.0,0.0,0.0,0.00303715063341,0.0,0.0,0.0,0.0,0.00152260675687,0.00153821484801,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_16
y1_PT_16_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000180553027149,0.000180503364172,0.0,0.0,0.0,0.000180626135672,0.0,0.0,0.000180626135672,0.000180003616023,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000180626135672,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000180734316266,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000180612776716,0.0,0.000180970234659,0.000180547136889,0.00036137146249,0.000360347288698,0.000360644342601,0.000541809610317,0.000180533816432,0.0,0.000541722218876,0.000541292576373,0.000360811695286,0.000541685260381,0.0,0.000361160491081,0.0,0.000180734316266,0.0,0.000360153295099,0.0,0.0,0.000180547136889,0.0,0.000180503364172,0.0,0.0,0.0,0.000180766962937,0.0,0.000180766962937,0.0,0.000180686039232,0.000180970234659,0.0,0.0,0.000181168424588,0.0,0.000180640765076,0.000180003616023,0.0,0.0,0.0,0.0,0.0,0.000180970234659,0.000361583011375])
# Creating a new Canvas
fig = plt.figure(figsize=(12,6),dpi=80)
frame = gridspec.GridSpec(1,1,right=0.7)
pad = fig.add_subplot(frame[0])
# Creating a new Stack
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights+y1_PT_9_weights+y1_PT_10_weights+y1_PT_11_weights+y1_PT_12_weights+y1_PT_13_weights+y1_PT_14_weights+y1_PT_15_weights+y1_PT_16_weights,\
label="$bg\_dip\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#e5e5e5", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights+y1_PT_9_weights+y1_PT_10_weights+y1_PT_11_weights+y1_PT_12_weights+y1_PT_13_weights+y1_PT_14_weights+y1_PT_15_weights,\
label="$bg\_dip\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#f2f2f2", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights+y1_PT_9_weights+y1_PT_10_weights+y1_PT_11_weights+y1_PT_12_weights+y1_PT_13_weights+y1_PT_14_weights,\
label="$bg\_dip\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights+y1_PT_9_weights+y1_PT_10_weights+y1_PT_11_weights+y1_PT_12_weights+y1_PT_13_weights,\
label="$bg\_dip\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights+y1_PT_9_weights+y1_PT_10_weights+y1_PT_11_weights+y1_PT_12_weights,\
label="$bg\_dip\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#c1bfa8", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights+y1_PT_9_weights+y1_PT_10_weights+y1_PT_11_weights,\
label="$bg\_dip\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#bab5a3", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights+y1_PT_9_weights+y1_PT_10_weights,\
label="$bg\_dip\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b2a596", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights+y1_PT_9_weights,\
label="$bg\_dip\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b7a39b", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights,\
label="$bg\_vbf\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ad998c", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights,\
label="$bg\_vbf\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#9b8e82", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights,\
label="$bg\_vbf\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#876656", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights,\
label="$bg\_vbf\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#afcec6", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights,\
label="$bg\_vbf\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#84c1a3", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights,\
label="$bg\_vbf\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#89a8a0", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights,\
label="$bg\_vbf\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#829e8c", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights,\
label="$bg\_vbf\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#adbcc6", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights,\
label="$signal$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#7a8e99", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
# Axis
plt.rc('text',usetex=False)
plt.xlabel(r"p_{T} [ j_{1} ] ( GeV ) ",\
fontsize=16,color="black")
plt.ylabel(r"$\mathrm{Events}$ $(\mathcal{L}_{\mathrm{int}} = 40.0\ \mathrm{fb}^{-1})$ ",\
fontsize=16,color="black")
# Boundary of y-axis
ymax=(y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights+y1_PT_9_weights+y1_PT_10_weights+y1_PT_11_weights+y1_PT_12_weights+y1_PT_13_weights+y1_PT_14_weights+y1_PT_15_weights+y1_PT_16_weights).max()*1.1
ymin=0 # linear scale
#ymin=min([x for x in (y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights+y1_PT_9_weights+y1_PT_10_weights+y1_PT_11_weights+y1_PT_12_weights+y1_PT_13_weights+y1_PT_14_weights+y1_PT_15_weights+y1_PT_16_weights) if x])/100. # log scale
plt.gca().set_ylim(ymin,ymax)
# Log/Linear scale for X-axis
plt.gca().set_xscale("linear")
#plt.gca().set_xscale("log",nonposx="clip")
# Log/Linear scale for Y-axis
plt.gca().set_yscale("linear")
#plt.gca().set_yscale("log",nonposy="clip")
# Legend
plt.legend(bbox_to_anchor=(1.05,1), loc=2, borderaxespad=0.)
# Saving the image
plt.savefig('../../HTML/MadAnalysis5job_0/selection_0.png')
plt.savefig('../../PDF/MadAnalysis5job_0/selection_0.png')
plt.savefig('../../DVI/MadAnalysis5job_0/selection_0.eps')
# Running!
if __name__ == '__main__':
selection_0()
| 186.494845 | 2,921 | 0.7123 | 9,075 | 36,180 | 2.759229 | 0.124848 | 0.404792 | 0.595687 | 0.78131 | 0.462939 | 0.454473 | 0.450519 | 0.43742 | 0.432588 | 0.430312 | 0 | 0.536423 | 0.045744 | 36,180 | 193 | 2,922 | 187.46114 | 0.188849 | 0.035462 | 0 | 0.185841 | 0 | 0.00885 | 0.029804 | 0.005737 | 0 | 0 | 0 | 0 | 0 | 1 | 0.00885 | false | 0 | 0.035398 | 0 | 0.044248 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f48c50e192d1b4a466efeb223e1a87fb07435bb7 | 6,948 | py | Python | v1.0/json-generator/consts.py | truwl/GGR-cwl | 8d02684ae0ff27e641f3704686e3bc8b1979b854 | [
"MIT"
] | 20 | 2016-07-08T15:43:03.000Z | 2021-09-09T09:24:44.000Z | v1.0/json-generator/consts.py | truwl/GGR-cwl | 8d02684ae0ff27e641f3704686e3bc8b1979b854 | [
"MIT"
] | 6 | 2016-01-28T15:18:51.000Z | 2020-07-08T16:29:12.000Z | v1.0/json-generator/consts.py | truwl/GGR-cwl | 8d02684ae0ff27e641f3704686e3bc8b1979b854 | [
"MIT"
] | 19 | 2016-01-27T17:12:59.000Z | 2021-02-15T17:26:56.000Z | # Programs
trimmomatic_jar = "/data/reddylab/software/Trimmomatic-0.32/trimmomatic-0.32.jar"
picard_jar = "/data/reddylab/software/picard-2.14.0/bin/picard.jar"
# Auxiliary reference files (species agnostic)
as_narrowPeak = '/data/reddylab/Reference_Data/ENCODE/kent/src/hg/lib/encode/narrowPeak.as'
as_broadPeak = '/data/reddylab/Reference_Data/ENCODE/kent/src/hg/lib/encode/broadPeak.as'
bamtools_forward_filter = '/data/reddylab/projects/GGR/auxiliary/quantification/forward_filter.duke_sequencing_core.txt'
bamtools_reverse_filter = '/data/reddylab/projects/GGR/auxiliary/quantification/reverse_filter.duke_sequencing_core.txt'
# Defaults
GENOME = 'hg38'
MEM = 24000
CPUS = 16
READ_LENGTH = 50
class ReferenceDataset(object):
def __init__(self, genome=GENOME, read_length=READ_LENGTH, with_ercc=False, umis=False):
if umis:
self.default_adapters = "/data/reddylab/Reference_Data/Adapters/trimmomatic_UMI.fa"
else:
self.default_adapters = "/data/reddylab/Reference_Data/Adapters/default_adapters.fasta"
self.read_length = read_length
_genome = genome.lower()
if _genome == 'hg38' or _genome == 'grch38':
self.bowtie_genome_ref_first_index = "/data/reddylab/Reference_Data/Genomes/hg38/GCA_000001405.15_GRCh38_no_alt_analysis_set.1.ebwt"
self.bowtie2_genome_ref_first_index = "/data/reddylab/Reference_Data/Genomes/hg38/GCA_000001405.15_GRCh38_no_alt_analysis_set.fna.bowtie_index.1.bt2" #TODO: Add ERCC92 Bowtie2 option
self.star_genome_dir = "/data/reddylab/Reference_Data/Genomes/hg38/STAR_genome_sjdbOverhang_%d_novelSJDB" % (self.read_length-1)
self.encode_blacklist_bedfile = "/data/reddylab/Reference_Data/ENCODE/hg38.blacklist.v3.bed"
self.genome_effective_size = "hs"
self.annotation_file = "/data/reddylab/Reference_Data/Gencode/v22/gencode.v22.annotation%s.gtf" % (".with_ercc92" if with_ercc else "")
self.rsem_dir = "/data/reddylab/Reference_Data/Genomes/hg38/RSEM_genome%s"% (".with_ercc92" if with_ercc else "")
self.genome_sizes_file = "/data/reddylab/Reference_Data/Genomes/hg38/GCA_000001405.15_GRCh38_no_alt_analysis_set%s.sizes" % (".with_ercc92" if with_ercc else "")
self.genome_fasta_files = ["/data/reddylab/Reference_Data/Genomes/hg38/GCA_000001405.15_GRCh38_no_alt_analysis_set%s.fna" % (".with_ercc92" if with_ercc else "")]
self.regions_bed_file = "/data/reddylab/Reference_Data/Genomes/hg38/GCA_000001405.15_GRCh38_no_alt_analysis_set%s.bed" % (".with_ercc92" if with_ercc else "")
elif _genome == 'hg19' or _genome == 'grch37':
self.bowtie_genome_ref_first_index = "/data/reddylab/Reference_Data/Genomes/hg19/hg19.1.ebwt"
self.bowtie2_genome_ref_first_index = "/data/reddylab/Reference_Data/Bowtie2/hg19/hg19.1.bt2"
self.star_genome_dir = "/data/reddylab/Reference_Data/Genomes/hg19/STAR_genome_sjdbOverhang_%d" % self.read_length
self.genome_sizes_file = "/data/reddylab/Reference_Data/Genomes/hg19/hg19.chrom.sizes"
self.encode_blacklist_bedfile = "/data/reddylab/Reference_Data/ENCODE/wgEncodeDacMapabilityConsensusExcludable.hg19.merged.bed"
self.genome_effective_size = "hs"
self.annotation_file = "/data/reddylab/Reference_Data/Gencode/v19/gencode.v19.annotation.gtf"
self.rsem_dir = "/data/reddylab/Reference_Data/Genomes/hg19/RSEM_genome"
self.genome_fasta_files = ["/data/reddylab/Reference_Data/Genomes/hg19/hg19.fa"]
self.regions_bed_file = "/data/reddylab/Reference_Data/Genomes/hg19/hg19.chrom.bed"
elif _genome == 'mm10' or _genome == 'grcm38':
self.bowtie_genome_ref_first_index = "/data/reddylab/Reference_Data/Genomes/mm10/bowtie/GRCm38.1.ebwt"
self.bowtie2_genome_ref_first_index = "/data/reddylab/Reference_Data/Genomes/mm10/bowtie2/GRCm38.1.bt2"
self.star_genome_dir = "/data/reddylab/Reference_Data/Genomes/mm10/STAR_genome_sjdbOverhang_%d_novelSJDB" % (self.read_length-1)
self.encode_blacklist_bedfile = "/data/reddylab/Reference_Data/ENCODE/mm10.blacklist.merged.bed"
self.genome_effective_size = "mm"
self.annotation_file = "/data/reddylab/Reference_Data/Gencode/vM13/gencode.vM13.annotation%s.gtf" % (".with_ercc92" if with_ercc else "")
self.rsem_dir = "/data/reddylab/Reference_Data/Genomes/mm10/RSEM/RSEM_genome%s" % (".with_ercc92" if with_ercc else "")
self.genome_sizes_file = "/data/reddylab/Reference_Data/Genomes/GRCm38/GRCm38%s.sizes" % (".with_ercc92" if with_ercc else "")
self.genome_fasta_files = ["/data/reddylab/Reference_Data/Genomes/mm10/GRCm38.primary_assembly.genome%s.fa" % (".with_ercc92" if with_ercc else "")]
self.regions_bed_file = "/data/reddylab/Reference_Data/Genomes/GRCm38/GRCm38%s.bed" % (".with_ercc92" if with_ercc else "")
elif _genome == 'mm9' or _genome == 'grcm37' or _genome == 'ncbi37':
self.bowtie_genome_ref_first_index = "/data/reddylab/Reference_Data/Genomes/mm9/bowtie/mm9.1.ebwt"
self.bowtie2_genome_ref_first_index = "/data/reddylab/Reference_Data/Genomes/mm9/bowtie2/mm9.1.bt2"
self.star_genome_dir = "/data/reddylab/Reference_Data/Genomes/mm9/STAR_genome_sjdbOverhang_%d_novelSJDB" % self.read_length
self.genome_sizes_file = "/data/reddylab/Reference_Data/Genomes/mm9/mm9.chrom.sizes"
self.encode_blacklist_bedfile = "/data/reddylab/Reference_Data/ENCODE/mm9.blacklist.bed"
self.genome_effective_size = "mm"
self.annotation_file = "/data/reddylab/Reference_Data/Gencode/vM1/gencode.vM1.annotation.gtf"
self.rsem_dir = "/data/reddylab/Reference_Data/Genomes/mm9/RSEM/RSEM_genome"
self.genome_fasta_files = ["/data/reddylab/Reference_Data/Genomes/mm9/mm9.fa"]
self.regions_bed_file = "/data/reddylab/Reference_Data/Genomes/mm9/mm9.chrom.bed"
elif _genome == 'danrer10' or _genome == 'grcz10':
self.bowtie_genome_ref_first_index = "/data/reddylab/Reference_Data/Genomes/danRer10/bowtie/danRer10.1.ebwt"
self.star_genome_dir = "/data/reddylab/Reference_Data/Genomes/danRer10/STAR_genome_sjdbOverhang_%d_novelSJDB" % self.read_length
self.genome_sizes_file = "/data/reddylab/Reference_Data/Genomes/danRer10/danRer10.chrom.sizes"
self.encode_blacklist_bedfile = None
self.genome_effective_size = "1.04e9"
self.annotation_file = "/data/reddylab/Reference_Data/Genomes/danRer10/Danio_rerio.GRCz10.88.chrom_cleaned.gtf"
self.rsem_dir = "/data/reddylab/Reference_Data/Genomes/danRer10/RSEM/RSEM_genome"
self.genome_fasta_files = ["/data/reddylab/Reference_Data/Genomes/danRer10/danRer10.fa"]
else:
raise Exception("Genome %s not recognized" % genome)
| 86.85 | 194 | 0.732009 | 917 | 6,948 | 5.248637 | 0.1494 | 0.124662 | 0.200706 | 0.238936 | 0.783087 | 0.771868 | 0.759402 | 0.704966 | 0.683358 | 0.640557 | 0 | 0.046315 | 0.148532 | 6,948 | 79 | 195 | 87.949367 | 0.767241 | 0.013385 | 0 | 0.082192 | 0 | 0.041096 | 0.531679 | 0.499416 | 0 | 0 | 0 | 0.012658 | 0 | 1 | 0.013699 | false | 0 | 0 | 0 | 0.027397 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
be3a4ea76bb7b0bc5c4d2afa711bcbb30d3eb4c2 | 1,782 | py | Python | PersonalWebApp/Blog/migrations/0004_auto_20200903_1747.py | CiganOliviu/personal_website | abedf67efc2e7e212c32815f645d3b3709f9f177 | [
"MIT"
] | 1 | 2021-04-02T16:45:56.000Z | 2021-04-02T16:45:56.000Z | PersonalWebApp/Blog/migrations/0004_auto_20200903_1747.py | CiganOliviu/personal_website | abedf67efc2e7e212c32815f645d3b3709f9f177 | [
"MIT"
] | null | null | null | PersonalWebApp/Blog/migrations/0004_auto_20200903_1747.py | CiganOliviu/personal_website | abedf67efc2e7e212c32815f645d3b3709f9f177 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.8 on 2020-09-03 17:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Blog', '0003_auto_20200903_1706'),
]
operations = [
migrations.RemoveField(
model_name='post',
name='content',
),
migrations.AddField(
model_name='post',
name='sub_content_four',
field=models.TextField(blank=True, default=''),
),
migrations.AddField(
model_name='post',
name='sub_content_one',
field=models.TextField(blank=True, default=''),
),
migrations.AddField(
model_name='post',
name='sub_content_three',
field=models.TextField(blank=True, default=''),
),
migrations.AddField(
model_name='post',
name='sub_content_two',
field=models.TextField(blank=True, default=''),
),
migrations.AddField(
model_name='post',
name='sub_title_four',
field=models.CharField(blank=True, default='', max_length=200, unique=True),
),
migrations.AddField(
model_name='post',
name='sub_title_one',
field=models.CharField(blank=True, default='', max_length=200, unique=True),
),
migrations.AddField(
model_name='post',
name='sub_title_three',
field=models.CharField(blank=True, default='', max_length=200, unique=True),
),
migrations.AddField(
model_name='post',
name='sub_title_two',
field=models.CharField(blank=True, default='', max_length=200, unique=True),
),
]
| 30.724138 | 88 | 0.551066 | 176 | 1,782 | 5.397727 | 0.272727 | 0.085263 | 0.123158 | 0.161053 | 0.766316 | 0.766316 | 0.766316 | 0.766316 | 0.713684 | 0.713684 | 0 | 0.035479 | 0.319865 | 1,782 | 57 | 89 | 31.263158 | 0.74835 | 0.025253 | 0 | 0.666667 | 1 | 0 | 0.108357 | 0.013256 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.019608 | 0 | 0.078431 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
be7ebfbdfb2734875eb3bf263bab28fc64fa69dc | 7,525 | py | Python | venv/lib/python3.8/site-packages/ansible_collections/community/general/tests/unit/plugins/modules/database/misc/test_redis_data_incr.py | saeedya/docker-ansible | 6fb0cfc6bc4a5925b21380952a5a4502ec02119a | [
"Apache-2.0"
] | 22 | 2021-07-16T08:11:22.000Z | 2022-03-31T07:15:34.000Z | venv/lib/python3.8/site-packages/ansible_collections/community/general/tests/unit/plugins/modules/database/misc/test_redis_data_incr.py | saeedya/docker-ansible | 6fb0cfc6bc4a5925b21380952a5a4502ec02119a | [
"Apache-2.0"
] | 1 | 2022-03-12T02:25:26.000Z | 2022-03-12T02:25:26.000Z | venv/lib/python3.8/site-packages/ansible_collections/community/general/tests/unit/plugins/modules/database/misc/test_redis_data_incr.py | saeedya/docker-ansible | 6fb0cfc6bc4a5925b21380952a5a4502ec02119a | [
"Apache-2.0"
] | 39 | 2021-07-05T02:31:42.000Z | 2022-03-31T02:46:03.000Z | # -*- coding: utf-8 -*-
#
# Copyright: (c) 2021, Andreas Botzner <andreas at botzner dot com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pytest
import json
import redis
from redis import __version__
from ansible_collections.community.general.plugins.modules.database.misc import redis_data_incr
from ansible_collections.community.general.tests.unit.plugins.modules.utils import set_module_args
HAS_REDIS_USERNAME_OPTION = True
if tuple(map(int, __version__.split('.'))) < (3, 4, 0):
HAS_REDIS_USERNAME_OPTION = False
if HAS_REDIS_USERNAME_OPTION:
from redis.exceptions import NoPermissionError, RedisError, ResponseError
def test_redis_data_incr_without_arguments(capfd):
set_module_args({})
with pytest.raises(SystemExit) as results:
redis_data_incr.main()
out, err = capfd.readouterr()
assert not err
assert json.loads(out)['failed']
@pytest.mark.skipif(not HAS_REDIS_USERNAME_OPTION, reason="Redis version < 3.4.0")
def test_redis_data_incr(capfd, mocker):
set_module_args({'login_host': 'localhost',
'login_user': 'root',
'login_password': 'secret',
'key': 'foo', })
mocker.patch('redis.Redis.incr', return_value=57)
with pytest.raises(SystemExit):
redis_data_incr.main()
out, err = capfd.readouterr()
print(out)
assert not err
assert json.loads(out)['value'] == 57.0
assert json.loads(
out)['msg'] == 'Incremented key: foo to 57'
assert json.loads(out)['changed']
@pytest.mark.skipif(not HAS_REDIS_USERNAME_OPTION, reason="Redis version < 3.4.0")
def test_redis_data_incr_int(capfd, mocker):
set_module_args({'login_host': 'localhost',
'login_user': 'root',
'login_password': 'secret',
'key': 'foo',
'increment_int': 10})
mocker.patch('redis.Redis.incrby', return_value=57)
with pytest.raises(SystemExit):
redis_data_incr.main()
out, err = capfd.readouterr()
print(out)
assert not err
assert json.loads(out)['value'] == 57.0
assert json.loads(
out)['msg'] == 'Incremented key: foo by 10 to 57'
assert json.loads(out)['changed']
@pytest.mark.skipif(not HAS_REDIS_USERNAME_OPTION, reason="Redis version < 3.4.0")
def test_redis_data_inc_float(capfd, mocker):
set_module_args({'login_host': 'localhost',
'login_user': 'root',
'login_password': 'secret',
'key': 'foo',
'increment_float': '5.5'})
mocker.patch('redis.Redis.incrbyfloat', return_value=57.45)
with pytest.raises(SystemExit):
redis_data_incr.main()
out, err = capfd.readouterr()
print(out)
assert not err
assert json.loads(out)['value'] == 57.45
assert json.loads(
out)['msg'] == 'Incremented key: foo by 5.5 to 57.45'
assert json.loads(out)['changed']
@pytest.mark.skipif(not HAS_REDIS_USERNAME_OPTION, reason="Redis version < 3.4.0")
def test_redis_data_incr_float_wrong_value(capfd):
set_module_args({'login_host': 'localhost',
'login_user': 'root',
'login_password': 'secret',
'key': 'foo',
'increment_float': 'not_a_number'})
with pytest.raises(SystemExit):
redis_data_incr.main()
out, err = capfd.readouterr()
print(out)
assert not err
assert json.loads(out)['failed']
@pytest.mark.skipif(HAS_REDIS_USERNAME_OPTION, reason="Redis version > 3.4.0")
def test_redis_data_incr_fail_username(capfd, mocker):
set_module_args({'login_host': 'localhost',
'login_user': 'root',
'login_password': 'secret',
'key': 'foo',
'_ansible_check_mode': False})
with pytest.raises(SystemExit):
redis_data_incr.main()
out, err = capfd.readouterr()
print(out)
assert not err
assert json.loads(out)['failed']
assert json.loads(
out)['msg'] == 'The option `username` in only supported with redis >= 3.4.0.'
def test_redis_data_incr_no_username(capfd, mocker):
set_module_args({'login_host': 'localhost',
'login_password': 'secret',
'key': 'foo', })
mocker.patch('redis.Redis.incr', return_value=57)
with pytest.raises(SystemExit):
redis_data_incr.main()
out, err = capfd.readouterr()
print(out)
assert not err
assert json.loads(out)['value'] == 57.0
assert json.loads(
out)['msg'] == 'Incremented key: foo to 57'
assert json.loads(out)['changed']
def test_redis_data_incr_float_no_username(capfd, mocker):
set_module_args({'login_host': 'localhost',
'login_password': 'secret',
'key': 'foo',
'increment_float': '5.5'})
mocker.patch('redis.Redis.incrbyfloat', return_value=57.45)
with pytest.raises(SystemExit):
redis_data_incr.main()
out, err = capfd.readouterr()
print(out)
assert not err
assert json.loads(out)['value'] == 57.45
assert json.loads(
out)['msg'] == 'Incremented key: foo by 5.5 to 57.45'
assert json.loads(out)['changed']
def test_redis_data_incr_check_mode(capfd, mocker):
set_module_args({'login_host': 'localhost',
'login_password': 'secret',
'key': 'foo',
'_ansible_check_mode': True})
mocker.patch('redis.Redis.get', return_value=10)
with pytest.raises(SystemExit):
redis_data_incr.main()
out, err = capfd.readouterr()
print(out)
assert not err
assert json.loads(out)['value'] == 11.0
assert json.loads(out)['msg'] == 'Incremented key: foo by 1 to 11.0'
assert not json.loads(out)['changed']
def test_redis_data_incr_check_mode_not_incrementable(capfd, mocker):
set_module_args({'login_host': 'localhost',
'login_password': 'secret',
'key': 'foo',
'_ansible_check_mode': True})
mocker.patch('redis.Redis.get', return_value='bar')
with pytest.raises(SystemExit):
redis_data_incr.main()
out, err = capfd.readouterr()
print(out)
assert not err
assert json.loads(out)['failed']
assert json.loads(out)[
'msg'] == "Value: bar of key: foo is not incrementable(int or float)"
assert 'value' not in json.loads(out)
assert not json.loads(out)['changed']
@pytest.mark.skipif(not HAS_REDIS_USERNAME_OPTION, reason="Redis version < 3.4.0")
def test_redis_data_incr_check_mode_permissions(capfd, mocker):
set_module_args({'login_host': 'localhost',
'login_password': 'secret',
'key': 'foo',
'_ansible_check_mode': True})
redis.Redis.get = mocker.Mock(side_effect=NoPermissionError(
"this user has no permissions to run the 'get' command or its subcommand"))
with pytest.raises(SystemExit):
redis_data_incr.main()
out, err = capfd.readouterr()
print(out)
assert not err
assert json.loads(out)['failed']
assert json.loads(out)['msg'].startswith(
'Failed to get value of key: foo with exception:')
assert 'value' not in json.loads(out)
assert not json.loads(out)['changed']
| 36.177885 | 98 | 0.62897 | 960 | 7,525 | 4.719792 | 0.153125 | 0.059589 | 0.079453 | 0.099316 | 0.793423 | 0.772236 | 0.768042 | 0.766056 | 0.749062 | 0.749062 | 0 | 0.017299 | 0.239468 | 7,525 | 207 | 99 | 36.352657 | 0.774419 | 0.023654 | 0 | 0.754286 | 0 | 0 | 0.204985 | 0.006265 | 0 | 0 | 0 | 0 | 0.234286 | 1 | 0.062857 | false | 0.057143 | 0.045714 | 0 | 0.108571 | 0.062857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
beb8683e97f02f2ece77faf1ded3e45c63b48b63 | 7,954 | py | Python | tests/components/binary_sensor/test_trend.py | smilepc/Home-assistant | db3bfad0b5e0815ba1e255d4d646af7c99caef8b | [
"MIT"
] | null | null | null | tests/components/binary_sensor/test_trend.py | smilepc/Home-assistant | db3bfad0b5e0815ba1e255d4d646af7c99caef8b | [
"MIT"
] | null | null | null | tests/components/binary_sensor/test_trend.py | smilepc/Home-assistant | db3bfad0b5e0815ba1e255d4d646af7c99caef8b | [
"MIT"
] | null | null | null | """The test for the Trend sensor platform."""
import homeassistant.bootstrap as bootstrap
from tests.common import get_test_home_assistant
class TestTrendBinarySensor:
"""Test the Trend sensor."""
def setup_method(self, method):
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop()
def test_up(self):
"""Test up trend."""
assert bootstrap.setup_component(self.hass, 'binary_sensor', {
'binary_sensor': {
'platform': 'trend',
'sensors': {
'test_trend_sensor': {
'entity_id':
"sensor.test_state"
}
}
}
})
self.hass.states.set('sensor.test_state', '1')
self.hass.pool.block_till_done()
self.hass.states.set('sensor.test_state', '2')
self.hass.pool.block_till_done()
state = self.hass.states.get('binary_sensor.test_trend_sensor')
assert state.state == 'on'
def test_down(self):
"""Test down trend."""
assert bootstrap.setup_component(self.hass, 'binary_sensor', {
'binary_sensor': {
'platform': 'trend',
'sensors': {
'test_trend_sensor': {
'entity_id':
"sensor.test_state"
}
}
}
})
self.hass.states.set('sensor.test_state', '2')
self.hass.pool.block_till_done()
self.hass.states.set('sensor.test_state', '1')
self.hass.pool.block_till_done()
state = self.hass.states.get('binary_sensor.test_trend_sensor')
assert state.state == 'off'
def test__invert_up(self):
"""Test up trend with custom message."""
assert bootstrap.setup_component(self.hass, 'binary_sensor', {
'binary_sensor': {
'platform': 'trend',
'sensors': {
'test_trend_sensor': {
'entity_id':
"sensor.test_state",
'invert': "Yes"
}
}
}
})
self.hass.states.set('sensor.test_state', '1')
self.hass.pool.block_till_done()
self.hass.states.set('sensor.test_state', '2')
self.hass.pool.block_till_done()
state = self.hass.states.get('binary_sensor.test_trend_sensor')
assert state.state == 'off'
def test_invert_down(self):
"""Test down trend with custom message."""
assert bootstrap.setup_component(self.hass, 'binary_sensor', {
'binary_sensor': {
'platform': 'trend',
'sensors': {
'test_trend_sensor': {
'entity_id':
"sensor.test_state",
'invert': "Yes"
}
}
}
})
self.hass.states.set('sensor.test_state', '2')
self.hass.pool.block_till_done()
self.hass.states.set('sensor.test_state', '1')
self.hass.pool.block_till_done()
state = self.hass.states.get('binary_sensor.test_trend_sensor')
assert state.state == 'on'
def test_attribute_up(self):
"""Test attribute up trend."""
assert bootstrap.setup_component(self.hass, 'binary_sensor', {
'binary_sensor': {
'platform': 'trend',
'sensors': {
'test_trend_sensor': {
'entity_id':
"sensor.test_state",
'attribute': 'attr'
}
}
}
})
self.hass.states.set('sensor.test_state', 'State', {'attr': '1'})
self.hass.pool.block_till_done()
self.hass.states.set('sensor.test_state', 'State', {'attr': '2'})
self.hass.pool.block_till_done()
state = self.hass.states.get('binary_sensor.test_trend_sensor')
assert state.state == 'on'
def test_attribute_down(self):
"""Test attribute down trend."""
assert bootstrap.setup_component(self.hass, 'binary_sensor', {
'binary_sensor': {
'platform': 'trend',
'sensors': {
'test_trend_sensor': {
'entity_id':
"sensor.test_state",
'attribute': 'attr'
}
}
}
})
self.hass.states.set('sensor.test_state', 'State', {'attr': '2'})
self.hass.pool.block_till_done()
self.hass.states.set('sensor.test_state', 'State', {'attr': '1'})
self.hass.pool.block_till_done()
state = self.hass.states.get('binary_sensor.test_trend_sensor')
assert state.state == 'off'
def test_non_numeric(self):
"""Test up trend."""
assert bootstrap.setup_component(self.hass, 'binary_sensor', {
'binary_sensor': {
'platform': 'trend',
'sensors': {
'test_trend_sensor': {
'entity_id':
"sensor.test_state"
}
}
}
})
self.hass.states.set('sensor.test_state', 'Non')
self.hass.pool.block_till_done()
self.hass.states.set('sensor.test_state', 'Numeric')
self.hass.pool.block_till_done()
state = self.hass.states.get('binary_sensor.test_trend_sensor')
assert state.state == 'off'
def test_missing_attribute(self):
"""Test attribute down trend."""
assert bootstrap.setup_component(self.hass, 'binary_sensor', {
'binary_sensor': {
'platform': 'trend',
'sensors': {
'test_trend_sensor': {
'entity_id':
"sensor.test_state",
'attribute': 'missing'
}
}
}
})
self.hass.states.set('sensor.test_state', 'State', {'attr': '2'})
self.hass.pool.block_till_done()
self.hass.states.set('sensor.test_state', 'State', {'attr': '1'})
self.hass.pool.block_till_done()
state = self.hass.states.get('binary_sensor.test_trend_sensor')
assert state.state == 'off'
def test_invalid_name_does_not_create(self):
"""Test invalid name."""
assert not bootstrap.setup_component(self.hass, 'binary_sensor', {
'binary_sensor': {
'platform': 'template',
'sensors': {
'test INVALID sensor': {
'entity_id':
"sensor.test_state"
}
}
}
})
assert self.hass.states.all() == []
def test_invalid_sensor_does_not_create(self):
"""Test invalid sensor."""
assert not bootstrap.setup_component(self.hass, 'binary_sensor', {
'binary_sensor': {
'platform': 'template',
'sensors': {
'test_trend_sensor': {
'not_entity_id':
"sensor.test_state"
}
}
}
})
assert self.hass.states.all() == []
def test_no_sensors_does_not_create(self):
"""Test no sensors."""
assert not bootstrap.setup_component(self.hass, 'binary_sensor', {
'binary_sensor': {
'platform': 'trend'
}
})
assert self.hass.states.all() == []
| 34.582609 | 74 | 0.491199 | 762 | 7,954 | 4.888451 | 0.094488 | 0.120268 | 0.101477 | 0.07302 | 0.877047 | 0.85396 | 0.837315 | 0.837315 | 0.837315 | 0.837315 | 0 | 0.002841 | 0.380563 | 7,954 | 229 | 75 | 34.733624 | 0.753197 | 0.050038 | 0 | 0.708995 | 0 | 0 | 0.216578 | 0.033155 | 0 | 0 | 0 | 0 | 0.116402 | 1 | 0.068783 | false | 0 | 0.010582 | 0 | 0.084656 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fe2e3dd2b0949baea2d7ab1e5c71ed8c6e09c350 | 23,245 | py | Python | tests/test_cyclegan.py | drcut/mmediting | e7f13f16dc63f1698d819248ed045983b35c0dbe | [
"Apache-2.0"
] | 1 | 2021-04-30T23:08:16.000Z | 2021-04-30T23:08:16.000Z | tests/test_cyclegan.py | drcut/mmediting | e7f13f16dc63f1698d819248ed045983b35c0dbe | [
"Apache-2.0"
] | null | null | null | tests/test_cyclegan.py | drcut/mmediting | e7f13f16dc63f1698d819248ed045983b35c0dbe | [
"Apache-2.0"
] | null | null | null | import copy
from unittest.mock import patch
import mmcv
import pytest
import torch
from mmcv.parallel import DataContainer as DC
from mmcv.runner import obj_from_dict
from mmedit.models import build_model
from mmedit.models.backbones import ResnetGenerator
from mmedit.models.components import PatchDiscriminator
from mmedit.models.losses import GANLoss, L1Loss
def test_cyclegan():
model_cfg = dict(
type='CycleGAN',
generator=dict(
type='ResnetGenerator',
in_channels=3,
out_channels=3,
base_channels=64,
norm_cfg=dict(type='IN'),
use_dropout=False,
num_blocks=9,
padding_mode='reflect',
init_cfg=dict(type='normal', gain=0.02)),
discriminator=dict(
type='PatchDiscriminator',
in_channels=3,
base_channels=64,
num_conv=3,
norm_cfg=dict(type='IN'),
init_cfg=dict(type='normal', gain=0.02)),
gan_loss=dict(
type='GANLoss',
gan_type='lsgan',
real_label_val=1.0,
fake_label_val=0,
loss_weight=1.0),
cycle_loss=dict(type='L1Loss', loss_weight=10.0, reduction='mean'),
id_loss=dict(type='L1Loss', loss_weight=0.5, reduction='mean'))
train_cfg = None
test_cfg = None
# build synthesizer
synthesizer = build_model(
model_cfg, train_cfg=train_cfg, test_cfg=test_cfg)
# test checking if id loss > 0, in_channels == out_channels
with pytest.raises(AssertionError):
bad_model_cfg = copy.deepcopy(model_cfg)
bad_model_cfg['generator']['out_channels'] = 1
_ = build_model(bad_model_cfg, train_cfg=train_cfg, test_cfg=test_cfg)
# test checking gan loss cannot be None
with pytest.raises(AssertionError):
bad_model_cfg = copy.deepcopy(model_cfg)
bad_model_cfg['gan_loss'] = None
_ = build_model(bad_model_cfg, train_cfg=train_cfg, test_cfg=test_cfg)
# test checking cycle loss cannot be None
with pytest.raises(AssertionError):
bad_model_cfg = copy.deepcopy(model_cfg)
bad_model_cfg['cycle_loss'] = None
_ = build_model(bad_model_cfg, train_cfg=train_cfg, test_cfg=test_cfg)
# test attributes
assert synthesizer.__class__.__name__ == 'CycleGAN'
assert isinstance(synthesizer.generators['a'], ResnetGenerator)
assert isinstance(synthesizer.generators['b'], ResnetGenerator)
assert isinstance(synthesizer.discriminators['a'], PatchDiscriminator)
assert isinstance(synthesizer.discriminators['b'], PatchDiscriminator)
assert isinstance(synthesizer.gan_loss, GANLoss)
assert isinstance(synthesizer.cycle_loss, L1Loss)
assert isinstance(synthesizer.id_loss, L1Loss)
assert synthesizer.train_cfg is None
assert synthesizer.test_cfg is None
# prepare data
inputs = torch.rand(1, 3, 64, 64)
targets = torch.rand(1, 3, 64, 64)
data_batch = {'img_a': inputs, 'img_b': targets}
img_meta = {}
img_meta['img_a_path'] = 'img_a_path'
img_meta['img_b_path'] = 'img_b_path'
data_batch['meta'] = [img_meta]
# prepare optimizer
optim_cfg = dict(type='Adam', lr=2e-4, betas=(0.5, 0.999))
optimizer = {
'generators':
obj_from_dict(
optim_cfg, torch.optim,
dict(params=getattr(synthesizer, 'generators').parameters())),
'discriminators':
obj_from_dict(
optim_cfg, torch.optim,
dict(params=getattr(synthesizer, 'discriminators').parameters()))
}
# test forward_dummy
with torch.no_grad():
output = synthesizer.forward_dummy(data_batch['img_a'])
assert torch.is_tensor(output)
assert output.size() == (1, 3, 64, 64)
# test forward_test
with torch.no_grad():
outputs = synthesizer(inputs, targets, [img_meta], test_mode=True)
assert torch.equal(outputs['real_a'], data_batch['img_a'])
assert torch.equal(outputs['real_b'], data_batch['img_b'])
assert torch.is_tensor(outputs['fake_b'])
assert torch.is_tensor(outputs['fake_a'])
assert outputs['fake_b'].size() == (1, 3, 64, 64)
assert outputs['fake_a'].size() == (1, 3, 64, 64)
# val_step
with torch.no_grad():
outputs = synthesizer.val_step(data_batch)
assert torch.equal(outputs['real_a'], data_batch['img_a'])
assert torch.equal(outputs['real_b'], data_batch['img_b'])
assert torch.is_tensor(outputs['fake_b'])
assert torch.is_tensor(outputs['fake_a'])
assert outputs['fake_b'].size() == (1, 3, 64, 64)
assert outputs['fake_a'].size() == (1, 3, 64, 64)
# test forward_train
outputs = synthesizer(inputs, targets, [img_meta], test_mode=False)
assert torch.equal(outputs['real_a'], data_batch['img_a'])
assert torch.equal(outputs['real_b'], data_batch['img_b'])
assert torch.is_tensor(outputs['fake_b'])
assert torch.is_tensor(outputs['fake_a'])
assert torch.is_tensor(outputs['rec_a'])
assert torch.is_tensor(outputs['rec_b'])
assert outputs['fake_b'].size() == (1, 3, 64, 64)
assert outputs['fake_a'].size() == (1, 3, 64, 64)
assert outputs['rec_a'].size() == (1, 3, 64, 64)
assert outputs['rec_b'].size() == (1, 3, 64, 64)
# test train_step
outputs = synthesizer.train_step(data_batch, optimizer)
assert isinstance(outputs, dict)
assert isinstance(outputs['log_vars'], dict)
assert isinstance(outputs['results'], dict)
for v in [
'loss_gan_d_a', 'loss_gan_d_b', 'loss_id_a', 'loss_id_b',
'loss_gan_g_a', 'loss_gan_g_b', 'loss_cycle_a', 'loss_cycle_b'
]:
assert isinstance(outputs['log_vars'][v], float)
assert outputs['num_samples'] == 1
assert torch.equal(outputs['results']['real_a'], data_batch['img_a'])
assert torch.equal(outputs['results']['real_b'], data_batch['img_b'])
assert torch.is_tensor(outputs['results']['fake_b'])
assert torch.is_tensor(outputs['results']['fake_a'])
assert outputs['results']['fake_b'].size() == (1, 3, 64, 64)
assert outputs['results']['fake_a'].size() == (1, 3, 64, 64)
# test train_step and forward_test (gpu)
if torch.cuda.is_available():
synthesizer = synthesizer.cuda()
optimizer = {
'generators':
obj_from_dict(
optim_cfg, torch.optim,
dict(params=getattr(synthesizer, 'generators').parameters())),
'discriminators':
obj_from_dict(
optim_cfg, torch.optim,
dict(
params=getattr(synthesizer,
'discriminators').parameters()))
}
data_batch_cuda = copy.deepcopy(data_batch)
data_batch_cuda['img_a'] = inputs.cuda()
data_batch_cuda['img_b'] = targets.cuda()
data_batch_cuda['meta'] = [DC(img_meta, cpu_only=True).data]
# forward_test
with torch.no_grad():
outputs = synthesizer(
data_batch_cuda['img_a'],
data_batch_cuda['img_b'],
data_batch_cuda['meta'],
test_mode=True)
assert torch.equal(outputs['real_a'], data_batch_cuda['img_a'].cpu())
assert torch.equal(outputs['real_b'], data_batch_cuda['img_b'].cpu())
assert torch.is_tensor(outputs['fake_b'])
assert torch.is_tensor(outputs['fake_a'])
assert outputs['fake_b'].size() == (1, 3, 64, 64)
assert outputs['fake_a'].size() == (1, 3, 64, 64)
# val_step
with torch.no_grad():
outputs = synthesizer.val_step(data_batch_cuda)
assert torch.equal(outputs['real_a'], data_batch_cuda['img_a'].cpu())
assert torch.equal(outputs['real_b'], data_batch_cuda['img_b'].cpu())
assert torch.is_tensor(outputs['fake_b'])
assert torch.is_tensor(outputs['fake_a'])
assert outputs['fake_b'].size() == (1, 3, 64, 64)
assert outputs['fake_a'].size() == (1, 3, 64, 64)
# test forward_train
outputs = synthesizer(
data_batch_cuda['img_a'],
data_batch_cuda['img_b'],
data_batch_cuda['meta'],
test_mode=False)
assert torch.equal(outputs['real_a'], data_batch_cuda['img_a'])
assert torch.equal(outputs['real_b'], data_batch_cuda['img_b'])
assert torch.is_tensor(outputs['fake_b'])
assert torch.is_tensor(outputs['fake_a'])
assert torch.is_tensor(outputs['rec_a'])
assert torch.is_tensor(outputs['rec_b'])
assert outputs['fake_b'].size() == (1, 3, 64, 64)
assert outputs['fake_a'].size() == (1, 3, 64, 64)
assert outputs['rec_a'].size() == (1, 3, 64, 64)
assert outputs['rec_b'].size() == (1, 3, 64, 64)
# train_step
outputs = synthesizer.train_step(data_batch_cuda, optimizer)
assert isinstance(outputs, dict)
assert isinstance(outputs['log_vars'], dict)
assert isinstance(outputs['results'], dict)
for v in [
'loss_gan_d_a', 'loss_gan_d_b', 'loss_id_a', 'loss_id_b',
'loss_gan_g_a', 'loss_gan_g_b', 'loss_cycle_a', 'loss_cycle_b'
]:
assert isinstance(outputs['log_vars'][v], float)
assert outputs['num_samples'] == 1
assert torch.equal(outputs['results']['real_a'],
data_batch_cuda['img_a'].cpu())
assert torch.equal(outputs['results']['real_b'],
data_batch_cuda['img_b'].cpu())
assert torch.is_tensor(outputs['results']['fake_b'])
assert torch.is_tensor(outputs['results']['fake_a'])
assert outputs['results']['fake_b'].size() == (1, 3, 64, 64)
assert outputs['results']['fake_a'].size() == (1, 3, 64, 64)
# test disc_steps and disc_init_steps
data_batch['img_a'] = inputs.cpu()
data_batch['img_b'] = targets.cpu()
train_cfg = dict(disc_steps=2, disc_init_steps=2)
synthesizer = build_model(
model_cfg, train_cfg=train_cfg, test_cfg=test_cfg)
optimizer = {
'generators':
obj_from_dict(
optim_cfg, torch.optim,
dict(params=getattr(synthesizer, 'generators').parameters())),
'discriminators':
obj_from_dict(
optim_cfg, torch.optim,
dict(params=getattr(synthesizer, 'discriminators').parameters()))
}
# iter 0, 1
for i in range(2):
assert synthesizer.step_counter == i
outputs = synthesizer.train_step(data_batch, optimizer)
assert isinstance(outputs, dict)
assert isinstance(outputs['log_vars'], dict)
assert isinstance(outputs['results'], dict)
for v in [
'loss_id_a', 'loss_id_b', 'loss_gan_g_a', 'loss_gan_g_b',
'loss_cycle_a', 'loss_cycle_b'
]:
assert outputs['log_vars'].get(v) is None
assert isinstance(outputs['log_vars']['loss_gan_d_a'], float)
assert isinstance(outputs['log_vars']['loss_gan_d_b'], float)
assert outputs['num_samples'] == 1
assert torch.equal(outputs['results']['real_a'], data_batch['img_a'])
assert torch.equal(outputs['results']['real_b'], data_batch['img_b'])
assert torch.is_tensor(outputs['results']['fake_b'])
assert torch.is_tensor(outputs['results']['fake_a'])
assert outputs['results']['fake_b'].size() == (1, 3, 64, 64)
assert outputs['results']['fake_a'].size() == (1, 3, 64, 64)
assert synthesizer.step_counter == i + 1
# iter 2, 3, 4, 5
for i in range(2, 6):
assert synthesizer.step_counter == i
outputs = synthesizer.train_step(data_batch, optimizer)
assert isinstance(outputs, dict)
assert isinstance(outputs['log_vars'], dict)
assert isinstance(outputs['results'], dict)
log_check_list = [
'loss_gan_d_a', 'loss_gan_d_b', 'loss_id_a', 'loss_id_b',
'loss_gan_g_a', 'loss_gan_g_b', 'loss_cycle_a', 'loss_cycle_b'
]
if i % 2 == 1:
log_None_list = [
'loss_id_a', 'loss_id_b', 'loss_gan_g_a', 'loss_gan_g_b',
'loss_cycle_a', 'loss_cycle_b'
]
for v in log_None_list:
assert outputs['log_vars'].get(v) is None
log_check_list.remove(v)
for v in log_check_list:
assert isinstance(outputs['log_vars'][v], float)
assert outputs['num_samples'] == 1
assert torch.equal(outputs['results']['real_a'], data_batch['img_a'])
assert torch.equal(outputs['results']['real_b'], data_batch['img_b'])
assert torch.is_tensor(outputs['results']['fake_b'])
assert torch.is_tensor(outputs['results']['fake_a'])
assert outputs['results']['fake_b'].size() == (1, 3, 64, 64)
assert outputs['results']['fake_a'].size() == (1, 3, 64, 64)
assert synthesizer.step_counter == i + 1
# test without id loss
model_cfg_ = copy.deepcopy(model_cfg)
model_cfg_.pop('id_loss')
synthesizer = build_model(model_cfg_, train_cfg=None, test_cfg=None)
optimizer = {
'generators':
obj_from_dict(
optim_cfg, torch.optim,
dict(params=getattr(synthesizer, 'generators').parameters())),
'discriminators':
obj_from_dict(
optim_cfg, torch.optim,
dict(params=getattr(synthesizer, 'discriminators').parameters()))
}
data_batch['img_a'] = inputs.cpu()
data_batch['img_b'] = targets.cpu()
outputs = synthesizer.train_step(data_batch, optimizer)
assert isinstance(outputs, dict)
assert isinstance(outputs['log_vars'], dict)
assert isinstance(outputs['results'], dict)
assert outputs['log_vars'].get('loss_id_a') is None
assert outputs['log_vars'].get('loss_id_b') is None
log_check_list = [
'loss_gan_d_a', 'loss_gan_d_b', 'loss_gan_g_a', 'loss_gan_g_b',
'loss_cycle_a', 'loss_cycle_b'
]
for v in log_check_list:
assert isinstance(outputs['log_vars'][v], float)
assert outputs['num_samples'] == 1
assert torch.equal(outputs['results']['real_a'], data_batch['img_a'])
assert torch.equal(outputs['results']['real_b'], data_batch['img_b'])
assert torch.is_tensor(outputs['results']['fake_b'])
assert torch.is_tensor(outputs['results']['fake_a'])
assert outputs['results']['fake_b'].size() == (1, 3, 64, 64)
assert outputs['results']['fake_a'].size() == (1, 3, 64, 64)
# test b2a translation
data_batch['img_a'] = inputs.cpu()
data_batch['img_b'] = targets.cpu()
train_cfg = dict(direction='b2a')
synthesizer = build_model(
model_cfg, train_cfg=train_cfg, test_cfg=test_cfg)
optimizer = {
'generators':
obj_from_dict(
optim_cfg, torch.optim,
dict(params=getattr(synthesizer, 'generators').parameters())),
'discriminators':
obj_from_dict(
optim_cfg, torch.optim,
dict(params=getattr(synthesizer, 'discriminators').parameters()))
}
assert synthesizer.step_counter == 0
outputs = synthesizer.train_step(data_batch, optimizer)
assert isinstance(outputs, dict)
assert isinstance(outputs['log_vars'], dict)
assert isinstance(outputs['results'], dict)
for v in [
'loss_gan_d_a', 'loss_gan_d_b', 'loss_id_a', 'loss_id_b',
'loss_gan_g_a', 'loss_gan_g_b', 'loss_cycle_a', 'loss_cycle_b'
]:
assert isinstance(outputs['log_vars'][v], float)
assert outputs['num_samples'] == 1
assert torch.equal(outputs['results']['real_a'], data_batch['img_b'])
assert torch.equal(outputs['results']['real_b'], data_batch['img_a'])
assert torch.is_tensor(outputs['results']['fake_b'])
assert torch.is_tensor(outputs['results']['fake_a'])
assert outputs['results']['fake_b'].size() == (1, 3, 64, 64)
assert outputs['results']['fake_a'].size() == (1, 3, 64, 64)
assert synthesizer.step_counter == 1
# test GAN image buffer size = 0
data_batch['img_a'] = inputs.cpu()
data_batch['img_b'] = targets.cpu()
train_cfg = dict(buffer_size=0)
synthesizer = build_model(
model_cfg, train_cfg=train_cfg, test_cfg=test_cfg)
optimizer = {
'generators':
obj_from_dict(
optim_cfg, torch.optim,
dict(params=getattr(synthesizer, 'generators').parameters())),
'discriminators':
obj_from_dict(
optim_cfg, torch.optim,
dict(params=getattr(synthesizer, 'discriminators').parameters()))
}
assert synthesizer.step_counter == 0
outputs = synthesizer.train_step(data_batch, optimizer)
assert isinstance(outputs, dict)
assert isinstance(outputs['log_vars'], dict)
assert isinstance(outputs['results'], dict)
for v in [
'loss_gan_d_a', 'loss_gan_d_b', 'loss_id_a', 'loss_id_b',
'loss_gan_g_a', 'loss_gan_g_b', 'loss_cycle_a', 'loss_cycle_b'
]:
assert isinstance(outputs['log_vars'][v], float)
assert outputs['num_samples'] == 1
assert torch.equal(outputs['results']['real_a'], data_batch['img_a'])
assert torch.equal(outputs['results']['real_b'], data_batch['img_b'])
assert torch.is_tensor(outputs['results']['fake_b'])
assert torch.is_tensor(outputs['results']['fake_a'])
assert outputs['results']['fake_b'].size() == (1, 3, 64, 64)
assert outputs['results']['fake_a'].size() == (1, 3, 64, 64)
assert synthesizer.step_counter == 1
# test save image
# show input
train_cfg = None
test_cfg = dict(show_input=True)
synthesizer = build_model(
model_cfg, train_cfg=train_cfg, test_cfg=test_cfg)
with patch.object(mmcv, 'imwrite', return_value=True):
# test save path not None Assertion
with pytest.raises(AssertionError):
with torch.no_grad():
_ = synthesizer(
inputs,
targets, [img_meta],
test_mode=True,
save_image=True)
# iteration is None
with torch.no_grad():
outputs = synthesizer(
inputs,
targets, [img_meta],
test_mode=True,
save_image=True,
save_path='save_path')
assert torch.equal(outputs['real_a'], data_batch['img_a'])
assert torch.equal(outputs['real_b'], data_batch['img_b'])
assert torch.is_tensor(outputs['fake_b'])
assert torch.is_tensor(outputs['fake_a'])
assert outputs['fake_b'].size() == (1, 3, 64, 64)
assert outputs['fake_a'].size() == (1, 3, 64, 64)
assert outputs['saved_flag']
# iteration is not None
with torch.no_grad():
outputs = synthesizer(
inputs,
targets, [img_meta],
test_mode=True,
save_image=True,
save_path='save_path',
iteration=1000)
assert torch.equal(outputs['real_a'], data_batch['img_a'])
assert torch.equal(outputs['real_b'], data_batch['img_b'])
assert torch.is_tensor(outputs['fake_b'])
assert torch.is_tensor(outputs['fake_a'])
assert outputs['fake_b'].size() == (1, 3, 64, 64)
assert outputs['fake_a'].size() == (1, 3, 64, 64)
assert outputs['saved_flag']
# not show input, test_direction a2b
train_cfg = None
test_cfg = dict(show_input=False, test_direction='a2b')
synthesizer = build_model(
model_cfg, train_cfg=train_cfg, test_cfg=test_cfg)
with patch.object(mmcv, 'imwrite', return_value=True):
# test save path not None Assertion
with pytest.raises(AssertionError):
with torch.no_grad():
_ = synthesizer(
inputs,
targets, [img_meta],
test_mode=True,
save_image=True)
# iteration is None
with torch.no_grad():
outputs = synthesizer(
inputs,
targets, [img_meta],
test_mode=True,
save_image=True,
save_path='save_path')
assert torch.equal(outputs['real_a'], data_batch['img_a'])
assert torch.equal(outputs['real_b'], data_batch['img_b'])
assert torch.is_tensor(outputs['fake_b'])
assert torch.is_tensor(outputs['fake_a'])
assert outputs['fake_b'].size() == (1, 3, 64, 64)
assert outputs['fake_a'].size() == (1, 3, 64, 64)
assert outputs['saved_flag']
# iteration is not None
with torch.no_grad():
outputs = synthesizer(
inputs,
targets, [img_meta],
test_mode=True,
save_image=True,
save_path='save_path',
iteration=1000)
assert torch.equal(outputs['real_a'], data_batch['img_a'])
assert torch.equal(outputs['real_b'], data_batch['img_b'])
assert torch.is_tensor(outputs['fake_b'])
assert torch.is_tensor(outputs['fake_a'])
assert outputs['fake_b'].size() == (1, 3, 64, 64)
assert outputs['fake_a'].size() == (1, 3, 64, 64)
assert outputs['saved_flag']
# not show input, test_direction b2a
train_cfg = None
test_cfg = dict(show_input=False, test_direction='b2a')
synthesizer = build_model(
model_cfg, train_cfg=train_cfg, test_cfg=test_cfg)
with patch.object(mmcv, 'imwrite', return_value=True):
# test save path not None Assertion
with pytest.raises(AssertionError):
with torch.no_grad():
_ = synthesizer(
inputs,
targets, [img_meta],
test_mode=True,
save_image=True)
# iteration is None
with torch.no_grad():
outputs = synthesizer(
inputs,
targets, [img_meta],
test_mode=True,
save_image=True,
save_path='save_path')
assert torch.equal(outputs['real_a'], data_batch['img_a'])
assert torch.equal(outputs['real_b'], data_batch['img_b'])
assert torch.is_tensor(outputs['fake_b'])
assert torch.is_tensor(outputs['fake_a'])
assert outputs['fake_b'].size() == (1, 3, 64, 64)
assert outputs['fake_a'].size() == (1, 3, 64, 64)
assert outputs['saved_flag']
# iteration is not None
with torch.no_grad():
outputs = synthesizer(
inputs,
targets, [img_meta],
test_mode=True,
save_image=True,
save_path='save_path',
iteration=1000)
assert torch.equal(outputs['real_a'], data_batch['img_a'])
assert torch.equal(outputs['real_b'], data_batch['img_b'])
assert torch.is_tensor(outputs['fake_b'])
assert torch.is_tensor(outputs['fake_a'])
assert outputs['fake_b'].size() == (1, 3, 64, 64)
assert outputs['fake_a'].size() == (1, 3, 64, 64)
assert outputs['saved_flag']
| 41.807554 | 78 | 0.60684 | 2,995 | 23,245 | 4.436394 | 0.057763 | 0.067058 | 0.013547 | 0.020321 | 0.868292 | 0.855122 | 0.843607 | 0.835403 | 0.807481 | 0.806954 | 0 | 0.020546 | 0.254592 | 23,245 | 555 | 79 | 41.882883 | 0.746292 | 0.035535 | 0 | 0.776639 | 0 | 0 | 0.130149 | 0 | 0 | 0 | 0 | 0 | 0.397541 | 1 | 0.002049 | false | 0 | 0.022541 | 0 | 0.02459 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fe4f0f934112bcc6cdaf8a2af0d0125f735e8b34 | 190 | py | Python | app/gws/ext/search/provider/wfs/__init__.py | ewie/gbd-websuite | 6f2814c7bb64d11cb5a0deec712df751718fb3e1 | [
"Apache-2.0"
] | null | null | null | app/gws/ext/search/provider/wfs/__init__.py | ewie/gbd-websuite | 6f2814c7bb64d11cb5a0deec712df751718fb3e1 | [
"Apache-2.0"
] | null | null | null | app/gws/ext/search/provider/wfs/__init__.py | ewie/gbd-websuite | 6f2814c7bb64d11cb5a0deec712df751718fb3e1 | [
"Apache-2.0"
] | null | null | null | import gws.ext.ows.provider.wfs.search
class Config(gws.ext.ows.provider.wfs.search.Config):
"""WFS search"""
pass
class Object(gws.ext.ows.provider.wfs.search.Object):
pass
| 17.272727 | 53 | 0.710526 | 29 | 190 | 4.655172 | 0.37931 | 0.266667 | 0.2 | 0.377778 | 0.577778 | 0.577778 | 0 | 0 | 0 | 0 | 0 | 0 | 0.136842 | 190 | 10 | 54 | 19 | 0.823171 | 0.052632 | 0 | 0.4 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.4 | 0.2 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 7 |
fe6f7ab0e84a02a41c199f8614c5b77b0edf108f | 171 | py | Python | sacramento/bills.py | jayktee/scrapers-us-municipal | ff52a331e91cb590a3eda7db6c688d75b77acacb | [
"MIT"
] | 67 | 2015-04-28T19:28:18.000Z | 2022-01-31T03:27:17.000Z | sacramento/bills.py | jayktee/scrapers-us-municipal | ff52a331e91cb590a3eda7db6c688d75b77acacb | [
"MIT"
] | 202 | 2015-01-15T18:43:12.000Z | 2021-11-23T15:09:10.000Z | sacramento/bills.py | jayktee/scrapers-us-municipal | ff52a331e91cb590a3eda7db6c688d75b77acacb | [
"MIT"
] | 54 | 2015-01-27T03:15:45.000Z | 2021-09-10T19:35:32.000Z | from pupa.scrape import Scraper
from pupa.scrape import Bill
class SacramentoBillScraper(Scraper):
def scrape(self):
# needs to be implemented
pass
| 17.1 | 37 | 0.707602 | 21 | 171 | 5.761905 | 0.714286 | 0.132231 | 0.231405 | 0.330579 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.239766 | 171 | 9 | 38 | 19 | 0.930769 | 0.134503 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0.2 | 0.4 | 0 | 0.8 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
fe8f991e1dbd1156cf3245b2234cbe70d00b9235 | 156 | py | Python | A_Star_Search/student_impl/__init__.py | JeremieMelo/EE382M-S22-A-Star-Search | f5a555b834ff3394be8581ec6bc2c664de75396a | [
"MIT"
] | null | null | null | A_Star_Search/student_impl/__init__.py | JeremieMelo/EE382M-S22-A-Star-Search | f5a555b834ff3394be8581ec6bc2c664de75396a | [
"MIT"
] | null | null | null | A_Star_Search/student_impl/__init__.py | JeremieMelo/EE382M-S22-A-Star-Search | f5a555b834ff3394be8581ec6bc2c664de75396a | [
"MIT"
] | null | null | null | '''
Description:
Author: Jiaqi Gu (jqgu@utexas.edu)
Date: 2022-03-07 16:03:49
LastEditors: Jiaqi Gu (jqgu@utexas.edu)
LastEditTime: 2022-03-07 16:03:50
'''
| 19.5 | 39 | 0.717949 | 27 | 156 | 4.148148 | 0.592593 | 0.125 | 0.196429 | 0.303571 | 0.571429 | 0 | 0 | 0 | 0 | 0 | 0 | 0.201439 | 0.108974 | 156 | 7 | 40 | 22.285714 | 0.604317 | 0.942308 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a3acd8750a4c1472451b70e2b010a8dcd60f961d | 23,376 | py | Python | messengerext/surveys/tests.py | groupsome/groupsome | 4edcf30d66ff458c4df37d3198ef187219a768d7 | [
"MIT"
] | 6 | 2016-10-07T13:43:17.000Z | 2017-10-07T22:34:44.000Z | messengerext/surveys/tests.py | groupsome/groupsome | 4edcf30d66ff458c4df37d3198ef187219a768d7 | [
"MIT"
] | null | null | null | messengerext/surveys/tests.py | groupsome/groupsome | 4edcf30d66ff458c4df37d3198ef187219a768d7 | [
"MIT"
] | 1 | 2020-07-15T04:29:31.000Z | 2020-07-15T04:29:31.000Z | from django.test import TestCase, RequestFactory
from django.core.urlresolvers import reverse
from django.test.client import Client
from surveys.models import Choice, Vote, Survey
from groups.tests import create_user, create_group
from home import models
from mock import patch
import mock
import telepot
import json
import bot
class TestSurveyModel(TestCase):
def test_survey_create(self):
user = create_user()
# Group
group = models.Group.create_and_save(name="Soccer", picture="", description="", telegram_id=21)
survey = Survey.create_and_save(question="When we meet for a soccer game?", creator=user,
created="2016-06-04 8:00+01:00", group=group, multiselect=False)
self.assertEquals(len(Survey.objects.all()), 1)
self.assertEquals(survey.question, "When we meet for a soccer game?")
self.assertEquals(survey.created, "2016-06-04 8:00+01:00")
self.assertEquals(survey.status, "open")
self.assertEquals(survey.group, group)
def test_string_representation(self):
user = create_user()
group = models.Group.create_and_save(name="Soccer", picture="", description="", telegram_id=21)
survey = Survey.create_and_save(question="When we meet for a soccer game?", creator=user,
created="2016-06-04 8:00+01:00", group=group, multiselect=False)
self.assertEquals(str(survey), "When we meet for a soccer game?")
class TestChoiceModel(TestCase):
def test_choice_create(self):
user = create_user()
group = models.Group.create_and_save(name="Soccer", picture="", description="", telegram_id=21)
survey = Survey.create_and_save(question="When we meet for a soccer game?", creator=user,
created="2016-06-04 8:00+01:00", group=group, multiselect=False)
choice = Choice.create_and_save(option="6 PM", survey=survey)
self.assertEquals(len(Choice.objects.all()), 1)
self.assertEquals(choice.option, "6 PM")
self.assertEquals(choice.survey, survey)
def test_string_representation(self):
user = create_user()
group = models.Group.create_and_save(name="Soccer", picture="", description="", telegram_id=21)
survey = Survey.create_and_save(question="When we meet for a soccer game?", creator=user,
created="2016-06-04 8:00+01:00", group=group, multiselect=False)
choice = Choice.create_and_save(option="6 PM", survey=survey)
self.assertEquals(str(choice), "6 PM")
class TestVoteModel(TestCase):
# Bei Vote muss die choice id immer mit '-' enden also z.B '-'(remove all) oder '23-'(singlevote)
# oder '22-23-24' (multivote)
def test_survey_create(self):
user = create_user()
group = models.Group.create_and_save(name="Soccer", picture="", description="", telegram_id=22)
survey = Survey.create_and_save(question="When we meet for a soccer game?", creator=user,
created="2016-06-04 8:00+01:00", group=group, multiselect=False)
choice = Choice.create_and_save(option="6 PM", survey=survey)
vote = Vote.create_and_save(user=user, choice=choice)
self.assertEquals(len(Vote.objects.all()), 1)
self.assertEquals(vote.user, user)
self.assertEquals(vote.choice, choice)
self.assertEquals(choice.survey, survey)
def send_message(arg1, arg2, arg3, **kwargs):
return {"text": "sometext", "from": {"id": 123456789, "first_name": "Jon"},
"date": 1473851534, "chat": {"title": "TestGroup", "id": -987654321, "type": "group"},
"entities": [{"offset": 0, "length": 24, "type": "bot_command"}], "message_id": 894}
def editMessageReplyMarkup(arg1, arg2, reply_markup=None):
pass
class TestASurveysView(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.client = Client()
self.user = create_user()
def test_not_authenticated_user_is_redirected_to_login(self):
response = self.client.get(reverse('surveys:surveys'), follow=True)
self.assertRedirects(response, reverse('accounts:login'))
def test_surveys_mapping_works(self):
self.client.force_login(user=self.user)
response = self.client.get(reverse('surveys:surveys'))
self.assertEquals(response.status_code, 200)
def test_surveys_shows_template(self):
self.client.force_login(user=self.user)
response = self.client.get(reverse('surveys:surveys'))
self.assertTemplateUsed(response=response, template_name='surveys/surveys.html')
class TestCreateSurveyView(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.client = Client()
self.user = create_user()
self.group = create_group(self.user)
def test_not_authenticated_user_is_redirected_to_login(self):
response = self.client.get(reverse('surveys:surveys'), follow=True)
self.assertRedirects(response, reverse('accounts:login'))
def test_create_survey_mapping_works(self):
self.client.force_login(user=self.user)
response = self.client.get(reverse('surveys:create_survey'))
self.assertEquals(response.status_code, 200)
def test_create_survey_shows_template(self):
self.client.force_login(user=self.user)
response = self.client.get(reverse('surveys:create_survey'))
self.assertTemplateUsed(response=response, template_name='surveys/create_survey.html')
@mock.patch.object(telepot.Bot, 'sendMessage', send_message)
# TODO
def test_create_survey(self):
self.client.force_login(user=self.user)
response = self.client.post(
reverse('surveys:create_survey'),
{'question': 'Testfrage', 'group': 'Test', 'option_1': '1', 'option_2': '2'}, follow=True)
self.assertRedirects(response, reverse('surveys:surveys'))
messages = list(response.context['messages'])
self.assertEqual(len(messages), 0)
survey = Survey.objects.get(group=self.group)
choices = survey.choices.all().order_by('id')
self.assertEqual(survey.question, 'Testfrage')
self.assertEqual(survey.group.name, 'Test')
self.assertEqual(choices[0].option, '1')
self.assertEqual(choices[1].option, '2')
self.assertEqual(len(choices.all()), 2)
def test_create_survey_empty_question_failure(self):
self.client.force_login(user=self.user)
response = self.client.post(
reverse('surveys:create_survey'),
{'question': '', 'group': 'Test', 'option_1': '1', 'option_2': '2'}, follow=True)
self.assertRedirects(response, reverse('surveys:surveys'))
messages = list(response.context['messages'])
self.assertEqual(len(messages), 1)
self.assertEqual(str(messages[0]), 'Survey question can`t be empty!')
def test_create_survey_wrong_group_failure(self):
self.client.force_login(user=self.user)
response = self.client.post(
reverse('surveys:create_survey'),
{'question': 'What?', 'group': '', 'option_1': '1', 'option_2': '2'}, follow=True)
self.assertRedirects(response, reverse('surveys:surveys'))
messages = list(response.context['messages'])
self.assertEqual(len(messages), 1)
self.assertEqual(str(messages[0]), 'No group found!')
class TestDeleteSurveyView(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.client = Client()
self.user = create_user()
self.group = create_group(self.user)
self.new_survey = Survey.create_and_save(question="When we meet for a soccer game?", creator=self.user,
created="2016-06-04 8:00+01:00", group=self.group, multiselect=False)
self.new_survey.chat_id = "123"
self.new_survey.msg_id = "123"
self.new_option_1 = Choice.create_and_save("Optionstest 1", self.new_survey)
self.new_option_2 = Choice.create_and_save("Optionstest 2", self.new_survey)
@patch.object(telepot.Bot, 'editMessageText', send_message)
def test_delete_survey_as_creator(self):
self.client.force_login(user=self.user)
response = self.client.get(
'/surveys/' + str(self.new_survey.id) + '/delete', follow=True)
self.assertEquals(len(Survey.objects.all()), 0)
@patch.object(telepot.Bot, 'editMessageText', send_message)
def test_delete_survey_not_as_creator(self):
new_user = create_user(username="Sepp")
self.group.users.add(new_user)
self.client.force_login(user=new_user)
response = self.client.get(
'/surveys/' + str(self.new_survey.id) + '/delete', follow=True)
data = json.loads(response.content.decode("utf-8"))
self.assertEqual(data['message'], "Only the creator or group admin can delete a survey.")
class TestEditSurveyView(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.client = Client()
self.user = create_user()
self.group = create_group(self.user)
self.new_survey = Survey.create_and_save(question="When we meet for a soccer game?", creator=self.user,
created="2016-06-04 8:00+01:00", group=self.group, multiselect=False)
self.new_option_1 = Choice.create_and_save("Optionstest 1", self.new_survey)
self.new_option_2 = Choice.create_and_save("Optionstest 2", self.new_survey)
def test_not_authenticated_user_is_redirected_to_login(self):
response = self.client.get('/surveys/'+str(self.new_survey.id)+'/edit_survey', follow=True)
self.assertRedirects(response, reverse('accounts:login'))
def test_edit_survey_mapping_works(self):
self.client.force_login(user=self.user)
response = self.client.get('/surveys/'+str(self.new_survey.id)+'/edit_survey')
self.assertEquals(response.status_code, 200)
def test_edit_survey_shows_template(self):
self.client.force_login(user=self.user)
response = self.client.get('/surveys/1/edit_survey')
self.assertTemplateUsed(response=response, template_name='surveys/edit_survey.html')
@patch.object(telepot.Bot, 'sendMessage', send_message)
def test_edit_survey_as_creator(self):
self.client.force_login(user=self.user)
response = self.client.post(
'/surveys/'+str(self.new_survey.id)+'/edit_survey',
{'question': 'When we meet for a football game?', 'group': self.group, 'option_1': '1 PM',
'option_2': '2 PM'},
follow=True)
self.assertRedirects(response, reverse('surveys:surveys'))
messages = list(response.context['messages'])
self.assertEqual(len(messages), 0)
survey = Survey.objects.get(group=self.group)
self.assertEqual(survey.question, 'When we meet for a football game?')
choices = survey.choices.all().order_by('id')
self.assertEquals(choices[0].option, '1 PM')
self.assertEquals(choices[1].option, '2 PM')
@patch.object(telepot.Bot, 'sendMessage', send_message)
def test_edit_survey_not_as_creator(self):
new_user = create_user(username="Sepp")
self.group.users.add(new_user)
self.client.force_login(user=new_user)
response = self.client.post(
'/surveys/' + str(self.new_survey.id) + '/edit_survey',
{'question': 'When we meet for a football game?', 'group': self.group, 'option_1': '1 PM',
'option_2': '2 PM'},
follow=True)
data = json.loads(response.content.decode("utf-8"))
self.assertEqual(data['message'], "Only the creator or group admin can edit a survey.")
def test_edit_survey_empty_question_failure(self):
self.client.force_login(user=self.user)
response = self.client.post(
'/surveys/'+str(self.new_survey.id)+'/edit_survey',
{'question': '', 'group': 'Test', 'option_1': '1', 'option_2': '2'}, follow=True)
self.assertRedirects(response, reverse('surveys:surveys'))
messages = list(response.context['messages'])
self.assertEqual(len(messages), 1)
self.assertEqual(str(messages[0]), 'Survey question can`t be empty!')
class TestVoteSurveyView(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.client = Client()
self.user = create_user()
self.group = create_group(self.user)
self.new_survey = Survey.create_and_save(question="When we meet for a soccer game?", creator=self.user,
created="2016-06-04 8:00+01:00", group=self.group, multiselect=False)
self.new_option_1 = Choice.create_and_save("Optionstest 1", self.new_survey)
self.new_option_2 = Choice.create_and_save("Optionstest 2", self.new_survey)
def test_not_authenticated_user_is_redirected_to_login(self):
response = self.client.get(reverse('surveys:surveys'), follow=True)
self.assertRedirects(response, reverse('accounts:login'))
@patch.object(telepot.Bot, 'editMessageReplyMarkup', editMessageReplyMarkup)
def test_vote_single_select_survey(self):
self.client.force_login(user=self.user)
self.assertEquals(len(self.new_option_1.votes.all()), 0)
self.assertEquals(len(self.new_option_2.votes.all()), 0)
response = self.client.get(
'/surveys/' + str(self.new_survey.id) + '/' + str(self.new_option_1.id) + '-/vote_survey',
follow=True)
self.assertEquals(len(self.new_option_1.votes.all()), 1)
self.assertEquals(len(self.new_option_2.votes.all()), 0)
def test_vote_multi_select_on_single_survey_failure(self):
self.client.force_login(user=self.user)
self.assertEquals(len(self.new_option_1.votes.all()), 0)
self.assertEquals(len(self.new_option_2.votes.all()), 0)
response = self.client.get(
'/surveys/' + str(self.new_survey.id) + '/' + str(self.new_option_1.id) + '-' + str(self.new_option_2.id) +
'-/vote_survey', follow=True)
self.assertRedirects(response, reverse('surveys:surveys'))
messages = list(response.context['messages'])
self.assertEqual(len(messages), 1)
self.assertEqual(str(messages[0]), 'Something went wrong!')
self.assertEquals(len(self.new_option_1.votes.all()), 0)
self.assertEquals(len(self.new_option_2.votes.all()), 0)
@patch.object(telepot.Bot, 'editMessageReplyMarkup', editMessageReplyMarkup)
def test_vote_multi_select_survey_and_delete_votes(self):
self.client.force_login(user=self.user)
self.new_survey.multiselect = True
self.new_survey.save()
self.assertEquals(len(self.new_option_1.votes.all()), 0)
self.assertEquals(len(self.new_option_2.votes.all()), 0)
response = self.client.get(
'/surveys/' + str(self.new_survey.id) + '/' + str(self.new_option_1.id) + '-' + str(self.new_option_2.id) +
'-/vote_survey', follow=True)
self.assertEquals(len(self.new_option_1.votes.all()), 1)
self.assertEquals(len(self.new_option_2.votes.all()), 1)
response = self.client.get(
'/surveys/' + str(self.new_survey.id) + '/-/vote_survey', follow=True)
self.assertEquals(len(self.new_option_1.votes.all()), 0)
self.assertEquals(len(self.new_option_2.votes.all()), 0)
@patch.object(telepot.Bot, 'editMessageReplyMarkup', editMessageReplyMarkup)
def test_change_vote(self):
self.client.force_login(user=self.user)
self.assertEquals(len(self.new_option_1.votes.all()), 0)
self.assertEquals(len(self.new_option_2.votes.all()), 0)
response = self.client.get(
'/surveys/' + str(self.new_survey.id) + '/' + str(self.new_option_1.id) + '-/vote_survey', follow=True)
self.assertEquals(len(self.new_option_1.votes.all()), 1)
self.assertEquals(len(self.new_option_2.votes.all()), 0)
response = self.client.get(
'/surveys/' + str(self.new_survey.id) + '/' + str(self.new_option_2.id) + '-/vote_survey', follow=True)
self.assertEquals(len(self.new_option_1.votes.all()), 0)
self.assertEquals(len(self.new_option_2.votes.all()), 1)
class TestCloseSurveyView(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.client = Client()
self.user = create_user()
self.group = create_group(self.user)
self.new_survey = Survey.create_and_save(question="When we meet for a soccer game?", creator=self.user,
created="2016-06-04 8:00+01:00", group=self.group, multiselect=False)
self.new_option_1 = Choice.create_and_save("Optionstest 1", self.new_survey)
self.new_option_2 = Choice.create_and_save("Optionstest 2", self.new_survey)
@mock.patch.object(telepot.Bot, 'sendMessage', send_message)
@mock.patch.object(telepot.Bot, 'editMessageText', send_message)
def test_close_survey_as_creator(self):
self.client.force_login(user=self.user)
self.assertEquals(self.new_survey.status, "open")
response = self.client.get(
'/surveys/' + str(self.new_survey.id) + '/close', follow=True)
self.assertEquals(Survey.objects.get(id=self.new_survey.id).status, "closed")
data = json.loads(response.content.decode("utf-8"))
self.assertEqual(data['message'], "Survey closed")
@mock.patch.object(telepot.Bot, 'sendMessage', send_message)
@mock.patch.object(telepot.Bot, 'editMessageText', send_message)
def test_close_survey_not_as_creator(self):
new_user = create_user(username="Sepp")
self.group.users.add(new_user)
self.client.force_login(user=new_user)
self.assertEquals(self.new_survey.status, "open")
response = self.client.get(
'/surveys/' + str(self.new_survey.id) + '/close', follow=True)
self.assertEquals(Survey.objects.get(id=self.new_survey.id).status, "open")
data = json.loads(response.content.decode("utf-8"))
self.assertEqual(data['message'], "Only the creator or group admin can close a survey.")
def test_close_survey_user_not_in_group(self):
new_user = create_user(username="Sepp")
self.client.force_login(user=new_user)
response = self.client.get(
'/surveys/' + str(self.new_survey.id) + '/close', follow=True)
self.assertRedirects(response, reverse('surveys:surveys'))
messages = list(response.context['messages'])
self.assertEqual(len(messages), 1)
self.assertEqual(str(messages[0]), 'Something went wrong!')
class TestSendResultSurveyView(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.client = Client()
self.user = create_user()
self.group = create_group(self.user)
self.new_survey = Survey.create_and_save(question="When we meet for a soccer game?", creator=self.user,
created="2016-06-04 8:00+01:00", group=self.group, multiselect=False)
self.new_survey.status = "closed"
self.new_option_1 = Choice.create_and_save("Optionstest 1", self.new_survey)
self.new_option_2 = Choice.create_and_save("Optionstest 2", self.new_survey)
@mock.patch.object(telepot.Bot, 'sendMessage', send_message)
def test_send_result_survey_as_creator(self):
self.client.force_login(user=self.user)
self.assertEquals(self.new_survey.status, "closed")
response = self.client.post(
'/surveys/' + str(self.new_survey.id) + '/send', follow=True)
data = json.loads(response.content.decode("utf-8"))
self.assertEqual(data['message'], "Survey result posted")
@mock.patch.object(telepot.Bot, 'sendMessage', send_message)
def test_send_result_survey_not_as_creator(self):
new_user = create_user(username="Sepp")
self.group.users.add(new_user)
self.client.force_login(user=new_user)
self.assertEquals(self.new_survey.status, "closed")
response = self.client.post(
'/surveys/' + str(self.new_survey.id) + '/send', follow=True)
data = json.loads(response.content.decode("utf-8"))
self.assertEqual(data['message'], "Only the creator or group admin can send results.")
def test_send_result_survey_user_not_in_group(self):
new_user = create_user(username="Sepp")
self.client.force_login(user=new_user)
response = self.client.post(
'/surveys/' + str(self.new_survey.id) + '/send', follow=True)
self.assertRedirects(response, reverse('surveys:surveys'))
messages = list(response.context['messages'])
self.assertEqual(len(messages), 1)
self.assertEqual(str(messages[0]), 'Something went wrong!')
class TestResendSurveyView(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.client = Client()
self.user = create_user()
self.group = create_group(self.user)
self.new_survey = Survey.create_and_save(question="When we meet for a soccer game?", creator=self.user,
created="2016-06-04 8:00+01:00", group=self.group, multiselect=False)
self.new_survey.status = "closed"
self.new_option_1 = Choice.create_and_save("Optionstest 1", self.new_survey)
self.new_option_2 = Choice.create_and_save("Optionstest 2", self.new_survey)
@mock.patch.object(telepot.Bot, 'sendMessage', send_message)
def test_resend_survey_as_creator(self):
self.client.force_login(user=self.user)
self.assertEquals(self.new_survey.status, "closed")
response = self.client.post(
'/surveys/' + str(self.new_survey.id) + '/resend', follow=True)
data = json.loads(response.content.decode("utf-8"))
self.assertEqual(data['message'], "Survey resent")
@mock.patch.object(telepot.Bot, 'sendMessage', send_message)
def test_resend_survey_not_as_creator(self):
new_user = create_user(username="Sepp")
self.group.users.add(new_user)
self.client.force_login(user=new_user)
self.assertEquals(self.new_survey.status, "closed")
response = self.client.post(
'/surveys/' + str(self.new_survey.id) + '/resend', follow=True)
data = json.loads(response.content.decode("utf-8"))
self.assertEqual(data['message'], "Only the creator or group admin can resend a survey.")
def test_resend_survey_user_not_in_group(self):
new_user = create_user(username="Sepp")
self.client.force_login(user=new_user)
response = self.client.post(
'/surveys/' + str(self.new_survey.id) + '/resend', follow=True)
self.assertRedirects(response, reverse('surveys:surveys'))
messages = list(response.context['messages'])
self.assertEqual(len(messages), 1)
self.assertEqual(str(messages[0]), 'Something went wrong!')
| 43.530726 | 119 | 0.658196 | 2,966 | 23,376 | 5.018543 | 0.070128 | 0.047497 | 0.047162 | 0.036278 | 0.881827 | 0.873094 | 0.864629 | 0.859053 | 0.83695 | 0.815586 | 0 | 0.021002 | 0.205595 | 23,376 | 536 | 120 | 43.61194 | 0.78056 | 0.005732 | 0 | 0.732997 | 0 | 0 | 0.135344 | 0.010457 | 0 | 0 | 0 | 0.001866 | 0.236776 | 1 | 0.115869 | false | 0.002519 | 0.027708 | 0.002519 | 0.173804 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a3fb4371673a77b3887ae8a6e18cc9881f5db2c4 | 3,024 | py | Python | octoprint/tests/test_octoprint.py | divyamamgai/integrations-extras | 8c40a9cf870578687cc224ee91d3c70cd3a435a4 | [
"BSD-3-Clause"
] | 158 | 2016-06-02T16:25:31.000Z | 2022-03-16T15:55:14.000Z | octoprint/tests/test_octoprint.py | divyamamgai/integrations-extras | 8c40a9cf870578687cc224ee91d3c70cd3a435a4 | [
"BSD-3-Clause"
] | 554 | 2016-03-15T17:39:12.000Z | 2022-03-31T10:29:16.000Z | octoprint/tests/test_octoprint.py | divyamamgai/integrations-extras | 8c40a9cf870578687cc224ee91d3c70cd3a435a4 | [
"BSD-3-Clause"
] | 431 | 2016-05-13T15:33:13.000Z | 2022-03-31T10:06:46.000Z | import mock
import pytest
from datadog_checks.octoprint import OctoPrintCheck
@pytest.mark.skip
@pytest.mark.integration
@pytest.mark.usefixtures('dd_environment')
@mock.patch('datadog_checks.octoprint.OctoPrintCheck.get_rpi_core_temp')
def test_check(mock_rpi_temp, aggregator, mock_api_request, instance):
mock_rpi_temp.return_value = 49.0
check = OctoPrintCheck('octoprint', {}, [instance])
check.check(instance)
aggregator.assert_metric("octoprint.rpi_core_temp", 0.0, count=1)
aggregator.assert_metric("octoprint.printer_state", 1, count=1)
aggregator.assert_metric("octoprint.pct_completed", 0, count=1)
aggregator.assert_metric("octoprint.print_job_time", 1, count=1)
aggregator.assert_metric("octoprint.print_job_time_left", 9999999, count=1)
aggregator.assert_metric("octoprint.current_tool_temp", 50.0, count=1)
aggregator.assert_metric("octoprint.target_tool_temp", 190.0, count=1)
aggregator.assert_metric("octoprint.current_bed_temp", 68.0, count=1)
aggregator.assert_metric("octoprint.target_bed_temp", 70.0, count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.unit
@mock.patch('datadog_checks.octoprint.OctoPrintCheck.get_rpi_core_temp')
def test_empty_job(mock_rpi_temp, aggregator, mock_empty_api_request, instance):
mock_rpi_temp.return_value = 49.0
check = OctoPrintCheck('octoprint', {}, [instance])
check.check(instance)
aggregator.assert_metric("octoprint.rpi_core_temp", 49.0, count=1)
aggregator.assert_metric("octoprint.printer_state", 0, count=1)
aggregator.assert_metric("octoprint.current_tool_temp", 25.0, count=1)
aggregator.assert_metric("octoprint.target_tool_temp", 200.0, count=1)
aggregator.assert_metric("octoprint.current_bed_temp", 24.77, count=1)
aggregator.assert_metric("octoprint.target_bed_temp", 70.0, count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.unit
@mock.patch('datadog_checks.octoprint.OctoPrintCheck.get_rpi_core_temp')
def test_active_job(mock_rpi_temp, aggregator, mock_active_api_request, instance):
mock_rpi_temp.return_value = 49.0
check = OctoPrintCheck('octoprint', {}, [instance])
check.check(instance)
aggregator.assert_metric("octoprint.rpi_core_temp", 49.0, count=1)
aggregator.assert_metric("octoprint.printer_state", 2, count=1)
aggregator.assert_metric("octoprint.est_print_time", 146, count=1)
aggregator.assert_metric("octoprint.pct_completed", 0.22, count=1)
aggregator.assert_metric("octoprint.print_job_time", 4, count=1)
aggregator.assert_metric("octoprint.print_job_time_left", 15, count=1)
aggregator.assert_metric("octoprint.current_tool_temp", 25.0, count=1)
aggregator.assert_metric("octoprint.target_tool_temp", 200.0, count=1)
aggregator.assert_metric("octoprint.current_bed_temp", 24.77, count=1)
aggregator.assert_metric("octoprint.target_bed_temp", 70.0, count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.e2e
def test_e2e():
return True
| 42 | 82 | 0.774471 | 422 | 3,024 | 5.255924 | 0.163507 | 0.201984 | 0.247971 | 0.349414 | 0.899459 | 0.888188 | 0.846258 | 0.845356 | 0.844454 | 0.709197 | 0 | 0.039057 | 0.102513 | 3,024 | 71 | 83 | 42.591549 | 0.778187 | 0 | 0 | 0.518519 | 0 | 0 | 0.277116 | 0.263558 | 0 | 0 | 0 | 0 | 0.518519 | 1 | 0.074074 | false | 0 | 0.055556 | 0.018519 | 0.148148 | 0.592593 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 11 |
4ab5f6e7cdcc19446b76fb819d882d635f55b527 | 6,328 | py | Python | tests/test_server.py | larmoreg/fastmicro | 0ddd7ff814357196191612890102ce3bfed79580 | [
"MIT"
] | 1 | 2021-09-03T01:26:11.000Z | 2021-09-03T01:26:11.000Z | tests/test_server.py | larmoreg/fastmicro | 0ddd7ff814357196191612890102ce3bfed79580 | [
"MIT"
] | 9 | 2021-09-07T19:33:09.000Z | 2021-09-25T06:03:00.000Z | tests/test_server.py | larmoreg/fastmicro | 0ddd7ff814357196191612890102ce3bfed79580 | [
"MIT"
] | null | null | null | import logging
import pytest
from uuid import uuid4
from fastmicro.entrypoint import Entrypoint
from fastmicro.messaging import MessagingABC
from fastmicro.messaging.topic import Topic
from .conftest import User, Greeting
@pytest.mark.asyncio
async def test_process(
messaging: MessagingABC,
user_topic: Topic[User],
greeting_topic: Topic[Greeting],
_entrypoint: Entrypoint[User, Greeting],
) -> None:
input_message = User(name="Greg")
input_header = user_topic.header_type(correlation_id=uuid4())
input_header.message = input_message
async with messaging:
await user_topic.subscribe(_entrypoint.name)
await greeting_topic.subscribe("test", latest=True)
await user_topic.send([input_header])
await _entrypoint.process()
async with _entrypoint.reply_topic.receive("test", "test") as output_headers:
assert len(output_headers) == 1
output_header = output_headers[0]
assert output_header.correlation_id == input_header.correlation_id
assert not output_header.error
assert output_header.message
assert input_header.message
assert output_header.message.name == input_header.message.name
assert (
output_header.message.greeting == f"Hello, {input_header.message.name}!"
)
@pytest.mark.asyncio
async def test_timeout(
messaging: MessagingABC,
user_topic: Topic[User],
greeting_topic: Topic[Greeting],
_entrypoint: Entrypoint[User, Greeting],
caplog: pytest.LogCaptureFixture,
) -> None:
caplog.set_level(logging.CRITICAL, logger="fastmicro.entrypoint")
input_message = User(name="Greg", delay=1)
input_header = user_topic.header_type(correlation_id=uuid4())
input_header.message = input_message
async with messaging:
await user_topic.subscribe(_entrypoint.name)
await greeting_topic.subscribe("test", latest=True)
await user_topic.send([input_header])
await _entrypoint.process(processing_timeout=0.1)
async with _entrypoint.reply_topic.receive("test", "test") as output_headers:
assert len(output_headers) == 1
output_header = output_headers[0]
assert output_header.correlation_id == input_header.correlation_id
assert output_header.error == "Timed out after 0.1 sec"
assert not output_header.message
@pytest.mark.asyncio
async def test_exception(
messaging: MessagingABC,
user_topic: Topic[User],
greeting_topic: Topic[Greeting],
_invalid: Entrypoint[User, Greeting],
caplog: pytest.LogCaptureFixture,
) -> None:
caplog.set_level(logging.CRITICAL, logger="fastmicro.entrypoint")
input_message = User(name="Greg")
input_header = user_topic.header_type(correlation_id=uuid4())
input_header.message = input_message
async with messaging:
await user_topic.subscribe(_invalid.name)
await greeting_topic.subscribe("test", latest=True)
await user_topic.send([input_header])
await _invalid.process()
async with _invalid.reply_topic.receive("test", "test") as output_headers:
assert len(output_headers) == 1
output_header = output_headers[0]
assert output_header.correlation_id == input_header.correlation_id
assert output_header.error == "Test"
assert not output_header.message
@pytest.mark.asyncio
async def test_retries(
messaging: MessagingABC,
user_topic: Topic[User],
greeting_topic: Topic[Greeting],
_invalid: Entrypoint[User, Greeting],
caplog: pytest.LogCaptureFixture,
) -> None:
caplog.set_level(logging.CRITICAL, logger="fastmicro.entrypoint")
input_message = User(name="Greg")
input_header = user_topic.header_type(correlation_id=uuid4())
input_header.message = input_message
async with messaging:
await user_topic.subscribe(_invalid.name)
await greeting_topic.subscribe("test", latest=True)
await user_topic.send([input_header])
await _invalid.process(retries=1, sleep_time=0.1)
async with _invalid.reply_topic.receive("test", "test") as output_headers:
assert len(output_headers) == 1
output_header = output_headers[0]
assert output_header.correlation_id == input_header.correlation_id
assert output_header.error == "Test"
assert not output_header.message
@pytest.mark.asyncio
async def test_resends(
messaging: MessagingABC,
user_topic: Topic[User],
greeting_topic: Topic[Greeting],
_invalid: Entrypoint[User, Greeting],
caplog: pytest.LogCaptureFixture,
) -> None:
caplog.set_level(logging.CRITICAL, logger="fastmicro.entrypoint")
input_message = User(name="Greg")
input_header = user_topic.header_type(correlation_id=uuid4())
input_header.message = input_message
async with messaging:
await user_topic.subscribe(_invalid.name)
await greeting_topic.subscribe("test", latest=True)
await user_topic.send([input_header])
for i in range(2):
await _invalid.process(resends=1)
async with _invalid.reply_topic.receive("test", "test") as output_headers:
assert len(output_headers) == 1
output_header = output_headers[0]
assert output_header.correlation_id == input_header.correlation_id
assert output_header.error == "Test"
assert not output_header.message
@pytest.mark.asyncio
async def test_raises(
messaging: MessagingABC,
user_topic: Topic[User],
greeting_topic: Topic[Greeting],
_invalid: Entrypoint[User, Greeting],
caplog: pytest.LogCaptureFixture,
) -> None:
caplog.set_level(logging.CRITICAL, logger="fastmicro.entrypoint")
input_message = User(name="Greg")
input_header = user_topic.header_type(correlation_id=uuid4())
input_header.message = input_message
async with messaging:
await user_topic.subscribe(_invalid.name)
await greeting_topic.subscribe("test", latest=True)
await user_topic.send([input_header])
with pytest.raises(RuntimeError) as excinfo:
await _invalid.process(raises=True)
assert str(excinfo.value) == "Test"
| 34.579235 | 88 | 0.699115 | 740 | 6,328 | 5.743243 | 0.104054 | 0.067294 | 0.039529 | 0.031059 | 0.856706 | 0.856706 | 0.843059 | 0.843059 | 0.843059 | 0.843059 | 0 | 0.005384 | 0.207491 | 6,328 | 182 | 89 | 34.769231 | 0.842074 | 0 | 0 | 0.786207 | 0 | 0 | 0.041403 | 0.004425 | 0 | 0 | 0 | 0 | 0.165517 | 1 | 0 | false | 0 | 0.048276 | 0 | 0.048276 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
436e69b54bc3cc21255a45d1b629474ebc34ab45 | 168 | py | Python | main.py | oluwafenyi/comic-dl | 9d4497341a51a2089a6404c674b8a0567d0db83b | [
"MIT"
] | 4 | 2019-12-13T19:07:00.000Z | 2020-06-30T00:44:25.000Z | main.py | oluwafenyi/comic-dl | 9d4497341a51a2089a6404c674b8a0567d0db83b | [
"MIT"
] | null | null | null | main.py | oluwafenyi/comic-dl | 9d4497341a51a2089a6404c674b8a0567d0db83b | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
from comic_dl.core.commands import execute_from_command_line
# todo: comment code
if __name__ == '__main__':
execute_from_command_line()
| 16.8 | 60 | 0.767857 | 24 | 168 | 4.75 | 0.791667 | 0.192982 | 0.315789 | 0.385965 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006897 | 0.136905 | 168 | 9 | 61 | 18.666667 | 0.77931 | 0.238095 | 0 | 0 | 0 | 0 | 0.063492 | 0 | 0 | 0 | 0 | 0.111111 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
438c3a7557811b65e5f97ec11425d2d033f455ab | 143 | py | Python | plugins/PRAplugin/__init__.py | alptezbasaran/raven | fd6fe8fe90b59d6dd3615cfea929722f3e04b2ca | [
"Apache-2.0"
] | 1 | 2018-07-02T21:12:48.000Z | 2018-07-02T21:12:48.000Z | plugins/PRAplugin/__init__.py | alptezbasaran/raven | fd6fe8fe90b59d6dd3615cfea929722f3e04b2ca | [
"Apache-2.0"
] | null | null | null | plugins/PRAplugin/__init__.py | alptezbasaran/raven | fd6fe8fe90b59d6dd3615cfea929722f3e04b2ca | [
"Apache-2.0"
] | null | null | null | from PRAplugin.src import ETModel
from PRAplugin.src import FTModel
from PRAplugin.src import GraphModel
from PRAplugin.src import MarkovModel
| 28.6 | 37 | 0.86014 | 20 | 143 | 6.15 | 0.4 | 0.422764 | 0.520325 | 0.715447 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.111888 | 143 | 4 | 38 | 35.75 | 0.968504 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
43cbb150c3cbd5abe888513655fcbc9db0862438 | 101,513 | py | Python | closed/Fujitsu/configs/dlrm/Server/__init__.py | ctuning/inference_results_v1.1 | d9176eca28fcf6d7a05ccb97994362a76a1eb5ab | [
"Apache-2.0"
] | 12 | 2021-09-23T08:05:57.000Z | 2022-03-21T03:52:11.000Z | closed/Fujitsu/configs/dlrm/Server/__init__.py | ctuning/inference_results_v1.1 | d9176eca28fcf6d7a05ccb97994362a76a1eb5ab | [
"Apache-2.0"
] | 11 | 2021-09-23T20:34:06.000Z | 2022-01-22T07:58:02.000Z | closed/Fujitsu/configs/dlrm/Server/__init__.py | ctuning/inference_results_v1.1 | d9176eca28fcf6d7a05ccb97994362a76a1eb5ab | [
"Apache-2.0"
] | 16 | 2021-09-23T20:26:38.000Z | 2022-03-09T12:59:56.000Z | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
sys.path.insert(0, os.getcwd())
from code.common.constants import Benchmark, Scenario
from code.common.system_list import System, Architecture, MIGConfiguration, MIGSlice
from configs.configuration import *
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIex1(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 180000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIex1_HighAccuracy(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 180000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIex1_Triton(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 180000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 8
max_queue_delay_usec = 10000
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIex1_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 180000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 8
max_queue_delay_usec = 10000
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIex8(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 1200000
use_jemalloc = False
use_small_tile_gemm_plugin = True
numa_config = "3:0-15&2:16-31&1:32-47&0:48-63&7:64-79&6:80-95&5:96-111&4:112-127"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIex8_HighAccuracy(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 1200000
use_jemalloc = False
use_small_tile_gemm_plugin = True
numa_config = "3:0-15&2:16-31&1:32-47&0:48-63&7:64-79&6:80-95&5:96-111&4:112-127"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIex8_Triton(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 600000
use_jemalloc = False
use_small_tile_gemm_plugin = True
numa_config = "3:0-15&2:16-31&1:32-47&0:48-63&7:64-79&6:80-95&5:96-111&4:112-127"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 0
max_queue_delay_usec = 10000
use_triton = True
gather_kernel_buffer_threshold = 64
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIex8_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 600000
use_jemalloc = False
use_small_tile_gemm_plugin = True
numa_config = "3:0-15&2:16-31&1:32-47&0:48-63&7:64-79&6:80-95&5:96-111&4:112-127"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 0
max_queue_delay_usec = 10000
use_triton = True
gather_kernel_buffer_threshold = 64
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxQ)
class A100_PCIex8_MaxQ(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 950000
use_jemalloc = False
use_small_tile_gemm_plugin = True
numa_config = "3:0-7&2:8-15&1:16-23&0:24-31&7:32-39&6:40-47&5:48-55&4:56-63"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
power_limit = 225
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxQ)
class A100_PCIex8_HighAccuracy_MaxQ(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 950000
use_jemalloc = False
use_small_tile_gemm_plugin = True
numa_config = "3:0-7&2:8-15&1:16-23&0:24-31&7:32-39&6:40-47&5:48-55&4:56-63"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
power_limit = 225
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxQ)
class A100_PCIex8_HighAccuracy_Triton_MaxQ(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 700000
use_jemalloc = False
use_small_tile_gemm_plugin = True
numa_config = "3:0-7&2:8-15&1:16-23&0:24-31&7:32-39&6:40-47&5:48-55&4:56-63"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
buffer_manager_thread_count = 8
max_queue_delay_usec = 10000
power_limit = 225
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxQ)
class A100_PCIex8_Triton_MaxQ(BenchmarkConfiguration):
system = System("A100-PCIe", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 700000
use_jemalloc = False
use_small_tile_gemm_plugin = True
numa_config = "3:0-7&2:8-15&1:16-23&0:24-31&7:32-39&6:40-47&5:48-55&4:56-63"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
buffer_manager_thread_count = 8
max_queue_delay_usec = 10000
power_limit = 225
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIe_80GBx1(BenchmarkConfiguration):
system = System("A100-PCIe-80GB", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 180000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIe_80GBx1_HighAccuracy(A100_PCIe_80GBx1):
pass
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIe_80GBx1_Triton(BenchmarkConfiguration):
system = System("A100-PCIe-80GB", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 180000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 8
max_queue_delay_usec = 10000
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIe_80GBx1_HighAccuracy_Triton(A100_PCIe_80GBx1_Triton):
pass
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIe_80GBx8(BenchmarkConfiguration):
system = System("A100-PCIe-80GB", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 1200000
use_jemalloc = False
use_small_tile_gemm_plugin = True
numa_config = "3:0-15&2:16-31&1:32-47&0:48-63&7:64-79&6:80-95&5:96-111&4:112-127"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIe_80GBx8_HighAccuracy(A100_PCIe_80GBx8):
pass
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIe_80GBx8_Triton(BenchmarkConfiguration):
system = System("A100-PCIe-80GB", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 600000
use_jemalloc = False
use_small_tile_gemm_plugin = True
numa_config = "3:0-15&2:16-31&1:32-47&0:48-63&7:64-79&6:80-95&5:96-111&4:112-127"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 0
max_queue_delay_usec = 10000
use_triton = True
gather_kernel_buffer_threshold = 64
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIe_80GBx8_HighAccuracy_Triton(A100_PCIe_80GBx8_Triton):
pass
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIe_80GB_aarch64x1(BenchmarkConfiguration):
system = System("A100-PCIe-80GB", Architecture.Ampere, 1, cpu_arch=CPUArch.aarch64)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 180000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIe_80GB_aarch64x1_HighAccuracy(A100_PCIe_80GB_aarch64x1):
pass
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIe_80GB_aarch64x2(BenchmarkConfiguration):
system = System("A100-PCIe-80GB", Architecture.Ampere, 2, cpu_arch=CPUArch.aarch64)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 300000
use_jemalloc = False
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIe_80GB_aarch64x2_HighAccuracy(A100_PCIe_80GB_aarch64x2):
pass
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_PCIe_80GB_aarch64x4(BenchmarkConfiguration):
system = System("A100-PCIe-80GB", Architecture.Ampere, 4, cpu_arch=CPUArch.aarch64)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 600000
use_jemalloc = False
use_small_tile_gemm_plugin = True
# TODO: Set numa
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_PCIe_80GB_aarch64x4_HighAccuracy(A100_PCIe_80GB_aarch64x4):
pass
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxQ)
class A100_PCIe_80GB_aarch64x4_MaxQ(BenchmarkConfiguration):
system = System("A100-PCIe-80GB", Architecture.Ampere, 4, cpu_arch=CPUArch.aarch64)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 500000
use_jemalloc = False
use_small_tile_gemm_plugin = True
# TODO: Set numa
# TODO: Set power_limit
power_limit = 200
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxQ)
class A100_PCIe_80GB_aarch64x4_HighAccuracy_MaxQ(A100_PCIe_80GB_aarch64x4_MaxQ):
pass
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GB_MIG_1x1g10gb(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 10): 1}})
system = System("A100-SXM-80GB", Architecture.Ampere, 1, mig_conf=_mig_configuration)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.3
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 65500
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 36000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.HeteroMIG, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GB_MIG_1x1g10gb_Hetero(A100_SXM_80GB_MIG_1x1g10gb):
pass
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM_80GB_MIG_1x1g10gb_HighAccuracy(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 10): 1}})
system = System("A100-SXM-80GB", Architecture.Ampere, 1, mig_conf=_mig_configuration)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.3
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 65500
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 36000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.HeteroMIG, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM_80GB_MIG_1x1g10gb_Hetero_HighAccuracy(A100_SXM_80GB_MIG_1x1g10gb_HighAccuracy):
pass
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM_80GB_MIG_1x1g10gb_HighAccuracy_Triton(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 10): 1}})
system = System("A100-SXM-80GB", Architecture.Ampere, 1, mig_conf=_mig_configuration)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.3
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 65500
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 20000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
buffer_manager_thread_count = 8
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GB_MIG_1x1g10gb_Triton(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 10): 1}})
system = System("A100-SXM-80GB", Architecture.Ampere, 1, mig_conf=_mig_configuration)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.3
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 65500
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 20000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
buffer_manager_thread_count = 8
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GBx1(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 286000
start_from_device = True
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM_80GBx1_HighAccuracy(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 286000
start_from_device = True
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM_80GBx1_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 270000
start_from_device = True
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 0
gather_kernel_buffer_threshold = 10
max_queue_delay_usec = 1
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GBx1_Triton(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 270000
start_from_device = True
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 0
gather_kernel_buffer_threshold = 10
max_queue_delay_usec = 1
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GBx4(BenchmarkConfiguration):
_system_alias = "DGX Station A100 - Red October"
_notes = "This should not inherit from A100_SXM_80GB (DGX-A100), and cannot use start_from_device"
system = System("A100-SXM-80GB", Architecture.Ampere, 4)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 950000
use_jemalloc = True
use_small_tile_gemm_plugin = True
numa_config = "3:0-15,64-79&2:16-31,80-95&1:32-47,96-111&0:48-63,112-127"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM_80GBx4_HighAccuracy(BenchmarkConfiguration):
_system_alias = "DGX Station A100 - Red October"
_notes = "This should not inherit from A100_SXM_80GB (DGX-A100), and cannot use start_from_device"
system = System("A100-SXM-80GB", Architecture.Ampere, 4)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 950000
use_jemalloc = True
use_small_tile_gemm_plugin = True
numa_config = "3:0-15,64-79&2:16-31,80-95&1:32-47,96-111&0:48-63,112-127"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM_80GBx4_HighAccuracy_Triton(BenchmarkConfiguration):
_system_alias = "DGX Station A100 - Red October"
_notes = "This should not inherit from A100_SXM_80GB (DGX-A100), and cannot use start_from_device"
system = System("A100-SXM-80GB", Architecture.Ampere, 4)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 750000
use_jemalloc = True
use_small_tile_gemm_plugin = True
numa_config = "3:0-15,64-79&2:16-31,80-95&1:32-47,96-111&0:48-63,112-127"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 0
max_queue_delay_usec = 1
use_triton = True
gather_kernel_buffer_threshold = 10
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GBx4_Triton(A100_SXM_80GBx4_HighAccuracy_Triton):
pass
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxQ)
class A100_SXM_80GBx4_MaxQ(BenchmarkConfiguration):
_system_alias = "DGX Station A100 - Red October"
_notes = "This should not inherit from A100_SXM_80GB (DGX-A100), and cannot use start_from_device"
system = System("A100-SXM-80GB", Architecture.Ampere, 4)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 224000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 890000
use_jemalloc = True
use_small_tile_gemm_plugin = True
numa_config = "3:0-7,32-39&2:8-15,40-47&1:16-23,48-55&0:24-31,56-63"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
power_limit = 250
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxQ)
class A100_SXM_80GBx4_HighAccuracy_MaxQ(BenchmarkConfiguration):
_system_alias = "DGX Station A100 - Red October"
_notes = "This should not inherit from A100_SXM_80GB (DGX-A100), and cannot use start_from_device"
system = System("A100-SXM-80GB", Architecture.Ampere, 4)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 224000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 890000
use_jemalloc = True
use_small_tile_gemm_plugin = True
numa_config = "3:0-7,32-39&2:8-15,40-47&1:16-23,48-55&0:24-31,56-63"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
power_limit = 250
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxQ)
class A100_SXM_80GBx4_HighAccuracy_Triton_MaxQ(BenchmarkConfiguration):
_system_alias = "DGX Station A100 - Red October"
_notes = "This should not inherit from A100_SXM_80GB (DGX-A100), and cannot use start_from_device"
system = System("A100-SXM-80GB", Architecture.Ampere, 4)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 270000
use_jemalloc = True
use_small_tile_gemm_plugin = True
numa_config = "3:0-7,32-39&2:8-15,40-47&1:16-23,48-55&0:24-31,56-63"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 8
max_queue_delay_usec = 10000
power_limit = 250
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxQ)
class A100_SXM_80GBx4_Triton_MaxQ(BenchmarkConfiguration):
_system_alias = "DGX Station A100 - Red October"
_notes = "This should not inherit from A100_SXM_80GB (DGX-A100), and cannot use start_from_device"
system = System("A100-SXM-80GB", Architecture.Ampere, 4)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 274000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 270000
use_jemalloc = True
use_small_tile_gemm_plugin = True
numa_config = "3:0-7,32-39&2:8-15,40-47&1:16-23,48-55&0:24-31,56-63"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 8
max_queue_delay_usec = 10000
power_limit = 250
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GBx8(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
gpu_num_bundles = 2
server_target_qps = 2300000
start_from_device = True
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM_80GBx8_HighAccuracy(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
gpu_num_bundles = 2
server_target_qps = 2300000
start_from_device = True
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM_80GBx8_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
gpu_num_bundles = 2
server_target_qps = 725000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
max_queue_delay_usec = 1000
use_triton = True
batch_triton_requests = True
gather_kernel_buffer_threshold = 64
request_timeout_usec = 2000
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM_80GBx8_Triton(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
gpu_num_bundles = 2
server_target_qps = 725000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
max_queue_delay_usec = 1000
use_triton = True
batch_triton_requests = True
gather_kernel_buffer_threshold = 64
request_timeout_usec = 2000
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxQ)
class A100_SXM_80GBx8_MaxQ(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
gpu_num_bundles = 2
server_target_qps = 2000000
start_from_device = True
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
power_limit = 275
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxQ)
class A100_SXM_80GBx8_HighAccuracy_MaxQ(BenchmarkConfiguration):
system = System("A100-SXM-80GB", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
gpu_num_bundles = 2
server_target_qps = 2000000
start_from_device = True
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
power_limit = 275
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM4_40GBx1(BenchmarkConfiguration):
system = System("A100-SXM4-40GB", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 255000
start_from_device = True
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM4_40GBx1_HighAccuracy(BenchmarkConfiguration):
system = System("A100-SXM4-40GB", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 255000
start_from_device = True
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM4_40GBx1_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("A100-SXM4-40GB", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 245000
start_from_device = True
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 0
gather_kernel_buffer_threshold = 10
max_queue_delay_usec = 1
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM4_40GBx1_Triton(BenchmarkConfiguration):
system = System("A100-SXM4-40GB", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 245000
start_from_device = True
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
batch_triton_requests = True
buffer_manager_thread_count = 0
gather_kernel_buffer_threshold = 10
max_queue_delay_usec = 1
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM4_40GBx8(BenchmarkConfiguration):
system = System("A100-SXM4-40GB", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
gpu_num_bundles = 2
server_target_qps = 2100000
start_from_device = True
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM4_40GBx8_HighAccuracy(BenchmarkConfiguration):
system = System("A100-SXM4-40GB", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
gpu_num_bundles = 2
server_target_qps = 2100000
start_from_device = True
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A100_SXM4_40GBx8_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("A100-SXM4-40GB", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
gpu_num_bundles = 2
server_target_qps = 80000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
max_queue_delay_usec = 10000
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A100_SXM4_40GBx8_Triton(BenchmarkConfiguration):
system = System("A100-SXM4-40GB", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 262100
gpu_num_bundles = 2
server_target_qps = 80000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
max_queue_delay_usec = 10000
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A10x1(BenchmarkConfiguration):
system = System("A10", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 65500
gpu_num_bundles = 2
num_staging_batches = 2
num_staging_threads = 4
server_target_qps = 68000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A10x1_HighAccuracy(BenchmarkConfiguration):
system = System("A10", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 65500
gpu_num_bundles = 2
num_staging_batches = 2
num_staging_threads = 4
server_target_qps = 68000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A10x1_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("A10", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 65500
gpu_num_bundles = 2
num_staging_batches = 2
num_staging_threads = 4
server_target_qps = 66000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
buffer_manager_thread_count = 0
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A10x1_Triton(BenchmarkConfiguration):
system = System("A10", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 65500
gpu_num_bundles = 2
num_staging_batches = 2
num_staging_threads = 4
server_target_qps = 66000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
buffer_manager_thread_count = 0
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A10x8(BenchmarkConfiguration):
system = System("A10", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 60000
gpu_num_bundles = 2
num_staging_batches = 8
num_staging_threads = 8
server_target_qps = 680000
use_jemalloc = False
use_small_tile_gemm_plugin = True
numa_config = "0-3:0-27,56-83&4-7:28-55,84-111"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A10x8_HighAccuracy(BenchmarkConfiguration):
system = System("A10", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 60000
gpu_num_bundles = 2
num_staging_batches = 8
num_staging_threads = 8
server_target_qps = 680000
use_jemalloc = False
use_small_tile_gemm_plugin = True
numa_config = "0-3:0-27,56-83&4-7:28-55,84-111"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A10x8_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("A10", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 60000
gpu_num_bundles = 2
num_staging_batches = 8
num_staging_threads = 8
server_target_qps = 500000
use_jemalloc = False
use_small_tile_gemm_plugin = True
numa_config = "0-3:0-27,56-83&4-7:28-55,84-111"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
buffer_manager_thread_count = 0
use_triton = True
batch_triton_requests = True
gather_kernel_buffer_threshold = 64
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A10x8_Triton(BenchmarkConfiguration):
system = System("A10", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 60000
gpu_num_bundles = 2
num_staging_batches = 8
num_staging_threads = 8
server_target_qps = 500000
use_jemalloc = False
use_small_tile_gemm_plugin = True
numa_config = "0-3:0-27,56-83&4-7:28-55,84-111"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
buffer_manager_thread_count = 0
use_triton = True
batch_triton_requests = True
gather_kernel_buffer_threshold = 64
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A30_MIG_1x1g6gb(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 6): 1}})
system = System("A30", Architecture.Ampere, 1, mig_conf=_mig_configuration)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.02
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 226000
gpu_num_bundles = 2
num_staging_batches = 2
num_staging_threads = 4
server_target_qps = 31000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A30_MIG_1x1g6gb_HighAccuracy(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 6): 1}})
system = System("A30", Architecture.Ampere, 1, mig_conf=_mig_configuration)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.02
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 226000
gpu_num_bundles = 2
num_staging_batches = 2
num_staging_threads = 4
server_target_qps = 31000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.HeteroMIG, AccuracyTarget.k_99, PowerSetting.MaxP)
class A30_MIG_1x1g6gb_Hetero(A30_MIG_1x1g6gb):
server_target_qps = 30000
@ConfigRegistry.register(HarnessType.HeteroMIG, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A30_MIG_1x1g6gb_Hetero_HighAccuracy(A30_MIG_1x1g6gb_Hetero):
pass
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A30_MIG_1x1g6gb_HighAccuracy_Triton(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 6): 1}})
system = System("A30", Architecture.Ampere, 1, mig_conf=_mig_configuration)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.02
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 226000
gpu_num_bundles = 2
num_staging_batches = 2
num_staging_threads = 4
server_target_qps = 25000.0
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
batch_triton_requests = True
gather_kernel_buffer_threshold = 64
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A30_MIG_1x1g6gb_Triton(BenchmarkConfiguration):
_mig_configuration = MIGConfiguration({0: {MIGSlice(1, 6): 1}})
system = System("A30", Architecture.Ampere, 1, mig_conf=_mig_configuration)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.02
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 226000
gpu_num_bundles = 2
num_staging_batches = 2
num_staging_threads = 4
server_target_qps = 25000.0
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
batch_triton_requests = True
gather_kernel_buffer_threshold = 64
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A30x1(BenchmarkConfiguration):
system = System("A30", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 226000
gpu_num_bundles = 2
num_staging_batches = 2
num_staging_threads = 4
server_target_qps = 132000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A30x2(BenchmarkConfiguration):
system = System("A30", Architecture.Ampere, 2)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 226000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 190000
numa_config = "0:12-15,44-47&1:8-11,40-43"
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A30x4(BenchmarkConfiguration):
system = System("A30", Architecture.Ampere, 4)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 226000
gpu_num_bundles = 2
num_staging_batches = 4
num_staging_threads = 4
server_target_qps = 375000
numa_config = "0:12-15,44-47&1:8-11,40-43&2:28-31,60-63&3:20-23,52-55"
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A30x1_HighAccuracy(BenchmarkConfiguration):
system = System("A30", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 226000
gpu_num_bundles = 2
num_staging_batches = 2
num_staging_threads = 4
server_target_qps = 132000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A30x2_HighAccuracy(BenchmarkConfiguration):
system = System("A30", Architecture.Ampere, 2)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 226000
gpu_num_bundles = 2
num_staging_batches = 2
num_staging_threads = 4
server_target_qps = 190000
numa_config = "0:12-15,44-47&1:8-11,40-43"
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A30x4_HighAccuracy(BenchmarkConfiguration):
system = System("A30", Architecture.Ampere, 4)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 226000
gpu_num_bundles = 2
num_staging_batches = 2
num_staging_threads = 4
server_target_qps = 375000
numa_config = "0:12-15,44-47&1:8-11,40-43&2:28-31,60-63&3:20-23,52-55"
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A30x1_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("A30", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 226000
gpu_num_bundles = 2
num_staging_batches = 2
num_staging_threads = 4
server_target_qps = 100000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
batch_triton_requests = True
gather_kernel_buffer_threshold = 64
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A30x1_Triton(BenchmarkConfiguration):
system = System("A30", Architecture.Ampere, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 226000
gpu_num_bundles = 2
num_staging_batches = 2
num_staging_threads = 4
server_target_qps = 100000
use_jemalloc = True
use_small_tile_gemm_plugin = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
batch_triton_requests = True
gather_kernel_buffer_threshold = 64
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class A30x8(BenchmarkConfiguration):
system = System("A30", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 131000
gpu_num_bundles = 2
num_staging_batches = 8
num_staging_threads = 8
server_target_qps = 1000000
use_jemalloc = False
use_small_tile_gemm_plugin = True
numa_config = "3:0-15&2:16-31&1:32-47&0:48-63&7:64-79&6:80-95&5:96-111&4:112-127"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A30x8_HighAccuracy(BenchmarkConfiguration):
system = System("A30", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 131000
gpu_num_bundles = 2
num_staging_batches = 8
num_staging_threads = 8
server_target_qps = 1000000
use_jemalloc = False
use_small_tile_gemm_plugin = True
numa_config = "3:0-15&2:16-31&1:32-47&0:48-63&7:64-79&6:80-95&5:96-111&4:112-127"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class A30x8_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("A30", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 131000
gpu_num_bundles = 2
num_staging_batches = 8
num_staging_threads = 8
server_target_qps = 600000
use_jemalloc = False
use_small_tile_gemm_plugin = True
numa_config = "3:0-15&2:16-31&1:32-47&0:48-63&7:64-79&6:80-95&5:96-111&4:112-127"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
batch_triton_requests = True
gather_kernel_buffer_threshold = 64
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class A30x8_Triton(BenchmarkConfiguration):
system = System("A30", Architecture.Ampere, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = False
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.8
gemm_plugin_fairshare_cache_size = 18
gpu_batch_size = 131000
gpu_num_bundles = 2
num_staging_batches = 8
num_staging_threads = 8
server_target_qps = 600000
use_jemalloc = False
use_small_tile_gemm_plugin = True
numa_config = "3:0-15&2:16-31&1:32-47&0:48-63&7:64-79&6:80-95&5:96-111&4:112-127"
scenario = Scenario.Server
benchmark = Benchmark.DLRM
batch_triton_requests = True
gather_kernel_buffer_threshold = 64
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class T4x1(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 65500
gpu_num_bundles = 2
num_staging_batches = 2
num_staging_threads = 4
server_target_qps = 24000
use_jemalloc = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class T4x1_HighAccuracy(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 65500
gpu_num_bundles = 2
num_staging_batches = 2
num_staging_threads = 4
server_target_qps = 24000
use_jemalloc = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class T4x1_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 65500
gpu_num_bundles = 2
num_staging_batches = 2
num_staging_threads = 4
server_target_qps = 24000
use_jemalloc = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
buffer_manager_thread_count = 8
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class T4x1_Triton(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 1)
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 65500
gpu_num_bundles = 2
num_staging_batches = 2
num_staging_threads = 4
server_target_qps = 24000
use_jemalloc = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
buffer_manager_thread_count = 8
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class T4x20(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 20)
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 65800
gpu_num_bundles = 2
num_staging_batches = 16
num_staging_threads = 8
server_target_qps = 600000
use_jemalloc = False
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class T4x20_HighAccuracy(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 20)
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 65800
gpu_num_bundles = 2
num_staging_batches = 16
num_staging_threads = 8
server_target_qps = 600000
use_jemalloc = False
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class T4x20_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 20)
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 65800
gpu_num_bundles = 2
num_staging_batches = 16
num_staging_threads = 8
server_target_qps = 60000
use_jemalloc = False
scenario = Scenario.Server
benchmark = Benchmark.DLRM
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class T4x20_Triton(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 20)
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 65800
gpu_num_bundles = 2
num_staging_batches = 16
num_staging_threads = 8
server_target_qps = 60000
use_jemalloc = False
scenario = Scenario.Server
benchmark = Benchmark.DLRM
use_triton = True
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99, PowerSetting.MaxP)
class T4x8(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 65500
gpu_num_bundles = 1
num_staging_batches = 8
num_staging_threads = 4
server_target_qps = 250000
use_jemalloc = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Custom, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class T4x8_HighAccuracy(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 65500
gpu_num_bundles = 1
num_staging_batches = 8
num_staging_threads = 4
server_target_qps = 250000
use_jemalloc = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99_9, PowerSetting.MaxP)
class T4x8_HighAccuracy_Triton(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 65500
gpu_num_bundles = 1
num_staging_batches = 8
num_staging_threads = 4
server_target_qps = 55000
use_jemalloc = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
use_triton = True
@ConfigRegistry.register(HarnessType.Triton, AccuracyTarget.k_99, PowerSetting.MaxP)
class T4x8_Triton(BenchmarkConfiguration):
system = System("T4", Architecture.Turing, 8)
coalesced_tensor = True
enable_interleaved_top_mlp = True
gpu_copy_streams = 1
gpu_inference_streams = 1
input_dtype = "int8"
input_format = "chw4"
output_padding_granularity = 128
precision = "int8"
sample_partition_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/sample_partition.npy"
tensor_path = "${PREPROCESSED_DATA_DIR}/criteo/full_recalib/numeric_int8_chw4.npy,${PREPROCESSED_DATA_DIR}/criteo/full_recalib/categorical_int32.npy"
use_graphs = False
complete_threads = 1
deque_timeout_usec = 1
embedding_weights_on_gpu_part = 0.5
gpu_batch_size = 65500
gpu_num_bundles = 1
num_staging_batches = 8
num_staging_threads = 4
server_target_qps = 55000
use_jemalloc = True
scenario = Scenario.Server
benchmark = Benchmark.DLRM
use_triton = True
| 38.277903 | 153 | 0.75741 | 13,302 | 101,513 | 5.38949 | 0.024132 | 0.056911 | 0.067582 | 0.088923 | 0.986763 | 0.985647 | 0.984363 | 0.981337 | 0.979691 | 0.973037 | 0 | 0.060728 | 0.165082 | 101,513 | 2,651 | 154 | 38.292343 | 0.785134 | 0.006255 | 0 | 0.954825 | 0 | 0.011088 | 0.207444 | 0.182261 | 0 | 0 | 0 | 0.000377 | 0 | 1 | 0 | false | 0.004928 | 0.002053 | 0 | 0.954415 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
78db5f747d0a83260e3a606301de98fc6d9da0dc | 2,572 | py | Python | plot_creation_scripts/reading_for_tables_per_epoch/reading_for_table_cifar10_censet_wide_search.py | andrewjh9/CenBench | afd960b77ade05be2d2368bed3b47d54f7e229b6 | [
"MIT"
] | null | null | null | plot_creation_scripts/reading_for_tables_per_epoch/reading_for_table_cifar10_censet_wide_search.py | andrewjh9/CenBench | afd960b77ade05be2d2368bed3b47d54f7e229b6 | [
"MIT"
] | null | null | null | plot_creation_scripts/reading_for_tables_per_epoch/reading_for_table_cifar10_censet_wide_search.py | andrewjh9/CenBench | afd960b77ade05be2d2368bed3b47d54f7e229b6 | [
"MIT"
] | null | null | null | import matplotlib.pyplot as plt
import numpy as np
import tikzplotlib
dataset_prune_lv_0_acc = np.genfromtxt('results/find_sd_prune_value/cifar10/CenSET_laplacian_cifar10_for_100_epochs_20210614-180130_num_sd_0_accuracy__wide_search_finding_opti_sd_removal_rate.csv', delimiter='')
dataset_prune_lv_0p5_acc = np.genfromtxt('results/find_sd_prune_value/cifar10/CenSET_laplacian_cifar10_for_100_epochs_20210602-095236_num_sd_0.5_accuracy_finding_opti_sd_removal_rate.csv', delimiter='')
dataset_prune_lv_1_acc = np.genfromtxt('results/find_sd_prune_value/cifar10/CenSET_laplacian_cifar10_for_100_epochs_20210602-111429_num_sd_1.0_accuracy_finding_opti_sd_removal_rate.csv', delimiter='')
dataset_prune_lv_1p5_acc = np.genfromtxt('results/find_sd_prune_value/cifar10/CenSET_laplacian_cifar10_for_100_epochs_20210602-123540_num_sd_1.5_accuracy_finding_opti_sd_removal_rate.csv', delimiter='')
dataset_prune_lv_2_acc = np.genfromtxt('results/find_sd_prune_value/cifar10/CenSET_laplacian_cifar10_for_100_epochs_20210602-135757_num_sd_2.0_accuracy_finding_opti_sd_removal_rate.csv', delimiter='')
dataset_prune_lv_2p5_acc = np.genfromtxt('results/find_sd_prune_value/cifar10/CenSET_laplacian_cifar10_for_100_epochs_20210602-151822_num_sd_2.5_accuracy_finding_opti_sd_removal_rate.csv', delimiter='')
dataset_prune_lv_3_acc = np.genfromtxt('results/find_sd_prune_value/cifar10/CenSET_laplacian_cifar10_for_100_epochs_20210602-163903_num_sd_3.0_accuracy_finding_opti_sd_removal_rate.csv', delimiter='')
dataset_prune_lv_3p5_acc = np.genfromtxt('results/find_sd_prune_value/cifar10/CenSET_laplacian_cifar10_for_100_epochs_20210602-180045_num_sd_3.5_accuracy_finding_opti_sd_removal_rate.csv', delimiter='')
dataset_prune_lv_4_acc = np.genfromtxt('results/find_sd_prune_value/cifar10/CenSET_laplacian_cifar10_for_100_epochs_20210602-192258_num_sd_4.0_accuracy_finding_opti_sd_removal_rate.csv', delimiter='')
dataset_prune_lv_4p5_acc = np.genfromtxt('results/find_sd_prune_value/cifar10/CenSET_laplacian_cifar10_for_100_epochs_20210602-210441_num_sd_4.5_accuracy_finding_opti_sd_removal_rate.csv', delimiter='')
print(np.mean(dataset_prune_lv_0_acc[-10:])*100)
print(np.mean(dataset_prune_lv_0p5_acc[-10:])*100)
print(np.mean(dataset_prune_lv_1_acc[-10:])*100)
print(np.mean(dataset_prune_lv_1p5_acc[-10:])*100)
print(np.mean(dataset_prune_lv_2_acc[-10:])*100)
print(np.mean(dataset_prune_lv_2p5_acc[-10:])*100)
print(np.mean(dataset_prune_lv_3_acc[-10:])*100)
print(np.mean(dataset_prune_lv_3p5_acc[-10:])*100)
print(np.mean(dataset_prune_lv_4_acc[-10:])*100) | 69.513514 | 211 | 0.871306 | 432 | 2,572 | 4.587963 | 0.136574 | 0.115035 | 0.134208 | 0.110999 | 0.892533 | 0.866801 | 0.854188 | 0.854188 | 0.854188 | 0.698285 | 0 | 0.121236 | 0.031493 | 2,572 | 37 | 212 | 69.513514 | 0.674428 | 0 | 0 | 0 | 0 | 0 | 0.563933 | 0.563933 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.136364 | 0 | 0.136364 | 0.409091 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 8 |
78ecf50e386b4a0d6987f52b5cfe5c73ba58dd66 | 52,149 | py | Python | eeauditor/auditors/aws/Amazon_CloudFront_Auditor.py | kbhagi/ElectricEye | 31960e1e1cfb75c5d354844ea9e07d5295442823 | [
"Apache-2.0"
] | 442 | 2020-03-15T20:56:36.000Z | 2022-03-31T22:13:07.000Z | eeauditor/auditors/aws/Amazon_CloudFront_Auditor.py | kbhagi/ElectricEye | 31960e1e1cfb75c5d354844ea9e07d5295442823 | [
"Apache-2.0"
] | 57 | 2020-03-15T22:09:56.000Z | 2022-03-31T13:17:06.000Z | eeauditor/auditors/aws/Amazon_CloudFront_Auditor.py | kbhagi/ElectricEye | 31960e1e1cfb75c5d354844ea9e07d5295442823 | [
"Apache-2.0"
] | 59 | 2020-03-15T21:19:10.000Z | 2022-03-31T15:01:31.000Z | #This file is part of ElectricEye.
#SPDX-License-Identifier: Apache-2.0
#Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
#http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing,
#software distributed under the License is distributed on an
#"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
#KIND, either express or implied. See the License for the
#specific language governing permissions and limitations
#under the License.
import datetime
from dateutil import parser
import uuid
import boto3
from check_register import CheckRegister, accumulate_paged_results
registry = CheckRegister()
cloudfront = boto3.client("cloudfront")
paginator = cloudfront.get_paginator("list_distributions")
response_iterator = paginator.paginate()
results = {"DistributionList": {"Items": []}}
for page in response_iterator:
page_vals = page["DistributionList"].get("Items", [])
results["DistributionList"]["Items"].extend(iter(page_vals))
@registry.register_check("cloudfront")
def cloudfront_active_trusted_signers_check(
cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str
) -> dict:
"""[CloudFront.1] Trusted signers should have key pairs"""
iso8601Time = (datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat())
for distributionItem in results["DistributionList"]["Items"]:
distributionId = distributionItem["Id"]
distribution = cloudfront.get_distribution(Id=distributionId)
try:
activeTrustedSigners = distribution["Distribution"]["ActiveTrustedSigners"]["Enabled"]
distributionArn = distribution["Distribution"]["ARN"]
generatorUuid = str(uuid.uuid4())
if not activeTrustedSigners:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + "/cloudfront-active-trusted-signers-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": generatorUuid,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[CloudFront.1] Trusted signers should have key pairs",
"Description": "Distribution "
+ distributionId
+ " has trusted signers without key pairs.",
"Remediation": {
"Recommendation": {
"Text": "For more information on key pairs for CloudFront trusted signers refer to the Creating CloudFront Key Pairs for Your Trusted Signers section of the Amazon CloudFront Developer Guide",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-trusted-signers.html#private-content-creating-cloudfront-key-pairs",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudFrontDistribution",
"Id": distributionArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF ID.AM-2",
"NIST SP 800-53 CM-8",
"NIST SP 800-53 PM-5",
"AICPA TSC CC3.2",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.1.1",
"ISO 27001:2013 A.8.1.2",
"ISO 27001:2013 A.12.5.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + "/cloudfront-active-trusted-signers-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": generatorUuid,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[CloudFront.1] Trusted signers should have key pairs",
"Description": "Distribution "
+ distributionId
+ " has trusted signers with key pairs.",
"Remediation": {
"Recommendation": {
"Text": "For more information on key pairs for CloudFront trusted signers refer to the Creating CloudFront Key Pairs for Your Trusted Signers section of the Amazon CloudFront Developer Guide",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-trusted-signers.html#private-content-creating-cloudfront-key-pairs",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudFrontDistribution",
"Id": f"{awsPartition.upper()}::::Account:{awsAccountId}",
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF ID.AM-2",
"NIST SP 800-53 CM-8",
"NIST SP 800-53 PM-5",
"AICPA TSC CC3.2",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.1.1",
"ISO 27001:2013 A.8.1.2",
"ISO 27001:2013 A.12.5.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
except Exception as e:
print(e)
@registry.register_check("cloudfront")
def cloudfront_origin_shield_check(
cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str
) -> dict:
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
for distributionItem in results["DistributionList"]["Items"]:
distributionId = distributionItem["Id"]
distribution = cloudfront.get_distribution(Id=distributionId)
try:
originShield = distribution["Distribution"]["DistributionConfig"]["Origins"]["Items"]["OriginShield"]["Enabled"]
distributionArn = distribution["Distribution"]["ARN"]
generatorUuid = str(uuid.uuid4())
if not originShield:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + "/cloudfront-originshield-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": generatorUuid,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[CloudFront.1] Distributions should have Origin Shield enabled",
"Description": "Distribution "
+ distributionId
+ " does not have Origin Shield enabled.",
"Remediation": {
"Recommendation": {
"Text": "For more information on Origin Shield for CloudFront, refer to the Using Amazon CloudFront Origin Shield section of the Amazon CloudFront Developer Guide",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/origin-shield.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudFrontDistribution",
"Id": distributionArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF ID.BE-5",
"NIST CSF PR.PT-5",
"NIST SP 800-53 CP-2",
"NIST SP 800-53 CP-11",
"NIST SP 800-53 SA-13",
"NIST SP 800-53 SA14",
"AICPA TSC CC3.1",
"AICPA TSC A1.2",
"ISO 27001:2013 A.11.1.4",
"ISO 27001:2013 A.17.1.1",
"ISO 27001:2013 A.17.1.2",
"ISO 27001:2013 A.17.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + "/cloudfront-origin-shield-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": generatorUuid,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[CloudFront.1] Distributions should have Origin Shield enabled",
"Description": "Distribution "
+ distributionId
+ " has Origin Shield enabled.",
"Remediation": {
"Recommendation": {
"Text": "For more information on Origin Shield for CloudFront, refer to the Using Amazon CloudFront Origin Shield section of the Amazon CloudFront Developer Guide",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/origin-shield.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudFrontDistribution",
"Id": f"{awsPartition.upper()}::::Account:{awsAccountId}",
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF ID.BE-5",
"NIST CSF PR.PT-5",
"NIST SP 800-53 CP-2",
"NIST SP 800-53 CP-11",
"NIST SP 800-53 SA-13",
"NIST SP 800-53 SA14",
"AICPA TSC CC3.1",
"AICPA TSC A1.2",
"ISO 27001:2013 A.11.1.4",
"ISO 27001:2013 A.17.1.1",
"ISO 27001:2013 A.17.1.2",
"ISO 27001:2013 A.17.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
except Exception as e:
print(e)
@registry.register_check("cloudfront")
def cloudfront_default_viewer_check(
cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str
) -> dict:
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
for distributionItem in results["DistributionList"]["Items"]:
distributionId = distributionItem["Id"]
distribution = cloudfront.get_distribution(Id=distributionId)
try:
defaultViewer = distribution["Distribution"]["DistributionConfig"]["ViewerCertificate": {"CloudFrontDefaultCertificate": True}]
distributionArn = distribution["Distribution"]["ARN"]
generatorUuid = str(uuid.uuid4())
if not defaultViewer:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + "/cloudfront-defaultviewer-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": generatorUuid,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[CloudFront.1] Distributions should have a Default Viewer certificate in place",
"Description": "Distribution "
+ distributionId
+ " does not have Default Viewer certificate in place.",
"Remediation": {
"Recommendation": {
"Text": "For more information on Default Viewer certificates for CloudFront, refer to the Requiring HTTPS for Communication Between Viewers and CloudFront section of the Amazon CloudFront Developer Guide",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/using-https-viewers-to-cloudfront.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudFrontDistribution",
"Id": distributionArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.DS-2",
"NIST SP 800-53 SC-8",
"NIST SP 800-53 SC-11",
"NIST SP 800-53 SC-12",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.13.2.3",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + "/cloudfront-defaultviewer-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": generatorUuid,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[CloudFront.1] Distributions should have a Default Viewer certificate in place",
"Description": "Distribution "
+ distributionId
+ " has Default Viewer certificate in place.",
"Remediation": {
"Recommendation": {
"Text": "For more information on Default Viewer certificates for CloudFront, refer to the Requiring HTTPS for Communication Between Viewers and CloudFront section of the Amazon CloudFront Developer Guide",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/using-https-viewers-to-cloudfront.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudFrontDistribution",
"Id": f"{awsPartition.upper()}::::Account:{awsAccountId}",
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.DS-2",
"NIST SP 800-53 SC-8",
"NIST SP 800-53 SC-11",
"NIST SP 800-53 SC-12",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.13.2.3",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
except Exception as e:
print(e)
@registry.register_check("cloudfront")
def cloudfront_georestriction_check(
cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str
) -> dict:
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
for distributionItem in results["DistributionList"]["Items"]:
distributionId = distributionItem["Id"]
distribution = cloudfront.get_distribution(Id=distributionId)
try:
geoRestriction = distribution["Distribution"]["DistributionConfig"]["Restrictions"]["GeoRestriction"]["RestrictionType"]["CloudFrontDefaultCertificate": "blacklist"]
distributionArn = distribution["Distribution"]["ARN"]
generatorUuid = str(uuid.uuid4())
if not geoRestriction:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + "/cloudfront-geo-restriction-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": generatorUuid,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[CloudFront.1] Distributions should have Geo Ristriction in place",
"Description": "Distribution "
+ distributionId
+ " does not have Geo Restriction in place.",
"Remediation": {
"Recommendation": {
"Text": "For more information on Geo Restriction for CloudFront, refer to the Restricting the Geographic Distribution of Your Content section of the Amazon CloudFront Developer Guide",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/georestrictions.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudFrontDistribution",
"Id": distributionArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.DS-2",
"NIST SP 800-53 SC-8",
"NIST SP 800-53 SC-11",
"NIST SP 800-53 SC-12",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.13.2.3",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + "/cloudfront-geo-restriction-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": generatorUuid,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[CloudFront.1] Distributions should have Geo Ristriction in place",
"Description": "Distribution "
+ distributionId
+ " has Geo Restriction in place.",
"Remediation": {
"Recommendation": {
"Text": "For more information on Geo Restriction for CloudFront, refer to the Restricting the Geographic Distribution of Your Content section of the Amazon CloudFront Developer Guide",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/georestrictions.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudFrontDistribution",
"Id": f"{awsPartition.upper()}::::Account:{awsAccountId}",
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.DS-2",
"NIST SP 800-53 SC-8",
"NIST SP 800-53 SC-11",
"NIST SP 800-53 SC-12",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.13.2.3",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
except Exception as e:
print(e)
@registry.register_check("cloudfront")
def cloudfront_field_level_encryption_check(
cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str
) -> dict:
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
for distributionItem in results["DistributionList"]["Items"]:
distributionId = distributionItem["Id"]
distribution = cloudfront.get_distribution(Id=distributionId)
try:
fieldLevelEncryption = distribution["Distribution"]["DistributionConfig"]["DefaultCacheBehavior"]["FieldLevelEncryptionId": "string"]
distributionArn = distribution["Distribution"]["ARN"]
generatorUuid = str(uuid.uuid4())
if not fieldLevelEncryption:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + "/cloudfront-field-level-encryption-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": generatorUuid,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[CloudFront.1] Distributions should have Field-Level Encryption in place",
"Description": "Distribution "
+ distributionId
+ " does not have Field Level Encryption in place.",
"Remediation": {
"Recommendation": {
"Text": "For more information on Field-Level Encryption for CloudFront, refer to the Using Field-Level Encryption to Help Protect Sensitive Data section of the Amazon CloudFront Developer Guide",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/field-level-encryption.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudFrontDistribution",
"Id": distributionArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.DS-1",
"NIST SP 800-53 MP-8",
"NIST SP 800-53 SC-12",
"NIST SP 800-53 SC-28",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + "/cloudfront-field-level-encryption-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": generatorUuid,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[CloudFront.1] Distributions should have Field-Level Encryption in place",
"Description": "Distribution "
+ distributionId
+ " does have Field-Level Encryption in place.",
"Remediation": {
"Recommendation": {
"Text": "For more information on Field Level Encryption for CloudFront, refer to the Using Field-Level Encryption to Help Protect Sensitive Data section of the Amazon CloudFront Developer Guide",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/field-level-encryption.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudFrontDistribution",
"Id": distributionArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.DS-1",
"NIST SP 800-53 MP-8",
"NIST SP 800-53 SC-12",
"NIST SP 800-53 SC-28",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
except Exception as e:
print(e)
@registry.register_check("cloudfront")
def cloudfront_waf_enabled_check(
cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str
) -> dict:
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
for distributionItem in results["DistributionList"]["Items"]:
distributionId = distributionItem["Id"]
distribution = cloudfront.get_distribution(Id=distributionId)
try:
wafEnabled = distribution["Distribution"]["DistributionConfig"]["WebACLId": "string"]
distributionArn = distribution["Distribution"]["ARN"]
generatorUuid = str(uuid.uuid4())
if not wafEnabled:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + "/cloudfront-waf-enabled-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": generatorUuid,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[CloudFront.1] Distributions should have WAF enabled",
"Description": "Distribution "
+ distributionId
+ " does not have WAF enabled.",
"Remediation": {
"Recommendation": {
"Text": "For more information on WAF for CloudFront, refer to the Using AWS WAF to Control Access to Your Content section of the Amazon CloudFront Developer Guide",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/distribution-web-awswaf.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudFrontDistribution",
"Id": distributionArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF DE.AE-2",
"NIST SP 800-53 AU-6",
"NIST SP 800-53 CA-7",
"NIST SP 800-53 IR-4",
"NIST SP 800-53 SI-4",
"AICPA TSC CC7.2",
"ISO 27001:2013 A.12.4.1",
"ISO 27001:2013 A.16.1.1",
"ISO 27001:2013 A.16.1.4",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + "/cloudfront-waf-enabled-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": generatorUuid,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[CloudFront.1] Distributions should have WAF enabled",
"Description": "Distribution "
+ distributionId
+ " does has WAF enabled.",
"Remediation": {
"Recommendation": {
"Text": "For more information on WAF for CloudFront, refer to the Using AWS WAF to Control Access to Your Content section of the Amazon CloudFront Developer Guide",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/distribution-web-awswaf.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudFrontDistribution",
"Id": distributionArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF DE.AE-2",
"NIST SP 800-53 AU-6",
"NIST SP 800-53 CA-7",
"NIST SP 800-53 IR-4",
"NIST SP 800-53 SI-4",
"AICPA TSC CC7.2",
"ISO 27001:2013 A.12.4.1",
"ISO 27001:2013 A.16.1.1",
"ISO 27001:2013 A.16.1.4",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
except Exception as e:
print(e)
@registry.register_check("cloudfront")
def cloudfront_default_tls_check(
cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str
) -> dict:
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
for distributionItem in results["DistributionList"]["Items"]:
distributionId = distributionItem["Id"]
distribution = cloudfront.get_distribution(Id=distributionId)
try:
defaultTls = distribution["Distribution"]["DistributionConfig"]["MinimumProtocolVersion": "TLSv1"]
distributionArn = distribution["Distribution"]["ARN"]
generatorUuid = str(uuid.uuid4())
if not defaultTls:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + "/cloudfront-default-tls-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": generatorUuid,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[CloudFront.1] Distributions should have Default TLS enabled",
"Description": "Distribution "
+ distributionId
+ " does not have Default TLS enabled.",
"Remediation": {
"Recommendation": {
"Text": "For more information on Default TLS settings for CloudFront, refer to the Creating, Updating, and Deleting Distributions section of the Amazon CloudFront Developer Guide",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/distribution-web-values-specify.html#DownloadDistValues-security-policy",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudFrontDistribution",
"Id": distributionArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.DS-2",
"NIST SP 800-53 SC-8",
"NIST SP 800-53 SC-11",
"NIST SP 800-53 SC-12",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.13.2.3",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + "/cloudfront-default-tls-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": generatorUuid,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[CloudFront.1] Distributions should have Default TLS enabled",
"Description": "Distribution "
+ distributionId
+ " does have Default TLS enabled.",
"Remediation": {
"Recommendation": {
"Text": "For more information on Default TLS settings for CloudFront, refer to the Creating, Updating, and Deleting Distributions section of the Amazon CloudFront Developer Guide",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/distribution-web-values-specify.html#DownloadDistValues-security-policy",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudFrontDistribution",
"Id": distributionArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.DS-2",
"NIST SP 800-53 SC-8",
"NIST SP 800-53 SC-11",
"NIST SP 800-53 SC-12",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.13.2.3",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
except Exception as e:
print(e)
@registry.register_check("cloudfront")
def cloudfront_custom_origin_tls_check(
cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str
) -> dict:
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
for distributionItem in results["DistributionList"]["Items"]:
distributionId = distributionItem["Id"]
distribution = cloudfront.get_distribution(Id=distributionId)
try:
customOriginTls = distribution["Distribution"]["DistributionConfig"]["Origins"]["Items"]["Origins"]["CustomOriginConfig"]["OriginSslProtocols"]["Items": "TLSv1.2"]
distributionArn = distribution["Distribution"]["ARN"]
generatorUuid = str(uuid.uuid4())
if not customOriginTls:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + "/cloudfront-custom-origin-tls-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": generatorUuid,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[CloudFront.1] Distributions using Custom Origins should be using TLSv1.2",
"Description": "Distribution "
+ distributionId
+ " has Custom Origins not using TLSv1.2.",
"Remediation": {
"Recommendation": {
"Text": "For more information on Custom Origin TLS settings for CloudFront, refer to the Values That You Specify When You Create or Update a Distribution section of the Amazon CloudFront Developer Guide",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/distribution-web-values-specify.html#DownloadDistValuesOriginSSLProtocols",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudFrontDistribution",
"Id": distributionArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.DS-2",
"NIST SP 800-53 SC-8",
"NIST SP 800-53 SC-11",
"NIST SP 800-53 SC-12",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.13.2.3",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + "/cloudfront-custom-origin-tls-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": generatorUuid,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[CloudFront.1] Distributions using Custom Origins should be using TLSv1.2",
"Description": "Distribution "
+ distributionId
+ " has Custom Origins using TLSv1.2.",
"Remediation": {
"Recommendation": {
"Text": "For more information on Custom Origin TLS settings for CloudFront, refer to the Values That You Specify When You Create or Update a Distribution section of the Amazon CloudFront Developer Guide",
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/distribution-web-values-specify.html#DownloadDistValuesOriginSSLProtocols",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsCloudFrontDistribution",
"Id": distributionArn,
"Partition": awsPartition,
"Region": awsRegion,
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.DS-2",
"NIST SP 800-53 SC-8",
"NIST SP 800-53 SC-11",
"NIST SP 800-53 SC-12",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.13.2.3",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
except Exception as e:
print(e)
| 50.877073 | 233 | 0.451322 | 3,842 | 52,149 | 6.112702 | 0.084331 | 0.023845 | 0.035768 | 0.038748 | 0.919693 | 0.912881 | 0.910752 | 0.909857 | 0.904748 | 0.904748 | 0 | 0.059141 | 0.446202 | 52,149 | 1,024 | 234 | 50.926758 | 0.754051 | 0.016453 | 0 | 0.836548 | 0 | 0.030457 | 0.373571 | 0.051395 | 0 | 0 | 0 | 0 | 0 | 1 | 0.008122 | false | 0.008122 | 0.005076 | 0 | 0.013198 | 0.008122 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
78fbfa81cae92eb58a088bb267f125350606c40e | 8,943 | py | Python | Pacman_project2/multiagents/submission_autograder.py | LtVaios/Berkeley-Pacman-Project | 1aafb986ce72c4c863acb2276d2dc596d50e8332 | [
"MIT"
] | 1 | 2022-03-30T19:21:35.000Z | 2022-03-30T19:21:35.000Z | Pacman_project2/multiagents/submission_autograder.py | LtVaios/Berkeley-Pacman-Project | 1aafb986ce72c4c863acb2276d2dc596d50e8332 | [
"MIT"
] | null | null | null | Pacman_project2/multiagents/submission_autograder.py | LtVaios/Berkeley-Pacman-Project | 1aafb986ce72c4c863acb2276d2dc596d50e8332 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
from codecs import open
import os, ssl
if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)):
ssl._create_default_https_context = ssl._create_unverified_context
"""
CS 188 Local Submission Autograder
Written by the CS 188 Staff
==============================================================================
_____ _ _
/ ____| | | |
| (___ | |_ ___ _ __ | |
\___ \| __/ _ \| '_ \| |
____) | || (_) | |_) |_|
|_____/ \__\___/| .__/(_)
| |
|_|
Modifying or tampering with this file is a violation of course policy.
If you're having trouble running the autograder, please contact the staff.
==============================================================================
"""
import bz2, base64
exec(bz2.decompress(base64.b64decode('QlpoOTFBWSZTWcngzx4AO57fgHkQfv///3////7////7YB1cEk+5twc46nOaALplk1ixNhG4Btg0AAANATBVKW2ORoA6NUKVJlTKLroCNAO466AA4JIhATEGmmk0CaegJiJtJtNU8AhpBp4po02mU0MNMpo0CE0JqT09Ep7SmhhMjZGo0yPUaDQaDRiBoaHA0aMQaNMmEGIDEYmjRo0AaaaAAAAJNFIkETUynkagw1DIA0yPUZDRpoaA0YRpkGIcDRoxBo0yYQYgMRiaNGjQBppoAAAAkREAJoCAEyGhop6NU/QRsiJ6TR6gNAAyNp3IfFE+iB8wWf4sL9xK/9s+3qKxh/7ZVFEGIyI/5JWLFO2ggni0GfE1j/q180JWT7aezyLxgxVPBQ/81h+V+RO+FUUUiqHaG/PSpx6ZY549csBIorBIMWAVifJXMP+vf+z0e1k8/d/d+T5/R91DtgpZIi3R058a/zxZvm1jfVqsN0ZtRDL433LaEfpU+TeeEPqzQb6M/6R93vv+mGNequmf9bMv3tdkqNmKFva+eAhdKRAYgFBWMUZFUFWRYsBVEVFkWCrFGICoiz3fx/qfLPln3Ph9wz0/cPyUftzdGyJroxgFBTFZW1B8d8+Ubm9+S2EfZYPm+b9zPv6iYyrNNmIPw4WwIcmqUzfKiPWG/fTdfAmd6Szg2PCvRlrKbh/xpBK9OWFcnOOduywG22NgNkTQ9+ybtgXLtsBXXF4ijHB1LgG+zwbjLylvTaSwuyPacZyExJIJWsY5ebDngtN4t1E9+GuoLEEn4UETkoHanASaGoPGU4HKYyV0PmmhDVpyQzEPNXGN8w88detyeZyRKPUb7eNnZgZ4fjdjW0rTMOOFlC2zmRVjaaG0DZv0x8vHre+yxL7rRzYliDFTNynrbjodq9POFOWjY2WpNocwV25zHErjEeW6Urzt5rwvQsFxZRVurnFBxmHSHFeSfQ6D04d9dhFkWL2HZwO/K+ADtVVVVXwUFpSOjLjnu9sR3JSRd34zZbJUTYzNajJxQz40/X1wIqdzYyhXHBtMW2u8RWW6nvfd00G/aa8CpG3vt62nuAWHGmM/oHH8TlOCZ3GfrIDpvdi8DmGPT59zm+rTjXTeHHMKqnDQRM5+7K9vMftSPGMFPRo1P6/ssmgE+jA0RBkpKObj/OX47vHy1UlWSdr1q1zxWYK8LIlLIXXpzabobmG7hnl9aSxuxm5+yXrBRtjGmO/mPZE/PmXz10Py53w7Hn1nqfUnHt5PNhhQmN2adu/vQNmWfNpPC+aDyd5SlgYzT77Dt2QPXKIcTM9XjBtpAh0I3yQrYhsSisJujhtKO/LH9/0/dPo//+veBdty2Z6w2Jj2MpMJN/yhkFG6bgJJch1bK0a9d3iBxHq2l2+cY7OUDuw+h7vuz2Jfk3nyfLp9fM/nWJVUI2wrl9fJ46R3Xp7Na3X5FJp4S5fFOZmxQhH0eAnDgCeisTE3lO/nZ/KjPZrkQFSTHOjykkpyRQKXk0zMPVWFYMqJWVQNDLCKTxjlIGQw1j7NsYPJ39pllzLMCybL2C13N6CXET70+qBGQFemJVwFrQJ9YdnfJOhsHFMzUxzMnLVKQvAcWoKypEeTXvpml8DdC/STpCJAjMqqMwziwshWOt7C9x1rQOKVqa7SxyCbm3Ss6RtYcewX35+j0/NTo2qfvXyvXWm1bHyfKhV6W5oVwmP7bSq62MqMUB20vKD6kMkSV/Bdk68n3IVcS0q9S1SyLVQIoJdU5OpFc22rAnRmJWSGiX8TI+5dSsSvSZAdUnw3nkmOxYfhIRH8o7pHJkfCk+921pyxtAc8Rbive92vnfur5J28XfMaBbS62N4NRwzGH7d4F9+hp48Cdue+o83GYwq2zuGtHBn0REcVHqmBA/eIU+G8tk6GvaMN2MZsI57pwZYPwspPn2Upur6CI6L4jM4DY3whRdCxNGNI8SfieY6o3Q6tRhfNJgajl9gckkvYz1MXuT1PVJnMOzg7O9g2FGc7Cvxst/HhtL7u5iWrCGPMh76ypah0sgjq77KGzqizWIZsaRuJR45FBvXewmTM6D5q1LW75Y2GBL1ziiD45tKKNtL2/FEaKeavM8JhDEwoXlEUynpuJhSdCtp5xJ7jM0zPdw06crtMDGbvtwNiZYFhGt46bGHnZS0bacmqaBn5LJRlVPKPSQnm1VdaOp0tp6/D39hYGPs263PdI38u3pnaPJxDeUDOS7LY5mQTJU1d/eDmZcbuDjHGWrC5BkeZ7m3FbDtVf3RNAOCA1+WAEePF25g725szQWAUrlis++tqK0OQEWfQjKLYs19VEeOdZ2EDBYEMrY8dXGPz1WwLIV2IFIbqxbTjVUaRDfGOjydG7DU5Nt7U0BosNNfF8xYxBHKpFctuyZDoMt+GGQuteafLXltJ7uy+GPXL25hvSkZ9fqO0DFImQMd7K9jeABjr2nuA769Xq5oeQbxMlg0xzylG4koDMHlBnGIrpnEOm6aRx2vGK/PKn1L1gYlU+R7Mxtyuv9JFGNtt/AaPAdmu6z7+mn29SCgHH/PHeA8g2Rvq2g+PCm+te09LeFe2c+vvatyt26G9TI9FK559BtYYFBeV0SCCNxTrwVjaocQBFUNeN6XOc6C20tYeFJGPW53xE/bG1uGBFcPWLawOJYC7uKyyWAWG0kCNbXdSKez2v6n8GOIPj74RV8N8z67XGsxRoo5sIiaR8q1KM+D0igfXQQvSxKYGvJXn2ev4f7/864VIOiAhwOVkFrCLbZy3g8vio82Mzbn21xnZKnxYvVZzKriUrzjAN7HuzbkJ8nIIj0jWN7/Jt9zntK4PxVGkkPGRSiZ/kLo4BQJd81U+w0BFFX0KXv0V7yMEfOcm8yjglqcagSLCfLsKk5aVFGOFLySBfgLJM4QPQWU7X7BN3sKPWRQ8vwyVhYGKWSz2ioVypzxvZmi7yndo7d6+Uc1VEMfuanu4fLycn9fofsAkhFu/x6/ZH39PLad11r5gEkIwwqASQjsoPN63/ClyX3ucjBxgJIRxft49vx/X/wCSEbn9Oc8mQG8T40gJIRjYUuASQjf0+8BJCKKzy3AxuHEIg8t0zMRtaVk2acmM7i7XW5tjoLlKN0u1Mms5nhzHLj0P/sdL32Q7GCMMcAaJbRr7GGMdEzihUjGRFw1slliiMTdTgLQQYnISmKBioDSbhtrbQdLbbWNYQKLVlhA2LUEEEHYTRdSpVgVELzAlMNmY1ogsUGKS20obW2yjBRbDGIwRMKIJZbHnkep9X/kBJCOXxAJIRs5gEkInnODd0bP1ke2fnASQjL0nEs+4BJCPG/2d3rASQjHO+nXd9Xqz7GNdIM/VRuZUTVc1tUli8/XJ88SIyAdlLIIwpSkEZ0SToaGBkRkh2BSkkRIFKWERkIF501LOHJIIycKWQRkk2FxpEQDoM8EI2lOHAEQ5KWSIkJLhxpBEgfB73kz9oCSEO7Xv4UCSEbpm0TR1sT/uAkhEaOukPne3+Dr9L9gknv34PhXOwpcNSmmmG6UR2RdRg85RM8xxW6rbxu9iUIyPLG8obJ0CIaRIipqFGCCyzDg4IMQssxRRqBScgaYZjQbl0lOKDhZRttaCLLTOq0EiMgs0KEMWYgyJrEqiMkt4FjXOIs1ludW1qpSLHCTUwJrZWsrQosZbjj6eqHx7h3T4iuswzDLTUZkLlsMI3U0xrFrW1uOvx9dETqltiDL7QhrGw9CMIjJwDQgNhAwgIyHAghRIIIxIE8HXIlQqpUwcBgxCGDCRXGIREHcWzEGML0HB6RqW2ToEssGCDJw4hQbWYNpYDMUeZRrD0NnjzNqcoG5uYec5weDuEzxoixpS2tW04mxUsxQqWyBohDXNRRt4nONW14V5Xct4DkbNTINpYwo4shYYKEQRmQFCyazg0trJQgIRbXnMXWi26yCZHTRDQEFFmoJtwZZYltE5JDYSzSNY2AnK0o610ppjCWR0iJRMJSaFVBYUgnMhdf5R6QEkI8gEkIswdwsCAbkbmIawHaxibFwzPF5dZhuu3w3mXhnFDQwZDpdbaPDSsNi2TTmGOU4ji8q5qipylVMJc5xi5WgmRoKMMqUNCs4rVCirZzhiubQWml0EWwxUaEuSKpQu3LKMKlBrIseSQNoMaTZbAssLY2XlWjaFWtTIygMg0aXUicktyDUN0Y6al/R3o/wbb2NEfpZ9lLoPS91dzIsNHnK0rHie/Ud4tW02kfXef3545xR2P9kR/ZC3DBKl0mZ8sITNOWLu7Q5rtivKJ5B5UNMuroR9tK9Dbj8IDDNNTYR8bezsrKiJtIFHYOUyKchD0UpXTItK4Qppcz3/IZ3GsjejG+AHwwXo06fSAkhHul/Kis1ZkegNC/INjo7+b4mxMdSDVkniyCQzDCcfIq7BXI1A0EVponsdlgtig86O1ppRI2XASrRKMWZfw+YSalOMgE23biCRARBDx2XdzFQ1HWkURdTPIORFSJzjorMU+8PXbV1gOqVrCLtOrEDZ+DMO4Hqw5SAYGVjAZZomN2WyZY/qa5jS72lNRyWGcLjcMhVNQ3N9nynKEA2mlUlsC7Bi6QPEB1UqlXwvjW4lFl/wBJCMKDurOUgXBW+tvRJKoWsk4EUVlMjrY1QFDRAZ2rp9AoLCZUhXEWT7oTBG9AZNDTcaVVSoVzB7QEkI05zH01tflgPSw7Y5SDANwSB9Cr+kM07IwqjFTJ1eE71gg1/MBxvdIMWYN0Ihc8SjC0xnbHbKLH2Pz+lEt5Z7Gz6J0j+cn9XcDPXc0k8+BliRJ0NLi26lrWGCy/z8+6iMjBAVpHAkzRGfaCIjCYMSnQDoN6TsSDw32UphoMNa+fPG1g2HTYhaIrecS0Oipsdy77aAHWz6toZoNmdrt6q7hCSEdmg2NP4d2deUqx+u6NlSnuH3YwF+HK+6oWTQys3N6Kzw91pKj6KSb7eSLyITQGYgNWCPYk8GU3gpJtIbqBRWcUFQ7mBam1/ClgJnMk1QWYUOhg1WrZTB3ZZTJWMrYvtjS7P/sBJCLyrJLSErluBrZQVEpF9OlN9syHACPzJuuR3bqPh6azY7nnKgK17NQMkHUg8PbADQZ5CSOq8N3fzm4EdcwijYrSerrIswOBwW7/Dsw1q5Dh6cywhr3Ge824TIzfXotRiejU3l1dUHL+NbuPNVxjMjlRLUxISUIgjNGuCoFoo5Z4XC4oGqmHB92UC7CsuRDREVJ21GcStpow/b1oLMNt+H3klPARic7fibTZIV1oRbt2uwliq8uLUfIyZULhWoC+26LUmTfQRfIBgWgfCvhaAF1ygw5C05hjK9iaB1I6V0b7pQu0E2DQwYxCYU/XWD9Hd4K73zE/jFjQ/yASQj92fnKp59BaHrFoFAp2+BcZ9a3yvGpCgyLEttFTm+gCaU95VFE77iCfJMG2NCea79g6oMixdmMEqxW7Y7ZJaCGBCPih7/VqAsU/QBJCD8KAIPnQ+MjZOSXSzAVDAfeLjFu4tkCbUqNHEZ7t2Bv9LAZuISIGn5AJIQ5w2VCB+5iICpEEaG/p8igWK1kXpgiCH49GHE3pFbDXgid4CSEOc8A0DJvf3kUQj3lQCOjl55d2KRaIqswCvTaGj4sS84xS303rWCTS/313p2gYpWd6bPQXfJrgwGYQRTIDqZKYEGUAuElqBbDFUVURbKj+EYo8//h6Q4moPuPYuOW/kNoozmnnckS8xxYMaZMw9+HHh6gNmnjesy9F0bE0NptiBjY00PyQVP8YNLUapB1PuObjkw7Xlr6X4K68xFxR9WMkgwHnAbS7f4JdFNMS+lxx1c7srrWpPMZgsgCSEW2tG3ieMXWyMgYu2CgIkHCTtVTpjg9S0uvco6AEkIx8Wto1DUCBqH2wQMgqAkhE0fsRjQIDgWjerQjBpF4fl64xKKzbCiyFTlMkREW10DUWKK2GynZy2TGrVcK9XMBoccXCY5EQt1/Rc0d+OzeQZ6IyrE8TEsNTNmi0lgdOVwMyHRpkG8T1QJXRIXM6gXSQ8I/ABJCM6267ThYzX1OeDhgkaZK7K9sWvCDuVOnZkgZJgMjnY02G7fGPKy7mMTHjkT1L6IsDRHSBkAxX7MEHNcP4XBR5AHxGTbQGzZ8c+D5zEOnf7b56z4wUE9Wj7S3g4rS4wiKMVEVQYiB7V5jlT+iBWJ83x4DM6YfB7yWiEc7P5WQUbTvMGBMiYXmOzruu6s6Do45jhhDoBYecPTOKMD1KQ0ApKRnvcN+0JRpCKoDrtlluaJ9YreYDWlvLk+Iw3DA2rw23YpiNWSNoGwDD/UBJCIFVoYS1GZhKSTJpjjXa0zaGh5H0xxSw733kDqiDrMKQzM0N8vMdJhRgzMK0FQdACSEcoI84KBxUVPXRoZ+dbfdtjmXKFO57WkdFaxq0DYkTMNiKBz2/Qa1+E86nRBi4Kn8O3RjrlV4ZMgTlJToipgoQ1Ey8HIKYLmSKARJFeFgcFs1CRrYj1LOZspRH+w0YgYLn5uuiyfIGAu6kZdXGdG6lwMG10Gt/BKXIkgEvQ0AkpWvl3r9r15JWXcBof39x07tB+7NHf38SrGztIMweKnnVy3UkEDJi0dAwswywE2/H4s2gaBAfa99M7D9YCSESTURiSy615QKWPROIirJPeSj6NDzb2cMFkSHzPHnu0OkrZ5gfAAyZjkgMOwApfesXmMlMuS452TWCsXxBgjLvFoKvNke8uRQ6w8sFoMdc4SS7Hhy0s1BrHz1mlCqZYoPYbbEVEPFMAYAbbkXCL/P7FXEe1Y8rssQ/eoDUJAgLtEENLRpp4+XzuKFW3YIsRHmASQjvy8KSV+4tUXBkw+y+uNbJg4QoLWQehm+63AZ42WTOcWuiqyI9EBJyeWw1uAvgrBOCjC2+mcYDLmXwoAYr0EPwk0IXrHV2Dt+T3/7fpldc/rVGzHA+qhukKMWU583HNYKjVRSpWoZHWIgpwu5eAv0muepci4nSdvQlS9PHnXA0ejOQgqSFCSpMnBCYgn9JR8PVaqe7VnD2VVdbPHd8dp2p3fxPk5/dDq4TxWJ0nB0xXSmMYoxi+XLMk5sgkyhzWbijc1GvlTGRVVfN7Z7GC1Dp8Xt2njXcNnaebxnK0OIjiyeHnC1aFiDBEJ55y+dR7eng0FL6nNziluvgSEdZxqKD5h0eNwV6fRpLfN5VVVHjaUqqtotVALs4crYLPp83Gq3utvRHxsFp1OdNcr1FUfDbn17mpa7wgLeSnq5muaOwOId6BxhhwdTLmY8Fl+4d+FmFqKBlhTggJA2LuCwpU+V+HGmWCRdjCY0A2se3Zw2LLeb4/QOnAC83C2XA2JgiVCAab0IjSI4gQBuMMPCbMDdGGN5hvst5JlK35ZKBPiKrVqp7Cp8GEtS2C4HSQO223ZY70KjzUkImLtbM3DDi77CMEQouawZEXzJYxY4upY0zo44DpvCaC0B3ffOToQ6YznRoEkEVSZiz6SBdM3ZAXWh83XXPOmFt44kJrNM6NwyrszgXovNt2KP1MIenUwfZlmFb8QRckwMWNSgOdrsLbMUaKKbanR7uN/WcOIcWmNBag3UNvDec9KOprJMaRKmIpE5JVBO9lESmi9IgsjoOznRRLEdyHwzNBrNiIQPfEj6UjW1OQApkZYriXq7nQYVDpoAUz1XP7AEkI1KSqn4CI1g/EftB2TouvLrQxe94u06rENf6XHR3qKTMouuW6vbbIAdoWgV6x180cB6+jhd0DMLyzmRj0mcfgxtKFcYLZyu17e3b9gCSEOlNmXmeUdib43ed1yKudKY+kpOTCi9mVanIsF0ULblCX9oCSEVFlcFNTqP2bs7RfFo7QEkIZAdnmO+OPVKlOYV1a1okrWIkIVZoiUDRDXP1wpgzASQj12XXCsNUd3Mihj9dkM2/MzGoRwMGA+vUBJCLS4yTkutg1BQ35M/2NxN/cdl7fJtoq7ZuI+C+Hzb4t/vT9c86lPEgQJyGUD+BdyRThQkMngzx4=')))
| 288.483871 | 8,030 | 0.923739 | 294 | 8,943 | 27.880952 | 0.918367 | 0.003294 | 0.004636 | 0.006344 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.138069 | 0.022476 | 8,943 | 30 | 8,031 | 298.1 | 0.799588 | 0.004696 | 0 | 0 | 0 | 0.142857 | 0.966891 | 0.964845 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.571429 | 0 | 0.571429 | 0.142857 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 9 |
60020ca6c74b50a90520358e4bb3db89a2ec6217 | 6,895 | py | Python | pages/migrations/0054_auto_20180130_1754.py | JoshZero87/site | c8024b805ff5ff0e16f54dce7bf05097fd2f08e0 | [
"MIT"
] | 4 | 2017-01-29T00:38:41.000Z | 2019-09-04T14:30:24.000Z | pages/migrations/0054_auto_20180130_1754.py | JoshZero87/site | c8024b805ff5ff0e16f54dce7bf05097fd2f08e0 | [
"MIT"
] | 74 | 2017-10-02T04:42:54.000Z | 2022-01-13T00:44:16.000Z | pages/migrations/0054_auto_20180130_1754.py | JoshZero87/site | c8024b805ff5ff0e16f54dce7bf05097fd2f08e0 | [
"MIT"
] | 3 | 2017-03-24T23:26:46.000Z | 2019-10-21T01:16:03.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2018-01-30 17:54
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtailimages', '0015_fill_filter_spec_field'),
('pages', '0053_auto_20180116_2153'),
]
operations = [
migrations.AddField(
model_name='indexpage',
name='block_1_background_color',
field=models.CharField(default='218fff', help_text='6 digit CSS color code.', max_length=6),
),
migrations.AddField(
model_name='indexpage',
name='block_1_background_image',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='indexpage',
name='block_1_button_color',
field=models.CharField(blank=True, choices=[('blue', 'Blue'), ('green', 'Green'), ('red', 'Red')], max_length=128, null=True),
),
migrations.AddField(
model_name='indexpage',
name='block_1_button_text',
field=models.CharField(blank=True, help_text='\n Call-to-action text to display on the button. Use action-oriented verbs if\n possible.\n ', max_length=16, null=True),
),
migrations.AddField(
model_name='indexpage',
name='block_1_button_url',
field=models.URLField(blank=True, help_text='\n Button will display if both url and text fields are filled in.\n ', null=True),
),
migrations.AddField(
model_name='indexpage',
name='block_1_button_url_new_window',
field=models.BooleanField(default=False, help_text='Open new window for button url.'),
),
migrations.AddField(
model_name='indexpage',
name='block_1_embed_code',
field=models.TextField(blank=True, help_text='Raw HTML embed code for video, etc.', null=True),
),
migrations.AddField(
model_name='indexpage',
name='block_1_text',
field=models.CharField(blank=True, help_text='\n Main copy in content block/module to provide information on the\n call-to-action.\n ', max_length=140, null=True),
),
migrations.AddField(
model_name='indexpage',
name='block_2_background_color',
field=models.CharField(default='218fff', help_text='6 digit CSS color code.', max_length=6),
),
migrations.AddField(
model_name='indexpage',
name='block_2_background_image',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='indexpage',
name='block_2_button_color',
field=models.CharField(blank=True, choices=[('blue', 'Blue'), ('green', 'Green'), ('red', 'Red')], max_length=128, null=True),
),
migrations.AddField(
model_name='indexpage',
name='block_2_button_text',
field=models.CharField(blank=True, help_text='\n Call-to-action text to display on the button. Use action-oriented verbs if\n possible.\n ', max_length=16, null=True),
),
migrations.AddField(
model_name='indexpage',
name='block_2_button_url',
field=models.URLField(blank=True, help_text='\n Button will display if both url and text fields are filled in.\n ', null=True),
),
migrations.AddField(
model_name='indexpage',
name='block_2_button_url_new_window',
field=models.BooleanField(default=False, help_text='Open new window for button url.'),
),
migrations.AddField(
model_name='indexpage',
name='block_2_embed_code',
field=models.TextField(blank=True, help_text='Raw HTML embed code for video, etc.', null=True),
),
migrations.AddField(
model_name='indexpage',
name='block_2_show',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='indexpage',
name='block_2_text',
field=models.CharField(blank=True, help_text='\n Main copy in content block/module to provide information on the\n call-to-action.\n ', max_length=100, null=True),
),
migrations.AddField(
model_name='indexpage',
name='block_3_background_color',
field=models.CharField(default='218fff', help_text='6 digit CSS color code.', max_length=6),
),
migrations.AddField(
model_name='indexpage',
name='block_3_background_image',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='indexpage',
name='block_3_button_color',
field=models.CharField(blank=True, choices=[('blue', 'Blue'), ('green', 'Green'), ('red', 'Red')], max_length=128, null=True),
),
migrations.AddField(
model_name='indexpage',
name='block_3_button_text',
field=models.CharField(blank=True, help_text='\n Call-to-action text to display on the button. Use action-oriented verbs if\n possible.\n ', max_length=16, null=True),
),
migrations.AddField(
model_name='indexpage',
name='block_3_button_url',
field=models.URLField(blank=True, help_text='\n Button will display if both url and text fields are filled in.\n ', null=True),
),
migrations.AddField(
model_name='indexpage',
name='block_3_button_url_new_window',
field=models.BooleanField(default=False, help_text='Open new window for button url.'),
),
migrations.AddField(
model_name='indexpage',
name='block_3_embed_code',
field=models.TextField(blank=True, help_text='Raw HTML embed code for video, etc.', null=True),
),
migrations.AddField(
model_name='indexpage',
name='block_3_show',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='indexpage',
name='block_3_text',
field=models.CharField(blank=True, help_text='\n Main copy in content block/module to provide information on the\n call-to-action.\n ', max_length=60, null=True),
),
]
| 46.587838 | 188 | 0.606962 | 808 | 6,895 | 4.986386 | 0.142327 | 0.116158 | 0.148424 | 0.174237 | 0.932986 | 0.932986 | 0.932986 | 0.932986 | 0.931745 | 0.918838 | 0 | 0.020072 | 0.270196 | 6,895 | 147 | 189 | 46.904762 | 0.780604 | 0.009862 | 0 | 0.721429 | 1 | 0.042857 | 0.300117 | 0.041178 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.021429 | 0 | 0.042857 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
60420249d2b90eb8e8fc03f5b5f21aba547065bc | 14,790 | py | Python | test/learning/test_gen_learning.py | Conengmo/snorkel | 36868e8a84de19b94e1c4b8eceaa64969a61a46b | [
"Apache-2.0"
] | 30 | 2019-08-22T19:27:59.000Z | 2022-03-13T22:03:15.000Z | test/learning/test_gen_learning.py | Conengmo/snorkel | 36868e8a84de19b94e1c4b8eceaa64969a61a46b | [
"Apache-2.0"
] | 2 | 2019-08-22T16:51:58.000Z | 2022-03-21T02:59:18.000Z | test/learning/test_gen_learning.py | Conengmo/snorkel | 36868e8a84de19b94e1c4b8eceaa64969a61a46b | [
"Apache-2.0"
] | 31 | 2019-08-22T19:28:08.000Z | 2022-03-23T12:50:49.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from builtins import *
import math
from numbskull.inference import FACTORS
from scipy import sparse
from snorkel.learning.gen_learning import GenerativeModel, DEP_EXCLUSIVE, DEP_REINFORCING, DEP_FIXING, DEP_SIMILAR
import unittest
import numpy as np
class TestGenLearning(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
def test_compile_no_deps(self):
# Defines a label matrix
L = sparse.lil_matrix((5, 3))
# The first LF always says yes
L[0, 0] = 1
L[1, 0] = 1
L[2, 0] = 1
L[3, 0] = 1
L[4, 0] = 1
# The second LF votes differently
L[0, 1] = 1
L[2, 1] = -1
L[4, 1] = 1
# The third LF always abstains
# Tests compilation
gen_model = GenerativeModel(class_prior=True, lf_prior=False,
lf_propensity=False, lf_class_propensity=False)
gen_model._process_dependency_graph(L, ())
m, n = L.shape
LF_acc_prior_weights = [1.0 for _ in range(n)]
is_fixed = [False for _ in range(n)]
gen_model.cardinality = 2
cardinalities = 2 * np.ones(5)
weight, variable, factor, ftv, domain_mask, n_edges =\
gen_model._compile(L, 0.5, 0.0, LF_acc_prior_weights, is_fixed,
cardinalities)
#
# Weights
#
# Should now be 3 for LFs + 3 (fixed) for LF priors + 1 class prior
self.assertEqual(len(weight), 7)
self.assertFalse(weight[0]['isFixed'])
self.assertEqual(weight[0]['initialValue'], 0.0)
# The LF priors
for i in range(1,7,2):
self.assertTrue(weight[i]['isFixed'])
self.assertEqual(weight[i]['initialValue'], 1.0)
# The LF weights
for i in range(2,7,2):
self.assertFalse(weight[i]['isFixed'])
self.assertEqual(weight[i]['initialValue'], 0.0)
#
# Variables
#
self.assertEqual(len(variable), 20)
for i in range(5):
self.assertEqual(variable[i]['isEvidence'], 0)
self.assertTrue(variable[i]['initialValue'] == 0 or variable[i]['initialValue'] == 1)
self.assertEqual(variable[i]["dataType"], 0)
self.assertEqual(variable[i]["cardinality"], 2)
for i in range(5):
for j in range(3):
self.assertEqual(variable[5 + i * 3 + j]['isEvidence'], 1)
# Remap label value; abstain is 0 in L, cardinality (= 2) in NS
if L[i, j] == -1:
l = 0
elif L[i, j] == 0:
l = 2
elif L[i,j] == 1:
l = 1
self.assertEqual(variable[5 + i * 3 + j]['initialValue'], l)
self.assertEqual(variable[5 + i * 3 + j]["dataType"], 0)
self.assertEqual(variable[5 + i * 3 + j]["cardinality"], 3)
#
# Factors
#
# 5 * 3 LF acc factors + 5 * 3 LF prior factors + 5 class prior factors
self.assertEqual(len(factor), 35)
for i in range(5):
self.assertEqual(factor[i]["factorFunction"], FACTORS["DP_GEN_CLASS_PRIOR"])
self.assertEqual(factor[i]["weightId"], 0)
self.assertEqual(factor[i]["featureValue"], 1)
self.assertEqual(factor[i]["arity"], 1)
self.assertEqual(factor[i]["ftv_offset"], i)
for i in range(5):
for j in range(6):
self.assertEqual(factor[5 + i * 6 + j]["factorFunction"], FACTORS["DP_GEN_LF_ACCURACY"])
self.assertEqual(factor[5 + i * 6 + j]["weightId"], j + 1)
self.assertEqual(factor[5 + i * 6 + j]["featureValue"], 1)
self.assertEqual(factor[5 + i * 6 + j]["arity"], 2)
self.assertEqual(factor[5 + i * 6 + j]["ftv_offset"], 5 + 2 * (i * 6 + j))
#
# Factor to Var
#
self.assertEqual(len(ftv), 65)
# Class prior factor - var edges
for i in range(5):
self.assertEqual(ftv[i]["vid"], i)
self.assertEqual(ftv[i]["dense_equal_to"], 0)
# LF *and LF prior* factor - var edges
for i in range(5):
for j in range(3):
# Each LF has one weight factor and one prior factor here
for k in range(2):
idx = 4 * (i * 3 + j) + 2 * k
self.assertEqual(ftv[5 + idx]["vid"], i)
self.assertEqual(ftv[6 + idx]["vid"], 5 + i * 3 + j)
self.assertEqual(ftv[5 + idx]["dense_equal_to"], 0)
self.assertEqual(ftv[6 + idx]["dense_equal_to"], 0)
#
# Domain mask
#
self.assertEqual(len(domain_mask), 20)
for i in range(20):
self.assertFalse(domain_mask[i])
# n_edges
self.assertEqual(n_edges, 65)
def test_compile_with_deps(self):
# Defines a label matrix
L = sparse.lil_matrix((5, 3))
# The first LF always says yes
L[0, 0] = 1
L[1, 0] = 1
L[2, 0] = 1
L[3, 0] = 1
L[4, 0] = 1
# The second LF votes differently
L[0, 1] = 1
L[2, 1] = -1
L[4, 1] = 1
# The third LF always abstains
# Defined dependencies
deps = []
deps.append((0, 1, DEP_SIMILAR))
deps.append((0, 2, DEP_SIMILAR))
deps.append((0, 1, DEP_FIXING))
deps.append((0, 2, DEP_REINFORCING))
deps.append((1, 2, DEP_EXCLUSIVE))
# Tests compilation
gen_model = GenerativeModel(class_prior=False, lf_prior=False,
lf_propensity=True, lf_class_propensity=False)
gen_model._process_dependency_graph(L, deps)
m, n = L.shape
LF_acc_prior_weights = [1.0 for _ in range(n)]
is_fixed = [False for _ in range(n)]
gen_model.cardinality = 2
cardinalities = 2 * np.ones(5)
weight, variable, factor, ftv, domain_mask, n_edges =\
gen_model._compile(L, 0.5, -1.0, LF_acc_prior_weights, is_fixed,
cardinalities)
#
# Weights
#
# Should now be 3 for LFs + 3 fixed for LF priors + 3 for LF propensity
# + 5 for deps
self.assertEqual(len(weight), 14)
# The LF priors
for i in range(0,6,2):
self.assertTrue(weight[i]['isFixed'])
self.assertEqual(weight[i]['initialValue'], 1.0)
# The LF weights
for i in range(1,6,2):
self.assertFalse(weight[i]['isFixed'])
self.assertEqual(weight[i]['initialValue'], 0.0)
# The dep weights
for i in range(6, 14):
self.assertFalse(weight[i]['isFixed'])
self.assertEqual(weight[i]['initialValue'], 0.5)
#
# Variables
#
self.assertEqual(len(variable), 20)
for i in range(5):
self.assertEqual(variable[i]['isEvidence'], 0)
self.assertTrue(variable[i]['initialValue'] == 0 or variable[i]['initialValue'] == 1)
self.assertEqual(variable[i]["dataType"], 0)
self.assertEqual(variable[i]["cardinality"], 2)
for i in range(5):
for j in range(3):
self.assertEqual(variable[5 + i * 3 + j]['isEvidence'], 1)
# Remap label value; abstain is 0 in L, cardinality (= 2) in NS
if L[i, j] == -1:
l = 0
elif L[i, j] == 0:
l = 2
elif L[i,j] == 1:
l = 1
self.assertEqual(variable[5 + i * 3 + j]['initialValue'], l)
self.assertEqual(variable[5 + i * 3 + j]["dataType"], 0)
self.assertEqual(variable[5 + i * 3 + j]["cardinality"], 3)
#
# Factors
#
self.assertEqual(len(factor), 70)
f_offset = 0
ftv_offset = 0
for i in range(5):
for j in range(6):
self.assertEqual(factor[f_offset + i * 6+ j]["factorFunction"], FACTORS["DP_GEN_LF_ACCURACY"])
self.assertEqual(factor[f_offset + i * 6 + j]["weightId"], j)
self.assertEqual(factor[f_offset + i * 6 + j]["featureValue"], 1)
self.assertEqual(factor[f_offset + i * 6 + j]["arity"], 2)
self.assertEqual(factor[f_offset + i * 6 + j]["ftv_offset"], ftv_offset + 2 * (i * 6 + j))
f_offset = 30
ftv_offset = 60
for i in range(5):
for j in range(3):
self.assertEqual(factor[f_offset + i * 3 + j]["factorFunction"], FACTORS["DP_GEN_LF_PROPENSITY"])
self.assertEqual(factor[f_offset + i * 3 + j]["weightId"], 6 + j)
self.assertEqual(factor[f_offset + i * 3 + j]["featureValue"], 1)
self.assertEqual(factor[f_offset + i * 3 + j]["arity"], 1)
self.assertEqual(factor[f_offset + i * 3 + j]["ftv_offset"], ftv_offset + (i * 3 + j))
f_offset = 45
ftv_offset = 75
for i in range(5):
self.assertEqual(factor[f_offset + i]["factorFunction"], FACTORS["DP_GEN_DEP_SIMILAR"])
self.assertEqual(factor[f_offset + i]["weightId"], 9)
self.assertEqual(factor[f_offset + i]["featureValue"], 1)
self.assertEqual(factor[f_offset + i]["arity"], 2)
self.assertEqual(factor[f_offset + i]["ftv_offset"], ftv_offset + 2 * i)
f_offset = 50
ftv_offset = 85
for i in range(5):
self.assertEqual(factor[f_offset + i]["factorFunction"], FACTORS["DP_GEN_DEP_SIMILAR"])
self.assertEqual(factor[f_offset + i]["weightId"], 10)
self.assertEqual(factor[f_offset + i]["featureValue"], 1)
self.assertEqual(factor[f_offset + i]["arity"], 2)
self.assertEqual(factor[f_offset + i]["ftv_offset"], ftv_offset + 2 * i)
f_offset = 55
ftv_offset = 95
for i in range(5):
self.assertEqual(factor[f_offset + i]["factorFunction"], FACTORS["DP_GEN_DEP_FIXING"])
self.assertEqual(factor[f_offset + i]["weightId"], 11)
self.assertEqual(factor[f_offset + i]["featureValue"], 1)
self.assertEqual(factor[f_offset + i]["arity"], 3)
self.assertEqual(factor[f_offset + i]["ftv_offset"], ftv_offset + 3 * i)
f_offset = 60
ftv_offset = 110
for i in range(5):
self.assertEqual(factor[f_offset + i]["factorFunction"], FACTORS["DP_GEN_DEP_REINFORCING"])
self.assertEqual(factor[f_offset + i]["weightId"], 12)
self.assertEqual(factor[f_offset + i]["featureValue"], 1)
self.assertEqual(factor[f_offset + i]["arity"], 3)
self.assertEqual(factor[f_offset + i]["ftv_offset"], ftv_offset + 3 * i)
f_offset = 65
ftv_offset = 125
for i in range(5):
self.assertEqual(factor[f_offset + i]["factorFunction"], FACTORS["DP_GEN_DEP_EXCLUSIVE"])
self.assertEqual(factor[f_offset + i]["weightId"], 13)
self.assertEqual(factor[f_offset + i]["featureValue"], 1)
self.assertEqual(factor[f_offset + i]["arity"], 2)
self.assertEqual(factor[f_offset + i]["ftv_offset"], ftv_offset + 2 * i)
#
# Factor to Var
#
self.assertEqual(len(ftv), 135)
ftv_offset = 0
for i in range(5):
for j in range(3):
for k in range(2):
self.assertEqual(ftv[ftv_offset + 4 * (i * 3 + j) + 2 * k]["vid"], i)
self.assertEqual(ftv[ftv_offset + 4 * (i * 3 + j) + 2 * k]["dense_equal_to"], 0)
self.assertEqual(ftv[ftv_offset + 4 * (i * 3 + j) + 2 * k + 1]["vid"], 5 + i * 3 + j)
self.assertEqual(ftv[ftv_offset + 4 * (i * 3 + j) + 2 * k + 1]["dense_equal_to"], 0)
ftv_offset = 60
for i in range(5):
for j in range(3):
self.assertEqual(ftv[ftv_offset + (i * 3 + j)]["vid"], 5 + i * 3 + j)
self.assertEqual(ftv[ftv_offset + (i * 3 + j)]["dense_equal_to"], 0)
ftv_offset = 75
for i in range(5):
self.assertEqual(ftv[ftv_offset + 2 * i]["vid"], 5 + i * 3)
self.assertEqual(ftv[ftv_offset + 2 * i]["dense_equal_to"], 0)
self.assertEqual(ftv[ftv_offset + 2 * i + 1]["vid"], 5 + i * 3 + 1)
self.assertEqual(ftv[ftv_offset + 2 * i + 1]["dense_equal_to"], 0)
ftv_offset = 85
for i in range(5):
self.assertEqual(ftv[ftv_offset + 2 * i]["vid"], 5 + i * 3)
self.assertEqual(ftv[ftv_offset + 2 * i]["dense_equal_to"], 0)
self.assertEqual(ftv[ftv_offset + 2 * i + 1]["vid"], 5 + i * 3 + 2)
self.assertEqual(ftv[ftv_offset + 2 * i + 1]["dense_equal_to"], 0)
ftv_offset = 95
for i in range(5):
self.assertEqual(ftv[ftv_offset + 3 * i]["vid"], i)
self.assertEqual(ftv[ftv_offset + 3 * i]["dense_equal_to"], 0)
self.assertEqual(ftv[ftv_offset + 3 * i + 1]["vid"], 5 + i * 3)
self.assertEqual(ftv[ftv_offset + 3 * i + 1]["dense_equal_to"], 0)
self.assertEqual(ftv[ftv_offset + 3 * i + 2]["vid"], 5 + i * 3 + 1)
self.assertEqual(ftv[ftv_offset + 3 * i + 2]["dense_equal_to"], 0)
ftv_offset = 110
for i in range(5):
self.assertEqual(ftv[ftv_offset + 3 * i]["vid"], i)
self.assertEqual(ftv[ftv_offset + 3 * i]["dense_equal_to"], 0)
self.assertEqual(ftv[ftv_offset + 3 * i + 1]["vid"], 5 + i * 3)
self.assertEqual(ftv[ftv_offset + 3 * i + 1]["dense_equal_to"], 0)
self.assertEqual(ftv[ftv_offset + 3 * i + 2]["vid"], 5 + i * 3 + 2)
self.assertEqual(ftv[ftv_offset + 3 * i + 2]["dense_equal_to"], 0)
ftv_offset = 125
for i in range(5):
self.assertEqual(ftv[ftv_offset + 2 * i]["vid"], 5 + i * 3 + 1)
self.assertEqual(ftv[ftv_offset + 2 * i]["dense_equal_to"], 0)
self.assertEqual(ftv[ftv_offset + 2 * i + 1]["vid"], 5 + i * 3 + 2)
self.assertEqual(ftv[ftv_offset + 2 * i + 1]["dense_equal_to"], 0)
#
# Domain mask
#
self.assertEqual(len(domain_mask), 20)
for i in range(20):
self.assertFalse(domain_mask[i])
# n_edges
self.assertEqual(n_edges, 135)
if __name__ == '__main__':
unittest.main()
| 38.316062 | 114 | 0.534145 | 1,972 | 14,790 | 3.870183 | 0.085193 | 0.222091 | 0.123821 | 0.100891 | 0.866745 | 0.814596 | 0.798087 | 0.748821 | 0.703223 | 0.679245 | 0 | 0.044814 | 0.3286 | 14,790 | 385 | 115 | 38.415584 | 0.723766 | 0.063759 | 0 | 0.621212 | 0 | 0 | 0.088911 | 0.001595 | 0 | 0 | 0 | 0 | 0.465909 | 1 | 0.015152 | false | 0.007576 | 0.041667 | 0 | 0.060606 | 0.003788 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
6054a9c8381da03303d479b143eafc5387e6830a | 46,360 | py | Python | oops/team_functions.py | mtasa-typescript/mtasa-wiki-dump | edea1746850fb6c99d6155d1d7891e2cceb33a5c | [
"MIT"
] | null | null | null | oops/team_functions.py | mtasa-typescript/mtasa-wiki-dump | edea1746850fb6c99d6155d1d7891e2cceb33a5c | [
"MIT"
] | 1 | 2021-02-24T21:50:18.000Z | 2021-02-24T21:50:18.000Z | oops/team_functions.py | mtasa-typescript/mtasa-wiki-dump | edea1746850fb6c99d6155d1d7891e2cceb33a5c | [
"MIT"
] | null | null | null | # Autogenerated file. ANY CHANGES WILL BE OVERWRITTEN
from to_python.core.types import FunctionType, \
FunctionArgument, \
FunctionArgumentValues, \
FunctionReturnTypes, \
FunctionSignature, \
FunctionDoc, \
FunctionOOP, \
FunctionOOPField, \
CompoundOOPData, \
FunctionData, \
CompoundFunctionData
DUMP_PARTIAL = [
CompoundOOPData(
server=[
FunctionOOP(
description=None,
base_function_name="countPlayersInTeam",
class_name='team',
method=FunctionData(
signature=FunctionSignature(
name='countPlayers',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['int'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theTeam',
argument_type=FunctionType(
names=['team'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function is for returning the number of players in the specified team.' ,
arguments={
"theTeam": """The team you wish to retrieve the player count of. """
},
result='returns an integer containing the number of players in the team, false if it could not be retrieved.' ,
),
url='countPlayersInTeam',
),
field=FunctionOOPField(
name='playerCount',
types=[
FunctionType(
names=['int'],
is_optional=False,
)
],
),
is_static=False,
)
],
client=[
FunctionOOP(
description=None,
base_function_name="countPlayersInTeam",
class_name='team',
method=FunctionData(
signature=FunctionSignature(
name='countPlayers',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['int'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theTeam',
argument_type=FunctionType(
names=['team'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function is for returning the number of players in the specified team.' ,
arguments={
"theTeam": """The team you wish to retrieve the player count of. """
},
result='returns an integer containing the number of players in the team, false if it could not be retrieved.' ,
),
url='countPlayersInTeam',
),
field=FunctionOOPField(
name='playerCount',
types=[
FunctionType(
names=['int'],
is_optional=False,
)
],
),
is_static=False,
)
],
),
CompoundOOPData(
server=[
FunctionOOP(
description=None,
base_function_name="createTeam",
class_name='Team',
method=FunctionData(
signature=FunctionSignature(
name='create',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['team'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='teamName',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='colorR',
argument_type=FunctionType(
names=['int'],
is_optional=True,
),
default_value='255',
)
],
[
FunctionArgument(
name='colorG',
argument_type=FunctionType(
names=['int'],
is_optional=True,
),
default_value='255',
)
],
[
FunctionArgument(
name='colorB',
argument_type=FunctionType(
names=['int'],
is_optional=True,
),
default_value='255',
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function is for creating a new team, which can be used to group players. Players will not join the team until they are respawned.' ,
arguments={
"teamName": """A string representing the teams name. """,
"colorR": """An integer representing the red color value. """,
"colorG": """An integer representing the green color value. """,
"colorB": """An integer representing the blue color value. """
},
result='returns a team element if it was successfully created, false if invalid arguments are passed or a team with that name already exists.' ,
),
url='createTeam',
),
field=None,
is_static=True,
)
],
client=[
],
),
CompoundOOPData(
server=[
FunctionOOP(
description=None,
base_function_name="getPlayersInTeam",
class_name='team',
method=FunctionData(
signature=FunctionSignature(
name='getPlayers',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['table'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theTeam',
argument_type=FunctionType(
names=['team'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function retrieves all the players of the specified team.' ,
arguments={
"theTeam": """The team you wish to retrieve all the players from. """
},
result='returns a table of all the players in the team, or an empty one if there are none else false if invalid arguments are passed.' ,
),
url='getPlayersInTeam',
),
field=FunctionOOPField(
name='players',
types=[
FunctionType(
names=['table'],
is_optional=False,
)
],
),
is_static=False,
)
],
client=[
FunctionOOP(
description=None,
base_function_name="getPlayersInTeam",
class_name='team',
method=FunctionData(
signature=FunctionSignature(
name='getPlayers',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['table'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theTeam',
argument_type=FunctionType(
names=['team'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function retrieves all the players of the specified team.' ,
arguments={
"theTeam": """The team you wish to retrieve all the players from. """
},
result='returns a table of all the players in the team, or an empty one if there are none else false if invalid arguments are passed.' ,
),
url='getPlayersInTeam',
),
field=FunctionOOPField(
name='players',
types=[
FunctionType(
names=['table'],
is_optional=False,
)
],
),
is_static=False,
)
],
),
CompoundOOPData(
server=[
FunctionOOP(
description=None,
base_function_name="getPlayerTeam",
class_name='player',
method=FunctionData(
signature=FunctionSignature(
name='getTeam',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['team'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='thePlayer',
argument_type=FunctionType(
names=['player'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function gets the current team a player is on.' ,
arguments={
"thePlayer": """: The player whose team you want to find out. """
},
result='returns a team element representing the team the player is on, false if the player is not part of a team.' ,
),
url='getPlayerTeam',
),
field=FunctionOOPField(
name='team',
types=[
FunctionType(
names=['team'],
is_optional=False,
)
],
),
is_static=False,
)
],
client=[
FunctionOOP(
description=None,
base_function_name="getPlayerTeam",
class_name='player',
method=FunctionData(
signature=FunctionSignature(
name='getTeam',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['team'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='thePlayer',
argument_type=FunctionType(
names=['player'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function gets the current team a player is on.' ,
arguments={
"thePlayer": """: The player whose team you want to find out. """
},
result='returns a team element representing the team the player is on, false if the player is not part of a team.' ,
),
url='getPlayerTeam',
),
field=FunctionOOPField(
name='team',
types=[
FunctionType(
names=['team'],
is_optional=False,
)
],
),
is_static=False,
)
],
),
CompoundOOPData(
server=[
FunctionOOP(
description=None,
base_function_name="getTeamColor",
class_name='team',
method=FunctionData(
signature=FunctionSignature(
name='getColor',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['int'],
is_optional=False,
),
FunctionType(
names=['int'],
is_optional=False,
),
FunctionType(
names=['int'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theTeam',
argument_type=FunctionType(
names=['team'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function retrieves the color of a team.' ,
arguments={
"theTeam": """The team you want to get the color of. """
},
result='returns 3 integers representing the red, green, and blue color components of the team if its valid, false otherwise.' ,
),
url='getTeamColor',
),
field=None,
is_static=False,
)
],
client=[
FunctionOOP(
description=None,
base_function_name="getTeamColor",
class_name='team',
method=FunctionData(
signature=FunctionSignature(
name='getColor',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['int'],
is_optional=False,
),
FunctionType(
names=['int'],
is_optional=False,
),
FunctionType(
names=['int'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theTeam',
argument_type=FunctionType(
names=['team'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function retrieves the color of a team.' ,
arguments={
"theTeam": """The team you want to get the color of. """
},
result='returns 3 integers representing the red, green, and blue color components of the team if its valid, false otherwise.' ,
),
url='getTeamColor',
),
field=None,
is_static=False,
)
],
),
CompoundOOPData(
server=[
FunctionOOP(
description=None,
base_function_name="getTeamFriendlyFire",
class_name='team',
method=FunctionData(
signature=FunctionSignature(
name='getFriendlyFire',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theTeam',
argument_type=FunctionType(
names=['team'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function tells you if friendly fire is turned on for the specified team.' ,
arguments={
"theTeam": """The team object that will be checked """
},
result='returns true if friendly fire is on for the specified team, false if it is turned off or if invalid arguments are specified.' ,
),
url='getTeamFriendlyFire',
),
field=FunctionOOPField(
name='friendlyFire',
types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
),
is_static=False,
)
],
client=[
FunctionOOP(
description=None,
base_function_name="getTeamFriendlyFire",
class_name='team',
method=FunctionData(
signature=FunctionSignature(
name='getFriendlyFire',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theTeam',
argument_type=FunctionType(
names=['team'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function tells you if friendly fire is turned on for the specified team.' ,
arguments={
"theTeam": """The team object that will be checked """
},
result='returns true if friendly fire is on for the specified team, false if it is turned off or if invalid arguments are specified.' ,
),
url='getTeamFriendlyFire',
),
field=FunctionOOPField(
name='friendlyFire',
types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
),
is_static=False,
)
],
),
CompoundOOPData(
server=[
FunctionOOP(
description=None,
base_function_name="getTeamFromName",
class_name='Team',
method=FunctionData(
signature=FunctionSignature(
name='getFromName',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['team'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='teamName',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function finds a team element using the provided team name.' ,
arguments={
"teamName": """A string determining the name of the team you wish to find. """
},
result='returns the team element if it was found, false otherwise.' ,
),
url='getTeamFromName',
),
field=None,
is_static=True,
)
],
client=[
FunctionOOP(
description=None,
base_function_name="getTeamFromName",
class_name='Team',
method=FunctionData(
signature=FunctionSignature(
name='getFromName',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['team'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='teamName',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function finds a team element using the provided team name.' ,
arguments={
"teamName": """A string determining the name of the team you wish to find. """
},
result='returns the team element if it was found, false otherwise.' ,
),
url='getTeamFromName',
),
field=None,
is_static=True,
)
],
),
CompoundOOPData(
server=[
FunctionOOP(
description=None,
base_function_name="getTeamName",
class_name='team',
method=FunctionData(
signature=FunctionSignature(
name='getName',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['string'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theTeam',
argument_type=FunctionType(
names=['team'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function gets the team name of a team object.' ,
arguments={
"theTeam": """The team you want to retrieve the name of. """
},
result='returns a string representing the teams name if the team object was valid, false otherwise.' ,
),
url='getTeamName',
),
field=FunctionOOPField(
name='name',
types=[
FunctionType(
names=['string'],
is_optional=False,
)
],
),
is_static=False,
)
],
client=[
FunctionOOP(
description=None,
base_function_name="getTeamName",
class_name='team',
method=FunctionData(
signature=FunctionSignature(
name='getName',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['string'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theTeam',
argument_type=FunctionType(
names=['team'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function gets the team name of a team object.' ,
arguments={
"theTeam": """The team you want to retrieve the name of. """
},
result='returns a string representing the teams name if the team object was valid, false otherwise.' ,
),
url='getTeamName',
),
field=FunctionOOPField(
name='name',
types=[
FunctionType(
names=['string'],
is_optional=False,
)
],
),
is_static=False,
)
],
),
CompoundOOPData(
server=[
FunctionOOP(
description=None,
base_function_name="setPlayerTeam",
class_name='player',
method=FunctionData(
signature=FunctionSignature(
name='setTeam',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='thePlayer',
argument_type=FunctionType(
names=['player'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='theTeam',
argument_type=FunctionType(
names=['team'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function adds a player to an existing team. The player will automatically be removed from his current team if hes on one.' ,
arguments={
"thePlayer": """The player you wish to add to a team. """,
"theTeam": """The team you want to add the player to, or nil if you wish to unassign a player from his team. """
},
result='returns true if the player was successfully added to the specified team or removed from his previous one, false otherwise.' ,
),
url='setPlayerTeam',
),
field=FunctionOOPField(
name='team',
types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
),
is_static=False,
)
],
client=[
],
),
CompoundOOPData(
server=[
FunctionOOP(
description=None,
base_function_name="setTeamColor",
class_name='team',
method=FunctionData(
signature=FunctionSignature(
name='setColor',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theTeam',
argument_type=FunctionType(
names=['team'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='colorR',
argument_type=FunctionType(
names=['int'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='colorG',
argument_type=FunctionType(
names=['int'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='colorB',
argument_type=FunctionType(
names=['int'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function is for setting the color of a specified team. This color is shown, for example, in the team players nametags.' ,
arguments={
"theTeam": """The team you want to change the color of. """,
"colorR": """An integer representing the red color value, from 0 to 255. """,
"colorG": """An integer representing the green color value, from 0 to 255. """,
"colorB": """An integer representing the blue color value, from 0 to 255. """
},
result='returns true if the team is valid and the color is different, otherwise false.' ,
),
url='setTeamColor',
),
field=None,
is_static=False,
)
],
client=[
],
),
CompoundOOPData(
server=[
FunctionOOP(
description=None,
base_function_name="setTeamFriendlyFire",
class_name='team',
method=FunctionData(
signature=FunctionSignature(
name='setFriendlyFire',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theTeam',
argument_type=FunctionType(
names=['team'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='friendlyFire',
argument_type=FunctionType(
names=['bool'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function sets the friendly fire value for the specified team.' ,
arguments={
"theTeam": """The team that will have friendly fire set """,
"friendlyFire": """A boolean denoting whether the players from the same team can kill each other (true) or whether the players cant kill each other (false). """
},
result='returns true if the friendly fire value is set for the specified team, false if the friendly fire value cant be set for the specified team or if invalid arguments are specified.' ,
),
url='setTeamFriendlyFire',
),
field=FunctionOOPField(
name='friendlyFire',
types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
),
is_static=False,
)
],
client=[
],
),
CompoundOOPData(
server=[
FunctionOOP(
description=None,
base_function_name="setTeamName",
class_name='team',
method=FunctionData(
signature=FunctionSignature(
name='setName',
return_types=FunctionReturnTypes(
return_types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
variable_length=False,
),
arguments=FunctionArgumentValues(
arguments=[
[
FunctionArgument(
name='theTeam',
argument_type=FunctionType(
names=['team'],
is_optional=False,
),
default_value=None,
)
],
[
FunctionArgument(
name='newName',
argument_type=FunctionType(
names=['string'],
is_optional=False,
),
default_value=None,
)
]
],
variable_length=False,
),
generic_types=[
],
),
docs=FunctionDoc(
description='This function is used to set a teams name.' ,
arguments={
"theTeam": """The team you want to change the name of. """,
"newName": """A string representing the name you want the team to be called. """
},
result='returns true if the team was valid and the name was changed, false otherwise.' ,
),
url='setTeamName',
),
field=FunctionOOPField(
name='name',
types=[
FunctionType(
names=['bool'],
is_optional=False,
)
],
),
is_static=False,
)
],
client=[
],
)
]
| 38.473029 | 204 | 0.336713 | 2,464 | 46,360 | 6.216315 | 0.085633 | 0.071032 | 0.059738 | 0.053013 | 0.912581 | 0.903571 | 0.887576 | 0.881112 | 0.83835 | 0.831233 | 0 | 0.001235 | 0.598274 | 46,360 | 1,204 | 205 | 38.504983 | 0.821198 | 0.0011 | 0 | 0.83107 | 1 | 0.013582 | 0.138791 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.002547 | 0.000849 | 0 | 0.000849 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
6073147f5a5ee5f10cbba7b8856eddfbb866c839 | 32,334 | py | Python | sdk/python/pulumi_auth0/connection.py | kevinschoonover/pulumi-auth0 | 18a1ae8fde65291d9e49d6bbc9bb6a5b0eb5dd8a | [
"ECL-2.0",
"Apache-2.0"
] | 15 | 2020-05-19T13:46:53.000Z | 2022-02-24T05:09:57.000Z | sdk/python/pulumi_auth0/connection.py | kevinschoonover/pulumi-auth0 | 18a1ae8fde65291d9e49d6bbc9bb6a5b0eb5dd8a | [
"ECL-2.0",
"Apache-2.0"
] | 71 | 2020-05-18T22:56:21.000Z | 2022-03-31T15:19:49.000Z | sdk/python/pulumi_auth0/connection.py | kevinschoonover/pulumi-auth0 | 18a1ae8fde65291d9e49d6bbc9bb6a5b0eb5dd8a | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2021-10-30T10:06:00.000Z | 2022-02-26T02:39:40.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ConnectionArgs', 'Connection']
@pulumi.input_type
class ConnectionArgs:
def __init__(__self__, *,
strategy: pulumi.Input[str],
display_name: Optional[pulumi.Input[str]] = None,
enabled_clients: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
is_domain_connection: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
options: Optional[pulumi.Input['ConnectionOptionsArgs']] = None,
realms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
strategy_version: Optional[pulumi.Input[str]] = None,
validation: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a Connection resource.
:param pulumi.Input[str] strategy: Type of the connection, which indicates the identity provider. Options include `ad`, `adfs`, `amazon`, `aol`, `apple`, `auth0`, `auth0-adldap`, `auth0-oidc`, `baidu`, `bitbucket`, `bitly`, `box`, `custom`, `daccount`, `dropbox`, `dwolla`, `email`, `evernote`, `evernote-sandbox`, `exact`, `facebook`, `fitbit`, `flickr`, `github`, `google-apps`, `google-oauth2`, `guardian`, `instagram`, `ip`, `line`, `linkedin`, `miicard`, `oauth1`, `oauth2`, `office365`, `oidc`, `paypal`, `paypal-sandbox`, `pingfederate`, `planningcenter`, `renren`, `salesforce`, `salesforce-community`, `salesforce-sandbox` `samlp`, `sharepoint`, `shopify`, `sms`, `soundcloud`, `thecity`, `thecity-sandbox`, `thirtysevensignals`, `twitter`, `untappd`, `vkontakte`, `waad`, `weibo`, `windowslive`, `wordpress`, `yahoo`, `yammer`, `yandex`.
:param pulumi.Input[str] display_name: Name used in login screen
:param pulumi.Input[Sequence[pulumi.Input[str]]] enabled_clients: IDs of the clients for which the connection is enabled. If not specified, no clients are enabled.
:param pulumi.Input[bool] is_domain_connection: Indicates whether or not the connection is domain level.
:param pulumi.Input[str] name: Name of the connection.
:param pulumi.Input['ConnectionOptionsArgs'] options: Configuration settings for connection options. For details, see Options.
:param pulumi.Input[Sequence[pulumi.Input[str]]] realms: Defines the realms for which the connection will be used (i.e., email domains). If not specified, the connection name is added as the realm.
:param pulumi.Input[str] strategy_version: Version 1 is deprecated, use version 2.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] validation: Validation of the minimum and maximum values allowed for a user to have as username. For details, see Validation.
"""
pulumi.set(__self__, "strategy", strategy)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if enabled_clients is not None:
pulumi.set(__self__, "enabled_clients", enabled_clients)
if is_domain_connection is not None:
pulumi.set(__self__, "is_domain_connection", is_domain_connection)
if name is not None:
pulumi.set(__self__, "name", name)
if options is not None:
pulumi.set(__self__, "options", options)
if realms is not None:
pulumi.set(__self__, "realms", realms)
if strategy_version is not None:
pulumi.set(__self__, "strategy_version", strategy_version)
if validation is not None:
pulumi.set(__self__, "validation", validation)
@property
@pulumi.getter
def strategy(self) -> pulumi.Input[str]:
"""
Type of the connection, which indicates the identity provider. Options include `ad`, `adfs`, `amazon`, `aol`, `apple`, `auth0`, `auth0-adldap`, `auth0-oidc`, `baidu`, `bitbucket`, `bitly`, `box`, `custom`, `daccount`, `dropbox`, `dwolla`, `email`, `evernote`, `evernote-sandbox`, `exact`, `facebook`, `fitbit`, `flickr`, `github`, `google-apps`, `google-oauth2`, `guardian`, `instagram`, `ip`, `line`, `linkedin`, `miicard`, `oauth1`, `oauth2`, `office365`, `oidc`, `paypal`, `paypal-sandbox`, `pingfederate`, `planningcenter`, `renren`, `salesforce`, `salesforce-community`, `salesforce-sandbox` `samlp`, `sharepoint`, `shopify`, `sms`, `soundcloud`, `thecity`, `thecity-sandbox`, `thirtysevensignals`, `twitter`, `untappd`, `vkontakte`, `waad`, `weibo`, `windowslive`, `wordpress`, `yahoo`, `yammer`, `yandex`.
"""
return pulumi.get(self, "strategy")
@strategy.setter
def strategy(self, value: pulumi.Input[str]):
pulumi.set(self, "strategy", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
Name used in login screen
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="enabledClients")
def enabled_clients(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
IDs of the clients for which the connection is enabled. If not specified, no clients are enabled.
"""
return pulumi.get(self, "enabled_clients")
@enabled_clients.setter
def enabled_clients(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "enabled_clients", value)
@property
@pulumi.getter(name="isDomainConnection")
def is_domain_connection(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates whether or not the connection is domain level.
"""
return pulumi.get(self, "is_domain_connection")
@is_domain_connection.setter
def is_domain_connection(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_domain_connection", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the connection.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def options(self) -> Optional[pulumi.Input['ConnectionOptionsArgs']]:
"""
Configuration settings for connection options. For details, see Options.
"""
return pulumi.get(self, "options")
@options.setter
def options(self, value: Optional[pulumi.Input['ConnectionOptionsArgs']]):
pulumi.set(self, "options", value)
@property
@pulumi.getter
def realms(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Defines the realms for which the connection will be used (i.e., email domains). If not specified, the connection name is added as the realm.
"""
return pulumi.get(self, "realms")
@realms.setter
def realms(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "realms", value)
@property
@pulumi.getter(name="strategyVersion")
def strategy_version(self) -> Optional[pulumi.Input[str]]:
"""
Version 1 is deprecated, use version 2.
"""
return pulumi.get(self, "strategy_version")
@strategy_version.setter
def strategy_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "strategy_version", value)
@property
@pulumi.getter
def validation(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Validation of the minimum and maximum values allowed for a user to have as username. For details, see Validation.
"""
return pulumi.get(self, "validation")
@validation.setter
def validation(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "validation", value)
@pulumi.input_type
class _ConnectionState:
def __init__(__self__, *,
display_name: Optional[pulumi.Input[str]] = None,
enabled_clients: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
is_domain_connection: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
options: Optional[pulumi.Input['ConnectionOptionsArgs']] = None,
realms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
strategy: Optional[pulumi.Input[str]] = None,
strategy_version: Optional[pulumi.Input[str]] = None,
validation: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering Connection resources.
:param pulumi.Input[str] display_name: Name used in login screen
:param pulumi.Input[Sequence[pulumi.Input[str]]] enabled_clients: IDs of the clients for which the connection is enabled. If not specified, no clients are enabled.
:param pulumi.Input[bool] is_domain_connection: Indicates whether or not the connection is domain level.
:param pulumi.Input[str] name: Name of the connection.
:param pulumi.Input['ConnectionOptionsArgs'] options: Configuration settings for connection options. For details, see Options.
:param pulumi.Input[Sequence[pulumi.Input[str]]] realms: Defines the realms for which the connection will be used (i.e., email domains). If not specified, the connection name is added as the realm.
:param pulumi.Input[str] strategy: Type of the connection, which indicates the identity provider. Options include `ad`, `adfs`, `amazon`, `aol`, `apple`, `auth0`, `auth0-adldap`, `auth0-oidc`, `baidu`, `bitbucket`, `bitly`, `box`, `custom`, `daccount`, `dropbox`, `dwolla`, `email`, `evernote`, `evernote-sandbox`, `exact`, `facebook`, `fitbit`, `flickr`, `github`, `google-apps`, `google-oauth2`, `guardian`, `instagram`, `ip`, `line`, `linkedin`, `miicard`, `oauth1`, `oauth2`, `office365`, `oidc`, `paypal`, `paypal-sandbox`, `pingfederate`, `planningcenter`, `renren`, `salesforce`, `salesforce-community`, `salesforce-sandbox` `samlp`, `sharepoint`, `shopify`, `sms`, `soundcloud`, `thecity`, `thecity-sandbox`, `thirtysevensignals`, `twitter`, `untappd`, `vkontakte`, `waad`, `weibo`, `windowslive`, `wordpress`, `yahoo`, `yammer`, `yandex`.
:param pulumi.Input[str] strategy_version: Version 1 is deprecated, use version 2.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] validation: Validation of the minimum and maximum values allowed for a user to have as username. For details, see Validation.
"""
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if enabled_clients is not None:
pulumi.set(__self__, "enabled_clients", enabled_clients)
if is_domain_connection is not None:
pulumi.set(__self__, "is_domain_connection", is_domain_connection)
if name is not None:
pulumi.set(__self__, "name", name)
if options is not None:
pulumi.set(__self__, "options", options)
if realms is not None:
pulumi.set(__self__, "realms", realms)
if strategy is not None:
pulumi.set(__self__, "strategy", strategy)
if strategy_version is not None:
pulumi.set(__self__, "strategy_version", strategy_version)
if validation is not None:
pulumi.set(__self__, "validation", validation)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
Name used in login screen
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="enabledClients")
def enabled_clients(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
IDs of the clients for which the connection is enabled. If not specified, no clients are enabled.
"""
return pulumi.get(self, "enabled_clients")
@enabled_clients.setter
def enabled_clients(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "enabled_clients", value)
@property
@pulumi.getter(name="isDomainConnection")
def is_domain_connection(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates whether or not the connection is domain level.
"""
return pulumi.get(self, "is_domain_connection")
@is_domain_connection.setter
def is_domain_connection(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_domain_connection", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the connection.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def options(self) -> Optional[pulumi.Input['ConnectionOptionsArgs']]:
"""
Configuration settings for connection options. For details, see Options.
"""
return pulumi.get(self, "options")
@options.setter
def options(self, value: Optional[pulumi.Input['ConnectionOptionsArgs']]):
pulumi.set(self, "options", value)
@property
@pulumi.getter
def realms(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Defines the realms for which the connection will be used (i.e., email domains). If not specified, the connection name is added as the realm.
"""
return pulumi.get(self, "realms")
@realms.setter
def realms(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "realms", value)
@property
@pulumi.getter
def strategy(self) -> Optional[pulumi.Input[str]]:
"""
Type of the connection, which indicates the identity provider. Options include `ad`, `adfs`, `amazon`, `aol`, `apple`, `auth0`, `auth0-adldap`, `auth0-oidc`, `baidu`, `bitbucket`, `bitly`, `box`, `custom`, `daccount`, `dropbox`, `dwolla`, `email`, `evernote`, `evernote-sandbox`, `exact`, `facebook`, `fitbit`, `flickr`, `github`, `google-apps`, `google-oauth2`, `guardian`, `instagram`, `ip`, `line`, `linkedin`, `miicard`, `oauth1`, `oauth2`, `office365`, `oidc`, `paypal`, `paypal-sandbox`, `pingfederate`, `planningcenter`, `renren`, `salesforce`, `salesforce-community`, `salesforce-sandbox` `samlp`, `sharepoint`, `shopify`, `sms`, `soundcloud`, `thecity`, `thecity-sandbox`, `thirtysevensignals`, `twitter`, `untappd`, `vkontakte`, `waad`, `weibo`, `windowslive`, `wordpress`, `yahoo`, `yammer`, `yandex`.
"""
return pulumi.get(self, "strategy")
@strategy.setter
def strategy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "strategy", value)
@property
@pulumi.getter(name="strategyVersion")
def strategy_version(self) -> Optional[pulumi.Input[str]]:
"""
Version 1 is deprecated, use version 2.
"""
return pulumi.get(self, "strategy_version")
@strategy_version.setter
def strategy_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "strategy_version", value)
@property
@pulumi.getter
def validation(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Validation of the minimum and maximum values allowed for a user to have as username. For details, see Validation.
"""
return pulumi.get(self, "validation")
@validation.setter
def validation(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "validation", value)
class Connection(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
display_name: Optional[pulumi.Input[str]] = None,
enabled_clients: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
is_domain_connection: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
options: Optional[pulumi.Input[pulumi.InputType['ConnectionOptionsArgs']]] = None,
realms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
strategy: Optional[pulumi.Input[str]] = None,
strategy_version: Optional[pulumi.Input[str]] = None,
validation: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
With Auth0, you can define sources of users, otherwise known as connections, which may include identity providers (such as Google or LinkedIn), databases, or passwordless authentication methods. This resource allows you to configure and manage connections to be used with your clients and users.
## Example Usage
```python
import pulumi
import pulumi_auth0 as auth0
my_connection = auth0.Connection("myConnection",
options=auth0.ConnectionOptionsArgs(
brute_force_protection=True,
configuration={
"bar": "baz",
"foo": "bar",
},
custom_scripts={
"getUser": \"\"\"function getByEmail (email, callback) {
return callback(new Error("Whoops!"))
}
\"\"\",
},
enabled_database_customization=True,
password_histories=[auth0.ConnectionOptionsPasswordHistoryArgs(
enable=True,
size=3,
)],
password_policy="excellent",
),
strategy="auth0")
```
> The Auth0 dashboard displays only one connection per social provider. Although the Auth0 Management API allowes the creation of multiple connections per strategy, the additional connections may not be visible in the Auth0 dashboard.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] display_name: Name used in login screen
:param pulumi.Input[Sequence[pulumi.Input[str]]] enabled_clients: IDs of the clients for which the connection is enabled. If not specified, no clients are enabled.
:param pulumi.Input[bool] is_domain_connection: Indicates whether or not the connection is domain level.
:param pulumi.Input[str] name: Name of the connection.
:param pulumi.Input[pulumi.InputType['ConnectionOptionsArgs']] options: Configuration settings for connection options. For details, see Options.
:param pulumi.Input[Sequence[pulumi.Input[str]]] realms: Defines the realms for which the connection will be used (i.e., email domains). If not specified, the connection name is added as the realm.
:param pulumi.Input[str] strategy: Type of the connection, which indicates the identity provider. Options include `ad`, `adfs`, `amazon`, `aol`, `apple`, `auth0`, `auth0-adldap`, `auth0-oidc`, `baidu`, `bitbucket`, `bitly`, `box`, `custom`, `daccount`, `dropbox`, `dwolla`, `email`, `evernote`, `evernote-sandbox`, `exact`, `facebook`, `fitbit`, `flickr`, `github`, `google-apps`, `google-oauth2`, `guardian`, `instagram`, `ip`, `line`, `linkedin`, `miicard`, `oauth1`, `oauth2`, `office365`, `oidc`, `paypal`, `paypal-sandbox`, `pingfederate`, `planningcenter`, `renren`, `salesforce`, `salesforce-community`, `salesforce-sandbox` `samlp`, `sharepoint`, `shopify`, `sms`, `soundcloud`, `thecity`, `thecity-sandbox`, `thirtysevensignals`, `twitter`, `untappd`, `vkontakte`, `waad`, `weibo`, `windowslive`, `wordpress`, `yahoo`, `yammer`, `yandex`.
:param pulumi.Input[str] strategy_version: Version 1 is deprecated, use version 2.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] validation: Validation of the minimum and maximum values allowed for a user to have as username. For details, see Validation.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ConnectionArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
With Auth0, you can define sources of users, otherwise known as connections, which may include identity providers (such as Google or LinkedIn), databases, or passwordless authentication methods. This resource allows you to configure and manage connections to be used with your clients and users.
## Example Usage
```python
import pulumi
import pulumi_auth0 as auth0
my_connection = auth0.Connection("myConnection",
options=auth0.ConnectionOptionsArgs(
brute_force_protection=True,
configuration={
"bar": "baz",
"foo": "bar",
},
custom_scripts={
"getUser": \"\"\"function getByEmail (email, callback) {
return callback(new Error("Whoops!"))
}
\"\"\",
},
enabled_database_customization=True,
password_histories=[auth0.ConnectionOptionsPasswordHistoryArgs(
enable=True,
size=3,
)],
password_policy="excellent",
),
strategy="auth0")
```
> The Auth0 dashboard displays only one connection per social provider. Although the Auth0 Management API allowes the creation of multiple connections per strategy, the additional connections may not be visible in the Auth0 dashboard.
:param str resource_name: The name of the resource.
:param ConnectionArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ConnectionArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
display_name: Optional[pulumi.Input[str]] = None,
enabled_clients: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
is_domain_connection: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
options: Optional[pulumi.Input[pulumi.InputType['ConnectionOptionsArgs']]] = None,
realms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
strategy: Optional[pulumi.Input[str]] = None,
strategy_version: Optional[pulumi.Input[str]] = None,
validation: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ConnectionArgs.__new__(ConnectionArgs)
__props__.__dict__["display_name"] = display_name
__props__.__dict__["enabled_clients"] = enabled_clients
__props__.__dict__["is_domain_connection"] = is_domain_connection
__props__.__dict__["name"] = name
__props__.__dict__["options"] = options
__props__.__dict__["realms"] = realms
if strategy is None and not opts.urn:
raise TypeError("Missing required property 'strategy'")
__props__.__dict__["strategy"] = strategy
__props__.__dict__["strategy_version"] = strategy_version
__props__.__dict__["validation"] = validation
super(Connection, __self__).__init__(
'auth0:index/connection:Connection',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
display_name: Optional[pulumi.Input[str]] = None,
enabled_clients: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
is_domain_connection: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
options: Optional[pulumi.Input[pulumi.InputType['ConnectionOptionsArgs']]] = None,
realms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
strategy: Optional[pulumi.Input[str]] = None,
strategy_version: Optional[pulumi.Input[str]] = None,
validation: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'Connection':
"""
Get an existing Connection resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] display_name: Name used in login screen
:param pulumi.Input[Sequence[pulumi.Input[str]]] enabled_clients: IDs of the clients for which the connection is enabled. If not specified, no clients are enabled.
:param pulumi.Input[bool] is_domain_connection: Indicates whether or not the connection is domain level.
:param pulumi.Input[str] name: Name of the connection.
:param pulumi.Input[pulumi.InputType['ConnectionOptionsArgs']] options: Configuration settings for connection options. For details, see Options.
:param pulumi.Input[Sequence[pulumi.Input[str]]] realms: Defines the realms for which the connection will be used (i.e., email domains). If not specified, the connection name is added as the realm.
:param pulumi.Input[str] strategy: Type of the connection, which indicates the identity provider. Options include `ad`, `adfs`, `amazon`, `aol`, `apple`, `auth0`, `auth0-adldap`, `auth0-oidc`, `baidu`, `bitbucket`, `bitly`, `box`, `custom`, `daccount`, `dropbox`, `dwolla`, `email`, `evernote`, `evernote-sandbox`, `exact`, `facebook`, `fitbit`, `flickr`, `github`, `google-apps`, `google-oauth2`, `guardian`, `instagram`, `ip`, `line`, `linkedin`, `miicard`, `oauth1`, `oauth2`, `office365`, `oidc`, `paypal`, `paypal-sandbox`, `pingfederate`, `planningcenter`, `renren`, `salesforce`, `salesforce-community`, `salesforce-sandbox` `samlp`, `sharepoint`, `shopify`, `sms`, `soundcloud`, `thecity`, `thecity-sandbox`, `thirtysevensignals`, `twitter`, `untappd`, `vkontakte`, `waad`, `weibo`, `windowslive`, `wordpress`, `yahoo`, `yammer`, `yandex`.
:param pulumi.Input[str] strategy_version: Version 1 is deprecated, use version 2.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] validation: Validation of the minimum and maximum values allowed for a user to have as username. For details, see Validation.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ConnectionState.__new__(_ConnectionState)
__props__.__dict__["display_name"] = display_name
__props__.__dict__["enabled_clients"] = enabled_clients
__props__.__dict__["is_domain_connection"] = is_domain_connection
__props__.__dict__["name"] = name
__props__.__dict__["options"] = options
__props__.__dict__["realms"] = realms
__props__.__dict__["strategy"] = strategy
__props__.__dict__["strategy_version"] = strategy_version
__props__.__dict__["validation"] = validation
return Connection(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[Optional[str]]:
"""
Name used in login screen
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="enabledClients")
def enabled_clients(self) -> pulumi.Output[Sequence[str]]:
"""
IDs of the clients for which the connection is enabled. If not specified, no clients are enabled.
"""
return pulumi.get(self, "enabled_clients")
@property
@pulumi.getter(name="isDomainConnection")
def is_domain_connection(self) -> pulumi.Output[bool]:
"""
Indicates whether or not the connection is domain level.
"""
return pulumi.get(self, "is_domain_connection")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the connection.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def options(self) -> pulumi.Output[Optional['outputs.ConnectionOptions']]:
"""
Configuration settings for connection options. For details, see Options.
"""
return pulumi.get(self, "options")
@property
@pulumi.getter
def realms(self) -> pulumi.Output[Sequence[str]]:
"""
Defines the realms for which the connection will be used (i.e., email domains). If not specified, the connection name is added as the realm.
"""
return pulumi.get(self, "realms")
@property
@pulumi.getter
def strategy(self) -> pulumi.Output[str]:
"""
Type of the connection, which indicates the identity provider. Options include `ad`, `adfs`, `amazon`, `aol`, `apple`, `auth0`, `auth0-adldap`, `auth0-oidc`, `baidu`, `bitbucket`, `bitly`, `box`, `custom`, `daccount`, `dropbox`, `dwolla`, `email`, `evernote`, `evernote-sandbox`, `exact`, `facebook`, `fitbit`, `flickr`, `github`, `google-apps`, `google-oauth2`, `guardian`, `instagram`, `ip`, `line`, `linkedin`, `miicard`, `oauth1`, `oauth2`, `office365`, `oidc`, `paypal`, `paypal-sandbox`, `pingfederate`, `planningcenter`, `renren`, `salesforce`, `salesforce-community`, `salesforce-sandbox` `samlp`, `sharepoint`, `shopify`, `sms`, `soundcloud`, `thecity`, `thecity-sandbox`, `thirtysevensignals`, `twitter`, `untappd`, `vkontakte`, `waad`, `weibo`, `windowslive`, `wordpress`, `yahoo`, `yammer`, `yandex`.
"""
return pulumi.get(self, "strategy")
@property
@pulumi.getter(name="strategyVersion")
def strategy_version(self) -> pulumi.Output[str]:
"""
Version 1 is deprecated, use version 2.
"""
return pulumi.get(self, "strategy_version")
@property
@pulumi.getter
def validation(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Validation of the minimum and maximum values allowed for a user to have as username. For details, see Validation.
"""
return pulumi.get(self, "validation")
| 54.71066 | 855 | 0.649286 | 3,607 | 32,334 | 5.672027 | 0.080954 | 0.086026 | 0.063639 | 0.029913 | 0.913046 | 0.902244 | 0.893103 | 0.885038 | 0.882546 | 0.87409 | 0 | 0.004018 | 0.222583 | 32,334 | 590 | 856 | 54.80339 | 0.809882 | 0.460351 | 0 | 0.816199 | 1 | 0 | 0.096196 | 0.01553 | 0 | 0 | 0 | 0 | 0 | 1 | 0.161994 | false | 0.003115 | 0.021807 | 0 | 0.280374 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
6078e00ba357827be9aa46e3d81d877ded812803 | 8,400 | py | Python | api/tests/test_explanation.py | tiveritz/sequence-api | ba0fb432028eaf878122e4d96d8b1ce234602e47 | [
"MIT"
] | null | null | null | api/tests/test_explanation.py | tiveritz/sequence-api | ba0fb432028eaf878122e4d96d8b1ce234602e47 | [
"MIT"
] | null | null | null | api/tests/test_explanation.py | tiveritz/sequence-api | ba0fb432028eaf878122e4d96d8b1ce234602e47 | [
"MIT"
] | null | null | null | import re
import pytest
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
class TestExplanationText():
api_client = APIClient()
@pytest.mark.django_db
def test_create_text_explanation(self):
"""
Ensure client can create a new Explanation text
"""
url = reverse('explanation')
data = {'type': 'text', 'title': 'Create an Explanation'}
response = self.api_client.post(url, data, format='json')
msg = 'HTTP status return code is not 201'
assert response.status_code == status.HTTP_201_CREATED, msg
msg = 'Created title is not correct'
assert response.data['title'] == data['title'], msg
@pytest.mark.django_db
def test_retrieve_text_explanation_by_id(self):
"""
Ensure client can retreive a Text Explanation by Uri ID
"""
url = reverse('explanation')
data = {'type': 'text', 'title': 'Create and Retrieve'}
response_post = self.api_client.post(url, data, format='json')
url = reverse('explanation-detail',
args=[response_post.data['api_id']])
response_get = self.api_client.get(url, format='json')
msg = 'Retreiving Text Explanation by Uri ID failed'
assert response_get.status_code == status.HTTP_200_OK, msg
msg = ('Retreiving Text Explanation by Uri ID did not return the '
'correct explanation')
assert response_get.data['title'] == data['title'], msg
@pytest.mark.django_db
def test_update_text_explanation(self):
"""
Ensure client can update Text Explanation
"""
url = reverse('explanation')
data = {'type': 'text', 'title': 'Create and Retrieve'}
response_post = self.api_client.post(url, data, format='json')
url = reverse('explanation-detail',
args=[response_post.data['api_id']])
status_msg = 'HTTP status return code is not 200'
data = {
'title': 'New Title',
'content': 'New Content',
}
for k, v in data.items():
change_data = {k: v}
response_patch = self.api_client.patch(
url, change_data, format='json')
assert response_patch.status_code == status.HTTP_200_OK, status_msg
msg = f'Updating {k} failed'
assert response_patch.data[k] == data[k], msg
@pytest.mark.django_db
def test_delete_text_explanation(self):
"""
Ensure client can delete Text Explanation
"""
url = reverse('explanation')
data = {'type': 'text', 'title': 'Create and Retrieve'}
response_post = self.api_client.post(url, data, format='json')
url_delete = reverse('explanation-detail',
args=[response_post.data['api_id']])
response_delete = self.api_client.delete(url_delete, format='json')
msg = 'HTTP status return code is not 204'
assert response_delete.status_code == status.HTTP_204_NO_CONTENT, msg
url_get = reverse('explanation-detail',
args=[response_post.data['api_id']])
response_get = self.api_client.get(url_get, data, format='json')
msg = 'Text Explanation was not successfully deleted'
assert response_get.status_code == status.HTTP_404_NOT_FOUND, msg
@pytest.mark.django_db
def test_forbidden_updates(self):
"""
Ensure client can not change read only fields
"""
url = reverse('explanation')
data = {'type': 'text', 'title': 'Create and Retrieve'}
response_post = self.api_client.post(url, data, format='json')
url = reverse('explanation-detail',
args=[response_post.data['api_id']])
data = {
'api_id': 'lolololo',
'created': 'lol',
}
for k, v in data.items():
forbidden_data = {k: v}
response_patch = self.api_client.patch(
url, forbidden_data, format='json')
msg = f'Forbidden update on {k} was not blocked'
assert response_post.data[k] == response_patch.data[k], msg
class TestExplanationCode():
api_client = APIClient()
@pytest.mark.django_db
def test_create_text_explanation(self):
"""
Ensure client can create a new Explanation text
"""
url = reverse('explanation')
data = {'type': 'code', 'title': 'Create an Explanation'}
response = self.api_client.post(url, data, format='json')
msg = 'HTTP status return code is not 201'
assert response.status_code == status.HTTP_201_CREATED, msg
msg = 'Created title is not correct'
assert response.data['title'] == data['title'], msg
@pytest.mark.django_db
def test_retrieve_code_explanation_by_id(self):
"""
Ensure client can retreive a Code Explanation by Uri ID
"""
url = reverse('explanation')
data = {'type': 'code', 'title': 'Create and Retrieve'}
response_post = self.api_client.post(url, data, format='json')
url = reverse('explanation-detail',
args=[response_post.data['api_id']])
response_get = self.api_client.get(url, format='json')
msg = 'Retreiving Code Explanation by Uri ID failed'
assert response_get.status_code == status.HTTP_200_OK, msg
msg = ('Retreiving Code Explanation by Uri ID did not return the '
'correct explanation')
assert response_get.data['title'] == data['title'], msg
@pytest.mark.django_db
def test_update_code_explanation(self):
"""
Ensure client can update Code Explanation
"""
url = reverse('explanation')
data = {'type': 'code', 'title': 'Create and Retrieve'}
response_post = self.api_client.post(url, data, format='json')
url = reverse('explanation-detail',
args=[response_post.data['api_id']])
status_msg = 'HTTP status return code is not 200'
data = {
'title': 'New Title',
'content': 'New Content',
}
for k, v in data.items():
change_data = {k: v}
response_patch = self.api_client.patch(
url, change_data, format='json')
assert response_patch.status_code == status.HTTP_200_OK, status_msg
msg = f'Updating {k} failed'
assert response_patch.data[k] == data[k], msg
@pytest.mark.django_db
def test_delete_code_explanation(self):
"""
Ensure client can delete Code Explanation
"""
url = reverse('explanation')
data = {'type': 'code', 'title': 'Create and Retrieve'}
response_post = self.api_client.post(url, data, format='json')
url_delete = reverse('explanation-detail',
args=[response_post.data['api_id']])
response_delete = self.api_client.delete(url_delete, format='json')
msg = 'HTTP status return code is not 204'
assert response_delete.status_code == status.HTTP_204_NO_CONTENT, msg
url_get = reverse('explanation-detail',
args=[response_post.data['api_id']])
response_get = self.api_client.get(url_get, data, format='json')
msg = 'Code Explanation was not successfully deleted'
assert response_get.status_code == status.HTTP_404_NOT_FOUND, msg
@pytest.mark.django_db
def test_forbidden_updates(self):
"""
Ensure client can not change read only fields
"""
url = reverse('explanation')
data = {'type': 'code', 'title': 'Create and Retrieve'}
response_post = self.api_client.post(url, data, format='json')
url = reverse('explanation-detail',
args=[response_post.data['api_id']])
data = {
'api_id': 'lolololo',
'created': 'lol',
}
for k, v in data.items():
forbidden_data = {k: v}
response_patch = self.api_client.patch(
url, forbidden_data, format='json')
msg = f'Forbidden update on {k} was not blocked'
assert response_post.data[k] == response_patch.data[k], msg
| 36.206897 | 79 | 0.596905 | 996 | 8,400 | 4.85743 | 0.090361 | 0.040926 | 0.053741 | 0.037205 | 0.963621 | 0.961968 | 0.928896 | 0.925589 | 0.923936 | 0.898718 | 0 | 0.008024 | 0.287857 | 8,400 | 231 | 80 | 36.363636 | 0.800736 | 0.055595 | 0 | 0.83871 | 0 | 0 | 0.209296 | 0 | 0 | 0 | 0 | 0 | 0.116129 | 1 | 0.064516 | false | 0 | 0.032258 | 0 | 0.122581 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
60935a50690ce0fe55ed771a081420fc893f7878 | 54,278 | py | Python | tests/test_date.py | shane-kercheval/python-helpers | 71a16a80603f403809e7ec766355551ac69bd120 | [
"MIT"
] | null | null | null | tests/test_date.py | shane-kercheval/python-helpers | 71a16a80603f403809e7ec766355551ac69bd120 | [
"MIT"
] | 1 | 2021-08-01T23:31:25.000Z | 2021-08-01T23:31:25.000Z | tests/test_date.py | shane-kercheval/python-helpers | 71a16a80603f403809e7ec766355551ac69bd120 | [
"MIT"
] | null | null | null | import datetime
import unittest
import numpy as np
import pandas as pd
from dateutil.parser import parse
from helpsk import date, validation
from tests.helpers import subtests_expected_vs_actual
# noinspection PyMethodMayBeStatic
class TestDate(unittest.TestCase):
def test_fiscal_quarter_date(self):
date_values = ['2020-12-01', '2020-12-15', '2020-12-31',
'2021-01-01', '2021-01-15', '2021-01-31',
'2021-02-01', '2021-02-15', '2021-02-28',
'2021-03-01', '2021-03-15', '2021-03-31',
'2021-04-01', '2021-04-15', '2021-04-30',
'2021-05-01', '2021-05-15', '2021-05-31',
'2021-06-01', '2021-06-15', '2021-06-30',
'2021-07-01', '2021-07-15', '2021-07-31',
'2021-08-01', '2021-08-15', '2021-08-31',
'2021-09-01', '2021-09-15', '2021-09-30',
'2021-10-01', '2021-10-15', '2021-10-31',
'2021-11-01', '2021-11-15', '2021-11-30',
'2021-12-01', '2021-12-15', '2021-12-31',
'2022-01-01', '2022-01-15', '2022-01-31']
test_parameters = dict(include_year=True, fiscal_start=1)
results = [date.fiscal_quarter(value=parse(x), **test_parameters) for x in date_values]
expected = [2020.4, 2020.4, 2020.4,
2021.1, 2021.1, 2021.1,
2021.1, 2021.1, 2021.1,
2021.1, 2021.1, 2021.1,
2021.2, 2021.2, 2021.2,
2021.2, 2021.2, 2021.2,
2021.2, 2021.2, 2021.2,
2021.3, 2021.3, 2021.3,
2021.3, 2021.3, 2021.3,
2021.3, 2021.3, 2021.3,
2021.4, 2021.4, 2021.4,
2021.4, 2021.4, 2021.4,
2021.4, 2021.4, 2021.4,
2022.1, 2022.1, 2022.1]
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=expected,
**test_parameters)
test_parameters = dict(include_year=False, fiscal_start=1)
results = [date.fiscal_quarter(value=parse(x), **test_parameters) for x in date_values]
expected = [4, 4, 4,
1, 1, 1,
1, 1, 1,
1, 1, 1,
2, 2, 2,
2, 2, 2,
2, 2, 2,
3, 3, 3,
3, 3, 3,
3, 3, 3,
4, 4, 4,
4, 4, 4,
4, 4, 4,
1, 1, 1]
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=expected,
**test_parameters)
test_parameters = dict(include_year=True, fiscal_start=2)
results = [date.fiscal_quarter(value=parse(x), **test_parameters) for x in date_values]
expected = [2021.4, 2021.4, 2021.4,
2021.4, 2021.4, 2021.4,
2022.1, 2022.1, 2022.1,
2022.1, 2022.1, 2022.1,
2022.1, 2022.1, 2022.1,
2022.2, 2022.2, 2022.2,
2022.2, 2022.2, 2022.2,
2022.2, 2022.2, 2022.2,
2022.3, 2022.3, 2022.3,
2022.3, 2022.3, 2022.3,
2022.3, 2022.3, 2022.3,
2022.4, 2022.4, 2022.4,
2022.4, 2022.4, 2022.4,
2022.4, 2022.4, 2022.4]
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=expected,
**test_parameters)
test_parameters = dict(include_year=False, fiscal_start=2)
results = [date.fiscal_quarter(value=parse(x), **test_parameters) for x in date_values]
expected = [4, 4, 4,
4, 4, 4,
1, 1, 1,
1, 1, 1,
1, 1, 1,
2, 2, 2,
2, 2, 2,
2, 2, 2,
3, 3, 3,
3, 3, 3,
3, 3, 3,
4, 4, 4,
4, 4, 4,
4, 4, 4]
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=expected,
**test_parameters)
test_parameters = dict(include_year=True, fiscal_start=12)
results = [date.fiscal_quarter(value=parse(x), **test_parameters) for x in date_values]
expected = [2021.1, 2021.1, 2021.1, # 2020-Dec
2021.1, 2021.1, 2021.1, # 2021-Jan
2021.1, 2021.1, 2021.1, # 2021-Feb
2021.2, 2021.2, 2021.2, # 2021-Mar
2021.2, 2021.2, 2021.2, # 2021-Apr
2021.2, 2021.2, 2021.2, # 2021-May
2021.3, 2021.3, 2021.3, # 2021-Jun
2021.3, 2021.3, 2021.3, # 2021-Jul
2021.3, 2021.3, 2021.3, # 2021-Aug
2021.4, 2021.4, 2021.4, # 2021-Sep
2021.4, 2021.4, 2021.4, # 2021-Oct
2021.4, 2021.4, 2021.4, # 2021-Nov
2022.1, 2022.1, 2022.1, # 2021-Dec
2022.1, 2022.1, 2022.1] # 2022-Jan
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=expected,
**test_parameters)
test_parameters = dict(include_year=False, fiscal_start=12)
results = [date.fiscal_quarter(value=parse(x), **test_parameters) for x in date_values]
expected = [1, 1, 1, # 2020-Dec
1, 1, 1, # 2021-Jan
1, 1, 1, # 2021-Feb
2, 2, 2, # 2021-Mar
2, 2, 2, # 2021-Apr
2, 2, 2, # 2021-May
3, 3, 3, # 2021-Jun
3, 3, 3, # 2021-Jul
3, 3, 3, # 2021-Aug
4, 4, 4, # 2021-Sep
4, 4, 4, # 2021-Oct
4, 4, 4, # 2021-Nov
1, 1, 1, # 2021-Dec
1, 1, 1] # 2022-Jan
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=expected,
**test_parameters)
def test_fiscal_quarter_datetime(self):
date_values = ['2020-12-01', '2020-12-15', '2020-12-31',
'2021-01-01', '2021-01-15', '2021-01-31',
'2021-02-01', '2021-02-15', '2021-02-28',
'2021-03-01', '2021-03-15', '2021-03-31',
'2021-04-01', '2021-04-15', '2021-04-30',
'2021-05-01', '2021-05-15', '2021-05-31',
'2021-06-01', '2021-06-15', '2021-06-30',
'2021-07-01', '2021-07-15', '2021-07-31',
'2021-08-01', '2021-08-15', '2021-08-31',
'2021-09-01', '2021-09-15', '2021-09-30',
'2021-10-01', '2021-10-15', '2021-10-31',
'2021-11-01', '2021-11-15', '2021-11-30',
'2021-12-01', '2021-12-15', '2021-12-31',
'2022-01-01', '2022-01-15', '2022-01-31']
test_parameters = dict(include_year=True, fiscal_start=1)
results = [date.fiscal_quarter(value=parse(x + ' 23:59:59'), **test_parameters) for x in date_values]
expected = [2020.4, 2020.4, 2020.4,
2021.1, 2021.1, 2021.1,
2021.1, 2021.1, 2021.1,
2021.1, 2021.1, 2021.1,
2021.2, 2021.2, 2021.2,
2021.2, 2021.2, 2021.2,
2021.2, 2021.2, 2021.2,
2021.3, 2021.3, 2021.3,
2021.3, 2021.3, 2021.3,
2021.3, 2021.3, 2021.3,
2021.4, 2021.4, 2021.4,
2021.4, 2021.4, 2021.4,
2021.4, 2021.4, 2021.4,
2022.1, 2022.1, 2022.1]
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=expected,
**test_parameters)
test_parameters = dict(include_year=False, fiscal_start=1)
results = [date.fiscal_quarter(value=parse(x), **test_parameters) for x in date_values]
expected = [4, 4, 4,
1, 1, 1,
1, 1, 1,
1, 1, 1,
2, 2, 2,
2, 2, 2,
2, 2, 2,
3, 3, 3,
3, 3, 3,
3, 3, 3,
4, 4, 4,
4, 4, 4,
4, 4, 4,
1, 1, 1]
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=expected,
**test_parameters)
test_parameters = dict(include_year=True, fiscal_start=2)
results = [date.fiscal_quarter(value=parse(x), **test_parameters) for x in date_values]
expected = [2021.4, 2021.4, 2021.4,
2021.4, 2021.4, 2021.4,
2022.1, 2022.1, 2022.1,
2022.1, 2022.1, 2022.1,
2022.1, 2022.1, 2022.1,
2022.2, 2022.2, 2022.2,
2022.2, 2022.2, 2022.2,
2022.2, 2022.2, 2022.2,
2022.3, 2022.3, 2022.3,
2022.3, 2022.3, 2022.3,
2022.3, 2022.3, 2022.3,
2022.4, 2022.4, 2022.4,
2022.4, 2022.4, 2022.4,
2022.4, 2022.4, 2022.4]
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=expected,
**test_parameters)
test_parameters = dict(include_year=False, fiscal_start=2)
results = [date.fiscal_quarter(value=parse(x), **test_parameters) for x in date_values]
expected = [4, 4, 4,
4, 4, 4,
1, 1, 1,
1, 1, 1,
1, 1, 1,
2, 2, 2,
2, 2, 2,
2, 2, 2,
3, 3, 3,
3, 3, 3,
3, 3, 3,
4, 4, 4,
4, 4, 4,
4, 4, 4]
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=expected,
**test_parameters)
test_parameters = dict(include_year=True, fiscal_start=12)
results = [date.fiscal_quarter(value=parse(x), **test_parameters) for x in date_values]
expected = [2021.1, 2021.1, 2021.1, # 2020-Dec
2021.1, 2021.1, 2021.1, # 2021-Jan
2021.1, 2021.1, 2021.1, # 2021-Feb
2021.2, 2021.2, 2021.2, # 2021-Mar
2021.2, 2021.2, 2021.2, # 2021-Apr
2021.2, 2021.2, 2021.2, # 2021-May
2021.3, 2021.3, 2021.3, # 2021-Jun
2021.3, 2021.3, 2021.3, # 2021-Jul
2021.3, 2021.3, 2021.3, # 2021-Aug
2021.4, 2021.4, 2021.4, # 2021-Sep
2021.4, 2021.4, 2021.4, # 2021-Oct
2021.4, 2021.4, 2021.4, # 2021-Nov
2022.1, 2022.1, 2022.1, # 2021-Dec
2022.1, 2022.1, 2022.1] # 2022-Jan
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=expected,
**test_parameters)
test_parameters = dict(include_year=False, fiscal_start=12)
results = [date.fiscal_quarter(value=parse(x), **test_parameters) for x in date_values]
expected = [1, 1, 1, # 2020-Dec
1, 1, 1, # 2021-Jan
1, 1, 1, # 2021-Feb
2, 2, 2, # 2021-Mar
2, 2, 2, # 2021-Apr
2, 2, 2, # 2021-May
3, 3, 3, # 2021-Jun
3, 3, 3, # 2021-Jul
3, 3, 3, # 2021-Aug
4, 4, 4, # 2021-Sep
4, 4, 4, # 2021-Oct
4, 4, 4, # 2021-Nov
1, 1, 1, # 2021-Dec
1, 1, 1] # 2022-Jan
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=expected,
**test_parameters)
def test_to_string_date(self):
date_values = ['2020-12-01', '2020-12-15', '2020-12-31',
'2021-01-01', '2021-01-15', '2021-01-31',
'2021-02-01', '2021-02-15', '2021-02-28',
'2021-03-01', '2021-03-15', '2021-03-31',
'2021-04-01', '2021-04-15', '2021-04-30',
'2021-05-01', '2021-05-15', '2021-05-31',
'2021-06-01', '2021-06-15', '2021-06-30',
'2021-07-01', '2021-07-15', '2021-07-31',
'2021-08-01', '2021-08-15', '2021-08-31',
'2021-09-01', '2021-09-15', '2021-09-30',
'2021-10-01', '2021-10-15', '2021-10-31',
'2021-11-01', '2021-11-15', '2021-11-30',
'2021-12-01', '2021-12-15', '2021-12-31',
'2022-01-01', '2022-01-15', '2022-01-31']
test_parameters = dict(granularity=date.Granularity.DAY)
results = [date.to_string(value=parse(x), **test_parameters) for x in date_values]
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=date_values,
**test_parameters)
test_parameters = dict(granularity=date.Granularity.MONTH)
results = [date.to_string(value=parse(x), **test_parameters) for x in date_values]
expected = ['2020-Dec', '2020-Dec', '2020-Dec',
'2021-Jan', '2021-Jan', '2021-Jan',
'2021-Feb', '2021-Feb', '2021-Feb',
'2021-Mar', '2021-Mar', '2021-Mar',
'2021-Apr', '2021-Apr', '2021-Apr',
'2021-May', '2021-May', '2021-May',
'2021-Jun', '2021-Jun', '2021-Jun',
'2021-Jul', '2021-Jul', '2021-Jul',
'2021-Aug', '2021-Aug', '2021-Aug',
'2021-Sep', '2021-Sep', '2021-Sep',
'2021-Oct', '2021-Oct', '2021-Oct',
'2021-Nov', '2021-Nov', '2021-Nov',
'2021-Dec', '2021-Dec', '2021-Dec',
'2022-Jan', '2022-Jan', '2022-Jan']
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=expected,
**test_parameters)
test_parameters = dict(granularity=date.Granularity.QUARTER, fiscal_start=1)
results = [date.to_string(value=parse(x), **test_parameters) for x in date_values]
expected = ['2020-Q4', '2020-Q4', '2020-Q4', # 2020-Dec
'2021-Q1', '2021-Q1', '2021-Q1', # 2021-Jan
'2021-Q1', '2021-Q1', '2021-Q1', # 2021-Feb
'2021-Q1', '2021-Q1', '2021-Q1', # 2021-Mar
'2021-Q2', '2021-Q2', '2021-Q2', # 2021-Apr
'2021-Q2', '2021-Q2', '2021-Q2', # 2021-May
'2021-Q2', '2021-Q2', '2021-Q2', # 2021-Jun
'2021-Q3', '2021-Q3', '2021-Q3', # 2021-Jul
'2021-Q3', '2021-Q3', '2021-Q3', # 2021-Aug
'2021-Q3', '2021-Q3', '2021-Q3', # 2021-Sep
'2021-Q4', '2021-Q4', '2021-Q4', # 2021-Oct
'2021-Q4', '2021-Q4', '2021-Q4', # 2021-Nov
'2021-Q4', '2021-Q4', '2021-Q4', # 2021-Dec
'2022-Q1', '2022-Q1', '2022-Q1'] # 2022-Jan
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=expected,
**test_parameters)
test_parameters = dict(granularity=date.Granularity.QUARTER, fiscal_start=2)
results = [date.to_string(value=parse(x), **test_parameters) for x in date_values]
expected = ['2021-FQ4', '2021-FQ4', '2021-FQ4', # 2020-Dec
'2021-FQ4', '2021-FQ4', '2021-FQ4', # 2021-Jan
'2022-FQ1', '2022-FQ1', '2022-FQ1', # 2021-Feb
'2022-FQ1', '2022-FQ1', '2022-FQ1', # 2021-Mar
'2022-FQ1', '2022-FQ1', '2022-FQ1', # 2021-Apr
'2022-FQ2', '2022-FQ2', '2022-FQ2', # 2021-May
'2022-FQ2', '2022-FQ2', '2022-FQ2', # 2021-Jun
'2022-FQ2', '2022-FQ2', '2022-FQ2', # 2021-Jul
'2022-FQ3', '2022-FQ3', '2022-FQ3', # 2021-Aug
'2022-FQ3', '2022-FQ3', '2022-FQ3', # 2021-Sep
'2022-FQ3', '2022-FQ3', '2022-FQ3', # 2021-Oct
'2022-FQ4', '2022-FQ4', '2022-FQ4', # 2021-Nov
'2022-FQ4', '2022-FQ4', '2022-FQ4', # 2021-Dec
'2022-FQ4', '2022-FQ4', '2022-FQ4'] # 2022-Jan
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=expected,
**test_parameters)
test_parameters = dict(granularity=date.Granularity.QUARTER, fiscal_start=12)
results = [date.to_string(value=parse(x), **test_parameters) for x in date_values]
expected = ['2021-FQ1', '2021-FQ1', '2021-FQ1', # 2020-Dec
'2021-FQ1', '2021-FQ1', '2021-FQ1', # 2021-Jan
'2021-FQ1', '2021-FQ1', '2021-FQ1', # 2021-Feb
'2021-FQ2', '2021-FQ2', '2021-FQ2', # 2021-Mar
'2021-FQ2', '2021-FQ2', '2021-FQ2', # 2021-Apr
'2021-FQ2', '2021-FQ2', '2021-FQ2', # 2021-May
'2021-FQ3', '2021-FQ3', '2021-FQ3', # 2021-Jun
'2021-FQ3', '2021-FQ3', '2021-FQ3', # 2021-Jul
'2021-FQ3', '2021-FQ3', '2021-FQ3', # 2021-Aug
'2021-FQ4', '2021-FQ4', '2021-FQ4', # 2021-Sep
'2021-FQ4', '2021-FQ4', '2021-FQ4', # 2021-Oct
'2021-FQ4', '2021-FQ4', '2021-FQ4', # 2021-Nov
'2022-FQ1', '2022-FQ1', '2022-FQ1', # 2021-Dec
'2022-FQ1', '2022-FQ1', '2022-FQ1'] # 2022-Jan
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=expected,
**test_parameters)
def test_to_string_datetime(self):
date_values = ['2020-12-01', '2020-12-15', '2020-12-31',
'2021-01-01', '2021-01-15', '2021-01-31',
'2021-02-01', '2021-02-15', '2021-02-28',
'2021-03-01', '2021-03-15', '2021-03-31',
'2021-04-01', '2021-04-15', '2021-04-30',
'2021-05-01', '2021-05-15', '2021-05-31',
'2021-06-01', '2021-06-15', '2021-06-30',
'2021-07-01', '2021-07-15', '2021-07-31',
'2021-08-01', '2021-08-15', '2021-08-31',
'2021-09-01', '2021-09-15', '2021-09-30',
'2021-10-01', '2021-10-15', '2021-10-31',
'2021-11-01', '2021-11-15', '2021-11-30',
'2021-12-01', '2021-12-15', '2021-12-31',
'2022-01-01', '2022-01-15', '2022-01-31']
test_parameters = dict(granularity=date.Granularity.DAY)
results = [date.to_string(value=parse(x + ' 23:59:59'), **test_parameters) for x in date_values]
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=date_values,
**test_parameters)
test_parameters = dict(granularity=date.Granularity.MONTH)
results = [date.to_string(value=parse(x), **test_parameters) for x in date_values]
expected = ['2020-Dec', '2020-Dec', '2020-Dec',
'2021-Jan', '2021-Jan', '2021-Jan',
'2021-Feb', '2021-Feb', '2021-Feb',
'2021-Mar', '2021-Mar', '2021-Mar',
'2021-Apr', '2021-Apr', '2021-Apr',
'2021-May', '2021-May', '2021-May',
'2021-Jun', '2021-Jun', '2021-Jun',
'2021-Jul', '2021-Jul', '2021-Jul',
'2021-Aug', '2021-Aug', '2021-Aug',
'2021-Sep', '2021-Sep', '2021-Sep',
'2021-Oct', '2021-Oct', '2021-Oct',
'2021-Nov', '2021-Nov', '2021-Nov',
'2021-Dec', '2021-Dec', '2021-Dec',
'2022-Jan', '2022-Jan', '2022-Jan']
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=expected,
**test_parameters)
test_parameters = dict(granularity=date.Granularity.QUARTER, fiscal_start=1)
results = [date.to_string(value=parse(x), **test_parameters) for x in date_values]
expected = ['2020-Q4', '2020-Q4', '2020-Q4', # 2020-Dec
'2021-Q1', '2021-Q1', '2021-Q1', # 2021-Jan
'2021-Q1', '2021-Q1', '2021-Q1', # 2021-Feb
'2021-Q1', '2021-Q1', '2021-Q1', # 2021-Mar
'2021-Q2', '2021-Q2', '2021-Q2', # 2021-Apr
'2021-Q2', '2021-Q2', '2021-Q2', # 2021-May
'2021-Q2', '2021-Q2', '2021-Q2', # 2021-Jun
'2021-Q3', '2021-Q3', '2021-Q3', # 2021-Jul
'2021-Q3', '2021-Q3', '2021-Q3', # 2021-Aug
'2021-Q3', '2021-Q3', '2021-Q3', # 2021-Sep
'2021-Q4', '2021-Q4', '2021-Q4', # 2021-Oct
'2021-Q4', '2021-Q4', '2021-Q4', # 2021-Nov
'2021-Q4', '2021-Q4', '2021-Q4', # 2021-Dec
'2022-Q1', '2022-Q1', '2022-Q1'] # 2022-Jan
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=expected,
**test_parameters)
test_parameters = dict(granularity=date.Granularity.QUARTER, fiscal_start=2)
results = [date.to_string(value=parse(x), **test_parameters) for x in date_values]
expected = ['2021-FQ4', '2021-FQ4', '2021-FQ4', # 2020-Dec
'2021-FQ4', '2021-FQ4', '2021-FQ4', # 2021-Jan
'2022-FQ1', '2022-FQ1', '2022-FQ1', # 2021-Feb
'2022-FQ1', '2022-FQ1', '2022-FQ1', # 2021-Mar
'2022-FQ1', '2022-FQ1', '2022-FQ1', # 2021-Apr
'2022-FQ2', '2022-FQ2', '2022-FQ2', # 2021-May
'2022-FQ2', '2022-FQ2', '2022-FQ2', # 2021-Jun
'2022-FQ2', '2022-FQ2', '2022-FQ2', # 2021-Jul
'2022-FQ3', '2022-FQ3', '2022-FQ3', # 2021-Aug
'2022-FQ3', '2022-FQ3', '2022-FQ3', # 2021-Sep
'2022-FQ3', '2022-FQ3', '2022-FQ3', # 2021-Oct
'2022-FQ4', '2022-FQ4', '2022-FQ4', # 2021-Nov
'2022-FQ4', '2022-FQ4', '2022-FQ4', # 2021-Dec
'2022-FQ4', '2022-FQ4', '2022-FQ4'] # 2022-Jan
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=expected,
**test_parameters)
test_parameters = dict(granularity=date.Granularity.QUARTER, fiscal_start=12)
results = [date.to_string(value=parse(x), **test_parameters) for x in date_values]
expected = ['2021-FQ1', '2021-FQ1', '2021-FQ1', # 2020-Dec
'2021-FQ1', '2021-FQ1', '2021-FQ1', # 2021-Jan
'2021-FQ1', '2021-FQ1', '2021-FQ1', # 2021-Feb
'2021-FQ2', '2021-FQ2', '2021-FQ2', # 2021-Mar
'2021-FQ2', '2021-FQ2', '2021-FQ2', # 2021-Apr
'2021-FQ2', '2021-FQ2', '2021-FQ2', # 2021-May
'2021-FQ3', '2021-FQ3', '2021-FQ3', # 2021-Jun
'2021-FQ3', '2021-FQ3', '2021-FQ3', # 2021-Jul
'2021-FQ3', '2021-FQ3', '2021-FQ3', # 2021-Aug
'2021-FQ4', '2021-FQ4', '2021-FQ4', # 2021-Sep
'2021-FQ4', '2021-FQ4', '2021-FQ4', # 2021-Oct
'2021-FQ4', '2021-FQ4', '2021-FQ4', # 2021-Nov
'2022-FQ1', '2022-FQ1', '2022-FQ1', # 2021-Dec
'2022-FQ1', '2022-FQ1', '2022-FQ1'] # 2022-Jan
subtests_expected_vs_actual(test_case=self, actual_values=results, expected_values=expected,
**test_parameters)
def test_floor_missing_value(self):
self.assertTrue(date.floor(value=pd.NA, granularity=date.Granularity.DAY) is pd.NA)
self.assertTrue(date.floor(value=pd.NaT, granularity=date.Granularity.DAY) is pd.NaT)
self.assertTrue(date.floor(value=np.NaN, granularity=date.Granularity.DAY) is np.NaN)
self.assertTrue(date.floor(value=None, granularity=date.Granularity.DAY) is None) # noqa
self.assertTrue(date.floor(value=pd.NA, granularity=date.Granularity.MONTH) is pd.NA)
self.assertTrue(date.floor(value=pd.NaT, granularity=date.Granularity.MONTH) is pd.NaT)
self.assertTrue(date.floor(value=np.NaN, granularity=date.Granularity.MONTH) is np.NaN)
self.assertTrue(date.floor(value=None, granularity=date.Granularity.MONTH) is None) # noqa
self.assertTrue(date.floor(value=pd.NA, granularity=date.Granularity.QUARTER) is pd.NA)
self.assertTrue(date.floor(value=pd.NaT, granularity=date.Granularity.QUARTER) is pd.NaT)
self.assertTrue(date.floor(value=np.NaN, granularity=date.Granularity.QUARTER) is np.NaN)
self.assertTrue(date.floor(value=None, granularity=date.Granularity.QUARTER) is None) # noqa
def test_floor_day(self):
# test datetime
value = datetime.datetime(year=2021, month=2, day=13, hour=23, minute=45, second=55)
self.assertEqual(date.floor(value, granularity=date.Granularity.DAY),
parse('2021-02-13').date())
self.assertEqual(date.floor(value),
parse('2021-02-13').date())
# test date
value = datetime.date(year=2021, month=2, day=13)
self.assertEqual(date.floor(value, granularity=date.Granularity.DAY),
parse('2021-02-13').date())
self.assertEqual(date.floor(value),
parse('2021-02-13').date())
def test_floor_month(self):
# test datetime
value = datetime.datetime(year=2021, month=1, day=1, hour=23, minute=45, second=55)
self.assertEqual(date.floor(value, granularity=date.Granularity.MONTH),
parse('2021-01-01').date())
value = datetime.datetime(year=2021, month=1, day=31, hour=23, minute=45, second=55)
self.assertEqual(date.floor(value, granularity=date.Granularity.MONTH),
parse('2021-01-01').date())
value = datetime.datetime(year=2021, month=12, day=1, hour=23, minute=45, second=55)
self.assertEqual(date.floor(value, granularity=date.Granularity.MONTH),
parse('2021-12-01').date())
value = datetime.datetime(year=2021, month=12, day=31, hour=23, minute=45, second=55)
self.assertEqual(date.floor(value, granularity=date.Granularity.MONTH),
parse('2021-12-01').date())
# test date
self.assertEqual(date.floor(parse('2021-01-01'), granularity=date.Granularity.MONTH),
parse('2021-01-01').date())
self.assertEqual(date.floor(parse('2021-01-31'), granularity=date.Granularity.MONTH),
parse('2021-01-01').date())
self.assertEqual(date.floor(parse('2021-12-01'), granularity=date.Granularity.MONTH),
parse('2021-12-01').date())
self.assertEqual(date.floor(parse('2021-12-31'), granularity=date.Granularity.MONTH),
parse('2021-12-01').date())
def test_floor_quarter(self):
# default argument fiscal_start of 1
self.assertEqual(date.floor(parse('2021-01-01'), granularity=date.Granularity.QUARTER),
parse('2021-01-01').date())
self.assertEqual(date.floor(parse('2021-01-31'), granularity=date.Granularity.QUARTER),
parse('2021-01-01').date())
self.assertEqual(date.floor(parse('2021-02-01'), granularity=date.Granularity.QUARTER),
parse('2021-01-01').date())
self.assertEqual(date.floor(parse('2021-02-28'), granularity=date.Granularity.QUARTER),
parse('2021-01-01').date())
self.assertEqual(date.floor(parse('2021-03-01'), granularity=date.Granularity.QUARTER),
parse('2021-01-01').date())
self.assertEqual(date.floor(parse('2021-03-31'), granularity=date.Granularity.QUARTER),
parse('2021-01-01').date())
self.assertEqual(date.floor(parse('2021-04-01'), granularity=date.Granularity.QUARTER),
parse('2021-04-01').date())
self.assertEqual(date.floor(parse('2021-04-30'), granularity=date.Granularity.QUARTER),
parse('2021-04-01').date())
self.assertEqual(date.floor(parse('2021-05-01'), granularity=date.Granularity.QUARTER),
parse('2021-04-01').date())
self.assertEqual(date.floor(parse('2021-05-31'), granularity=date.Granularity.QUARTER),
parse('2021-04-01').date())
self.assertEqual(date.floor(parse('2021-06-01'), granularity=date.Granularity.QUARTER),
parse('2021-04-01').date())
self.assertEqual(date.floor(parse('2021-06-30'), granularity=date.Granularity.QUARTER),
parse('2021-04-01').date())
self.assertEqual(date.floor(parse('2021-07-01'), granularity=date.Granularity.QUARTER),
parse('2021-07-01').date())
self.assertEqual(date.floor(parse('2021-07-31'), granularity=date.Granularity.QUARTER),
parse('2021-07-01').date())
self.assertEqual(date.floor(parse('2021-08-01'), granularity=date.Granularity.QUARTER),
parse('2021-07-01').date())
self.assertEqual(date.floor(parse('2021-08-31'), granularity=date.Granularity.QUARTER),
parse('2021-07-01').date())
self.assertEqual(date.floor(parse('2021-09-01'), granularity=date.Granularity.QUARTER),
parse('2021-07-01').date())
self.assertEqual(date.floor(parse('2021-09-30'), granularity=date.Granularity.QUARTER),
parse('2021-07-01').date())
self.assertEqual(date.floor(parse('2021-10-01'), granularity=date.Granularity.QUARTER),
parse('2021-10-01').date())
self.assertEqual(date.floor(parse('2021-10-31'), granularity=date.Granularity.QUARTER),
parse('2021-10-01').date())
self.assertEqual(date.floor(parse('2021-11-01'), granularity=date.Granularity.QUARTER),
parse('2021-10-01').date())
self.assertEqual(date.floor(parse('2021-11-30'), granularity=date.Granularity.QUARTER),
parse('2021-10-01').date())
self.assertEqual(date.floor(parse('2021-12-01'), granularity=date.Granularity.QUARTER),
parse('2021-10-01').date())
self.assertEqual(date.floor(parse('2021-12-31'), granularity=date.Granularity.QUARTER),
parse('2021-10-01').date())
# fiscal quarter starts in February
self.assertEqual(date.floor(parse('2021-01-01'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2020-11-01').date())
self.assertEqual(date.floor(parse('2021-01-31'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2020-11-01').date())
self.assertEqual(date.floor(parse('2021-02-01'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-02-01').date())
self.assertEqual(date.floor(parse('2021-02-28'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-02-01').date())
self.assertEqual(date.floor(parse('2021-03-01'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-02-01').date())
self.assertEqual(date.floor(parse('2021-03-31'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-02-01').date())
self.assertEqual(date.floor(parse('2021-04-01'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-02-01').date())
self.assertEqual(date.floor(parse('2021-04-30'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-02-01').date())
self.assertEqual(date.floor(parse('2021-05-01'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-05-01').date())
self.assertEqual(date.floor(parse('2021-05-31'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-05-01').date())
self.assertEqual(date.floor(parse('2021-06-01'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-05-01').date())
self.assertEqual(date.floor(parse('2021-06-30'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-05-01').date())
self.assertEqual(date.floor(parse('2021-07-01'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-05-01').date())
self.assertEqual(date.floor(parse('2021-07-31'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-05-01').date())
self.assertEqual(date.floor(parse('2021-08-01'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-08-01').date())
self.assertEqual(date.floor(parse('2021-08-31'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-08-01').date())
self.assertEqual(date.floor(parse('2021-09-01'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-08-01').date())
self.assertEqual(date.floor(parse('2021-09-30'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-08-01').date())
self.assertEqual(date.floor(parse('2021-10-01'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-08-01').date())
self.assertEqual(date.floor(parse('2021-10-31'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-08-01').date())
self.assertEqual(date.floor(parse('2021-11-01'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-11-01').date())
self.assertEqual(date.floor(parse('2021-11-30'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-11-01').date())
self.assertEqual(date.floor(parse('2021-12-01'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-11-01').date())
self.assertEqual(date.floor(parse('2021-12-31'), granularity=date.Granularity.QUARTER,
fiscal_start=2),
parse('2021-11-01').date())
# fiscal quarter starts in November (should be same as February)
self.assertEqual(date.floor(parse('2021-01-01'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2020-11-01').date())
self.assertEqual(date.floor(parse('2021-01-31'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2020-11-01').date())
self.assertEqual(date.floor(parse('2021-02-01'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-02-01').date())
self.assertEqual(date.floor(parse('2021-02-28'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-02-01').date())
self.assertEqual(date.floor(parse('2021-03-01'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-02-01').date())
self.assertEqual(date.floor(parse('2021-03-31'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-02-01').date())
self.assertEqual(date.floor(parse('2021-04-01'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-02-01').date())
self.assertEqual(date.floor(parse('2021-04-30'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-02-01').date())
self.assertEqual(date.floor(parse('2021-05-01'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-05-01').date())
self.assertEqual(date.floor(parse('2021-05-31'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-05-01').date())
self.assertEqual(date.floor(parse('2021-06-01'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-05-01').date())
self.assertEqual(date.floor(parse('2021-06-30'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-05-01').date())
self.assertEqual(date.floor(parse('2021-07-01'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-05-01').date())
self.assertEqual(date.floor(parse('2021-07-31'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-05-01').date())
self.assertEqual(date.floor(parse('2021-08-01'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-08-01').date())
self.assertEqual(date.floor(parse('2021-08-31'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-08-01').date())
self.assertEqual(date.floor(parse('2021-09-01'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-08-01').date())
self.assertEqual(date.floor(parse('2021-09-30'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-08-01').date())
self.assertEqual(date.floor(parse('2021-10-01'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-08-01').date())
self.assertEqual(date.floor(parse('2021-10-31'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-08-01').date())
self.assertEqual(date.floor(parse('2021-11-01'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-11-01').date())
self.assertEqual(date.floor(parse('2021-11-30'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-11-01').date())
self.assertEqual(date.floor(parse('2021-12-01'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-11-01').date())
self.assertEqual(date.floor(parse('2021-12-31'), granularity=date.Granularity.QUARTER,
fiscal_start=11),
parse('2021-11-01').date())
# fiscal quarter starts in June
self.assertEqual(date.floor(parse('2021-01-01'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2020-12-01').date())
self.assertEqual(date.floor(parse('2021-01-31'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2020-12-01').date())
self.assertEqual(date.floor(parse('2021-02-01'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2020-12-01').date())
self.assertEqual(date.floor(parse('2021-02-28'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2020-12-01').date())
self.assertEqual(date.floor(parse('2021-03-01'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2021-03-01').date())
self.assertEqual(date.floor(parse('2021-03-31'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2021-03-01').date())
self.assertEqual(date.floor(parse('2021-04-01'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2021-03-01').date())
self.assertEqual(date.floor(parse('2021-04-30'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2021-03-01').date())
self.assertEqual(date.floor(parse('2021-05-01'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2021-03-01').date())
self.assertEqual(date.floor(parse('2021-05-31'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2021-03-01').date())
self.assertEqual(date.floor(parse('2021-06-01'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2021-06-01').date())
self.assertEqual(date.floor(parse('2021-06-30'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2021-06-01').date())
self.assertEqual(date.floor(parse('2021-07-01'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2021-06-01').date())
self.assertEqual(date.floor(parse('2021-07-31'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2021-06-01').date())
self.assertEqual(date.floor(parse('2021-08-01'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2021-06-01').date())
self.assertEqual(date.floor(parse('2021-08-31'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2021-06-01').date())
self.assertEqual(date.floor(parse('2021-09-01'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2021-09-01').date())
self.assertEqual(date.floor(parse('2021-09-30'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2021-09-01').date())
self.assertEqual(date.floor(parse('2021-10-01'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2021-09-01').date())
self.assertEqual(date.floor(parse('2021-10-31'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2021-09-01').date())
self.assertEqual(date.floor(parse('2021-11-01'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2021-09-01').date())
self.assertEqual(date.floor(parse('2021-11-30'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2021-09-01').date())
self.assertEqual(date.floor(parse('2021-12-01'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2021-12-01').date())
self.assertEqual(date.floor(parse('2021-12-31'), granularity=date.Granularity.QUARTER,
fiscal_start=6),
parse('2021-12-01').date())
def test_floor_series(self):
date_series = pd.Series(pd.to_datetime([
'2021-01-01 00:00:00', '2021-01-01 00:00:01',
np.NaN,
'2021-01-02 00:00:01', '2021-01-02 23:59:59',
'2021-02-01 00:00:00', '2021-02-01 00:00:01',
np.NaN,
'2021-02-02 00:00:01', '2021-02-02 23:59:59',
'2021-03-01 00:00:00', '2021-03-01 00:00:01',
np.NaN,
'2021-03-02 00:00:01', '2021-03-02 23:59:59',
'2021-04-01 00:00:00', '2021-04-01 00:00:01',
np.NaN,
'2021-04-02 00:00:01', '2021-04-02 23:59:59',
'2021-05-01 00:00:00', '2021-05-01 00:00:01',
np.NaN,
'2021-05-02 00:00:01', '2021-05-02 23:59:59',
'2021-06-01 00:00:00', '2021-06-01 00:00:01',
np.NaN,
'2021-06-02 00:00:01', '2021-06-02 23:59:59',
'2021-07-01 00:00:00', '2021-07-01 00:00:01',
np.NaN,
'2021-07-02 00:00:01', '2021-07-02 23:59:59',
'2021-08-01 00:00:00', '2021-08-01 00:00:01',
np.NaN,
'2021-08-02 00:00:01', '2021-08-02 23:59:59',
'2021-09-01 00:00:00', '2021-09-01 00:00:01',
np.NaN,
'2021-09-02 00:00:01', '2021-09-02 23:59:59',
'2021-10-01 00:00:00', '2021-10-01 00:00:01',
np.NaN,
'2021-10-02 00:00:01', '2021-10-02 23:59:59',
'2021-11-01 00:00:00', '2021-11-01 00:00:01',
np.NaN,
'2021-11-02 00:00:01', '2021-11-02 23:59:59',
'2021-12-01 00:00:00', '2021-12-01 00:00:01',
np.NaN,
'2021-12-02 00:00:01', '2021-12-02 23:59:59',
]))
expected_day = pd.Series(pd.to_datetime([
'2021-01-01', '2021-01-01',
np.NaN,
'2021-01-02', '2021-01-02',
'2021-02-01', '2021-02-01',
np.NaN,
'2021-02-02', '2021-02-02',
'2021-03-01', '2021-03-01',
np.NaN,
'2021-03-02', '2021-03-02',
'2021-04-01', '2021-04-01',
np.NaN,
'2021-04-02', '2021-04-02',
'2021-05-01', '2021-05-01',
np.NaN,
'2021-05-02', '2021-05-02',
'2021-06-01', '2021-06-01',
np.NaN,
'2021-06-02', '2021-06-02',
'2021-07-01', '2021-07-01',
np.NaN,
'2021-07-02', '2021-07-02',
'2021-08-01', '2021-08-01',
np.NaN,
'2021-08-02', '2021-08-02',
'2021-09-01', '2021-09-01',
np.NaN,
'2021-09-02', '2021-09-02',
'2021-10-01', '2021-10-01',
np.NaN,
'2021-10-02', '2021-10-02',
'2021-11-01', '2021-11-01',
np.NaN,
'2021-11-02', '2021-11-02',
'2021-12-01', '2021-12-01',
np.NaN,
'2021-12-02', '2021-12-02',
]))
expected_month = pd.Series(pd.to_datetime([
'2021-01-01', '2021-01-01',
np.NaN,
'2021-01-01', '2021-01-01',
'2021-02-01', '2021-02-01',
np.NaN,
'2021-02-01', '2021-02-01',
'2021-03-01', '2021-03-01',
np.NaN,
'2021-03-01', '2021-03-01',
'2021-04-01', '2021-04-01',
np.NaN,
'2021-04-01', '2021-04-01',
'2021-05-01', '2021-05-01',
np.NaN,
'2021-05-01', '2021-05-01',
'2021-06-01', '2021-06-01',
np.NaN,
'2021-06-01', '2021-06-01',
'2021-07-01', '2021-07-01',
np.NaN,
'2021-07-01', '2021-07-01',
'2021-08-01', '2021-08-01',
np.NaN,
'2021-08-01', '2021-08-01',
'2021-09-01', '2021-09-01',
np.NaN,
'2021-09-01', '2021-09-01',
'2021-10-01', '2021-10-01',
np.NaN,
'2021-10-01', '2021-10-01',
'2021-11-01', '2021-11-01',
np.NaN,
'2021-11-01', '2021-11-01',
'2021-12-01', '2021-12-01',
np.NaN,
'2021-12-01', '2021-12-01',
]))
expected_quarter = pd.Series(pd.to_datetime([
'2021-01-01', '2021-01-01',
np.NaN,
'2021-01-01', '2021-01-01',
'2021-01-01', '2021-01-01',
np.NaN,
'2021-01-01', '2021-01-01',
'2021-01-01', '2021-01-01',
np.NaN,
'2021-01-01', '2021-01-01',
'2021-04-01', '2021-04-01',
np.NaN,
'2021-04-01', '2021-04-01',
'2021-04-01', '2021-04-01',
np.NaN,
'2021-04-01', '2021-04-01',
'2021-04-01', '2021-04-01',
np.NaN,
'2021-04-01', '2021-04-01',
'2021-07-01', '2021-07-01',
np.NaN,
'2021-07-01', '2021-07-01',
'2021-07-01', '2021-07-01',
np.NaN,
'2021-07-01', '2021-07-01',
'2021-07-01', '2021-07-01',
np.NaN,
'2021-07-01', '2021-07-01',
'2021-10-01', '2021-10-01',
np.NaN,
'2021-10-01', '2021-10-01',
'2021-10-01', '2021-10-01',
np.NaN,
'2021-10-01', '2021-10-01',
'2021-10-01', '2021-10-01',
np.NaN,
'2021-10-01', '2021-10-01',
]))
# without series.name
validation.assert_dataframes_match([
pd.DataFrame(date_series.dt.date),
pd.DataFrame(expected_day.dt.date),
pd.DataFrame(date.floor(date_series, granularity=date.Granularity.DAY))
])
validation.assert_dataframes_match([
pd.DataFrame(expected_month.dt.date),
pd.DataFrame(date.floor(date_series, granularity=date.Granularity.MONTH))
])
validation.assert_dataframes_match([
pd.DataFrame(expected_quarter.dt.date),
pd.DataFrame(date.floor(date_series, granularity=date.Granularity.QUARTER))
])
# with series.name
date_series.name = 'date_day'
expected_day.name = 'date_day'
actual_values = date.floor(date_series, granularity=date.Granularity.DAY)
self.assertEqual(actual_values.name, 'date_day')
validation.assert_dataframes_match([
pd.DataFrame(expected_day.dt.date),
pd.DataFrame(actual_values)
])
date_series.name = 'date_month'
expected_day.name = 'date_month'
actual_values = date.floor(date_series, granularity=date.Granularity.MONTH)
self.assertEqual(actual_values.name, 'date_month')
validation.assert_dataframes_match([
pd.DataFrame(expected_month.dt.date),
pd.DataFrame(actual_values)
])
date_series.name = 'date_quarter'
expected_day.name = 'date_quarter'
actual_values = date.floor(date_series, granularity=date.Granularity.QUARTER)
self.assertEqual(actual_values.name, 'date_quarter')
validation.assert_dataframes_match([
pd.DataFrame(expected_quarter.dt.date),
pd.DataFrame(actual_values)
])
| 54.605634 | 109 | 0.488651 | 6,510 | 54,278 | 4.007527 | 0.020277 | 0.068995 | 0.133543 | 0.136609 | 0.970907 | 0.952815 | 0.938212 | 0.930392 | 0.923838 | 0.910805 | 0 | 0.248864 | 0.355558 | 54,278 | 993 | 110 | 54.660624 | 0.496984 | 0.02863 | 0 | 0.870727 | 0 | 0 | 0.167856 | 0 | 0 | 0 | 0 | 0 | 0.137821 | 1 | 0.009615 | false | 0 | 0.007479 | 0 | 0.018162 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
88056a80640e60389e2b7980cfb39d86179c62d8 | 16,757 | py | Python | idfy_rest_client/controllers/account_controller.py | dealflowteam/Idfy | fa3918a6c54ea0eedb9146578645b7eb1755b642 | [
"MIT"
] | null | null | null | idfy_rest_client/controllers/account_controller.py | dealflowteam/Idfy | fa3918a6c54ea0eedb9146578645b7eb1755b642 | [
"MIT"
] | null | null | null | idfy_rest_client/controllers/account_controller.py | dealflowteam/Idfy | fa3918a6c54ea0eedb9146578645b7eb1755b642 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
idfy_rest_client.controllers.account_controller
This file was automatically generated for Idfy by APIMATIC v2.0 ( https://apimatic.io ).
"""
from .base_controller import BaseController
from ..api_helper import APIHelper
from ..configuration import Configuration
from ..http.auth.o_auth_2 import OAuth2
from ..models.account_name_item import AccountNameItem
from ..models.account import Account
from ..models.account_list_item import AccountListItem
from ..exceptions.api_exception import APIException
class AccountController(BaseController):
"""A Controller to access Endpoints in the idfy_rest_client API."""
def list_account_names(self):
"""Does a GET request to /admin/account/list/names.
List names of accounts you have access to
Returns:
list of AccountNameItem: Response from the API. OK
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_query_builder = Configuration.get_base_uri()
_query_builder += '/admin/account/list/names'
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.http_client.get(_query_url, headers=_headers)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, AccountNameItem.from_dictionary)
def disable_account(self):
"""Does a POST request to /admin/account/disable.
Set the account as incative / disabled. Requires one of the following
scopes: [root, account-write, dealer]
Returns:
mixed: Response from the API. Ok
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_query_builder = Configuration.get_base_uri()
_query_builder += '/admin/account/disable'
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.http_client.post(_query_url, headers=_headers)
OAuth2.apply(_request)
_context = self.execute_request(_request)
# Endpoint and global error handling using HTTP status codes.
if _context.response.status_code == 400:
raise APIException('Bad request', _context)
elif _context.response.status_code == 403:
raise APIException('Forbidden (Access denied)', _context)
elif _context.response.status_code == 500:
raise APIException('Internal server error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body)
def update_account_styling(self,
styling):
"""Does a POST request to /admin/account/styling.
Upload / Update custom account css. Returns a url with your uploaded
css. Requires one of the following scopes: [root, account-write,
dealer]
Args:
styling (Styling): TODO: type description here. Example:
Returns:
mixed: Response from the API. Ok
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Validate required parameters
self.validate_parameters(styling=styling)
# Prepare query URL
_query_builder = Configuration.get_base_uri()
_query_builder += '/admin/account/styling'
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8'
}
# Prepare and execute request
_request = self.http_client.post(_query_url, headers=_headers, parameters=APIHelper.json_serialize(styling))
OAuth2.apply(_request)
_context = self.execute_request(_request)
# Endpoint and global error handling using HTTP status codes.
if _context.response.status_code == 400:
raise APIException('Bad request', _context)
elif _context.response.status_code == 403:
raise APIException('Forbidden (Access denied)', _context)
elif _context.response.status_code == 500:
raise APIException('Internal server error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body)
def update_account_logo(self,
account_logo):
"""Does a POST request to /admin/account/logo.
Upload / Update and resize account logo. Returns a url with your
uploaded / resized logo. Requires one of the following scopes: [root,
account-write, dealer]
Args:
account_logo (AccountLogo): TODO: type description here. Example:
Returns:
string: Response from the API. Ok
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Validate required parameters
self.validate_parameters(account_logo=account_logo)
# Prepare query URL
_query_builder = Configuration.get_base_uri()
_query_builder += '/admin/account/logo'
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'content-type': 'application/json; charset=utf-8'
}
# Prepare and execute request
_request = self.http_client.post(_query_url, headers=_headers, parameters=APIHelper.json_serialize(account_logo))
OAuth2.apply(_request)
_context = self.execute_request(_request)
# Endpoint and global error handling using HTTP status codes.
if _context.response.status_code == 400:
raise APIException('Bad request', _context)
elif _context.response.status_code == 403:
raise APIException('Forbidden (Access denied)', _context)
elif _context.response.status_code == 500:
raise APIException('Internal server error', _context)
self.validate_response(_context)
# Return appropriate type
return _context.response.raw_body
def create_account(self,
account_details):
"""Does a POST request to /admin/account.
Requires one of the following scopes: [dealer]
Args:
account_details (CreateAccountRequest): TODO: type description
here. Example:
Returns:
Account: Response from the API. Ok
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Validate required parameters
self.validate_parameters(account_details=account_details)
# Prepare query URL
_query_builder = Configuration.get_base_uri()
_query_builder += '/admin/account'
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8'
}
# Prepare and execute request
_request = self.http_client.post(_query_url, headers=_headers, parameters=APIHelper.json_serialize(account_details))
_context = self.execute_request(_request)
# Endpoint and global error handling using HTTP status codes.
if _context.response.status_code == 400:
raise APIException('Bad request', _context)
elif _context.response.status_code == 403:
raise APIException('Forbidden (Access denied)', _context)
elif _context.response.status_code == 500:
raise APIException('Internal server error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, Account.from_dictionary)
def update_account(self,
account_details):
"""Does a PUT request to /admin/account.
Requires one of the following scopes: [root, account-write, dealer]
Args:
account_details (UpdateAccountRequest): TODO: type description
here. Example:
Returns:
Account: Response from the API. Ok
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Validate required parameters
self.validate_parameters(account_details=account_details)
# Prepare query URL
_query_builder = Configuration.get_base_uri()
_query_builder += '/admin/account'
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8'
}
# Prepare and execute request
_request = self.http_client.put(_query_url, headers=_headers, parameters=APIHelper.json_serialize(account_details))
OAuth2.apply(_request)
_context = self.execute_request(_request)
# Endpoint and global error handling using HTTP status codes.
if _context.response.status_code == 400:
raise APIException('Bad request', _context)
elif _context.response.status_code == 403:
raise APIException('Forbidden (Access denied)', _context)
elif _context.response.status_code == 500:
raise APIException('Internal server error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, Account.from_dictionary)
def retrieve_account(self):
"""Does a GET request to /admin/account.
Requires one of the following scopes: [root, account-read, dealer]
Returns:
Account: Response from the API. Ok
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_query_builder = Configuration.get_base_uri()
_query_builder += '/admin/account'
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.http_client.get(_query_url, headers=_headers)
OAuth2.apply(_request)
_context = self.execute_request(_request)
# Endpoint and global error handling using HTTP status codes.
if _context.response.status_code == 400:
raise APIException('Bad request', _context)
elif _context.response.status_code == 403:
raise APIException('Forbidden (Access denied)', _context)
elif _context.response.status_code == 500:
raise APIException('Internal server error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, Account.from_dictionary)
def list_accounts(self,
filter_name=None,
filter_org_no=None,
filter_uni_customer_no=None,
filter_created_before=None,
filter_created_after=None,
filter_last_modified_before=None,
filter_last_modified_after=None,
filter_dealer_name=None,
filter_dealer_reference=None,
filter_enabled=None):
"""Does a GET request to /admin/account/list.
List accounts you have access to
Args:
filter_name (string, optional): TODO: type description here.
Example:
filter_org_no (string, optional): TODO: type description here.
Example:
filter_uni_customer_no (string, optional): TODO: type description
here. Example:
filter_created_before (datetime, optional): TODO: type description
here. Example:
filter_created_after (datetime, optional): TODO: type description
here. Example:
filter_last_modified_before (datetime, optional): TODO: type
description here. Example:
filter_last_modified_after (datetime, optional): TODO: type
description here. Example:
filter_dealer_name (string, optional): TODO: type description
here. Example:
filter_dealer_reference (string, optional): TODO: type description
here. Example:
filter_enabled (bool, optional): TODO: type description here.
Example:
Returns:
list of AccountListItem: Response from the API. OK
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_query_builder = Configuration.get_base_uri()
_query_builder += '/admin/account/list'
_query_parameters = {
'filter.name': filter_name,
'filter.orgNo': filter_org_no,
'filter.uniCustomerNo': filter_uni_customer_no,
'filter.createdBefore': APIHelper.RFC3339DateTime(filter_created_before),
'filter.createdAfter': APIHelper.RFC3339DateTime(filter_created_after),
'filter.lastModifiedBefore': APIHelper.RFC3339DateTime(filter_last_modified_before),
'filter.lastModifiedAfter': APIHelper.RFC3339DateTime(filter_last_modified_after),
'filter.dealerName': filter_dealer_name,
'filter.dealerReference': filter_dealer_reference,
'filter.enabled': filter_enabled
}
_query_builder = APIHelper.append_url_with_query_parameters(_query_builder,
_query_parameters, Configuration.array_serialization)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.http_client.get(_query_url, headers=_headers)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, AccountListItem.from_dictionary)
| 38.87935 | 125 | 0.621293 | 1,732 | 16,757 | 5.787529 | 0.1097 | 0.031125 | 0.03771 | 0.044892 | 0.820531 | 0.805068 | 0.783021 | 0.773144 | 0.757382 | 0.723663 | 0 | 0.00727 | 0.310497 | 16,757 | 430 | 126 | 38.969767 | 0.860308 | 0.356925 | 0 | 0.618497 | 1 | 0 | 0.107002 | 0.014965 | 0 | 0 | 0 | 0.032558 | 0 | 1 | 0.046243 | false | 0 | 0.046243 | 0 | 0.144509 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
7167a5d8a852aeb8eb9a165c049b451993689e9a | 19,353 | py | Python | tests/numba/rnnt_loss/utils/test_gpu_rnnt_kernel.py | titu1994/warprnnt_numba | b1bc81e02dfb05143c3d55ac7b50c8131e85b194 | [
"MIT"
] | 7 | 2022-01-25T22:05:54.000Z | 2022-02-23T09:01:32.000Z | tests/numba/rnnt_loss/utils/test_gpu_rnnt_kernel.py | titu1994/warprnnt_numba | b1bc81e02dfb05143c3d55ac7b50c8131e85b194 | [
"MIT"
] | null | null | null | tests/numba/rnnt_loss/utils/test_gpu_rnnt_kernel.py | titu1994/warprnnt_numba | b1bc81e02dfb05143c3d55ac7b50c8131e85b194 | [
"MIT"
] | 1 | 2022-01-24T13:12:11.000Z | 2022-01-24T13:12:11.000Z | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import pytest
import torch
from numba import cuda
from warprnnt_numba.rnnt_loss import rnnt_numpy
from warprnnt_numba.rnnt_loss.rnnt_pytorch import certify_inputs
from warprnnt_numba.rnnt_loss.utils.cuda_utils import gpu_rnnt_kernel, reduce
from warprnnt_numba import numba_utils
from warprnnt_numba.numba_utils import __NUMBA_MINIMUM_VERSION__
def log_softmax(x, axis=-1):
x = torch.from_numpy(x) # zero-copy
x = torch.log_softmax(x, dim=axis)
x = x.numpy()
return x
class TestRNNTCUDAKernels:
@pytest.mark.skipif(not cuda.is_available(), reason="CUDA Reductions can only be run when CUDA is available")
def test_compute_alphas_kernel(self):
numba_utils.skip_numba_cuda_test_if_unsupported(__NUMBA_MINIMUM_VERSION__)
random = np.random.RandomState(0)
original_shape = [1, 5, 11, 3]
B, T, U, V = original_shape
# Numpy kernel
x = random.randn(*original_shape)
labels = np.array([[1, 1, 1, 2, 2, 2, 1, 2, 2, 1]]) # [1, 10]
label_len = len(labels[0]) + 1
blank_idx = 0
x_np = log_softmax(x, axis=-1)
ground_alphas, ground_log_likelihood = rnnt_numpy.forward_pass(
x_np[0, :, :label_len, :], labels[0, : label_len - 1], blank_idx
)
# Pytorch kernel
device = torch.device('cuda')
if hasattr(cuda, 'external_stream'):
stream = cuda.external_stream(torch.cuda.current_stream(device).cuda_stream)
else:
stream = cuda.default_stream()
x_c = torch.tensor(x, device=device, dtype=torch.float32)
labels_c = torch.tensor(labels, device=device, dtype=torch.int32)
# Allocate workspace memory
denom = torch.zeros(B * T * U, device=device, dtype=x_c.dtype)
alphas = torch.zeros(B * T * U, device=device, dtype=x_c.dtype)
llForward = torch.zeros(B, device=device, dtype=x_c.dtype)
input_lengths = torch.tensor([T], dtype=torch.int32, device=device)
label_lengths = torch.tensor([len(labels[0])], dtype=torch.int32, device=device)
# certify input data
certify_inputs(x_c, labels_c, input_lengths, label_lengths)
# flatten activation tensor (for pointer based indexing)
x_c = x_c.view([-1])
# call kernel
# log softmax reduction
reduce.reduce_max(x_c, denom, rows=V, cols=B * T * U, minus=False, stream=stream)
reduce.reduce_exp(x_c, denom, rows=V, cols=B * T * U, minus=True, stream=stream)
# alpha kernel
gpu_rnnt_kernel.compute_alphas_kernel[B, U, stream, 0](
x_c, denom, alphas, llForward, input_lengths, label_lengths, labels_c, B, T, U, V, blank_idx,
)
# sync kernel
stream.synchronize()
# reshape alphas
alphas = alphas.view([B, T, U])
diff = ground_alphas - alphas[0].cpu().numpy()
assert np.abs(diff).mean() <= 1e-5
assert np.square(diff).mean() <= 1e-10
ll_diff = ground_log_likelihood - llForward[0].cpu().numpy()
assert np.abs(ll_diff).mean() <= 1e-5
assert np.square(ll_diff).mean() <= 1e-10
@pytest.mark.skipif(not cuda.is_available(), reason="CUDA Reductions can only be run when CUDA is available")
def test_compute_betas_kernel(self):
numba_utils.skip_numba_cuda_test_if_unsupported(__NUMBA_MINIMUM_VERSION__)
random = np.random.RandomState(0)
original_shape = [1, 5, 11, 3]
B, T, U, V = original_shape
# Numpy kernel
x = random.randn(*original_shape)
labels = np.array([[1, 1, 1, 2, 2, 2, 1, 2, 2, 1]]) # [1, 10]
label_len = len(labels[0]) + 1
blank_idx = 0
x_np = log_softmax(x, axis=-1)
ground_alphas, ground_log_likelihood = rnnt_numpy.backward_pass(
x_np[0, :, :label_len, :], labels[0, : label_len - 1], blank_idx
)
# Pytorch kernel
device = torch.device('cuda')
if hasattr(cuda, 'external_stream'):
stream = cuda.external_stream(torch.cuda.current_stream(device).cuda_stream)
else:
stream = cuda.default_stream()
x_c = torch.tensor(x, device=device, dtype=torch.float32)
labels_c = torch.tensor(labels, device=device, dtype=torch.int32)
# Allocate workspace memory
denom = torch.zeros(B * T * U, device=device, dtype=x_c.dtype)
betas = torch.zeros(B * T * U, device=device, dtype=x_c.dtype)
llBackward = torch.zeros(B, device=device, dtype=x_c.dtype)
input_lengths = torch.tensor([T], dtype=torch.int32, device=device)
label_lengths = torch.tensor([len(labels[0])], dtype=torch.int32, device=device)
# certify input data
certify_inputs(x_c, labels_c, input_lengths, label_lengths)
# flatten activation tensor (for pointer based indexing)
x_c = x_c.view([-1])
# call kernel
# log softmax reduction
reduce.reduce_max(x_c, denom, rows=V, cols=B * T * U, minus=False, stream=stream)
reduce.reduce_exp(x_c, denom, rows=V, cols=B * T * U, minus=True, stream=stream)
# beta kernel
gpu_rnnt_kernel.compute_betas_kernel[B, U, stream, 0](
x_c, denom, betas, llBackward, input_lengths, label_lengths, labels_c, B, T, U, V, blank_idx,
)
# sync kernel
stream.synchronize()
# reshape alphas
betas = betas.view([B, T, U])
diff = ground_alphas - betas[0].cpu().numpy()
assert np.abs(diff).mean() <= 1e-5
assert np.square(diff).mean() <= 1e-10
ll_diff = ground_log_likelihood - llBackward[0].cpu().numpy()
assert np.abs(ll_diff).mean() <= 1e-5
assert np.square(ll_diff).mean() <= 1e-10
@pytest.mark.skipif(not cuda.is_available(), reason="CUDA Reductions can only be run when CUDA is available")
@pytest.mark.unit
def test_compute_grads_kernel(self):
numba_utils.skip_numba_cuda_test_if_unsupported(__NUMBA_MINIMUM_VERSION__)
fastemit_lambda = 0.0
clamp = 0.0
random = np.random.RandomState(0)
original_shape = [1, 5, 11, 3]
B, T, U, V = original_shape
# Numpy kernel
x = random.randn(*original_shape)
labels = torch.from_numpy(np.array([[1, 1, 1, 2, 2, 2, 1, 2, 2, 1]], dtype=np.int32)) # [1, 10]
audio_len = torch.from_numpy(np.array([T], dtype=np.int32))
label_len = torch.from_numpy(np.array([U - 1], dtype=np.int32))
blank_idx = 0
x_np = torch.from_numpy(x)
x_np.requires_grad_(True)
"""
Here we will directly utilize the numpy variant of the loss without explicitly calling
the numpy functions for alpha, beta and grads.
This is because the grads returned by the rnnt_numpy.transduce_batch() are :
d/dx (alpha + beta alignment)(log_softmax(x)).
But according to the chain rule, we'd still need to compute the gradient of log_softmax(x)
and update the alignments by hand. Instead, we will rely on pytorch to compute the gradient
of the log_softmax(x) step and propagate it backwards.
"""
loss_func = rnnt_numpy.RNNTLoss(blank_idx, fastemit_lambda=fastemit_lambda, clamp=clamp)
loss_val = loss_func(x_np, labels, audio_len, label_len)
loss_val.sum().backward()
true_grads = x_np.grad
# Pytorch kernel
device = torch.device('cuda')
if hasattr(cuda, 'external_stream'):
stream = cuda.external_stream(torch.cuda.current_stream(device).cuda_stream)
else:
stream = cuda.default_stream()
x_c = torch.tensor(x, device=device, dtype=torch.float32)
labels_c = torch.tensor(labels, device=device, dtype=torch.int32)
# Allocate workspace memory
denom = torch.zeros(B * T * U, device=device, dtype=x_c.dtype)
alphas = torch.zeros(B * T * U, device=device, dtype=x_c.dtype)
betas = torch.zeros(B * T * U, device=device, dtype=x_c.dtype)
llForward = torch.zeros(B, device=device, dtype=x_c.dtype)
llBackward = torch.zeros(B, device=device, dtype=x_c.dtype)
input_lengths = torch.tensor([T], dtype=torch.int32, device=device)
label_lengths = torch.tensor([len(labels[0])], dtype=torch.int32, device=device)
# certify input data
certify_inputs(x_c, labels_c, input_lengths, label_lengths)
# flatten activation tensor (for pointer based indexing)
x_c = x_c.view([-1])
grads = torch.zeros_like(x_c, requires_grad=False)
# call kernel
# log softmax reduction
reduce.reduce_max(x_c, denom, rows=V, cols=B * T * U, minus=False, stream=stream)
reduce.reduce_exp(x_c, denom, rows=V, cols=B * T * U, minus=True, stream=stream)
# alpha kernel
gpu_rnnt_kernel.compute_alphas_kernel[B, U, stream, 0](
x_c, denom, alphas, llForward, input_lengths, label_lengths, labels_c, B, T, U, V, blank_idx,
)
# beta kernel
gpu_rnnt_kernel.compute_betas_kernel[B, U, stream, 0](
x_c, denom, betas, llBackward, input_lengths, label_lengths, labels_c, B, T, U, V, blank_idx,
)
# gamma kernel
grad_blocks_per_grid = B * T * U
grad_threads_per_block = gpu_rnnt_kernel.GPU_RNNT_THREAD_SIZE
gpu_rnnt_kernel.compute_grad_kernel[grad_blocks_per_grid, grad_threads_per_block, stream, 0](
grads,
x_c,
denom,
alphas,
betas,
llForward,
input_lengths,
label_lengths,
labels_c,
B,
T,
U,
V,
blank_idx,
fastemit_lambda,
clamp,
)
# sync kernel
stream.synchronize()
# reshape grads
grads = grads.view([B, T, U, V])
diff = true_grads - grads[0].cpu().numpy()
assert np.abs(diff).mean() <= 1e-5
assert np.square(diff).mean() <= 1e-10
@pytest.mark.skipif(not cuda.is_available(), reason="CUDA Reductions can only be run when CUDA is available")
@pytest.mark.unit
def test_compute_grads_kernel_fastemit(self):
numba_utils.skip_numba_cuda_test_if_unsupported(__NUMBA_MINIMUM_VERSION__)
fastemit_lambda = 0.001
clamp = 0.0
random = np.random.RandomState(0)
original_shape = [1, 5, 11, 3]
B, T, U, V = original_shape
# Numpy kernel
x = random.randn(*original_shape)
labels = torch.from_numpy(np.array([[1, 1, 1, 2, 2, 2, 1, 2, 2, 1]], dtype=np.int32)) # [1, 10]
audio_len = torch.from_numpy(np.array([T], dtype=np.int32))
label_len = torch.from_numpy(np.array([U - 1], dtype=np.int32))
blank_idx = 0
x_np = torch.from_numpy(x)
x_np.requires_grad_(True)
"""
Here we will directly utilize the numpy variant of the loss without explicitly calling
the numpy functions for alpha, beta and grads.
This is because the grads returned by the rnnt_numpy.transduce_batch() are :
d/dx (alpha + beta alignment)(log_softmax(x)).
But according to the chain rule, we'd still need to compute the gradient of log_softmax(x)
and update the alignments by hand. Instead, we will rely on pytorch to compute the gradient
of the log_softmax(x) step and propagate it backwards.
"""
loss_func = rnnt_numpy.RNNTLoss(blank_idx, fastemit_lambda=fastemit_lambda, clamp=clamp)
loss_val = loss_func(x_np, labels, audio_len, label_len)
loss_val.sum().backward()
true_grads = x_np.grad
# Pytorch kernel
device = torch.device('cuda')
if hasattr(cuda, 'external_stream'):
stream = cuda.external_stream(torch.cuda.current_stream(device).cuda_stream)
else:
stream = cuda.default_stream()
x_c = torch.tensor(x, device=device, dtype=torch.float32)
labels_c = torch.tensor(labels, device=device, dtype=torch.int32)
# Allocate workspace memory
denom = torch.zeros(B * T * U, device=device, dtype=x_c.dtype)
alphas = torch.zeros(B * T * U, device=device, dtype=x_c.dtype)
betas = torch.zeros(B * T * U, device=device, dtype=x_c.dtype)
llForward = torch.zeros(B, device=device, dtype=x_c.dtype)
llBackward = torch.zeros(B, device=device, dtype=x_c.dtype)
input_lengths = torch.tensor([T], dtype=torch.int32, device=device)
label_lengths = torch.tensor([len(labels[0])], dtype=torch.int32, device=device)
# certify input data
certify_inputs(x_c, labels_c, input_lengths, label_lengths)
# flatten activation tensor (for pointer based indexing)
x_c = x_c.view([-1])
grads = torch.zeros_like(x_c, requires_grad=False)
# call kernel
# log softmax reduction
reduce.reduce_max(x_c, denom, rows=V, cols=B * T * U, minus=False, stream=stream)
reduce.reduce_exp(x_c, denom, rows=V, cols=B * T * U, minus=True, stream=stream)
# alpha kernel
gpu_rnnt_kernel.compute_alphas_kernel[B, U, stream, 0](
x_c, denom, alphas, llForward, input_lengths, label_lengths, labels_c, B, T, U, V, blank_idx,
)
# beta kernel
gpu_rnnt_kernel.compute_betas_kernel[B, U, stream, 0](
x_c, denom, betas, llBackward, input_lengths, label_lengths, labels_c, B, T, U, V, blank_idx,
)
# gamma kernel
grad_blocks_per_grid = B * T * U
grad_threads_per_block = gpu_rnnt_kernel.GPU_RNNT_THREAD_SIZE
gpu_rnnt_kernel.compute_grad_kernel[grad_blocks_per_grid, grad_threads_per_block, stream, 0](
grads,
x_c,
denom,
alphas,
betas,
llForward,
input_lengths,
label_lengths,
labels_c,
B,
T,
U,
V,
blank_idx,
fastemit_lambda,
clamp,
)
# sync kernel
stream.synchronize()
# reshape grads
grads = grads.view([B, T, U, V])
diff = true_grads - grads[0].cpu().numpy()
assert np.abs(diff).mean() <= 1e-5
assert np.square(diff).mean() <= 1e-10
@pytest.mark.skipif(not cuda.is_available(), reason="CUDA Reductions can only be run when CUDA is available")
@pytest.mark.unit
def test_compute_grads_kernel_clamp(self):
numba_utils.skip_numba_cuda_test_if_unsupported(__NUMBA_MINIMUM_VERSION__)
fastemit_lambda = 0.0
clamp = 0.1
random = np.random.RandomState(0)
original_shape = [1, 5, 11, 3]
B, T, U, V = original_shape
# Numpy kernel
x = random.randn(*original_shape)
labels = torch.from_numpy(np.array([[1, 1, 1, 2, 2, 2, 1, 2, 2, 1]], dtype=np.int32)) # [1, 10]
audio_len = torch.from_numpy(np.array([T], dtype=np.int32))
label_len = torch.from_numpy(np.array([U - 1], dtype=np.int32))
blank_idx = 0
x_np = torch.from_numpy(x)
x_np.requires_grad_(True)
"""
Here we will directly utilize the numpy variant of the loss without explicitly calling
the numpy functions for alpha, beta and grads.
This is because the grads returned by the rnnt_numpy.transduce_batch() are :
d/dx (alpha + beta alignment)(log_softmax(x)).
But according to the chain rule, we'd still need to compute the gradient of log_softmax(x)
and update the alignments by hand. Instead, we will rely on pytorch to compute the gradient
of the log_softmax(x) step and propagate it backwards.
"""
loss_func = rnnt_numpy.RNNTLoss(blank_idx, fastemit_lambda=fastemit_lambda, clamp=clamp)
loss_val = loss_func(x_np, labels, audio_len, label_len)
loss_val.sum().backward()
true_grads = x_np.grad
# Pytorch kernel
device = torch.device('cuda')
if hasattr(cuda, 'external_stream'):
stream = cuda.external_stream(torch.cuda.current_stream(device).cuda_stream)
else:
stream = cuda.default_stream()
x_c = torch.tensor(x, device=device, dtype=torch.float32)
labels_c = torch.tensor(labels, device=device, dtype=torch.int32)
# Allocate workspace memory
denom = torch.zeros(B * T * U, device=device, dtype=x_c.dtype)
alphas = torch.zeros(B * T * U, device=device, dtype=x_c.dtype)
betas = torch.zeros(B * T * U, device=device, dtype=x_c.dtype)
llForward = torch.zeros(B, device=device, dtype=x_c.dtype)
llBackward = torch.zeros(B, device=device, dtype=x_c.dtype)
input_lengths = torch.tensor([T], dtype=torch.int32, device=device)
label_lengths = torch.tensor([len(labels[0])], dtype=torch.int32, device=device)
# certify input data
certify_inputs(x_c, labels_c, input_lengths, label_lengths)
# flatten activation tensor (for pointer based indexing)
x_c = x_c.view([-1])
grads = torch.zeros_like(x_c, requires_grad=False)
# call kernel
# log softmax reduction
reduce.reduce_max(x_c, denom, rows=V, cols=B * T * U, minus=False, stream=stream)
reduce.reduce_exp(x_c, denom, rows=V, cols=B * T * U, minus=True, stream=stream)
# alpha kernel
gpu_rnnt_kernel.compute_alphas_kernel[B, U, stream, 0](
x_c, denom, alphas, llForward, input_lengths, label_lengths, labels_c, B, T, U, V, blank_idx,
)
# beta kernel
gpu_rnnt_kernel.compute_betas_kernel[B, U, stream, 0](
x_c, denom, betas, llBackward, input_lengths, label_lengths, labels_c, B, T, U, V, blank_idx,
)
# gamma kernel
grad_blocks_per_grid = B * T * U
grad_threads_per_block = gpu_rnnt_kernel.GPU_RNNT_THREAD_SIZE
gpu_rnnt_kernel.compute_grad_kernel[grad_blocks_per_grid, grad_threads_per_block, stream, 0](
grads,
x_c,
denom,
alphas,
betas,
llForward,
input_lengths,
label_lengths,
labels_c,
B,
T,
U,
V,
blank_idx,
fastemit_lambda,
clamp,
)
# sync kernel
stream.synchronize()
# reshape grads
grads = grads.view([B, T, U, V])
diff = true_grads - grads[0].cpu().numpy()
assert np.abs(diff).mean() <= 1e-5
assert np.square(diff).mean() <= 1e-10
| 38.861446 | 113 | 0.624296 | 2,706 | 19,353 | 4.266814 | 0.0898 | 0.011259 | 0.012212 | 0.032739 | 0.924303 | 0.916421 | 0.916421 | 0.912437 | 0.912437 | 0.912437 | 0 | 0.018291 | 0.26833 | 19,353 | 497 | 114 | 38.939638 | 0.797105 | 0.088565 | 0 | 0.859935 | 0 | 0 | 0.023044 | 0 | 0 | 0 | 0 | 0 | 0.045603 | 1 | 0.019544 | false | 0.006515 | 0.029316 | 0 | 0.055375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
716ede88e1305b5b73016e01e4c99b16f0dda6e2 | 10,386 | py | Python | src/common/default/lib_default_conventions.py | c-hydro/hyde | 3a3ff92d442077ce353b071d5afe726fc5465201 | [
"MIT"
] | null | null | null | src/common/default/lib_default_conventions.py | c-hydro/hyde | 3a3ff92d442077ce353b071d5afe726fc5465201 | [
"MIT"
] | 18 | 2020-04-07T16:34:59.000Z | 2021-07-02T07:32:39.000Z | src/common/default/lib_default_conventions.py | c-hydro/fp-hyde | b0728397522aceebec3e7ff115aff160a10efede | [
"MIT"
] | null | null | null | """
Library Features:
Name: lib_default_conventions
Author(s): Fabio Delogu (fabio.delogu@cimafoundation.org)
Date: '20201102'
Version: '1.1.0'
"""
#######################################################################################
# Library
# Nothing to do here
#######################################################################################
# -------------------------------------------------------------------------------------
# Set default definition of file
conventions_file = dict(
general={
'Conventions': 'CF-1.7',
'title': '',
'institution': 'CIMA Research Foundation - www.cimafoundation.org',
'web-site': '',
'source': '',
'history': '',
'references': 'http://cf-pcmdi.llnl.gov/ ; http://cf-pcmdi.llnl.gov/documents/cf-standard-names/',
'comment': 'Author(s): Fabio Delogu',
'email': 'fabio.delogu@cimafoundation.org',
'project-info': '',
'algorithm': '',
},
geosystem={
'epsg_code': 4326,
'grid_mapping_name': 'latitude_longitude',
'longitude_of_prime_meridian': 0.0,
'semi_major_axis': 6378137.0,
'inverse_flattening': 298.257223563,
},
georeference={
'bounding_box': None,
'ncols': None,
'nrows': None,
'xllcorner': None,
'yllcorner': None,
'cellsize': None,
'nodata_value': None
},
)
# -------------------------------------------------------------------------------------
# -------------------------------------------------------------------------------------
# Set default definition of variable(s)
conventions_var = dict(
time={
'long_name': 'time',
'units': 'days since 1990-1-1 0:0:0',
'calendar': None,
},
terrain={
'long_name': 'geometric height',
'standard_name': 'altitude',
'grid_mapping': 'crs',
'dims': {'X': 'west_east', 'Y': 'south_north'},
'coordinates': 'longitude latitude',
'cell_method': '',
'units': 'm',
'positive': 'up',
'pressure_level': '',
'ScaleFactor': 1,
'Format': 'f4',
'Missing_value': -9999.0,
'_FillValue': None,
'Valid_range': '',
'description': '',
},
longitude={
'long_name': 'longitude coordinate',
'standard_name': 'longitude',
'grid_mapping': '',
'dims': {'X': 'west_east', 'Y': 'south_north'},
'coordinates': '',
'cell_method': '',
'coordinate_reference_frame': 'urn:ogc:crs:EPSG::4326',
'reference': 'WGS84',
'units': 'degrees_east',
'pressure_level': '',
'ScaleFactor': 1,
'Format': 'f4',
'Missing_value': -9999.0,
'_FillValue': None,
'Valid_range': '-180, 180',
'description': '',
},
latitude={
'long_name': 'latitude coordinate',
'standard_name': 'latitude',
'grid_mapping': '',
'dims': {'X': 'west_east', 'Y': 'south_north'},
'coordinates': '',
'cell_method': '',
'coordinate_reference_frame': 'urn:ogc:crs:EPSG::4326',
'reference': 'WGS84',
'units': 'degrees_north',
'pressure_level': '',
'ScaleFactor': 1,
'Format': 'f4',
'Missing_value': -9999.0,
'_FillValue': None,
'Valid_range': '-90, 90',
'description': '',
},
var2d={
'long_name': '',
'standard_name': '',
'grid_mapping': 'crs',
'dims': {'X': 'west_east', 'Y': 'south_north'},
'coordinates': 'longitude latitude',
'ancillary_variables': '',
'cell_method': '',
'units': '',
'pressure_level': '',
'ScaleFactor': 1,
'Format': 'f4',
'Missing_value': -9999.0,
'_FillValue': None,
'Valid_range': '',
'flag_masks': '',
'flag_values': '',
'flag_meanings': '',
'colormap': '',
'description': '',
},
var3d={
'long_name': '',
'standard_name': '',
'grid_mapping': 'crs',
'dims': {'X': 'west_east', 'Y': 'south_north', 'Time': 'time'},
'coordinates': 'longitude latitude',
'ancillary_variables': '',
'cell_method': '',
'units': '',
'pressure_level': '',
'ScaleFactor': 1,
'Format': 'f4',
'Missing_value': -9999.0,
'_FillValue': None,
'Valid_range': '',
'flag_masks': '',
'flag_values': '',
'flag_meanings': '',
'colormap': '',
'description': '',
},
var4d={
'long_name': '',
'standard_name': '',
'grid_mapping': 'crs',
'dims': {'level': 'pressure', 'X': 'west_east', 'Y': 'south_north', 'Time': 'time'},
'coordinates': 'longitude latitude',
'ancillary_variables': '',
'cell_method': '',
'units': '',
'pressure_level': '',
'ScaleFactor': 1,
'Format': 'f4',
'Missing_value': -9999.0,
'_FillValue': None,
'Valid_range': '',
'flag_masks': '',
'flag_values': '',
'flag_meanings': '',
'colormap': '',
'description': '',
}
)
# -------------------------------------------------------------------------------------
# -------------------------------------------------------------------------------------
# Set default definition of file
oFileConventions = dict(
general={
'Conventions': 'CF-1.7',
'title': '',
'institution': 'CIMA Research Foundation - www.cimafoundation.org',
'web-site': '',
'source': '',
'history': '',
'references': 'http://cf-pcmdi.llnl.gov/ ; http://cf-pcmdi.llnl.gov/documents/cf-standard-names/',
'comment': 'Author(s): Fabio Delogu',
'email': 'fabio.delogu@cimafoundation.org',
'project-info': '',
'algorithm': '',
},
geosystem={
'epsg_code': 4326,
'grid_mapping_name': 'latitude_longitude',
'longitude_of_prime_meridian': 0.0,
'semi_major_axis': 6378137.0,
'inverse_flattening': 298.257223563,
},
georeference={
'bounding_box': None,
'ncols': None,
'nrows': None,
'xllcorner': None,
'yllcorner': None,
'cellsize': None,
'nodata_value': None
},
)
# -------------------------------------------------------------------------------------
# -------------------------------------------------------------------------------------
# Set default definition of variable(s)
oVarConventions = dict(
time={
'long_name': 'time',
'units': 'days since 1990-1-1 0:0:0',
'calendar': None,
},
terrain={
'long_name': 'geometric height',
'standard_name': 'altitude',
'grid_mapping': 'crs',
'dims': {'X': 'west_east', 'Y': 'south_north'},
'coordinates': 'longitude latitude',
'cell_method': '',
'units': 'm',
'positive': 'up',
'pressure_level': '',
'ScaleFactor': 1,
'Format': 'f4',
'Missing_value': -9999.0,
'_FillValue': None,
'Valid_range': '',
'description': '',
},
longitude={
'long_name': 'longitude coordinate',
'standard_name': 'longitude',
'grid_mapping': '',
'dims': {'X': 'west_east', 'Y': 'south_north'},
'coordinates': '',
'cell_method': '',
'coordinate_reference_frame': 'urn:ogc:crs:EPSG::4326',
'reference': 'WGS84',
'units': 'degrees_east',
'pressure_level': '',
'ScaleFactor': 1,
'Format': 'f4',
'Missing_value': -9999.0,
'_FillValue': None,
'Valid_range': '-180, 180',
'description': '',
},
latitude={
'long_name': 'latitude coordinate',
'standard_name': 'latitude',
'grid_mapping': '',
'dims': {'X': 'west_east', 'Y': 'south_north'},
'coordinates': '',
'cell_method': '',
'coordinate_reference_frame': 'urn:ogc:crs:EPSG::4326',
'reference': 'WGS84',
'units': 'degrees_north',
'pressure_level': '',
'ScaleFactor': 1,
'Format': 'f4',
'Missing_value': -9999.0,
'_FillValue': None,
'Valid_range': '-90, 90',
'description': '',
},
var2d={
'long_name': '',
'standard_name': '',
'grid_mapping': 'crs',
'dims': {'X': 'west_east', 'Y': 'south_north'},
'coordinates': 'longitude latitude',
'ancillary_variables': '',
'cell_method': '',
'units': '',
'pressure_level': '',
'ScaleFactor': 1,
'Format': 'f4',
'Missing_value': -9999.0,
'_FillValue': None,
'Valid_range': '',
'flag_masks': '',
'flag_values': '',
'flag_meanings': '',
'colormap': '',
'description': '',
},
var3d={
'long_name': '',
'standard_name': '',
'grid_mapping': 'crs',
'dims': {'X': 'west_east', 'Y': 'south_north', 'Time': 'time'},
'coordinates': 'longitude latitude',
'ancillary_variables': '',
'cell_method': '',
'units': '',
'pressure_level': '',
'ScaleFactor': 1,
'Format': 'f4',
'Missing_value': -9999.0,
'_FillValue': None,
'Valid_range': '',
'flag_masks': '',
'flag_values': '',
'flag_meanings': '',
'colormap': '',
'description': '',
},
var4d={
'long_name': '',
'standard_name': '',
'grid_mapping': 'crs',
'dims': {'level': 'pressure', 'X': 'west_east', 'Y': 'south_north', 'Time': 'time'},
'coordinates': 'longitude latitude',
'ancillary_variables': '',
'cell_method': '',
'units': '',
'pressure_level': '',
'ScaleFactor': 1,
'Format': 'f4',
'Missing_value': -9999.0,
'_FillValue': None,
'Valid_range': '',
'flag_masks': '',
'flag_values': '',
'flag_meanings': '',
'colormap': '',
'description': '',
}
)
# -------------------------------------------------------------------------------------
| 28.61157 | 106 | 0.446563 | 825 | 10,386 | 5.396364 | 0.169697 | 0.034591 | 0.024259 | 0.026954 | 0.957323 | 0.945642 | 0.945642 | 0.945642 | 0.945642 | 0.945642 | 0 | 0.029755 | 0.291354 | 10,386 | 362 | 107 | 28.690608 | 0.575136 | 0.098402 | 0 | 0.909091 | 0 | 0.006494 | 0.444541 | 0.03839 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
717f5c25c9723324b709333789496a1c17e8d4b7 | 243 | py | Python | nmigen_boards/test/blinky.py | lethalbit/nmigen-boards | aaf18252e457ff95257137da2a629820c0ff2bfa | [
"BSD-2-Clause"
] | 11 | 2021-12-10T12:23:29.000Z | 2022-03-13T08:40:20.000Z | nmigen_boards/test/blinky.py | lethalbit/nmigen-boards | aaf18252e457ff95257137da2a629820c0ff2bfa | [
"BSD-2-Clause"
] | 12 | 2021-12-11T18:51:29.000Z | 2022-03-12T05:08:52.000Z | nmigen_boards/test/blinky.py | lethalbit/nmigen-boards | aaf18252e457ff95257137da2a629820c0ff2bfa | [
"BSD-2-Clause"
] | 7 | 2021-12-12T07:20:21.000Z | 2022-03-06T06:20:55.000Z | from amaranth_boards.test.blinky import *
from amaranth_boards.test.blinky import __all__
import warnings
warnings.warn("instead of nmigen_boards.test.blinky, use amaranth_boards.test.blinky",
DeprecationWarning, stacklevel=2)
| 30.375 | 86 | 0.790123 | 31 | 243 | 5.935484 | 0.516129 | 0.217391 | 0.347826 | 0.391304 | 0.369565 | 0.369565 | 0 | 0 | 0 | 0 | 0 | 0.004762 | 0.135802 | 243 | 7 | 87 | 34.714286 | 0.871429 | 0 | 0 | 0 | 0 | 0 | 0.283951 | 0.218107 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.6 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
71b0647c98185d887aea3f85e94493dd97509746 | 2,491 | py | Python | tests/test_double_pairing_heap.py | d-michail/python-jheaps | 65e1f4036b125dc065dc4398bf2eac0e95726f7a | [
"Apache-2.0"
] | null | null | null | tests/test_double_pairing_heap.py | d-michail/python-jheaps | 65e1f4036b125dc065dc4398bf2eac0e95726f7a | [
"Apache-2.0"
] | null | null | null | tests/test_double_pairing_heap.py | d-michail/python-jheaps | 65e1f4036b125dc065dc4398bf2eac0e95726f7a | [
"Apache-2.0"
] | null | null | null | import pytest
from random import Random
from jheaps import (
create_doubleended_addressable_pairing_heap
)
def test_long_heap():
h = create_doubleended_addressable_pairing_heap(key_type=int)
h1 = h.insert(5, 15)
assert h1.key == 5
assert h1.value == 15
h1.value=10
assert h1.value == 10
h2 = h.insert(6, 40)
h3 = h.insert(7, 30)
h4 = h.insert(8, 20)
h5 = h.find_min()
assert h5.key == 5
assert h5.value == 10
h5.decrease_key(4)
assert h5.key == 4
assert h5.value == 10
assert len(h) == 4
h6 = h.find_max()
assert h6.key == 8
assert h6.value == 20
h7 = h.delete_max()
assert h7.key == 8
assert h7.value == 20
h6 = h.find_max()
assert h6.key == 7
assert h6.value == 30
h6.increase_key(9)
assert h6.key == 9
assert h6.value == 30
def test_double_heap():
h = create_doubleended_addressable_pairing_heap(key_type=float)
h1 = h.insert(5.5, 15)
assert h1.key == 5.5
assert h1.value == 15
h1.value=10
assert h1.value == 10
h2 = h.insert(6.5, 40)
h3 = h.insert(7.5, 30)
h4 = h.insert(8.5, 20)
h5 = h.find_min()
assert h5.key == 5.5
assert h5.value == 10
h5.decrease_key(4.5)
assert h5.key == 4.5
assert h5.value == 10
assert len(h) == 4
h6 = h.find_max()
assert h6.key == 8.5
assert h6.value == 20
h7 = h.delete_max()
assert h7.key == 8.5
assert h7.value == 20
h6 = h.find_max()
assert h6.key == 7.5
assert h6.value == 30
h6.increase_key(9.5)
assert h6.key == 9.5
assert h6.value == 30
def test_any_heap():
h = create_doubleended_addressable_pairing_heap(key_type=object, value_type=object)
print(h)
h1 = h.insert("5.5", "15")
assert h1.key == "5.5"
assert h1.value == "15"
h1.value="10"
assert h1.value == "10"
h2 = h.insert("6.5", "40")
h3 = h.insert("7.5", "30")
h4 = h.insert("8.5", "20")
h5 = h.find_min()
assert h5.key == "5.5"
assert h5.value == "10"
h5.decrease_key("4.5")
assert h5.key == "4.5"
assert h5.value == "10"
assert len(h) == 4
h6 = h.find_max()
assert h6.key == "8.5"
assert h6.value == "20"
h7 = h.delete_max()
assert h7.key == "8.5"
assert h7.value == "20"
h6 = h.find_max()
assert h6.key == "7.5"
assert h6.value == "30"
h6.increase_key("9.5")
assert h6.key == "9.5"
assert h6.value == "30"
| 18.729323 | 87 | 0.564833 | 417 | 2,491 | 3.268585 | 0.119904 | 0.102715 | 0.072634 | 0.066031 | 0.898753 | 0.852531 | 0.820983 | 0.820983 | 0.799707 | 0.648569 | 0 | 0.12507 | 0.281012 | 2,491 | 132 | 88 | 18.871212 | 0.635958 | 0 | 0 | 0.397849 | 0 | 0 | 0.026094 | 0 | 0 | 0 | 0 | 0 | 0.516129 | 1 | 0.032258 | false | 0 | 0.032258 | 0 | 0.064516 | 0.010753 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
71c5e0633c9a24526f4b3f4ca434d002922cda7a | 165 | py | Python | wunderkafka/tests/__init__.py | severstal-digital/wunderkafka | 8c56fa4559a8576af7f005fd916bf97127576278 | [
"Apache-2.0"
] | null | null | null | wunderkafka/tests/__init__.py | severstal-digital/wunderkafka | 8c56fa4559a8576af7f005fd916bf97127576278 | [
"Apache-2.0"
] | null | null | null | wunderkafka/tests/__init__.py | severstal-digital/wunderkafka | 8c56fa4559a8576af7f005fd916bf97127576278 | [
"Apache-2.0"
] | null | null | null | from wunderkafka.tests.consumer import TestConsumer
from wunderkafka.tests.producer import TestProducer
from wunderkafka.tests.schema_registry import TestHTTPClient
| 41.25 | 60 | 0.890909 | 19 | 165 | 7.684211 | 0.578947 | 0.308219 | 0.410959 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.072727 | 165 | 3 | 61 | 55 | 0.954248 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
e0ae9aaac1f0bff13740b214503926e52db52c51 | 1,714 | py | Python | vaccinate/core/migrations/0068_dn_fields_set_null_on_delete.py | MoralCode/vial | cdaaab053a9cf1cef40104a2cdf480b7932d58f7 | [
"MIT"
] | 7 | 2021-06-28T17:33:47.000Z | 2022-02-12T21:54:59.000Z | vaccinate/core/migrations/0068_dn_fields_set_null_on_delete.py | MoralCode/vial | cdaaab053a9cf1cef40104a2cdf480b7932d58f7 | [
"MIT"
] | 104 | 2021-06-17T21:25:30.000Z | 2022-03-28T14:21:57.000Z | vaccinate/core/migrations/0068_dn_fields_set_null_on_delete.py | MoralCode/vial | cdaaab053a9cf1cef40104a2cdf480b7932d58f7 | [
"MIT"
] | 1 | 2021-06-25T17:52:23.000Z | 2021-06-25T17:52:23.000Z | # Generated by Django 3.1.7 on 2021-04-08 18:06
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("core", "0067_populate_denormalized_location_reports"),
]
operations = [
migrations.AlterField(
model_name="location",
name="dn_latest_report",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="+",
to="core.report",
),
),
migrations.AlterField(
model_name="location",
name="dn_latest_report_including_pending",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="+",
to="core.report",
),
),
migrations.AlterField(
model_name="location",
name="dn_latest_skip_report",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="+",
to="core.report",
),
),
migrations.AlterField(
model_name="location",
name="dn_latest_yes_report",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="+",
to="core.report",
),
),
]
| 29.050847 | 64 | 0.502334 | 155 | 1,714 | 5.348387 | 0.303226 | 0.057901 | 0.084439 | 0.13269 | 0.738239 | 0.738239 | 0.738239 | 0.738239 | 0.738239 | 0.671894 | 0 | 0.018304 | 0.394399 | 1,714 | 58 | 65 | 29.551724 | 0.780347 | 0.026254 | 0 | 0.769231 | 1 | 0 | 0.130774 | 0.058788 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.038462 | 0 | 0.096154 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e0eefae4337b3a34d760e9921cd1cab80bd30a4f | 6,906 | py | Python | typings/bpy/ops/constraint.py | Argmaster/PyR3 | 6786bcb6a101fe4bd4cc50fe43767b8178504b15 | [
"MIT"
] | 2 | 2021-12-12T18:51:52.000Z | 2022-02-23T09:49:16.000Z | typings/bpy/ops/constraint.py | Argmaster/PyR3 | 6786bcb6a101fe4bd4cc50fe43767b8178504b15 | [
"MIT"
] | 2 | 2021-11-08T12:09:02.000Z | 2021-12-12T23:01:12.000Z | typings/bpy/ops/constraint.py | Argmaster/PyR3 | 6786bcb6a101fe4bd4cc50fe43767b8178504b15 | [
"MIT"
] | null | null | null | import sys
import typing
def add_target():
''' Add a target to the constraint :file: startup/bl_operators/constraint.py\:36 <https://developer.blender.org/diffusion/B/browse/master/release/scripts/startup/bl_operators/constraint.py$36> _
'''
pass
def childof_clear_inverse(constraint: str = "",
owner: typing.Union[str, int] = 'OBJECT'):
''' Clear inverse correction for Child Of constraint
:param constraint: Constraint, Name of the constraint to edit
:type constraint: str
:param owner: Owner, The owner of this constraint * OBJECT Object, Edit a constraint on the active object. * BONE Bone, Edit a constraint on the active bone.
:type owner: typing.Union[str, int]
'''
pass
def childof_set_inverse(constraint: str = "",
owner: typing.Union[str, int] = 'OBJECT'):
''' Set inverse correction for Child Of constraint
:param constraint: Constraint, Name of the constraint to edit
:type constraint: str
:param owner: Owner, The owner of this constraint * OBJECT Object, Edit a constraint on the active object. * BONE Bone, Edit a constraint on the active bone.
:type owner: typing.Union[str, int]
'''
pass
def delete(constraint: str = "",
owner: typing.Union[str, int] = 'OBJECT',
report: bool = False):
''' Remove constraint from constraint stack
:param constraint: Constraint, Name of the constraint to edit
:type constraint: str
:param owner: Owner, The owner of this constraint * OBJECT Object, Edit a constraint on the active object. * BONE Bone, Edit a constraint on the active bone.
:type owner: typing.Union[str, int]
:param report: Report, Create a notification after the operation
:type report: bool
'''
pass
def disable_keep_transform():
''' Set the influence of this constraint to zero while trying to maintain the object's transformation. Other active constraints can still influence the final transformation :file: startup/bl_operators/constraint.py\:86 <https://developer.blender.org/diffusion/B/browse/master/release/scripts/startup/bl_operators/constraint.py$86> _
'''
pass
def followpath_path_animate(constraint: str = "",
owner: typing.Union[str, int] = 'OBJECT',
frame_start: int = 1,
length: int = 100):
''' Add default animation for path used by constraint if it isn't animated already
:param constraint: Constraint, Name of the constraint to edit
:type constraint: str
:param owner: Owner, The owner of this constraint * OBJECT Object, Edit a constraint on the active object. * BONE Bone, Edit a constraint on the active bone.
:type owner: typing.Union[str, int]
:param frame_start: Start Frame, First frame of path animation
:type frame_start: int
:param length: Length, Number of frames that path animation should take
:type length: int
'''
pass
def limitdistance_reset(constraint: str = "",
owner: typing.Union[str, int] = 'OBJECT'):
''' Reset limiting distance for Limit Distance Constraint
:param constraint: Constraint, Name of the constraint to edit
:type constraint: str
:param owner: Owner, The owner of this constraint * OBJECT Object, Edit a constraint on the active object. * BONE Bone, Edit a constraint on the active bone.
:type owner: typing.Union[str, int]
'''
pass
def move_down(constraint: str = "", owner: typing.Union[str, int] = 'OBJECT'):
''' Move constraint down in constraint stack
:param constraint: Constraint, Name of the constraint to edit
:type constraint: str
:param owner: Owner, The owner of this constraint * OBJECT Object, Edit a constraint on the active object. * BONE Bone, Edit a constraint on the active bone.
:type owner: typing.Union[str, int]
'''
pass
def move_to_index(constraint: str = "",
owner: typing.Union[str, int] = 'OBJECT',
index: int = 0):
''' Change the constraint's position in the list so it evaluates after the set number of others
:param constraint: Constraint, Name of the constraint to edit
:type constraint: str
:param owner: Owner, The owner of this constraint * OBJECT Object, Edit a constraint on the active object. * BONE Bone, Edit a constraint on the active bone.
:type owner: typing.Union[str, int]
:param index: Index, The index to move the constraint to
:type index: int
'''
pass
def move_up(constraint: str = "", owner: typing.Union[str, int] = 'OBJECT'):
''' Move constraint up in constraint stack
:param constraint: Constraint, Name of the constraint to edit
:type constraint: str
:param owner: Owner, The owner of this constraint * OBJECT Object, Edit a constraint on the active object. * BONE Bone, Edit a constraint on the active bone.
:type owner: typing.Union[str, int]
'''
pass
def normalize_target_weights():
''' Normalize weights of all target bones :file: startup/bl_operators/constraint.py\:61 <https://developer.blender.org/diffusion/B/browse/master/release/scripts/startup/bl_operators/constraint.py$61> _
'''
pass
def objectsolver_clear_inverse(constraint: str = "",
owner: typing.Union[str, int] = 'OBJECT'):
''' Clear inverse correction for Object Solver constraint
:param constraint: Constraint, Name of the constraint to edit
:type constraint: str
:param owner: Owner, The owner of this constraint * OBJECT Object, Edit a constraint on the active object. * BONE Bone, Edit a constraint on the active bone.
:type owner: typing.Union[str, int]
'''
pass
def objectsolver_set_inverse(constraint: str = "",
owner: typing.Union[str, int] = 'OBJECT'):
''' Set inverse correction for Object Solver constraint
:param constraint: Constraint, Name of the constraint to edit
:type constraint: str
:param owner: Owner, The owner of this constraint * OBJECT Object, Edit a constraint on the active object. * BONE Bone, Edit a constraint on the active bone.
:type owner: typing.Union[str, int]
'''
pass
def remove_target(index: int = 0):
''' Remove the target from the constraint
:param index: index
:type index: int
'''
pass
def stretchto_reset(constraint: str = "",
owner: typing.Union[str, int] = 'OBJECT'):
''' Reset original length of bone for Stretch To Constraint
:param constraint: Constraint, Name of the constraint to edit
:type constraint: str
:param owner: Owner, The owner of this constraint * OBJECT Object, Edit a constraint on the active object. * BONE Bone, Edit a constraint on the active bone.
:type owner: typing.Union[str, int]
'''
pass
| 36.347368 | 336 | 0.675934 | 922 | 6,906 | 5.02603 | 0.136659 | 0.061718 | 0.07596 | 0.090203 | 0.785067 | 0.776867 | 0.752266 | 0.752266 | 0.725723 | 0.725723 | 0 | 0.003407 | 0.235013 | 6,906 | 189 | 337 | 36.539683 | 0.873746 | 0.697509 | 0 | 0.533333 | 0 | 0 | 0.038328 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0.333333 | 0.044444 | 0 | 0.377778 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
1ca637660af9831aa242ba78f74eaa1bcfebd1e3 | 2,873 | py | Python | tests/app/test_csv_loader.py | sixkingdoms/vnpy | fc83765aa609f6a06bce499d95bb6c8c845be2f3 | [
"MIT"
] | null | null | null | tests/app/test_csv_loader.py | sixkingdoms/vnpy | fc83765aa609f6a06bce499d95bb6c8c845be2f3 | [
"MIT"
] | null | null | null | tests/app/test_csv_loader.py | sixkingdoms/vnpy | fc83765aa609f6a06bce499d95bb6c8c845be2f3 | [
"MIT"
] | null | null | null | """
Test if csv loader works fine
"""
import tempfile
import unittest
from vnpy.app.csv_loader import CsvLoaderEngine
from vnpy.trader.constant import Exchange, Interval
class TestCsvLoader(unittest.TestCase):
def setUp(self) -> None:
# no engine is necessary for CsvLoader
self.engine = CsvLoaderEngine(None, None)
def test_load(self):
data = """"Datetime","Open","High","Low","Close","Volume"
2010-04-16 09:16:00,3450.0,3488.0,3450.0,3468.0,489
2010-04-16 09:17:00,3468.0,3473.8,3467.0,3467.0,302
2010-04-16 09:18:00,3467.0,3471.0,3466.0,3467.0,203
2010-04-16 09:19:00,3467.0,3468.2,3448.0,3448.0,280
2010-04-16 09:20:00,3448.0,3459.0,3448.0,3454.0,250
2010-04-16 09:21:00,3454.0,3456.8,3454.0,3456.8,109
"""
with tempfile.TemporaryFile("w+t") as f:
f.write(data)
f.seek(0)
self.engine.load_by_handle(
f,
symbol="1",
exchange=Exchange.BITMEX,
interval=Interval.MINUTE,
datetime_head="Datetime",
open_head="Open",
close_head="Close",
low_head="Low",
high_head="High",
volume_head="Volume",
datetime_format="%Y-%m-%d %H:%M:%S",
)
def test_load_duplicated(self):
data = """"Datetime","Open","High","Low","Close","Volume"
2010-04-16 09:16:00,3450.0,3488.0,3450.0,3468.0,489
2010-04-16 09:17:00,3468.0,3473.8,3467.0,3467.0,302
2010-04-16 09:18:00,3467.0,3471.0,3466.0,3467.0,203
2010-04-16 09:19:00,3467.0,3468.2,3448.0,3448.0,280
2010-04-16 09:20:00,3448.0,3459.0,3448.0,3454.0,250
2010-04-16 09:21:00,3454.0,3456.8,3454.0,3456.8,109
"""
with tempfile.TemporaryFile("w+t") as f:
f.write(data)
f.seek(0)
self.engine.load_by_handle(
f,
symbol="1",
exchange=Exchange.BITMEX,
interval=Interval.MINUTE,
datetime_head="Datetime",
open_head="Open",
close_head="Close",
low_head="Low",
high_head="High",
volume_head="Volume",
datetime_format="%Y-%m-%d %H:%M:%S",
)
with tempfile.TemporaryFile("w+t") as f:
f.write(data)
f.seek(0)
self.engine.load_by_handle(
f,
symbol="1",
exchange=Exchange.BITMEX,
interval=Interval.MINUTE,
datetime_head="Datetime",
open_head="Open",
close_head="Close",
low_head="Low",
high_head="High",
volume_head="Volume",
datetime_format="%Y-%m-%d %H:%M:%S",
)
if __name__ == "__main__":
unittest.main()
| 31.228261 | 65 | 0.534633 | 410 | 2,873 | 3.65122 | 0.22439 | 0.048096 | 0.064128 | 0.08016 | 0.796259 | 0.796259 | 0.796259 | 0.796259 | 0.796259 | 0.796259 | 0 | 0.227503 | 0.311521 | 2,873 | 91 | 66 | 31.571429 | 0.529323 | 0.023321 | 0 | 0.813333 | 0 | 0.16 | 0.314868 | 0.205147 | 0 | 0 | 0 | 0 | 0 | 1 | 0.04 | false | 0 | 0.053333 | 0 | 0.106667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1cce8c5302ce698b504169b973bd69ceba80d121 | 2,620 | py | Python | rvlyzer/rep/instr_pretty_print.py | nessdoor/RVlyzer | 239beb63a4db1653261bc1cc59227ee2ddb77d1a | [
"MIT"
] | null | null | null | rvlyzer/rep/instr_pretty_print.py | nessdoor/RVlyzer | 239beb63a4db1653261bc1cc59227ee2ddb77d1a | [
"MIT"
] | null | null | null | rvlyzer/rep/instr_pretty_print.py | nessdoor/RVlyzer | 239beb63a4db1653261bc1cc59227ee2ddb77d1a | [
"MIT"
] | null | null | null | # Code contributed by Mattia Iamundo (https://github.com/MattiaIamundo)
def u_family(instr):
return "\t"+str(instr.opcode)+"\t"+instr.r1.name.lower()+","+str(instr.immediate)+"\n"
def i_family(instr):
return "\t" + str(instr.opcode) + "\t" + instr.r1.name.lower() + "," + instr.r2.name.lower() \
+ "," + str(instr.immediate) + "\n"
def s_family(instr):
return "\t"+str(instr.opcode)+"\t"+instr.r1.name.lower()+","+str(instr.immediate)\
+ "(" + instr.r2.name.lower()+")\n"
def r_family(instr):
return "\t"+str(instr.opcode)+"\t"+instr.r1.name.lower()+","\
+instr.r2.name.lower()+","+instr.r3.name.lower()+"\n"
def j_family(instr):
return "\t"+str(instr.opcode)+"\t"+str(instr.immediate)+"\n"
def jr_family(instr):
return "\t"+str(instr.opcode)+"\t"+instr.r1.name.lower()+"\n"
def b_family(instr):
return "\t"+str(instr.opcode)+"\t"+instr.r1.name.lower()+","+instr.r2.name.lower()+","\
+ str(instr.immediate)+"\n"
def al_family(instr):
return "\t" + str(instr.opcode) + "\t" + instr.r1.name.lower() + "," + str(instr.immediate) + "(" \
+ instr.r2.name.lower() + ")\n"
def as_family(instr):
return "\t" + str(instr.opcode) + "\t" + instr.r1.name.lower() + "," + instr.r2.name.lower() \
+ "," + str(instr.immediate) + "(" + instr.r3.name.lower() + ")\n"
def sext_family(instr):
return "\t"+str(instr.opcode)+"\t"+instr.r1.name.lower()+","+instr.r2.name.lower()+"\n"
def _2arg_family(instr):
if instr.r2 is None:
return "\t"+str(instr.opcode)+"\t"+instr.r1.name.lower()+","+str(instr.immediate)+"\n"
else:
return "\t"+str(instr.opcode)+"\t"+instr.r1.name.lower()+","+instr.r2.name.lower()+"\n"
def bz_family(instr):
return "\t"+str(instr.opcode)+"\t"+instr.r1.name.lower()+","+str(instr.immediate)+"\n"
def nop_family(instr):
return "\t"+str(instr.opcode)+"\n"
def snez_family(instr):
return "\t"+str(instr.opcode)+"\t"+instr.r1.name.lower()+","+instr.r2.name.lower()+"\n"
def li_family(instr):
return "\t" + str(instr.opcode) + "\t" + instr.r1.name.lower() + "," + str(instr.immediate) + "\n"
familystr = {"u": u_family,
"i": i_family,
"s": s_family,
"r": r_family,
"j": j_family,
"jr": jr_family,
"b": b_family,
"al": al_family,
"as": as_family,
"sext": sext_family,
"_2arg": _2arg_family,
"bz": bz_family,
"nop": nop_family,
"snez": snez_family,
"li": li_family}
| 30.114943 | 103 | 0.552672 | 362 | 2,620 | 3.90884 | 0.118785 | 0.146996 | 0.108127 | 0.169611 | 0.795053 | 0.780212 | 0.751943 | 0.729329 | 0.706007 | 0.706007 | 0 | 0.013936 | 0.205725 | 2,620 | 86 | 104 | 30.465116 | 0.666026 | 0.026336 | 0 | 0.166667 | 0 | 0 | 0.058454 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.277778 | false | 0 | 0 | 0.259259 | 0.574074 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 9 |
1cd69a5d6ab654c5a6fe9e1ed5e15b106b41753d | 23,019 | py | Python | release/stubs.min/System/Collections/ObjectModel.py | tranconbv/ironpython-stubs | a601759e6c6819beff8e6b639d18a24b7e351851 | [
"MIT"
] | null | null | null | release/stubs.min/System/Collections/ObjectModel.py | tranconbv/ironpython-stubs | a601759e6c6819beff8e6b639d18a24b7e351851 | [
"MIT"
] | null | null | null | release/stubs.min/System/Collections/ObjectModel.py | tranconbv/ironpython-stubs | a601759e6c6819beff8e6b639d18a24b7e351851 | [
"MIT"
] | null | null | null | # encoding: utf-8
# module System.Collections.ObjectModel calls itself ObjectModel
# from mscorlib,Version=4.0.0.0,Culture=neutral,PublicKeyToken=b77a5c561934e089,System,Version=4.0.0.0,Culture=neutral,PublicKeyToken=b77a5c561934e089
# by generator 1.145
# no doc
# no important
# no functions
# classes
class Collection(object):
"""
Collection[T]()
Collection[T](list: IList[T])
"""
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return Collection()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Add(self,item):
"""
Add(self: Collection[T],item: T)
Adds an object to the end of the System.Collections.ObjectModel.Collection.
item: The object to be added to the end of the System.Collections.ObjectModel.Collection. The value can be null for reference types.
"""
pass
def Clear(self):
"""
Clear(self: Collection[T])
Removes all elements from the System.Collections.ObjectModel.Collection.
"""
pass
def ClearItems(self,*args):
"""
ClearItems(self: Collection[T])
Removes all elements from the System.Collections.ObjectModel.Collection.
"""
pass
def Contains(self,item):
"""
Contains(self: Collection[T],item: T) -> bool
Determines whether an element is in the System.Collections.ObjectModel.Collection.
item: The object to locate in the System.Collections.ObjectModel.Collection. The value can be null for reference types.
Returns: true if item is found in the System.Collections.ObjectModel.Collection; otherwise,false.
"""
pass
def CopyTo(self,array,index):
""" CopyTo(self: Collection[T],array: Array[T],index: int) """
pass
def GetEnumerator(self):
"""
GetEnumerator(self: Collection[T]) -> IEnumerator[T]
Returns an enumerator that iterates through the System.Collections.ObjectModel.Collection.
Returns: An System.Collections.Generic.IEnumerator for the System.Collections.ObjectModel.Collection.
"""
pass
def IndexOf(self,item):
"""
IndexOf(self: Collection[T],item: T) -> int
Searches for the specified object and returns the zero-based index of the first occurrence within the entire System.Collections.ObjectModel.Collection.
item: The object to locate in the System.Collections.Generic.List. The value can be null for reference types.
Returns: The zero-based index of the first occurrence of item within the entire System.Collections.ObjectModel.Collection,if found; otherwise,-1.
"""
pass
def Insert(self,index,item):
"""
Insert(self: Collection[T],index: int,item: T)
Inserts an element into the System.Collections.ObjectModel.Collection at the specified index.
index: The zero-based index at which item should be inserted.
item: The object to insert. The value can be null for reference types.
"""
pass
def InsertItem(self,*args):
"""
InsertItem(self: Collection[T],index: int,item: T)
Inserts an element into the System.Collections.ObjectModel.Collection at the specified index.
index: The zero-based index at which item should be inserted.
item: The object to insert. The value can be null for reference types.
"""
pass
def Remove(self,item):
"""
Remove(self: Collection[T],item: T) -> bool
Removes the first occurrence of a specific object from the System.Collections.ObjectModel.Collection.
item: The object to remove from the System.Collections.ObjectModel.Collection. The value can be null for reference types.
Returns: true if item is successfully removed; otherwise,false. This method also returns false if item was not found in the original System.Collections.ObjectModel.Collection.
"""
pass
def RemoveAt(self,index):
"""
RemoveAt(self: Collection[T],index: int)
Removes the element at the specified index of the System.Collections.ObjectModel.Collection.
index: The zero-based index of the element to remove.
"""
pass
def RemoveItem(self,*args):
"""
RemoveItem(self: Collection[T],index: int)
Removes the element at the specified index of the System.Collections.ObjectModel.Collection.
index: The zero-based index of the element to remove.
"""
pass
def SetItem(self,*args):
"""
SetItem(self: Collection[T],index: int,item: T)
Replaces the element at the specified index.
index: The zero-based index of the element to replace.
item: The new value for the element at the specified index. The value can be null for reference types.
"""
pass
def __add__(self,*args):
""" x.__add__(y) <==> x+y """
pass
def __contains__(self,*args):
"""
__contains__(self: ICollection[T],item: T) -> bool
Determines whether the System.Collections.Generic.ICollection contains a specific value.
item: The object to locate in the System.Collections.Generic.ICollection.
Returns: true if item is found in the System.Collections.Generic.ICollection; otherwise,false.
__contains__(self: IList,value: object) -> bool
Determines whether the System.Collections.IList contains a specific value.
value: The object to locate in the System.Collections.IList.
Returns: true if the System.Object is found in the System.Collections.IList; otherwise,false.
"""
pass
def __getitem__(self,*args):
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self,*args):
""" x.__len__() <==> len(x) """
pass
@staticmethod
def __new__(self,list=None):
"""
__new__(cls: type)
__new__(cls: type,list: IList[T])
"""
pass
def __reduce_ex__(self,*args):
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
def __setitem__(self,*args):
""" x.__setitem__(i,y) <==> x[i]= """
pass
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of elements actually contained in the System.Collections.ObjectModel.Collection.
Get: Count(self: Collection[T]) -> int
"""
Items=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a System.Collections.Generic.IList wrapper around the System.Collections.ObjectModel.Collection.
"""
class KeyedCollection(Collection):
# no doc
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return KeyedCollection()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def ChangeItemKey(self,*args):
"""
ChangeItemKey(self: KeyedCollection[TKey,TItem],item: TItem,newKey: TKey)
Changes the key associated with the specified element in the lookup dictionary.
item: The element to change the key of.
newKey: The new key for item.
"""
pass
def ClearItems(self,*args):
"""
ClearItems(self: KeyedCollection[TKey,TItem])
Removes all elements from the System.Collections.ObjectModel.KeyedCollection.
"""
pass
def Contains(self,*__args):
"""
Contains(self: KeyedCollection[TKey,TItem],key: TKey) -> bool
Determines whether the collection contains an element with the specified key.
key: The key to locate in the System.Collections.ObjectModel.KeyedCollection.
Returns: true if the System.Collections.ObjectModel.KeyedCollection contains an element with the specified key; otherwise,false.
"""
pass
def GetKeyForItem(self,*args):
"""
GetKeyForItem(self: KeyedCollection[TKey,TItem],item: TItem) -> TKey
When implemented in a derived class,extracts the key from the specified element.
item: The element from which to extract the key.
Returns: The key for the specified element.
"""
pass
def InsertItem(self,*args):
"""
InsertItem(self: KeyedCollection[TKey,TItem],index: int,item: TItem)
Inserts an element into the System.Collections.ObjectModel.KeyedCollection at the specified index.
index: The zero-based index at which item should be inserted.
item: The object to insert.
"""
pass
def Remove(self,*__args):
"""
Remove(self: KeyedCollection[TKey,TItem],key: TKey) -> bool
Removes the element with the specified key from the System.Collections.ObjectModel.KeyedCollection.
key: The key of the element to remove.
Returns: true if the element is successfully removed; otherwise,false. This method also returns false if key is not found in the System.Collections.ObjectModel.KeyedCollection.
"""
pass
def RemoveItem(self,*args):
"""
RemoveItem(self: KeyedCollection[TKey,TItem],index: int)
Removes the element at the specified index of the System.Collections.ObjectModel.KeyedCollection.
index: The index of the element to remove.
"""
pass
def SetItem(self,*args):
"""
SetItem(self: KeyedCollection[TKey,TItem],index: int,item: TItem)
Replaces the item at the specified index with the specified item.
index: The zero-based index of the item to be replaced.
item: The new item.
"""
pass
def __getitem__(self,*args):
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
@staticmethod
def __new__(self,*args): #cannot find CLR constructor
"""
__new__(cls: type)
__new__(cls: type,comparer: IEqualityComparer[TKey])
__new__(cls: type,comparer: IEqualityComparer[TKey],dictionaryCreationThreshold: int)
"""
pass
def __reduce_ex__(self,*args):
pass
def __setitem__(self,*args):
""" x.__setitem__(i,y) <==> x[i]= """
pass
Comparer=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the generic equality comparer that is used to determine equality of keys in the collection.
Get: Comparer(self: KeyedCollection[TKey,TItem]) -> IEqualityComparer[TKey]
"""
Dictionary=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the lookup dictionary of the System.Collections.ObjectModel.KeyedCollection.
"""
Items=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a System.Collections.Generic.IList wrapper around the System.Collections.ObjectModel.Collection.
"""
class ObservableCollection(Collection):
"""
ObservableCollection[T]()
ObservableCollection[T](list: List[T])
ObservableCollection[T](collection: IEnumerable[T])
"""
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return ObservableCollection()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def BlockReentrancy(self,*args):
"""
BlockReentrancy(self: ObservableCollection[T]) -> IDisposable
Disallows reentrant attempts to change this collection.
Returns: An System.IDisposable object that can be used to dispose of the object.
"""
pass
def CheckReentrancy(self,*args):
"""
CheckReentrancy(self: ObservableCollection[T])
Checks for reentrant attempts to change this collection.
"""
pass
def ClearItems(self,*args):
"""
ClearItems(self: ObservableCollection[T])
Removes all items from the collection.
"""
pass
def InsertItem(self,*args):
"""
InsertItem(self: ObservableCollection[T],index: int,item: T)
Inserts an item into the collection at the specified index.
index: The zero-based index at which item should be inserted.
item: The object to insert.
"""
pass
def Move(self,oldIndex,newIndex):
"""
Move(self: ObservableCollection[T],oldIndex: int,newIndex: int)
Moves the item at the specified index to a new location in the collection.
oldIndex: The zero-based index specifying the location of the item to be moved.
newIndex: The zero-based index specifying the new location of the item.
"""
pass
def MoveItem(self,*args):
"""
MoveItem(self: ObservableCollection[T],oldIndex: int,newIndex: int)
Moves the item at the specified index to a new location in the collection.
oldIndex: The zero-based index specifying the location of the item to be moved.
newIndex: The zero-based index specifying the new location of the item.
"""
pass
def OnCollectionChanged(self,*args):
"""
OnCollectionChanged(self: ObservableCollection[T],e: NotifyCollectionChangedEventArgs)
Raises the System.Collections.ObjectModel.ObservableCollection event with the provided arguments.
e: Arguments of the event being raised.
"""
pass
def OnPropertyChanged(self,*args):
"""
OnPropertyChanged(self: ObservableCollection[T],e: PropertyChangedEventArgs)
Raises the System.Collections.ObjectModel.ObservableCollection event with the provided arguments.
e: Arguments of the event being raised.
"""
pass
def RemoveItem(self,*args):
"""
RemoveItem(self: ObservableCollection[T],index: int)
Removes the item at the specified index of the collection.
index: The zero-based index of the element to remove.
"""
pass
def SetItem(self,*args):
"""
SetItem(self: ObservableCollection[T],index: int,item: T)
Replaces the element at the specified index.
index: The zero-based index of the element to replace.
item: The new value for the element at the specified index.
"""
pass
def __getitem__(self,*args):
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
@staticmethod
def __new__(self,*__args):
"""
__new__(cls: type)
__new__(cls: type,list: List[T])
__new__(cls: type,collection: IEnumerable[T])
"""
pass
def __reduce_ex__(self,*args):
pass
def __setitem__(self,*args):
""" x.__setitem__(i,y) <==> x[i]= """
pass
Items=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a System.Collections.Generic.IList wrapper around the System.Collections.ObjectModel.Collection.
"""
CollectionChanged=None
PropertyChanged=None
class ReadOnlyCollection(object):
""" ReadOnlyCollection[T](list: IList[T]) """
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return ReadOnlyCollection()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Contains(self,value):
"""
Contains(self: ReadOnlyCollection[T],value: T) -> bool
Determines whether an element is in the System.Collections.ObjectModel.ReadOnlyCollection.
value: The object to locate in the System.Collections.ObjectModel.ReadOnlyCollection. The value can be null for reference types.
Returns: true if value is found in the System.Collections.ObjectModel.ReadOnlyCollection; otherwise,false.
"""
pass
def CopyTo(self,array,index):
""" CopyTo(self: ReadOnlyCollection[T],array: Array[T],index: int) """
pass
def GetEnumerator(self):
"""
GetEnumerator(self: ReadOnlyCollection[T]) -> IEnumerator[T]
Returns an enumerator that iterates through the System.Collections.ObjectModel.ReadOnlyCollection.
Returns: An System.Collections.Generic.IEnumerator for the System.Collections.ObjectModel.ReadOnlyCollection.
"""
pass
def IndexOf(self,value):
"""
IndexOf(self: ReadOnlyCollection[T],value: T) -> int
Searches for the specified object and returns the zero-based index of the first occurrence within the entire System.Collections.ObjectModel.ReadOnlyCollection.
value: The object to locate in the System.Collections.Generic.List. The value can be null for reference types.
Returns: The zero-based index of the first occurrence of item within the entire System.Collections.ObjectModel.ReadOnlyCollection,if found; otherwise,-1.
"""
pass
def __contains__(self,*args):
"""
__contains__(self: ICollection[T],item: T) -> bool
Determines whether the System.Collections.Generic.ICollection contains a specific value.
item: The object to locate in the System.Collections.Generic.ICollection.
Returns: true if item is found in the System.Collections.Generic.ICollection; otherwise,false.
__contains__(self: IList,value: object) -> bool
Determines whether the System.Collections.IList contains a specific value.
value: The object to locate in the System.Collections.IList.
Returns: true if the System.Object is found in the System.Collections.IList; otherwise,false.
"""
pass
def __getitem__(self,*args):
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self,*args):
""" x.__len__() <==> len(x) """
pass
@staticmethod
def __new__(self,list):
""" __new__(cls: type,list: IList[T]) """
pass
def __reduce_ex__(self,*args):
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of elements contained in the System.Collections.ObjectModel.ReadOnlyCollection instance.
Get: Count(self: ReadOnlyCollection[T]) -> int
"""
Items=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the System.Collections.Generic.IList that the System.Collections.ObjectModel.ReadOnlyCollection wraps.
"""
class ReadOnlyDictionary(object):
""" ReadOnlyDictionary[TKey,TValue](dictionary: IDictionary[TKey,TValue]) """
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return ReadOnlyDictionary()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def ContainsKey(self,key):
""" ContainsKey(self: ReadOnlyDictionary[TKey,TValue],key: TKey) -> bool """
pass
def GetEnumerator(self):
""" GetEnumerator(self: ReadOnlyDictionary[TKey,TValue]) -> IEnumerator[KeyValuePair[TKey,TValue]] """
pass
def TryGetValue(self,key,value):
""" TryGetValue(self: ReadOnlyDictionary[TKey,TValue],key: TKey) -> (bool,TValue) """
pass
def __contains__(self,*args):
"""
__contains__(self: IDictionary[TKey,TValue],key: TKey) -> bool
Determines whether the System.Collections.Generic.IDictionary contains an element with the specified key.
key: The key to locate in the System.Collections.Generic.IDictionary.
Returns: true if the System.Collections.Generic.IDictionary contains an element with the key; otherwise,false.
__contains__(self: IDictionary,key: object) -> bool
Determines whether the System.Collections.IDictionary object contains an element with the specified key.
key: The key to locate in the System.Collections.IDictionary object.
Returns: true if the System.Collections.IDictionary contains an element with the key; otherwise,false.
"""
pass
def __getitem__(self,*args):
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self,*args):
""" x.__len__() <==> len(x) """
pass
@staticmethod
def __new__(self,dictionary):
""" __new__(cls: type,dictionary: IDictionary[TKey,TValue]) """
pass
def __reduce_ex__(self,*args):
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Count(self: ReadOnlyDictionary[TKey,TValue]) -> int
"""
Dictionary=property(lambda self: object(),lambda self,v: None,lambda self: None)
Keys=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Keys(self: ReadOnlyDictionary[TKey,TValue]) -> KeyCollection
"""
Values=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Values(self: ReadOnlyDictionary[TKey,TValue]) -> ValueCollection
"""
KeyCollection=None
ValueCollection=None
class ReadOnlyObservableCollection(ReadOnlyCollection):
""" ReadOnlyObservableCollection[T](list: ObservableCollection[T]) """
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return ReadOnlyObservableCollection()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def OnCollectionChanged(self,*args):
"""
OnCollectionChanged(self: ReadOnlyObservableCollection[T],args: NotifyCollectionChangedEventArgs)
Raises the System.Collections.ObjectModel.ReadOnlyObservableCollection event using the provided arguments.
args: Arguments of the event being raised.
"""
pass
def OnPropertyChanged(self,*args):
"""
OnPropertyChanged(self: ReadOnlyObservableCollection[T],args: PropertyChangedEventArgs)
Raises the System.Collections.ObjectModel.ReadOnlyObservableCollection event using the provided arguments.
args: Arguments of the event being raised.
"""
pass
def __getitem__(self,*args):
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
@staticmethod
def __new__(self,list):
""" __new__(cls: type,list: ObservableCollection[T]) """
pass
def __reduce_ex__(self,*args):
pass
Items=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Returns the System.Collections.Generic.IList that the System.Collections.ObjectModel.ReadOnlyCollection wraps.
"""
CollectionChanged=None
PropertyChanged=None
| 36.023474 | 215 | 0.701855 | 2,863 | 23,019 | 5.44883 | 0.079986 | 0.078462 | 0.078205 | 0.0775 | 0.818205 | 0.795705 | 0.725192 | 0.66859 | 0.649936 | 0.630128 | 0 | 0.002076 | 0.183935 | 23,019 | 638 | 216 | 36.079937 | 0.828374 | 0.630045 | 0 | 0.809302 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.413953 | false | 0.386047 | 0 | 0 | 0.586047 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 8 |
1c4b744a51d67f2763a05c01079c43a9dd1bb856 | 134 | py | Python | discord/types/widget.py | kuzaku-developers/disnake | 61cc1ad4c2bafd39726a1447c85f7e469e41af10 | [
"MIT"
] | null | null | null | discord/types/widget.py | kuzaku-developers/disnake | 61cc1ad4c2bafd39726a1447c85f7e469e41af10 | [
"MIT"
] | null | null | null | discord/types/widget.py | kuzaku-developers/disnake | 61cc1ad4c2bafd39726a1447c85f7e469e41af10 | [
"MIT"
] | null | null | null | from disnake.types.widget import *
from disnake.types.widget import __dict__ as __original_dict__
locals().update(__original_dict__)
| 26.8 | 62 | 0.835821 | 18 | 134 | 5.444444 | 0.555556 | 0.22449 | 0.326531 | 0.44898 | 0.571429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.089552 | 134 | 4 | 63 | 33.5 | 0.803279 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
1c9631ea235a612d58d508967753e7c96a2a3265 | 49,754 | py | Python | integration-test/1190-mz-colours.py | rinnyB/vector-datasource | 024909ed8245a4ad4a25c908413ba3602de6c335 | [
"MIT"
] | null | null | null | integration-test/1190-mz-colours.py | rinnyB/vector-datasource | 024909ed8245a4ad4a25c908413ba3602de6c335 | [
"MIT"
] | 2 | 2021-03-31T20:22:37.000Z | 2021-12-13T20:50:11.000Z | integration-test/1190-mz-colours.py | rinnyB/vector-datasource | 024909ed8245a4ad4a25c908413ba3602de6c335 | [
"MIT"
] | null | null | null | # -*- encoding: utf-8 -*-
from shapely.wkt import loads as wkt_loads
import dsl
from . import FixtureTest
class MzColours(FixtureTest):
def test_colour_property(self):
self.generate_fixtures(dsl.way(280869875, wkt_loads('POINT (-73.93486891962429 40.6967057090726)'), {u'railway:switch:configuration': u'inside', u'railway:local_operated': u'no', u'railway:switch:movable_frog': u'no', u'railway:turnout_side': u'left', u'source': u'openstreetmap.org', u'railway:switch': u'regular', u'railway': u'switch'}),dsl.way(591995252, wkt_loads('POINT (-73.99998312333059 40.7179859760317)'), {u'name': u'Canal Street (J,Z)', u'wheelchair': u'no', u'source': u'openstreetmap.org', u'railway': u'station', u'network': u'New York City Subway'}),dsl.way(597928303, wkt_loads('POINT (-73.99371988950671 40.72021738834439)'), {u'name': u'Bowery (J,Z)', u'wheelchair': u'no', u'source': u'openstreetmap.org', u'railway': u'station', u'network': u'New York City Subway'}),dsl.way(597928305, wkt_loads('POINT (-74.0110343776189 40.70648029064251)'), {u'name': u'Broad Street (J,Z)', u'wheelchair': u'no', u'source': u'openstreetmap.org', u'railway': u'station', u'network': u'New York City Subway'}),dsl.way(597928312, wkt_loads('POINT (-73.9881575212674 40.71853365735368)'), {u'name': u'Delancey Street-Essex Street (F,J,M,Z)', u'wheelchair': u'no', u'source': u'openstreetmap.org', u'operator': u'MTA', u'railway': u'station', u'network': u'New York City Subway'}),dsl.way(597937439, wkt_loads('POINT (-73.90317617673749 40.6787216146596)'), {u'name': u'Broadway Junction (A,C,J,L,Z)', u'wheelchair': u'no', u'source': u'openstreetmap.org', u'railway': u'station', u'network': u'New York City Subway'}),dsl.way(598556935, wkt_loads('POINT (-73.95791412026 40.70840640737247)'), {u'name': u'Marcy Avenue (J,M,Z)', u'wheelchair': u'yes', u'source': u'openstreetmap.org', u'railway': u'station', u'network': u'New York City Subway'}),dsl.way(1005312423, wkt_loads('POINT (-73.800876391508 40.70227396468948)'), {u'name': u'Jamaica Center - Parsons/Archer (E,J,Z)', u'source': u'openstreetmap.org', u'wheelchair': u'yes', u'wikipedia': u'en:Jamaica Center\u2013Parsons/Archer (Archer Avenue Lines)', u'internet_access:fee': u'no', u'station': u'subway', u'wikidata': u'Q2612701', u'operator': u'New York City Transit Authority', u'railway': u'station', u'internet_access': u'wlan', u'network': u'New York City Subway'}),dsl.way(1005312424, wkt_loads('POINT (-73.80810962617579 40.70046426456449)'), {u'name': u'Sutphin Boulevard - Archer Avenue - JFK Airport (E,J,Z)', u'source': u'openstreetmap.org', u'wheelchair': u'yes', u'wikipedia': u'en:Sutphin Boulevard\u2013Archer Avenue\u2013JFK Airport (Archer Avenue Lines)', u'internet_access:fee': u'no', u'station': u'subway', u'wikidata': u'Q649968', u'operator': u'New York City Transit Authority', u'railway': u'station', u'internet_access': u'wlan', u'network': u'New York City Subway'}),dsl.way(1692394907, wkt_loads('POINT (-74.0077647794793 40.71024108644968)'), {u'name': u'Fulton Street-Broadway Nassau (A,C,J,Z)', u'wheelchair': u'no', u'wikipedia': u'en:Fulton Street (New York City Subway)', u'source': u'openstreetmap.org', u'wikidata': u'Q2982552', u'railway': u'station', u'network': u'New York City Subway'}),dsl.way(1832002627, wkt_loads('POINT (-73.8732859034682 40.68334960171839)'), {u'name': u'Crescent Street (J,Z)', u'wheelchair': u'no', u'source': u'openstreetmap.org', u'railway': u'station', u'network': u'New York City Subway'}),dsl.way(1832002633, wkt_loads('POINT (-73.879075814969 40.68170165688338)'), {u'name': u'Norwood Avenue (J,Z)', u'wheelchair': u'no', u'source': u'openstreetmap.org', u'railway': u'station', u'network': u'New York City Subway'}),dsl.way(1832002647, wkt_loads('POINT (-73.8916768427854 40.67803721618088)'), {u'name': u'Van Siclen Avenue (J,Z)', u'wheelchair': u'no', u'source': u'openstreetmap.org', u'railway': u'station', u'network': u'New York City Subway'}),dsl.way(1832002648, wkt_loads('POINT (-73.85165429176359 40.69389109750619)'), {u'name': u'Woodhaven Boulevard (J,Z)', u'wheelchair': u'no', u'source': u'openstreetmap.org', u'railway': u'station', u'network': u'New York City Subway'}),dsl.way(1832002649, wkt_loads('POINT (-73.84427418254688 40.69519417698228)'), {u'name': u'104th Street (J,Z)', u'wheelchair': u'no', u'source': u'openstreetmap.org', u'railway': u'station', u'network': u'New York City Subway'}),dsl.way(1832002653, wkt_loads('POINT (-73.82831767764969 40.70052657959609)'), {u'name': u'121st Street (J,Z)', u'wheelchair': u'no', u'source': u'openstreetmap.org', u'railway': u'station', u'network': u'New York City Subway'}),dsl.way(1832002657, wkt_loads('POINT (-73.86725461465069 40.69132096212261)'), {u'name': u'75th Street-Elderts Lane (J,Z)', u'wheelchair': u'no', u'source': u'openstreetmap.org', u'railway': u'station', u'network': u'New York City Subway'}),dsl.way(1903646883, wkt_loads('POINT (-73.93399279272769 40.6962265013319)'), {u'railway:switch:configuration': u'inside', u'railway:local_operated': u'no', u'railway:switch:movable_frog': u'no', u'railway:turnout_side': u'left', u'source': u'openstreetmap.org', u'railway:switch': u'regular', u'railway': u'switch'}),dsl.way(1903646947, wkt_loads('POINT (-73.93454552612199 40.69662799810268)'), {u'railway:switch:configuration': u'inside', u'railway:local_operated': u'no', u'source': u'openstreetmap.org', u'railway:switch': u'single_slip', u'railway': u'switch'}),dsl.way(1905087706, wkt_loads('POINT (-73.93438364970781 40.69653761897279)'), {u'source': u'openstreetmap.org', u'railway': u'railway_crossing'}),dsl.way(1905087715, wkt_loads('POINT (-73.92220177580289 40.68957608243889)'), {u'name': u'Gates Avenue (J,Z)', u'wheelchair': u'no', u'source': u'openstreetmap.org', u'railway': u'station', u'network': u'New York City Subway'}),dsl.way(1905087719, wkt_loads('POINT (-73.93422087497829 40.69644560512828)'), {u'railway:switch:configuration': u'inside', u'railway:local_operated': u'no', u'railway:switch:movable_frog': u'no', u'railway:turnout_side': u'left', u'source': u'openstreetmap.org', u'railway:switch': u'regular', u'railway': u'switch'}),dsl.way(2051677434, wkt_loads('POINT (-73.93527738358389 40.69699591549559)'), {u'name': u'Myrtle Avenue (J,M,Z)', u'wheelchair': u'no', u'wikipedia': u'en:Myrtle Avenue (BMT Jamaica Line)', u'source': u'openstreetmap.org', u'wikidata': u'Q2616277', u'public_transport': u'station', u'operator': u'Metropolitan Transportation Authority', u'railway': u'station', u'ref': u'J;M;Z', u'start_date': u'1888-09-16', u'network': u'New York City Subway'}),dsl.way(2052618392, wkt_loads('POINT (-74.00400047911269 40.71307121284639)'), {u'name': u'Chambers Street (J,Z)', u'wheelchair': u'no', u'source': u'openstreetmap.org', u'railway': u'station', u'network': u'New York City Subway'}),dsl.way(2782255511, wkt_loads('POINT (-73.95783902110219 40.70834730021729)'), {u'crossing': u'traffic_signals', u'source': u'openstreetmap.org', u'wheelchair': u'yes', u'highway': u'crossing'}),dsl.way(2782255513, wkt_loads('POINT (-73.9580108688161 40.70840715642598)'), {u'crossing': u'traffic_signals', u'source': u'openstreetmap.org', u'wheelchair': u'yes', u'highway': u'crossing'}),dsl.way(3020995528, wkt_loads('POINT (-73.9358375729951 40.6972148124399)'), {u'name': u'Myrtle Avenue', u'tram': u'no', u'public_transport': u'stop_position', u'source': u'openstreetmap.org', u'train': u'no', u'light_rail': u'no', u'operator': u'Metropolitan Transportation Authority', u'railway': u'stop', u'subway': u'yes'}),dsl.way(3020995529, wkt_loads('POINT (-73.93571612076869 40.69733420427839)'), {u'name': u'Myrtle Avenue', u'tram': u'no', u'public_transport': u'stop_position', u'source': u'openstreetmap.org', u'train': u'no', u'light_rail': u'no', u'operator': u'Metropolitan Transportation Authority', u'railway': u'stop', u'subway': u'yes'}),dsl.way(25509062, wkt_loads('LINESTRING (-73.81986354267929 40.69972948762659, -73.82063348870929 40.70026567332079, -73.8212134410567 40.70074757615059)'), {u'name': u'BMT Archer Avenue Line', u'electrified': u'rail', u'source': u'openstreetmap.org', u'frequency': u'0', u'gauge': u'1435', u'voltage': u'625', u'oneway': u'yes', u'railway': u'subway'}),dsl.way(40337619, wkt_loads('LINESTRING (-73.96121381196158 40.7102538200067, -73.96184937002509 40.71044700206859)'), {u'bridge': u'yes', u'layer': u'2', u'tiger:name_base': u'Williamsburg', u'name': u'Williamsburg Bridge subway tracks', u'tiger:cfcc': u'A63; A63; A41;A41; A63', u'electrified': u'rail', u'source': u'openstreetmap.org', u'frequency': u'0', u'gauge': u'1435', u'voltage': u'625', u'tiger:name_type': u'Brg', u'railway': u'subway', u'tiger:county': u'New York, NY; Kings, NY'}),dsl.way(46340156, wkt_loads('LINESTRING (-73.9591589157492 40.7088025192929, -73.9580108688161 40.70840715642598, -73.95783902110219 40.70834730021729, -73.9565517353001 40.7078989557555, -73.95470084648869 40.7072741012702, -73.9525070707333 40.7065334068587, -73.9521314851131 40.70639510031489, -73.95176137921598 40.70624058616518, -73.95147167253688 40.70609417533569, -73.9500758702484 40.7053132213434, -73.9474518913035 40.7038360028923, -73.94636322301069 40.7032274545438, -73.94369819105729 40.70173902100757, -73.9431858818507 40.70145326193468, -73.942873627458 40.7012610753292, -73.9421875840755 40.70082575903958, -73.94167141211329 40.70051881575919, -73.93824317149451 40.69860561533159, -73.9368733305177 40.69785127131109, -73.93677712095079 40.69779651361199, -73.9366690536221 40.69772738535788, -73.93660069182899 40.69768120900628, -73.93651166878431 40.69761889131299, -73.9364293831043 40.6975620902171, -73.936362189121 40.6975179569581, -73.93619555163581 40.6974149111331, -73.9358375729951 40.6972148124399, -73.9356540471826 40.6971121066826, -73.9352689394203 40.69689450344408, -73.935152787254 40.69683341095169, -73.93503762323461 40.6967817172605, -73.93486891962429 40.6967057090726)'), {u'bridge': u'yes', u'layer': u'2', u'name': u'BMT Jamaica Line', u'electrified': u'rail', u'source': u'openstreetmap.org', u'frequency': u'0', u'gauge': u'1435', u'voltage': u'625', u'railway': u'subway'}),dsl.way(46340235, wkt_loads('LINESTRING (-73.9854870994223 40.71762586834119, -73.98626611843669 40.7178910641163, -73.98810811392678 40.71851690833979, -73.9881575212674 40.71853365735368, -73.98831257048539 40.71858036393119, -73.98844273637009 40.7186197172612, -73.9892240910042 40.71885508781649)'), {u'layer': u'-3', u'name': u'BMT Nassau Street Line', u'tunnel': u'yes', u'electrified': u'rail', u'source': u'openstreetmap.org', u'frequency': u'0', u'gauge': u'1435', u'voltage': u'625', u'railway': u'subway'}),dsl.way(46340283, wkt_loads('LINESTRING (-73.81415295241818 40.69864198381448, -73.80810962617579 40.70046426456449, -73.802302197527 40.7019169728328, -73.800876391508 40.70227396468948, -73.79985877995421 40.70238149772579, -73.79944070402099 40.70242555966068)'), {u'layer': u'-1', u'name': u'IND/BMT Archer Avenue Lines', u'tunnel': u'yes', u'electrified': u'rail', u'source': u'openstreetmap.org', u'frequency': u'0', u'gauge': u'1435', u'voltage': u'600', u'railway': u'subway'}),dsl.way(46340286, wkt_loads('LINESTRING (-73.81986354267929 40.69972948762659, -73.81908012192 40.69931180054319, -73.81802918286908 40.69888907103378)'), {u'layer': u'-1', u'name': u'BMT Archer Avenue Line', u'tunnel': u'yes', u'electrified': u'rail', u'source': u'openstreetmap.org', u'frequency': u'0', u'gauge': u'1435', u'voltage': u'625', u'railway': u'subway'}),dsl.way(46343180, wkt_loads('LINESTRING (-73.9591589157492 40.7088025192929, -73.959317827723 40.70887538129429, -73.95944547832478 40.70897105510969, -73.9605975676767 40.7099326884868, -73.96072988951809 40.71002910981829, -73.96089014896479 40.7101266886084, -73.96104789312868 40.7102033624214, -73.96121381196158 40.7102538200067)'), {u'bridge': u'yes', u'layer': u'2', u'name': u'BMT Jamaica Line', u'electrified': u'rail', u'source': u'openstreetmap.org', u'frequency': u'0', u'gauge': u'1435', u'voltage': u'625', u'railway': u'subway'}),dsl.way(46343183, wkt_loads('LINESTRING (-73.8212134410567 40.70074757615059, -73.82148015086459 40.7009439185453, -73.82160519635218 40.70103701582959, -73.8217980646437 40.70115115687249, -73.82197314629251 40.7012377840728, -73.8221304412988 40.70130077941109, -73.8223183688562 40.7013628212483, -73.8225180643439 40.70142281994028, -73.82275827385089 40.7014694704183, -73.82301330556001 40.70150311322448, -73.8232374352234 40.7015106045353, -73.82348887367141 40.70150917437599, -73.8236847962349 40.70149276159358, -73.82390120038679 40.70145707569589, -73.82410107553748 40.70141437517868)'), {u'bridge': u'yes', u'layer': u'1', u'name': u'BMT Archer Avenue Line', u'electrified': u'rail', u'source': u'openstreetmap.org', u'frequency': u'0', u'gauge': u'1435', u'voltage': u'625', u'oneway': u'yes', u'railway': u'subway'}),dsl.way(46695598, wkt_loads('LINESTRING (-73.9912635361938 40.71947220816732, -73.99425690238348 40.72039515462619, -73.99623104005188 40.7210568549701, -73.99667480780219 40.72120016917949, -73.99681629245948 40.7212271980006, -73.9969069324717 40.72123570833241, -73.99698652320579 40.7212423804318, -73.99706422747789 40.7212301936375, -73.99711776706879 40.721216985601, -73.99724245323029 40.72117940395689, -73.99739759227981 40.7210811605371, -73.99754788042688 40.72092579538729, -73.99775449294221 40.72067565809599, -73.999235006362 40.7189233771218, -73.99998312333059 40.7179859760317, -74.0007585490838 40.716971759637, -74.00218013302099 40.71513270406061, -74.0026203973417 40.71433605925599, -74.00400047911269 40.71307121284639, -74.00492583368688 40.71228891074668, -74.0054041865757 40.71192019262629, -74.00650372448339 40.7111920108981, -74.00734302045339 40.71060770296908, -74.0077647794793 40.71024108644968, -74.00790464716898 40.71008726216397, -74.00803283676009 40.70994630732739, -74.0081350650394 40.7098336794318, -74.0082647817664 40.70969102169689, -74.00838264073168 40.70956136972439, -74.00932326666569 40.70859496429478, -74.01006770054158 40.70769296416069, -74.01065385126449 40.7070909883774, -74.0110343776189 40.70648029064251, -74.0111664299657 40.70628028727889, -74.01135947792018 40.7059468789065)'), {u'layer': u'-3', u'name': u'BMT Nassau Street Line', u'tunnel': u'yes', u'electrified': u'rail', u'source': u'openstreetmap.org', u'frequency': u'0', u'gauge': u'1435', u'voltage': u'625', u'railway': u'subway'}),dsl.way(180330650, wkt_loads('LINESTRING (-73.82410107553748 40.70141437517868, -73.82491638648939 40.70119658168419, -73.82539878179701 40.70107290626208, -73.82564186591289 40.70101890034768, -73.82628407150949 40.70090292028528, -73.8271389981654 40.70075956238348, -73.8271867885385 40.70075159426297, -73.82921608276529 40.70041897546119, -73.8296103533435 40.70035196115268, -73.83028858138299 40.70021391418889, -73.83070333354971 40.70011700191659, -73.8315835926966 40.69989361961328, -73.83191569985711 40.69980699066488, -73.83223379329928 40.69970285870589, -73.8326044381855 40.69954989544159, -73.83277610623628 40.69946646697708, -73.83295514047238 40.6993686682522, -73.8340420121347 40.69878950071549, -73.83517200293059 40.69818308565958, -73.8354774301272 40.69801499928728, -73.83568107820209 40.69790275985279, -73.83613993764919 40.69765056096769, -73.8364802194788 40.69743500269769, -73.83693449751799 40.69712245899419, -73.83775762381281 40.69657221777999, -73.83841375329639 40.69611187509218, -73.83857994162389 40.6960164552379, -73.83877469637748 40.69592117146389, -73.83907383536709 40.69579108403278, -73.8392563730329 40.6957206596317, -73.83941824944711 40.69566555963729, -73.8397845824199 40.6955621704025, -73.8399700845261 40.69551401739049, -73.8402299671378 40.69546109671349, -73.84057779481579 40.69540647326789, -73.8410253354904 40.6953517135593, -73.8413419017965 40.69531500273438, -73.8417979764662 40.69527835999839, -73.84234505047428 40.69524900492968, -73.84401897117469 40.69523660904989, -73.845060118589 40.69521726602398, -73.84528469740998 40.69522046715899, -73.84551098303008 40.6952185600999, -73.8458498275553 40.69519131639249, -73.8461028829708 40.6951554909004, -73.84684614903691 40.69500789784279, -73.84707638724419 40.6949611066206, -73.8480105453081 40.69476890128459, -73.84856336853399 40.69467286651838, -73.8500778382715 40.694393820033, -73.8504869310519 40.69430336976199, -73.850849221606 40.69420556345739, -73.8513989007283 40.69402466228149, -73.85183647010319 40.69385826818339, -73.85260435000809 40.69357220285998, -73.8539222683614 40.69309079301468, -73.85431510163519 40.6929792945111, -73.85470398232169 40.6928865946358, -73.85503537083 40.69283360376009, -73.85559798569238 40.69277550446099, -73.85694411114569 40.69270786945699, -73.85815270452891 40.69264166473409, -73.85921990308641 40.69255400467559, -73.86039283335289 40.69242588592119, -73.86252426602751 40.69213518280449, -73.86410952300939 40.6918595994379, -73.86791990695011 40.69125666319418, -73.868406434508 40.69117969511559, -73.8687485129682 40.69110940206708, -73.87081859070889 40.69058846832648, -73.8716098268111 40.69037268286448, -73.8717620912518 40.6903256840443, -73.8719797530451 40.69023768036719, -73.87360291893199 40.68954652051259, -73.87369328944961 40.6894996573399, -73.8737568003402 40.6894495927222, -73.87382857573138 40.68938488329669, -73.87389460190478 40.6893214679987, -73.8739359244078 40.689249470106, -73.8739584721215 40.68918046921436, -73.87397104853538 40.68909116982999, -73.8739761689326 40.68899928193281, -73.87395784330079 40.6889063721733, -73.8739243361407 40.6888226579168, -73.8734120269341 40.6873907123415, -73.87319697025509 40.68674965776008, -73.8730443464883 40.68627719025159, -73.8729648455857 40.68602290380028, -73.87262330611469 40.68495717893349, -73.87244391255238 40.68439130582159, -73.8723475233224 40.684031288948, -73.87236539979659 40.6839094890432, -73.87242909035029 40.68377726641138, -73.87251362181848 40.68367760538779, -73.8726553759703 40.68359408892649, -73.87276973150598 40.68355110447541, -73.87511065130489 40.68288187863698, -73.87689290882859 40.68237831991149, -73.87927083921718 40.68169566205288, -73.8804000215293 40.68137459721788, -73.881162601374 40.6811123215922, -73.88254717472138 40.68066726814978, -73.8840379289354 40.68019741438299, -73.8875107259923 40.67908935842159, -73.8891084695566 40.67857527960087, -73.8895852953094 40.67842581040581, -73.88978076871528 40.67837526058398, -73.8899573775001 40.6783369053701, -73.89015590517791 40.67830488590288, -73.8938955018742 40.67774209016148, -73.8966516229974 40.67733318994869, -73.89801661307159 40.67712921579599, -73.8988694736023 40.6770020893049, -73.89952398611828 40.67690337194289, -73.8997895281163 40.67688211601149, -73.89986938834511 40.6768877706349, -73.89995724357991 40.67690241815128, -73.9000638736041 40.67693259883759, -73.90015163900729 40.676970477964, -73.9003294156021 40.67706987650468, -73.90102066921318 40.67748416039589, -73.9016067301046 40.67785238787339, -73.9018267275176 40.67798966364648, -73.9024953435836 40.67837267177809, -73.90308059599118 40.67871691395739, -73.9031945922008 40.67878258750587, -73.90344782727939 40.67893518969239, -73.9036565957514 40.67910979614068, -73.90403802042098 40.67933106811098, -73.90414680640188 40.67939422041358, -73.9044895136828 40.67959498575069, -73.9047590980996 40.67975289983249, -73.9051821147669 40.67999705960928, -73.9055732412416 40.68017677262359, -73.90632980237389 40.6806092944916, -73.9064712870311 40.6806900216046, -73.9068261215684 40.6808926223843, -73.90699608282009 40.6809895626202, -73.90753570081129 40.68129707273778, -73.90793104936779 40.68150880019088, -73.91050768709728 40.682962807363, -73.9119675392655 40.68379538658148, -73.91652002146229 40.68637889093681, -73.92217581449118 40.68960857329669, -73.924075571654 40.69068845958519, -73.92892548604151 40.69344040806559, -73.93344410175209 40.69600031353589, -73.93393619886469 40.69628739005628, -73.93422087497829 40.69644560512828, -73.93429085373889 40.69648456289288, -73.93438364970781 40.69653761897279, -73.93454552612199 40.69662799810268)'), {u'bridge': u'yes', u'layer': u'2', u'name': u'BMT Jamaica Line', u'electrified': u'rail', u'source': u'openstreetmap.org', u'frequency': u'0', u'gauge': u'1435', u'voltage': u'625', u'railway': u'subway'}),dsl.way(180330653, wkt_loads('LINESTRING (-73.93486891962429 40.6967057090726, -73.9346067013928 40.69657255831939, -73.9343554426079 40.69643538892749, -73.93399279272769 40.6962265013319, -73.9338487927876 40.6961332611079, -73.92897821714861 40.69338687274139, -73.92412839259269 40.69063505827768, -73.92222872526141 40.68955517112379, -73.9165729322326 40.6863254861757, -73.91202027037269 40.6837419797502, -73.9105604182044 40.68290926362089, -73.90751584804349 40.68119127688269, -73.90715409647859 40.68097709596387, -73.9065064111587 40.68062346432109, -73.90615597836639 40.68041861461879, -73.9056315419035 40.6801119860696, -73.90533213341928 40.6798975973082, -73.90434218997622 40.67932275680028, -73.9042623297475 40.67927506893179, -73.9037086082064 40.67894465919148, -73.90324903010699 40.67868087522949, -73.90301609695381 40.67853971768058, -73.90259604472701 40.6782954844395, -73.90215811602599 40.6780385787172, -73.9014979441237 40.6776578849912, -73.90115523684278 40.67745581939938, -73.9010036012228 40.67737127322999, -73.90087316584361 40.67729497039038, -73.9002955491159 40.6769636651731, -73.90012711500009 40.6768665146985, -73.90002389857391 40.67682999807389, -73.8999168193921 40.67680560822779, -73.8998011163835 40.6767932089171, -73.8995618950233 40.67682209521968, -73.89918559075079 40.67687768769159, -73.89846082997958 40.67698601112479, -73.8979893941185 40.67705570591987, -73.8966369804582 40.6772592715319, -73.8938797813567 40.67767246421108, -73.89013946600819 40.6782353967936, -73.8899368060801 40.67826809756001, -73.8897544480774 40.6783076790911, -73.88955340511679 40.67835959149399, -73.88907307593439 40.6785102189852, -73.88747497304399 40.6790243664334, -73.88400217598709 40.68013249159978, -73.88250953531099 40.68061351819109, -73.8803543871129 40.68128971540109, -73.87923930835071 40.68162147598076, -73.87685859318469 40.6823038621094, -73.8750772339763 40.68280966942138, -73.8727221207959 40.68348148459239, -73.8725625800015 40.68353230302689, -73.87247544341889 40.68357521936898, -73.87242100551271 40.68361636453449, -73.87235623698071 40.68367052079808, -73.872300182107 40.68373898242681, -73.8722496069565 40.6838434118205, -73.87223262879759 40.68394831789609, -73.8722355932381 40.6840417795334, -73.87242199365951 40.6846045902083, -73.8725435357174 40.68497127971888, -73.87294687927999 40.68628597752387, -73.87320209065219 40.68704229166519, -73.87332462085701 40.68740549378439, -73.8738301028674 40.6888385970104, -73.87385660316821 40.68891611271869, -73.8738687304246 40.68899138052139, -73.8738674727832 40.68907461775711, -73.87384986580359 40.68915410853899, -73.873825790954 40.6892069661102, -73.8737882413751 40.6892780785495, -73.8737360492571 40.6893380200103, -73.87367568246998 40.68939591796109, -73.87360983595968 40.6894479579586, -73.87353159269851 40.68948930384388, -73.87194058649879 40.69017310793579, -73.87156634835139 40.6903070207355, -73.87078849714689 40.69052369189279, -73.86872362963479 40.6910415610313, -73.86839188180041 40.69110940206708, -73.86743742181099 40.69126538169657, -73.86408993973619 40.69179257651898, -73.8625062098903 40.69206788771289, -73.86037828064531 40.69235811433791, -73.85921002161828 40.69248568832629, -73.85814363154459 40.69256456203141, -73.856935846645 40.69263771424438, -73.8555835228163 40.69270017282081, -73.85504390482518 40.69275350436151, -73.85464819694251 40.69282277400008, -73.8542794385184 40.69290818602168, -73.8538819340051 40.69302547412168, -73.8525809039792 40.69350279777859, -73.8517971238938 40.6937960831354, -73.85134814591478 40.69396458881548, -73.8508169720873 40.6941424932989, -73.8504483933262 40.69423587252948, -73.85006463303679 40.69432148707009, -73.84854037166269 40.69460598264919, -73.84798763826841 40.6947024942802, -73.84705285138379 40.69489476791739, -73.84682288267101 40.6949415591862, -73.84608859975779 40.69508315876439, -73.8458396765925 40.69511469340588, -73.84551134235619 40.69513968953648, -73.8452851465676 40.69515181299709, -73.8450575134747 40.69515358383938, -73.84401744403868 40.6951679549046, -73.84234136738159 40.69518041890651, -73.84178998146018 40.69520997833329, -73.84133031352928 40.6952468935437, -73.8410112319404 40.69528387684289, -73.8405614454776 40.6953389771529, -73.84020903639168 40.69539428174459, -73.8399436740568 40.69544836032809, -73.8397541295318 40.69549760313099, -73.83938393380321 40.69560201409888, -73.839217296318 40.6956586806484, -73.83903071623349 40.69573067161588, -73.8387276246566 40.695862598097, -73.83852801900051 40.69596019764369, -73.83835554246591 40.69605915921528, -73.8376969875312 40.69652127307327, -73.83687413073091 40.69707137849358, -73.836420751007 40.69738317325929, -73.83608433193309 40.69759641606499, -73.8356278979373 40.6978473211304, -73.8350819019076 40.69813840796179, -73.8339899098482 40.6987335178857, -73.83290258902828 40.69931282211919, -73.83272517175971 40.69940980366648, -73.83255826477991 40.6994907804261, -73.8321940877637 40.69964108774169, -73.83188300118078 40.69974290423978, -73.83155448728138 40.69982857980798, -73.8306755756074 40.7000516899121, -73.8302635183866 40.70014798934151, -73.8295886141136 40.70028528729721, -73.82919667915519 40.70035196115268, -73.8271673849284 40.70068458028909, -73.82626772217129 40.70083100301059, -73.82561868937849 40.70095249957329, -73.8253716526754 40.70100739088489, -73.8248869217481 40.70113181556389, -73.8240785278239 40.70133925761549)'), {u'bridge': u'yes', u'layer': u'2', u'name': u'BMT Jamaica Line', u'electrified': u'rail', u'source': u'openstreetmap.org', u'frequency': u'0', u'gauge': u'1435', u'voltage': u'625', u'railway': u'subway'}),dsl.way(180330673, wkt_loads('LINESTRING (-73.9854870994223 40.71762586834119, -73.9853959204209 40.71761095743049, -73.9831000062178 40.71691688148329, -73.97842193954421 40.715502766161, -73.96182807995288 40.7104788699051)'), {u'bridge': u'yes', u'layer': u'3', u'name': u'BMT Jamaica Line', u'electrified': u'rail', u'source': u'openstreetmap.org', u'frequency': u'0', u'gauge': u'1435', u'voltage': u'625', u'railway': u'subway'}),dsl.way(180332203, wkt_loads('LINESTRING (-73.81802918286908 40.69888907103378, -73.81713634730821 40.698639600113, -73.81640269321568 40.6985193933434, -73.81506294580099 40.69848799652539, -73.81415295241818 40.69864198381448)'), {u'layer': u'-1', u'name': u'BMT Archer Avenue Line', u'tunnel': u'yes', u'electrified': u'rail', u'source': u'openstreetmap.org', u'frequency': u'0', u'gauge': u'1435', u'voltage': u'600', u'railway': u'subway'}),dsl.way(180332204, wkt_loads('LINESTRING (-73.82126266873431 40.700718359699, -73.82066897216299 40.70023856788567, -73.8198975888286 40.6997013604032)'), {u'name': u'BMT Archer Avenue Line', u'electrified': u'rail', u'source': u'openstreetmap.org', u'frequency': u'0', u'gauge': u'1435', u'voltage': u'625', u'oneway': u'yes', u'railway': u'subway'}),dsl.way(180332206, wkt_loads('LINESTRING (-73.8240785278239 40.70133925761549, -73.8238805391353 40.7013814814602, -73.8236696147066 40.70141641826631, -73.8234807888339 40.70143208193559, -73.82323447078299 40.70143358019938, -73.8230167191581 40.70142615698278, -73.82276860447659 40.70139346757899, -73.82253522216578 40.70134817910748, -73.8223353470151 40.70129560358018, -73.82215334833849 40.70123185910279, -73.82202030784499 40.70118296105429, -73.8218439685547 40.70110620875241, -73.82158201981781 40.7009521590564, -73.82141897559369 40.7008406056058, -73.82126266873431 40.700718359699)'), {u'bridge': u'yes', u'layer': u'1', u'name': u'BMT Jamaica Line', u'electrified': u'rail', u'source': u'openstreetmap.org', u'frequency': u'0', u'gauge': u'1435', u'voltage': u'625', u'oneway': u'yes', u'railway': u'subway'}),dsl.way(206583253, wkt_loads('LINESTRING (-73.93454552612199 40.69662799810268, -73.93471512804759 40.69672368951199, -73.9347963357493 40.69677919727721, -73.93486514670001 40.69682796234318, -73.9349256033187 40.69687018904748, -73.9350050143898 40.69692978314111, -73.93514272612289 40.69700871969258, -73.93529040915558 40.69709616956809, -73.93567219315131 40.6973098900423, -73.93571612076869 40.69733420427839, -73.93609467082941 40.69754336082839, -73.9362533133086 40.69762617874028, -73.93640171499349 40.69769809947488, -73.9365089738385 40.69775088216169, -73.9366642027196 40.69783029448638, -73.93683820639011 40.69792026322369, -73.93728619622229 40.6981627900018, -73.938191069208 40.69866432249818, -73.941594695988 40.70056859964449, -73.94213808690328 40.70089297718179, -73.94279879779479 40.70130970090828, -73.94306577709729 40.70147137729838, -73.94364330399338 40.7017903703393, -73.94627833221629 40.7032702901007, -73.94739601609278 40.70389198143378, -73.95002673240242 40.70536286544748, -73.951424600816 40.7061451127212, -73.95171879907151 40.70629377067059, -73.95209384570261 40.70645025953217, -73.95247239576339 40.70658965552359, -73.9544285671261 40.70725946042889, -73.95601041050989 40.70780096512259, -73.9591253187576 40.70885917461301, -73.9593675045582 40.70895866178338, -73.9594468257978 40.70901579363668, -73.9606502089524 40.71002611367731, -73.9608647266422 40.710164821244, -73.9610947851865 40.71025906323538, -73.9612006067269 40.71028841168959, -73.96182807995288 40.7104788699051)'), {u'bridge': u'yes', u'layer': u'2', u'name': u'BMT Jamaica Line', u'electrified': u'rail', u'source': u'openstreetmap.org', u'frequency': u'0', u'gauge': u'1435', u'voltage': u'625', u'railway': u'subway'}),dsl.way(206583332, wkt_loads('LINESTRING (-73.96184937002509 40.71044700206859, -73.97843119219158 40.71547675634369, -73.9842202053771 40.71723137490488, -73.9854082273403 40.71758821658319, -73.9854870994223 40.71762586834119)'), {u'bridge': u'yes', u'layer': u'3', u'tiger:name_base': u'Williamsburg', u'name': u'Williamsburg Bridge subway tracks', u'tiger:cfcc': u'A63; A63; A41;A41; A63', u'electrified': u'rail', u'source': u'openstreetmap.org', u'frequency': u'0', u'gauge': u'1435', u'voltage': u'625', u'tiger:name_type': u'Brg', u'railway': u'subway', u'tiger:county': u'New York, NY; Kings, NY'}),dsl.way(360569167, wkt_loads('LINESTRING (-73.9892240910042 40.71885508781649, -73.99078545279949 40.7193256221766, -73.9910319505135 40.71940017477608, -73.9912635361938 40.71947220816732)'), {u'layer': u'-3', u'name': u'BMT Nassau Street Line', u'tunnel': u'yes', u'electrified': u'rail', u'source': u'openstreetmap.org', u'frequency': u'0', u'gauge': u'1435', u'voltage': u'625', u'railway': u'subway'}),dsl.way(-366773, wkt_loads('LINESTRING (-73.9854870994223 40.71762586834119, -73.98626611843669 40.7178910641163, -73.98810811392678 40.71851690833979, -73.9881575212674 40.71853365735368, -73.98831257048539 40.71858036393119, -73.98844273637009 40.7186197172612, -73.9892240910042 40.71885508781649, -73.99078545279949 40.7193256221766, -73.9910319505135 40.71940017477608, -73.9912635361938 40.71947220816732, -73.99425690238348 40.72039515462619, -73.99623104005188 40.7210568549701, -73.99667480780219 40.72120016917949, -73.99681629245948 40.7212271980006, -73.9969069324717 40.72123570833241, -73.99698652320579 40.7212423804318, -73.99706422747789 40.7212301936375, -73.99711776706879 40.721216985601, -73.99724245323029 40.72117940395689, -73.99739759227981 40.7210811605371, -73.99754788042688 40.72092579538729, -73.99775449294221 40.72067565809599, -73.999235006362 40.7189233771218, -73.99998312333059 40.7179859760317, -74.0007585490838 40.716971759637, -74.00218013302099 40.71513270406061, -74.0026203973417 40.71433605925599, -74.00400047911269 40.71307121284639, -74.00492583368688 40.71228891074668, -74.0054041865757 40.71192019262629, -74.00650372448339 40.7111920108981, -74.00734302045339 40.71060770296908, -74.0077647794793 40.71024108644968, -74.00790464716898 40.71008726216397, -74.00803283676009 40.70994630732739, -74.0081350650394 40.7098336794318, -74.0082647817664 40.70969102169689, -74.00838264073168 40.70956136972439, -74.00932326666569 40.70859496429478, -74.01006770054158 40.70769296416069, -74.01065385126449 40.7070909883774, -74.0110343776189 40.70648029064251, -74.0111664299657 40.70628028727889, -74.01135947792018 40.7059468789065)'), {u'name': u'NYCS - Z Train', u'route_pref_color': u'0', u'colour': u'#996633', u'route_name': u'NYCS - Z Train', u'operator': u'Metropolitan Transportation Authority', u'source': u'openstreetmap.org', u'alt_name': u'Nassau Street Express', u'owner': u'City of New York', u'ref': u'Z', u'route': u'subway', u'network': u'NYC Subway'}),dsl.way(-366773, wkt_loads('LINESTRING (-73.9854870994223 40.71762586834119, -73.9854082273403 40.71758821658319, -73.9842202053771 40.71723137490488, -73.97843119219158 40.71547675634369, -73.96184937002509 40.71044700206859, -73.96121381196158 40.7102538200067, -73.96104789312868 40.7102033624214, -73.96089014896479 40.7101266886084, -73.96072988951809 40.71002910981829, -73.9605975676767 40.7099326884868, -73.95944547832478 40.70897105510969, -73.959317827723 40.70887538129429, -73.9591589157492 40.7088025192929, -73.9580108688161 40.70840715642598, -73.95783902110219 40.70834730021729, -73.9565517353001 40.7078989557555, -73.95470084648869 40.7072741012702, -73.9525070707333 40.7065334068587, -73.9521314851131 40.70639510031489, -73.95176137921598 40.70624058616518, -73.95147167253688 40.70609417533569, -73.9500758702484 40.7053132213434, -73.9474518913035 40.7038360028923, -73.94636322301069 40.7032274545438, -73.94369819105729 40.70173902100757, -73.9431858818507 40.70145326193468, -73.942873627458 40.7012610753292, -73.9421875840755 40.70082575903958, -73.94167141211329 40.70051881575919, -73.93824317149451 40.69860561533159, -73.9368733305177 40.69785127131109, -73.93677712095079 40.69779651361199, -73.9366690536221 40.69772738535788, -73.93660069182899 40.69768120900628, -73.93651166878431 40.69761889131299, -73.9364293831043 40.6975620902171, -73.936362189121 40.6975179569581, -73.93619555163581 40.6974149111331, -73.9358375729951 40.6972148124399, -73.9356540471826 40.6971121066826, -73.9352689394203 40.69689450344408, -73.935152787254 40.69683341095169, -73.93503762323461 40.6967817172605, -73.93486891962429 40.6967057090726, -73.9346067013928 40.69657255831939, -73.9343554426079 40.69643538892749, -73.93399279272769 40.6962265013319, -73.9338487927876 40.6961332611079, -73.92897821714861 40.69338687274139, -73.92412839259269 40.69063505827768, -73.92222872526141 40.68955517112379, -73.9165729322326 40.6863254861757, -73.91202027037269 40.6837419797502, -73.9105604182044 40.68290926362089, -73.90751584804349 40.68119127688269, -73.90715409647859 40.68097709596387, -73.9065064111587 40.68062346432109, -73.90615597836639 40.68041861461879, -73.9056315419035 40.6801119860696, -73.90533213341928 40.6798975973082, -73.90434218997622 40.67932275680028, -73.9042623297475 40.67927506893179, -73.9037086082064 40.67894465919148, -73.90324903010699 40.67868087522949, -73.90301609695381 40.67853971768058, -73.90259604472701 40.6782954844395, -73.90215811602599 40.6780385787172, -73.9014979441237 40.6776578849912, -73.90115523684278 40.67745581939938, -73.9010036012228 40.67737127322999, -73.90087316584361 40.67729497039038, -73.9002955491159 40.6769636651731, -73.90012711500009 40.6768665146985, -73.90002389857391 40.67682999807389, -73.8999168193921 40.67680560822779, -73.8998011163835 40.6767932089171, -73.8995618950233 40.67682209521968, -73.89918559075079 40.67687768769159, -73.89846082997958 40.67698601112479, -73.8979893941185 40.67705570591987, -73.8966369804582 40.6772592715319, -73.8938797813567 40.67767246421108, -73.89013946600819 40.6782353967936, -73.8899368060801 40.67826809756001, -73.8897544480774 40.6783076790911, -73.88955340511679 40.67835959149399, -73.88907307593439 40.6785102189852, -73.88747497304399 40.6790243664334, -73.88400217598709 40.68013249159978, -73.88250953531099 40.68061351819109, -73.8803543871129 40.68128971540109, -73.87923930835071 40.68162147598076, -73.87685859318469 40.6823038621094, -73.8750772339763 40.68280966942138, -73.8727221207959 40.68348148459239, -73.8725625800015 40.68353230302689, -73.87247544341889 40.68357521936898, -73.87242100551271 40.68361636453449, -73.87235623698071 40.68367052079808, -73.872300182107 40.68373898242681, -73.8722496069565 40.6838434118205, -73.87223262879759 40.68394831789609, -73.8722355932381 40.6840417795334, -73.87242199365951 40.6846045902083, -73.8725435357174 40.68497127971888, -73.87294687927999 40.68628597752387, -73.87320209065219 40.68704229166519, -73.87332462085701 40.68740549378439, -73.8738301028674 40.6888385970104, -73.87385660316821 40.68891611271869, -73.8738687304246 40.68899138052139, -73.8738674727832 40.68907461775711, -73.87384986580359 40.68915410853899, -73.873825790954 40.6892069661102, -73.8737882413751 40.6892780785495, -73.8737360492571 40.6893380200103, -73.87367568246998 40.68939591796109, -73.87360983595968 40.6894479579586, -73.87353159269851 40.68948930384388, -73.87194058649879 40.69017310793579, -73.87156634835139 40.6903070207355, -73.87078849714689 40.69052369189279, -73.86872362963479 40.6910415610313, -73.86839188180041 40.69110940206708, -73.86743742181099 40.69126538169657, -73.86408993973619 40.69179257651898, -73.8625062098903 40.69206788771289, -73.86037828064531 40.69235811433791, -73.85921002161828 40.69248568832629, -73.85814363154459 40.69256456203141, -73.856935846645 40.69263771424438, -73.8555835228163 40.69270017282081, -73.85504390482518 40.69275350436151, -73.85464819694251 40.69282277400008, -73.8542794385184 40.69290818602168, -73.8538819340051 40.69302547412168, -73.8525809039792 40.69350279777859, -73.8517971238938 40.6937960831354, -73.85134814591478 40.69396458881548, -73.8508169720873 40.6941424932989, -73.8504483933262 40.69423587252948, -73.85006463303679 40.69432148707009, -73.84854037166269 40.69460598264919, -73.84798763826841 40.6947024942802, -73.84705285138379 40.69489476791739, -73.84682288267101 40.6949415591862, -73.84608859975779 40.69508315876439, -73.8458396765925 40.69511469340588, -73.84551134235619 40.69513968953648, -73.8452851465676 40.69515181299709, -73.8450575134747 40.69515358383938, -73.84401744403868 40.6951679549046, -73.84234136738159 40.69518041890651, -73.84178998146018 40.69520997833329, -73.84133031352928 40.6952468935437, -73.8410112319404 40.69528387684289, -73.8405614454776 40.6953389771529, -73.84020903639168 40.69539428174459, -73.8399436740568 40.69544836032809, -73.8397541295318 40.69549760313099, -73.83938393380321 40.69560201409888, -73.839217296318 40.6956586806484, -73.83903071623349 40.69573067161588, -73.8387276246566 40.695862598097, -73.83852801900051 40.69596019764369, -73.83835554246591 40.69605915921528, -73.8376969875312 40.69652127307327, -73.83687413073091 40.69707137849358, -73.836420751007 40.69738317325929, -73.83608433193309 40.69759641606499, -73.8356278979373 40.6978473211304, -73.8350819019076 40.69813840796179, -73.8339899098482 40.6987335178857, -73.83290258902828 40.69931282211919, -73.83272517175971 40.69940980366648, -73.83255826477991 40.6994907804261, -73.8321940877637 40.69964108774169, -73.83188300118078 40.69974290423978, -73.83155448728138 40.69982857980798, -73.8306755756074 40.7000516899121, -73.8302635183866 40.70014798934151, -73.8295886141136 40.70028528729721, -73.82919667915519 40.70035196115268, -73.8271673849284 40.70068458028909, -73.82626772217129 40.70083100301059, -73.82561868937849 40.70095249957329, -73.8253716526754 40.70100739088489, -73.8248869217481 40.70113181556389, -73.8240785278239 40.70133925761549, -73.8238805391353 40.7013814814602, -73.8236696147066 40.70141641826631, -73.8234807888339 40.70143208193559, -73.82323447078299 40.70143358019938, -73.8230167191581 40.70142615698278, -73.82276860447659 40.70139346757899, -73.82253522216578 40.70134817910748, -73.8223353470151 40.70129560358018, -73.82215334833849 40.70123185910279, -73.82202030784499 40.70118296105429, -73.8218439685547 40.70110620875241, -73.82158201981781 40.7009521590564, -73.82141897559369 40.7008406056058, -73.82126266873431 40.700718359699, -73.82066897216299 40.70023856788567, -73.8198975888286 40.6997013604032)'), {u'name': u'NYCS - Z Train', u'route_pref_color': u'0', u'colour': u'#996633', u'route_name': u'NYCS - Z Train', u'operator': u'Metropolitan Transportation Authority', u'source': u'openstreetmap.org', u'alt_name': u'Nassau Street Express', u'owner': u'City of New York', u'ref': u'Z', u'route': u'subway', u'network': u'NYC Subway'}),dsl.way(-366773, wkt_loads('LINESTRING (-73.9854870994223 40.71762586834119, -73.9853959204209 40.71761095743049, -73.9831000062178 40.71691688148329, -73.97842193954421 40.715502766161, -73.96182807995288 40.7104788699051, -73.9612006067269 40.71028841168959, -73.9610947851865 40.71025906323538, -73.9608647266422 40.710164821244, -73.9606502089524 40.71002611367731, -73.9594468257978 40.70901579363668, -73.9593675045582 40.70895866178338, -73.9591253187576 40.70885917461301, -73.95601041050989 40.70780096512259, -73.9544285671261 40.70725946042889, -73.95247239576339 40.70658965552359, -73.95209384570261 40.70645025953217, -73.95171879907151 40.70629377067059, -73.951424600816 40.7061451127212, -73.95002673240242 40.70536286544748, -73.94739601609278 40.70389198143378, -73.94627833221629 40.7032702901007, -73.94364330399338 40.7017903703393, -73.94306577709729 40.70147137729838, -73.94279879779479 40.70130970090828, -73.94213808690328 40.70089297718179, -73.941594695988 40.70056859964449, -73.938191069208 40.69866432249818, -73.93728619622229 40.6981627900018, -73.93683820639011 40.69792026322369, -73.9366642027196 40.69783029448638, -73.9365089738385 40.69775088216169, -73.93640171499349 40.69769809947488, -73.9362533133086 40.69762617874028, -73.93609467082941 40.69754336082839, -73.93571612076869 40.69733420427839, -73.93567219315131 40.6973098900423, -73.93529040915558 40.69709616956809, -73.93514272612289 40.69700871969258, -73.9350050143898 40.69692978314111, -73.9349256033187 40.69687018904748, -73.93486514670001 40.69682796234318, -73.9347963357493 40.69677919727721, -73.93471512804759 40.69672368951199, -73.93454552612199 40.69662799810268, -73.93438364970781 40.69653761897279, -73.93429085373889 40.69648456289288, -73.93422087497829 40.69644560512828, -73.93393619886469 40.69628739005628, -73.93344410175209 40.69600031353589, -73.92892548604151 40.69344040806559, -73.924075571654 40.69068845958519, -73.92217581449118 40.68960857329669, -73.91652002146229 40.68637889093681, -73.9119675392655 40.68379538658148, -73.91050768709728 40.682962807363, -73.90793104936779 40.68150880019088, -73.90753570081129 40.68129707273778, -73.90699608282009 40.6809895626202, -73.9068261215684 40.6808926223843, -73.9064712870311 40.6806900216046, -73.90632980237389 40.6806092944916, -73.9055732412416 40.68017677262359, -73.9051821147669 40.67999705960928, -73.9047590980996 40.67975289983249, -73.9044895136828 40.67959498575069, -73.90414680640188 40.67939422041358, -73.90403802042098 40.67933106811098, -73.9036565957514 40.67910979614068, -73.90344782727939 40.67893518969239, -73.9031945922008 40.67878258750587, -73.90308059599118 40.67871691395739, -73.9024953435836 40.67837267177809, -73.9018267275176 40.67798966364648, -73.9016067301046 40.67785238787339, -73.90102066921318 40.67748416039589, -73.9003294156021 40.67706987650468, -73.90015163900729 40.676970477964, -73.9000638736041 40.67693259883759, -73.89995724357991 40.67690241815128, -73.89986938834511 40.6768877706349, -73.8997895281163 40.67688211601149, -73.89952398611828 40.67690337194289, -73.8988694736023 40.6770020893049, -73.89801661307159 40.67712921579599, -73.8966516229974 40.67733318994869, -73.8938955018742 40.67774209016148, -73.89015590517791 40.67830488590288, -73.8899573775001 40.6783369053701, -73.88978076871528 40.67837526058398, -73.8895852953094 40.67842581040581, -73.8891084695566 40.67857527960087, -73.8875107259923 40.67908935842159, -73.8840379289354 40.68019741438299, -73.88254717472138 40.68066726814978, -73.881162601374 40.6811123215922, -73.8804000215293 40.68137459721788, -73.87927083921718 40.68169566205288, -73.87689290882859 40.68237831991149, -73.87511065130489 40.68288187863698, -73.87276973150598 40.68355110447541, -73.8726553759703 40.68359408892649, -73.87251362181848 40.68367760538779, -73.87242909035029 40.68377726641138, -73.87236539979659 40.6839094890432, -73.8723475233224 40.684031288948, -73.87244391255238 40.68439130582159, -73.87262330611469 40.68495717893349, -73.8729648455857 40.68602290380028, -73.8730443464883 40.68627719025159, -73.87319697025509 40.68674965776008, -73.8734120269341 40.6873907123415, -73.8739243361407 40.6888226579168, -73.87395784330079 40.6889063721733, -73.8739761689326 40.68899928193281, -73.87397104853538 40.68909116982999, -73.8739584721215 40.68918046921436, -73.8739359244078 40.689249470106, -73.87389460190478 40.6893214679987, -73.87382857573138 40.68938488329669, -73.8737568003402 40.6894495927222, -73.87369328944961 40.6894996573399, -73.87360291893199 40.68954652051259, -73.8719797530451 40.69023768036719, -73.8717620912518 40.6903256840443, -73.8716098268111 40.69037268286448, -73.87081859070889 40.69058846832648, -73.8687485129682 40.69110940206708, -73.868406434508 40.69117969511559, -73.86791990695011 40.69125666319418, -73.86410952300939 40.6918595994379, -73.86252426602751 40.69213518280449, -73.86039283335289 40.69242588592119, -73.85921990308641 40.69255400467559, -73.85815270452891 40.69264166473409, -73.85694411114569 40.69270786945699, -73.85559798569238 40.69277550446099, -73.85503537083 40.69283360376009, -73.85470398232169 40.6928865946358, -73.85431510163519 40.6929792945111, -73.8539222683614 40.69309079301468, -73.85260435000809 40.69357220285998, -73.85183647010319 40.69385826818339, -73.8513989007283 40.69402466228149, -73.850849221606 40.69420556345739, -73.8504869310519 40.69430336976199, -73.8500778382715 40.694393820033, -73.84856336853399 40.69467286651838, -73.8480105453081 40.69476890128459, -73.84707638724419 40.6949611066206, -73.84684614903691 40.69500789784279, -73.8461028829708 40.6951554909004, -73.8458498275553 40.69519131639249, -73.84551098303008 40.6952185600999, -73.84528469740998 40.69522046715899, -73.845060118589 40.69521726602398, -73.84401897117469 40.69523660904989, -73.84234505047428 40.69524900492968, -73.8417979764662 40.69527835999839, -73.8413419017965 40.69531500273438, -73.8410253354904 40.6953517135593, -73.84057779481579 40.69540647326789, -73.8402299671378 40.69546109671349, -73.8399700845261 40.69551401739049, -73.8397845824199 40.6955621704025, -73.83941824944711 40.69566555963729, -73.8392563730329 40.6957206596317, -73.83907383536709 40.69579108403278, -73.83877469637748 40.69592117146389, -73.83857994162389 40.6960164552379, -73.83841375329639 40.69611187509218, -73.83775762381281 40.69657221777999, -73.83693449751799 40.69712245899419, -73.8364802194788 40.69743500269769, -73.83613993764919 40.69765056096769, -73.83568107820209 40.69790275985279, -73.8354774301272 40.69801499928728, -73.83517200293059 40.69818308565958, -73.8340420121347 40.69878950071549, -73.83295514047238 40.6993686682522, -73.83277610623628 40.69946646697708, -73.8326044381855 40.69954989544159, -73.83223379329928 40.69970285870589, -73.83191569985711 40.69980699066488, -73.8315835926966 40.69989361961328, -73.83070333354971 40.70011700191659, -73.83028858138299 40.70021391418889, -73.8296103533435 40.70035196115268, -73.82921608276529 40.70041897546119, -73.8271867885385 40.70075159426297, -73.8271389981654 40.70075956238348, -73.82628407150949 40.70090292028528, -73.82564186591289 40.70101890034768, -73.82539878179701 40.70107290626208, -73.82491638648939 40.70119658168419, -73.82410107553748 40.70141437517868, -73.82390120038679 40.70145707569589, -73.8236847962349 40.70149276159358, -73.82348887367141 40.70150917437599, -73.8232374352234 40.7015106045353, -73.82301330556001 40.70150311322448, -73.82275827385089 40.7014694704183, -73.8225180643439 40.70142281994028, -73.8223183688562 40.7013628212483, -73.8221304412988 40.70130077941109, -73.82197314629251 40.7012377840728, -73.8217980646437 40.70115115687249, -73.82160519635218 40.70103701582959, -73.82148015086459 40.7009439185453, -73.8212134410567 40.70074757615059, -73.82063348870929 40.70026567332079, -73.81986354267929 40.69972948762659, -73.81908012192 40.69931180054319, -73.81802918286908 40.69888907103378, -73.81713634730821 40.698639600113, -73.81640269321568 40.6985193933434, -73.81506294580099 40.69848799652539, -73.81415295241818 40.69864198381448, -73.80810962617579 40.70046426456449, -73.802302197527 40.7019169728328, -73.800876391508 40.70227396468948, -73.79985877995421 40.70238149772579, -73.79944070402099 40.70242555966068)'), {u'name': u'NYCS - Z Train', u'route_pref_color': u'0', u'colour': u'#996633', u'route_name': u'NYCS - Z Train', u'operator': u'Metropolitan Transportation Authority', u'source': u'openstreetmap.org', u'alt_name': u'Nassau Street Express', u'owner': u'City of New York', u'ref': u'Z', u'route': u'subway', u'network': u'NYC Subway'})) # noqa
self.assert_has_feature(
16, 19310, 24645, 'transit',
{'kind': 'subway', 'ref': 'Z',
'colour': '#996633', 'colour_name': 'peru'})
| 3,316.933333 | 49,403 | 0.796197 | 6,181 | 49,754 | 6.39314 | 0.192364 | 0.012754 | 0.00992 | 0.02604 | 0.928206 | 0.583485 | 0.582751 | 0.581714 | 0.575564 | 0.570022 | 0 | 0.674577 | 0.062447 | 49,754 | 14 | 49,404 | 3,553.857143 | 0.172551 | 0.000563 | 0 | 0 | 1 | 2 | 0.883153 | 0.006597 | 0 | 0 | 0 | 0 | 0.1 | 1 | 0.1 | false | 0 | 0.3 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
1c996c52a02f13eb4c5c78e0d0ec58a02564f3eb | 208 | py | Python | src/apps/proxy/purpleserver/proxy/tests/__init__.py | blueprin4/purplship-server | e4817d6f6fb358adb10eab81153cf564fdcbc784 | [
"ECL-2.0",
"Apache-2.0"
] | 12 | 2020-02-03T08:11:21.000Z | 2021-04-13T02:00:38.000Z | src/apps/proxy/purpleserver/proxy/tests/__init__.py | blueprin4/purplship-server | e4817d6f6fb358adb10eab81153cf564fdcbc784 | [
"ECL-2.0",
"Apache-2.0"
] | 9 | 2020-02-12T00:25:08.000Z | 2021-04-20T10:31:59.000Z | src/apps/proxy/purpleserver/proxy/tests/__init__.py | blueprin4/purplship-server | e4817d6f6fb358adb10eab81153cf564fdcbc784 | [
"ECL-2.0",
"Apache-2.0"
] | 7 | 2020-02-03T08:10:50.000Z | 2021-04-13T15:17:12.000Z | from purpleserver.proxy.tests.test_rating import *
from purpleserver.proxy.tests.test_shipping import *
from purpleserver.proxy.tests.test_tracking import *
from purpleserver.proxy.tests.test_pickup import *
| 41.6 | 52 | 0.846154 | 28 | 208 | 6.142857 | 0.357143 | 0.372093 | 0.488372 | 0.604651 | 0.802326 | 0.627907 | 0 | 0 | 0 | 0 | 0 | 0 | 0.076923 | 208 | 4 | 53 | 52 | 0.895833 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 9 |
1c9d0985432f81ad5f06393525753cf38bc3f6b9 | 427 | py | Python | pavo_cristatus/tests/doubles/module_fakes/non_annotated/module_fake_class_with_callables.py | MATTHEWFRAZER/pavo_cristatus | a4b96c0eb6c454fbe38d2092e29f63457a4ee955 | [
"MIT"
] | null | null | null | pavo_cristatus/tests/doubles/module_fakes/non_annotated/module_fake_class_with_callables.py | MATTHEWFRAZER/pavo_cristatus | a4b96c0eb6c454fbe38d2092e29f63457a4ee955 | [
"MIT"
] | null | null | null | pavo_cristatus/tests/doubles/module_fakes/non_annotated/module_fake_class_with_callables.py | MATTHEWFRAZER/pavo_cristatus | a4b96c0eb6c454fbe38d2092e29f63457a4ee955 | [
"MIT"
] | null | null | null | from pavo_cristatus.tests.doubles.module_fakes.module_fake_class import ModuleFakeClass
from trochilidae.interoperable_with_metaclass import interoperable_with_metaclass_future
__all__ = ["ModuleFakeClassWithCallables"]
class ModuleFakeClassWithCallables(interoperable_with_metaclass_future(ModuleFakeClass)):
def symbol_of_interest(self, a, b): pass
def non_symbol_of_interest(self, a : int, b : str) -> bool: pass
| 42.7 | 89 | 0.838407 | 51 | 427 | 6.607843 | 0.588235 | 0.151335 | 0.231454 | 0.189911 | 0.124629 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.093677 | 427 | 9 | 90 | 47.444444 | 0.870801 | 0 | 0 | 0 | 0 | 0 | 0.065574 | 0.065574 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0.333333 | 0.333333 | 0 | 0.833333 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
98e43f3f81153a8e654639dc7935aa6a1039cb57 | 34 | py | Python | 29/02/mymodule.py | pylangstudy/201708 | 126b1af96a1d1f57522d5a1d435b58597bea2e57 | [
"CC0-1.0"
] | null | null | null | 29/02/mymodule.py | pylangstudy/201708 | 126b1af96a1d1f57522d5a1d435b58597bea2e57 | [
"CC0-1.0"
] | 39 | 2017-07-31T22:54:01.000Z | 2017-08-31T00:19:03.000Z | 30/02/mymodule.py | pylangstudy/201708 | 126b1af96a1d1f57522d5a1d435b58597bea2e57 | [
"CC0-1.0"
] | null | null | null | def mymethod(): print('mymethod')
| 17 | 33 | 0.705882 | 4 | 34 | 6 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.088235 | 34 | 1 | 34 | 34 | 0.774194 | 0 | 0 | 0 | 0 | 0 | 0.235294 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | true | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 7 |
98f2cf424d86d21937ceb7095d42150ef887ec12 | 86 | py | Python | __init__.py | dxyang/math3d | d1bbe0f4fec78179af484dfeefe8f102a1e688a6 | [
"MIT"
] | null | null | null | __init__.py | dxyang/math3d | d1bbe0f4fec78179af484dfeefe8f102a1e688a6 | [
"MIT"
] | null | null | null | __init__.py | dxyang/math3d | d1bbe0f4fec78179af484dfeefe8f102a1e688a6 | [
"MIT"
] | null | null | null | from quat import * # noqa
from transform import * # noqa
from vec3 import * # noqa
| 21.5 | 31 | 0.686047 | 12 | 86 | 4.916667 | 0.5 | 0.508475 | 0.474576 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.015385 | 0.244186 | 86 | 3 | 32 | 28.666667 | 0.892308 | 0.162791 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
c70afc6363a27065c8b7553017fd18591dd2e858 | 21,087 | py | Python | htmlmth/evasions/html/convert_to_xhtml.py | ZwCreatePhoton/htmlmth | 74d23ca2fa53e11b2587251d2f71c8f275548182 | [
"MIT"
] | null | null | null | htmlmth/evasions/html/convert_to_xhtml.py | ZwCreatePhoton/htmlmth | 74d23ca2fa53e11b2587251d2f71c8f275548182 | [
"MIT"
] | null | null | null | htmlmth/evasions/html/convert_to_xhtml.py | ZwCreatePhoton/htmlmth | 74d23ca2fa53e11b2587251d2f71c8f275548182 | [
"MIT"
] | null | null | null | from . import TransformFunction, string_to_tfarg_function, mime_type_based_transform, normalized_headers_to_tfarg_function
import htmlmth.mods.html
import htmlmth.mods.http
from ..html import xua_move_meta_to_xmlpi
# TODO: remove use of and add note that .encode should not be present before using
## Note:
# Probably won't work if exploit requires document mode <= 8
# might chane back to xuacompatible in http headers for xml docs
# TODO: convert metadata mimetype to xhtml
#
_convert_to_xhtml = TransformFunction("",
"convert to XHTML document with an xml declaration tag",
mime_type_based_transform({
'text/html': string_to_tfarg_function(lambda x: htmlmth.mods.html.convert_to_xhtml(x))
})
)
_convert_to_xhtml_no_xml_tag = TransformFunction("",
"convert to XHTML document with no declaration xml tag",
mime_type_based_transform({
'text/html': string_to_tfarg_function(lambda x: htmlmth.mods.html.convert_to_xhtml(x, xml_decl=False))
}),
)
#https://docs.microsoft.com/en-us/openspecs/ie_standards/ms-iedoco/638b52a6-c5a1-433d-b872-ca07b8f06bdd
# IE will use the XML parser when the above content-types are declared
# IE will parse with HTML parser
convert_to_xhtml_no_xml_tag_http_declared_no_type = TransformFunction("",
_convert_to_xhtml_no_xml_tag.description + " ;" + "no declared MIME type in http headers",
_convert_to_xhtml_no_xml_tag,
mime_type_based_transform({
'text/html': normalized_headers_to_tfarg_function(lambda x: htmlmth.mods.http.remove_header("Content-Type", x))
}),
)
# soft assumption: text/xml declared in document
convert_to_xhtml_no_xml_tag_http_declared_no_type_inferred_xml = TransformFunction("",
_convert_to_xhtml_no_xml_tag.description + " ;" + "no declared MIME type in http headers" + " ;" + xua_move_meta_to_xmlpi.description,
_convert_to_xhtml_no_xml_tag,
mime_type_based_transform({
'text/html': normalized_headers_to_tfarg_function(lambda x: htmlmth.mods.http.remove_header("Content-Type", x))
}),
xua_move_meta_to_xmlpi,
# x-ua-compatible http-equiv meta tags do not get interpreted with IE's XML parser
)
# IE will parse with HTML parser
convert_to_xhtml_no_xml_tag_http_declared_text_html = TransformFunction("",
_convert_to_xhtml_no_xml_tag.description + " ;" + "declared as text/html in http headers",
_convert_to_xhtml_no_xml_tag,
mime_type_based_transform({
'text/html': normalized_headers_to_tfarg_function(lambda x: htmlmth.mods.http.declare_mime_type("text/html", x))
}),
)
# this one might not work?
# soft assumption: text/xml declared in document
convert_to_xhtml_no_xml_tag_http_declared_text_html_inferred_xml = TransformFunction("",
_convert_to_xhtml_no_xml_tag.description + " ;" + "declared as text/html in http headers" + " ;" + xua_move_meta_to_xmlpi.description,
_convert_to_xhtml_no_xml_tag,
mime_type_based_transform({
'text/html': normalized_headers_to_tfarg_function(lambda x: htmlmth.mods.http.declare_mime_type("text/html", x))
}),
xua_move_meta_to_xmlpi,
# x-ua-compatible http-equiv meta tags do not get interpreted with IE's XML parser
)
# IE will parse with XML parser
convert_to_xhtml_no_xml_tag_http_declared_text_xml = TransformFunction("",
_convert_to_xhtml_no_xml_tag.description + " ;" + "declared as text/xml in http headers" + " ;" + xua_move_meta_to_xmlpi.description,
_convert_to_xhtml_no_xml_tag,
mime_type_based_transform({
'text/html': normalized_headers_to_tfarg_function(lambda x: htmlmth.mods.http.declare_mime_type("text/xml", x))
}),
xua_move_meta_to_xmlpi,
# x-ua-compatible http-equiv meta tags do not get interpreted with IE's XML parser
)
# IE will parse with XML parser
convert_to_xhtml_no_xml_tag_http_declared_application_xml = TransformFunction("",
_convert_to_xhtml_no_xml_tag.description + " ;" + "declared as application/xml in http headers" + " ;" + xua_move_meta_to_xmlpi.description,
_convert_to_xhtml_no_xml_tag,
mime_type_based_transform({
'text/html': normalized_headers_to_tfarg_function(lambda x: htmlmth.mods.http.declare_mime_type("application/xml", x))
}),
xua_move_meta_to_xmlpi , # x-ua-compatible http-equiv meta tags do not get interpreted with IE's XML parser
)
# IE will parse with XML parser
convert_to_xhtml_no_xml_tag_http_declared_application_xhtml_xml = TransformFunction("",
_convert_to_xhtml_no_xml_tag.description + " ;" + "declared as application/xhtml+xml in http headers" + " ;" + xua_move_meta_to_xmlpi.description,
_convert_to_xhtml_no_xml_tag,
mime_type_based_transform({
'text/html': normalized_headers_to_tfarg_function(lambda x: htmlmth.mods.http.declare_mime_type("application/xhtml+xml", x))
}),
xua_move_meta_to_xmlpi , # x-ua-compatible http-equiv meta tags do not get interpreted with IE's XML parser
)
# IE will parse with XML parser
convert_to_xhtml_no_xml_tag_http_declared_image_svg_xml = TransformFunction("",
_convert_to_xhtml_no_xml_tag.description + " ;" + "declared as image/svg+xml in http headers" + " ;" + xua_move_meta_to_xmlpi.description,
_convert_to_xhtml_no_xml_tag,
mime_type_based_transform({
'text/html': normalized_headers_to_tfarg_function(lambda x: htmlmth.mods.http.declare_mime_type("image/svg+xml", x))
}),
xua_move_meta_to_xmlpi , # x-ua-compatible http-equiv meta tags do not get interpreted with IE's XML parser
)
# the output of convert_to_xhtml will begin with the <?xml ...?> tag (technically an optional x(ht)ml tag in some cases). When a document begins with that tag, IE will use its XML parser instead of HTML parser, even if you specifiy the mime type as "text/html" if it cant infer its mime type from other information
# IE will parse with HTML parser if "text/html" content-type http-equiv meta tag is in the document
# Otherwise, IE will parse with XML parser
# Soft assumption: document does not contain "text/html" content-type http-equiv meta tag
# OR http path for file ends with ".xml" (needs confirmation)
convert_to_xhtml_http_declared_no_type = TransformFunction("",
_convert_to_xhtml.description + " ;" + "no declared MIME type in http headers" + " ;" + xua_move_meta_to_xmlpi.description,
_convert_to_xhtml,
mime_type_based_transform({
'text/html': normalized_headers_to_tfarg_function(lambda x: htmlmth.mods.http.remove_header("Content-Type", x))
}),
xua_move_meta_to_xmlpi , # x-ua-compatible http-equiv meta tags do not get interpreted with IE's XML parser
)
# soft assumption: document contains "text/html" content-type http-equiv meta tag
# OR http path for file ends with ".html"/".htm" (needs confirmation)
convert_to_xhtml_http_declared_no_type_inferred_html = TransformFunction("",
_convert_to_xhtml.description + " ;" + "no declared MIME type in http headers",
_convert_to_xhtml,
mime_type_based_transform({
'text/html': normalized_headers_to_tfarg_function(lambda x: htmlmth.mods.http.remove_header("Content-Type", x))
}),
)
# IE will parse with HTML parser if "text/html" content-type http-equiv meta tag is in the document
# Otherwise, IE will parse with XML parser
# Soft assumption: document does not contain "text/html" content-type http-equiv meta tag
# OR http path for file ends with ".xml" (needs confirmation)
convert_to_xhtml_http_declared_text_html = TransformFunction("",
_convert_to_xhtml.description + " ;" + "declared as text/html in http headers" + " ;" + xua_move_meta_to_xmlpi.description,
_convert_to_xhtml,
mime_type_based_transform({
'text/html': normalized_headers_to_tfarg_function(lambda x: htmlmth.mods.http.declare_mime_type("text/html", x))
}),
xua_move_meta_to_xmlpi , # x-ua-compatible http-equiv meta tags do not get interpreted with IE's XML parser
)
# soft assumption: document contains "text/html" content-type http-equiv meta tag
# OR http path for file ends with ".html"/".htm" (needs confirmation)
convert_to_xhtml_http_declared_text_html_inferred_html = TransformFunction("",
_convert_to_xhtml.description + " ;" + "declared as text/html in http headers",
_convert_to_xhtml,
mime_type_based_transform({
'text/html': normalized_headers_to_tfarg_function(lambda x: htmlmth.mods.http.declare_mime_type("text/html", x))
}),
)
# IE will parse with XML parser
convert_to_xhtml_http_declared_text_xml = TransformFunction("",
_convert_to_xhtml.description + " ;" + "declared as text/xml in http headers" + " ;" + xua_move_meta_to_xmlpi.description,
_convert_to_xhtml,
mime_type_based_transform({
'text/html': normalized_headers_to_tfarg_function(lambda x: htmlmth.mods.http.declare_mime_type("text/xml", x))
}),
xua_move_meta_to_xmlpi , # x-ua-compatible http-equiv meta tags do not get interpreted with IE's XML parser
)
# IE will parse with XML parser
convert_to_xhtml_http_declared_application_xml = TransformFunction("",
_convert_to_xhtml.description + " ;" + "declared as application/xml in http headers" + " ;" + xua_move_meta_to_xmlpi.description,
_convert_to_xhtml,
mime_type_based_transform({
'text/html': normalized_headers_to_tfarg_function(lambda x: htmlmth.mods.http.declare_mime_type("application/xml", x))
}),
xua_move_meta_to_xmlpi , # x-ua-compatible http-equiv meta tags do not get interpreted with IE's XML parser
)
# IE will parse with XML parser
convert_to_xhtml_http_declared_application_xhtml_xml = TransformFunction("",
_convert_to_xhtml.description + " ;" + "declared as application/xhtml+xml in http headers" + " ;" + xua_move_meta_to_xmlpi.description,
_convert_to_xhtml,
mime_type_based_transform({
'text/html': normalized_headers_to_tfarg_function(lambda x: htmlmth.mods.http.declare_mime_type("application/xhtml+xml", x))
}),
xua_move_meta_to_xmlpi , # x-ua-compatible http-equiv meta tags do not get interpreted with IE's XML parser
)
# IE will parse with XML parser
convert_to_xhtml_http_declared_image_svg_xml = TransformFunction("",
_convert_to_xhtml.description + " ;" + "declared as image/svg+xml in http headers" + " ;" + xua_move_meta_to_xmlpi.description,
_convert_to_xhtml,
mime_type_based_transform({
'text/html': normalized_headers_to_tfarg_function(lambda x: htmlmth.mods.http.declare_mime_type("image/svg+xml", x))
}),
xua_move_meta_to_xmlpi , # x-ua-compatible http-equiv meta tags do not get interpreted with IE's XML parser
)
# IE will parse with XML parser
convert_to_xhtml_http_declared_image_gif = TransformFunction("",
_convert_to_xhtml.description + " ;" + "declared as image/gif in http headers" + " ;" + xua_move_meta_to_xmlpi.description,
_convert_to_xhtml,
mime_type_based_transform({
'text/html': normalized_headers_to_tfarg_function(lambda x: htmlmth.mods.http.declare_mime_type("image/gif", x))
}),
xua_move_meta_to_xmlpi , # x-ua-compatible http-equiv meta tags do not get interpreted with IE's XML parser
)
# IE will parse with HTML parser if "text/html" content-type http-equiv meta tag is in the document
# Otherwise, IE will parse with XML parser
# Soft assumption: document does not contain "text/html" content-type http-equiv meta tag
# OR http path for file ends with ".xml" (needs confirmation)
# soft assumption: document contains "text/html" content-type http-equiv meta tag
# OR http path for file ends with ".html"/".htm" (needs confirmation)
convert_to_xhtml_http_declared_image_gif_inferred_html = TransformFunction("",
_convert_to_xhtml.description + " ;" + "declared as image/gif in http headers",
_convert_to_xhtml,
mime_type_based_transform({
'text/html': normalized_headers_to_tfarg_function(lambda x: htmlmth.mods.http.declare_mime_type("image/gif", x))
}),
)
| 85.028226 | 314 | 0.428131 | 1,770 | 21,087 | 4.757627 | 0.082486 | 0.051538 | 0.101413 | 0.041682 | 0.907374 | 0.901437 | 0.888612 | 0.885881 | 0.881012 | 0.86118 | 0 | 0.001976 | 0.520131 | 21,087 | 247 | 315 | 85.37247 | 0.83022 | 0.168018 | 0 | 0.567742 | 0 | 0 | 0.072736 | 0.004803 | 0 | 0 | 0 | 0.004049 | 0 | 1 | 0 | false | 0 | 0.025806 | 0 | 0.025806 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c773a1d380aa213a4a733cdd76c91f7b54296bac | 13,103 | py | Python | examples/tests/test_offchain.py | CapCap/client-sdk-python | 0cdaead68746245e80ad6f93437465a31c9aa065 | [
"Apache-2.0"
] | 1 | 2021-02-15T14:41:34.000Z | 2021-02-15T14:41:34.000Z | examples/tests/test_offchain.py | xli/client-sdk-python | 1d0ec7f7b395bd827b778f1903001088e799fb05 | [
"Apache-2.0"
] | null | null | null | examples/tests/test_offchain.py | xli/client-sdk-python | 1d0ec7f7b395bd827b778f1903001088e799fb05 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) The Diem Core Contributors
# SPDX-License-Identifier: Apache-2.0
from diem.offchain import Status, Action
from ..vasp.wallet import ActionResult
AMOUNT = 1_000_000_000
BOTH_READY = {
"sender": Status.ready_for_settlement,
"receiver": Status.ready_for_settlement,
}
def test_travel_rule_data_exchange_happy_path(sender_app, receiver_app, assert_final_status):
intent_id = receiver_app.gen_intent_id("bar", AMOUNT)
sender_app.pay("foo", intent_id)
assert len(sender_app.saved_commands) == 1
assert len(receiver_app.saved_commands) == 0
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert len(receiver_app.saved_commands) == 1
assert receiver_app.run_once_background_job() == (
Action.EVALUATE_KYC_DATA,
ActionResult.PASS,
)
assert sender_app.run_once_background_job() is None
assert receiver_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert sender_app.run_once_background_job() == (
Action.EVALUATE_KYC_DATA,
ActionResult.PASS,
)
assert receiver_app.run_once_background_job() is None
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() is None
assert sender_app.run_once_background_job() == (
Action.SUBMIT_TXN,
ActionResult.TXN_EXECUTED,
)
assert receiver_app.run_once_background_job() is None
assert_final_status(BOTH_READY, AMOUNT)
def test_travel_rule_data_exchange_receiver_reject_sender_kyc_data(sender_app, receiver_app, assert_final_status):
receiver_app.evaluate_kyc_data_result = {"foo": ActionResult.REJECT}
intent_id = receiver_app.gen_intent_id("bar", AMOUNT)
sender_app.pay("foo", intent_id)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() == (
Action.EVALUATE_KYC_DATA,
ActionResult.REJECT,
)
assert receiver_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert_final_status({"sender": Status.needs_kyc_data, "receiver": Status.abort})
def test_travel_rule_data_exchange_receiver_soft_match_reject(sender_app, receiver_app, assert_final_status):
receiver_app.evaluate_kyc_data_result = {"foo": ActionResult.SOFT_MATCH}
receiver_app.manual_review_result = {"foo": ActionResult.REJECT}
intent_id = receiver_app.gen_intent_id("bar", AMOUNT)
sender_app.pay("foo", intent_id)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() == (
Action.EVALUATE_KYC_DATA,
ActionResult.SOFT_MATCH,
)
assert receiver_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert sender_app.run_once_background_job() == (
Action.CLEAR_SOFT_MATCH,
ActionResult.SENT_ADDITIONAL_KYC_DATA,
)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() == (
Action.REVIEW_KYC_DATA,
ActionResult.REJECT,
)
assert receiver_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert_final_status({"sender": Status.needs_kyc_data, "receiver": Status.abort})
def test_travel_rule_data_exchange_receiver_soft_match_pass(sender_app, receiver_app, assert_final_status):
receiver_app.evaluate_kyc_data_result = {"foo": ActionResult.SOFT_MATCH}
receiver_app.manual_review_result = {"foo": ActionResult.PASS}
intent_id = receiver_app.gen_intent_id("bar", AMOUNT)
sender_app.pay("foo", intent_id)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() == (
Action.EVALUATE_KYC_DATA,
ActionResult.SOFT_MATCH,
)
assert receiver_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert sender_app.run_once_background_job() == (
Action.CLEAR_SOFT_MATCH,
ActionResult.SENT_ADDITIONAL_KYC_DATA,
)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() == (
Action.REVIEW_KYC_DATA,
ActionResult.PASS,
)
assert receiver_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert sender_app.run_once_background_job() == (
Action.EVALUATE_KYC_DATA,
ActionResult.PASS,
)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() is None
assert sender_app.run_once_background_job() == (
Action.SUBMIT_TXN,
ActionResult.TXN_EXECUTED,
)
assert receiver_app.run_once_background_job() is None
assert_final_status(BOTH_READY, AMOUNT)
def test_travel_rule_data_exchange_sender_rejects_receiver_kyc_data(sender_app, receiver_app, assert_final_status):
sender_app.evaluate_kyc_data_result = {"bar": ActionResult.REJECT}
intent_id = receiver_app.gen_intent_id("bar", AMOUNT)
sender_app.pay("foo", intent_id)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() == (
Action.EVALUATE_KYC_DATA,
ActionResult.PASS,
)
assert receiver_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert sender_app.run_once_background_job() == (
Action.EVALUATE_KYC_DATA,
ActionResult.REJECT,
)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() is None
assert_final_status({"sender": Status.abort, "receiver": Status.ready_for_settlement})
def test_travel_rule_data_exchange_sender_soft_match_reject(sender_app, receiver_app, assert_final_status):
sender_app.evaluate_kyc_data_result = {"bar": ActionResult.SOFT_MATCH}
sender_app.manual_review_result = {"bar": ActionResult.REJECT}
intent_id = receiver_app.gen_intent_id("bar", AMOUNT)
sender_app.pay("foo", intent_id)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() == (
Action.EVALUATE_KYC_DATA,
ActionResult.PASS,
)
assert receiver_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert sender_app.run_once_background_job() == (
Action.EVALUATE_KYC_DATA,
ActionResult.SOFT_MATCH,
)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() == (
Action.CLEAR_SOFT_MATCH,
ActionResult.SENT_ADDITIONAL_KYC_DATA,
)
assert receiver_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert sender_app.run_once_background_job() == (
Action.REVIEW_KYC_DATA,
ActionResult.REJECT,
)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() is None
assert_final_status({"sender": Status.abort, "receiver": Status.ready_for_settlement})
def test_travel_rule_data_exchange_sender_soft_match_pass(sender_app, receiver_app, assert_final_status):
sender_app.evaluate_kyc_data_result = {"bar": ActionResult.SOFT_MATCH}
sender_app.manual_review_result = {"bar": ActionResult.PASS}
intent_id = receiver_app.gen_intent_id("bar", AMOUNT)
sender_app.pay("foo", intent_id)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() == (
Action.EVALUATE_KYC_DATA,
ActionResult.PASS,
)
assert receiver_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert sender_app.run_once_background_job() == (
Action.EVALUATE_KYC_DATA,
ActionResult.SOFT_MATCH,
)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() == (
Action.CLEAR_SOFT_MATCH,
ActionResult.SENT_ADDITIONAL_KYC_DATA,
)
assert receiver_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert sender_app.run_once_background_job() == (
Action.REVIEW_KYC_DATA,
ActionResult.PASS,
)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() is None
assert sender_app.run_once_background_job() == (
Action.SUBMIT_TXN,
ActionResult.TXN_EXECUTED,
)
assert receiver_app.run_once_background_job() is None
assert_final_status(BOTH_READY, AMOUNT)
def test_travel_rule_data_exchange_receiver_soft_match_pass_sender_soft_match_reject(
sender_app, receiver_app, assert_final_status
):
receiver_app.evaluate_kyc_data_result = {"foo": ActionResult.SOFT_MATCH}
receiver_app.manual_review_result = {"foo": ActionResult.PASS}
sender_app.evaluate_kyc_data_result = {"bar": ActionResult.SOFT_MATCH}
sender_app.manual_review_result = {"bar": ActionResult.REJECT}
intent_id = receiver_app.gen_intent_id("bar", AMOUNT)
sender_app.pay("foo", intent_id)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() == (
Action.EVALUATE_KYC_DATA,
ActionResult.SOFT_MATCH,
)
assert receiver_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert sender_app.run_once_background_job() == (
Action.CLEAR_SOFT_MATCH,
ActionResult.SENT_ADDITIONAL_KYC_DATA,
)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() == (
Action.REVIEW_KYC_DATA,
ActionResult.PASS,
)
assert receiver_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert sender_app.run_once_background_job() == (
Action.EVALUATE_KYC_DATA,
ActionResult.SOFT_MATCH,
)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() == (
Action.CLEAR_SOFT_MATCH,
ActionResult.SENT_ADDITIONAL_KYC_DATA,
)
assert receiver_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert sender_app.run_once_background_job() == (
Action.REVIEW_KYC_DATA,
ActionResult.REJECT,
)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() is None
assert_final_status({"sender": Status.abort, "receiver": Status.ready_for_settlement})
def test_travel_rule_data_exchange_receiver_soft_match_pass_sender_soft_match_pass(
sender_app, receiver_app, assert_final_status
):
receiver_app.evaluate_kyc_data_result = {"foo": ActionResult.SOFT_MATCH}
receiver_app.manual_review_result = {"foo": ActionResult.PASS}
sender_app.evaluate_kyc_data_result = {"bar": ActionResult.SOFT_MATCH}
sender_app.manual_review_result = {"bar": ActionResult.PASS}
intent_id = receiver_app.gen_intent_id("bar", AMOUNT)
sender_app.pay("foo", intent_id)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() == (
Action.EVALUATE_KYC_DATA,
ActionResult.SOFT_MATCH,
)
assert receiver_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert sender_app.run_once_background_job() == (
Action.CLEAR_SOFT_MATCH,
ActionResult.SENT_ADDITIONAL_KYC_DATA,
)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() == (
Action.REVIEW_KYC_DATA,
ActionResult.PASS,
)
assert receiver_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert sender_app.run_once_background_job() == (
Action.EVALUATE_KYC_DATA,
ActionResult.SOFT_MATCH,
)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() == (
Action.CLEAR_SOFT_MATCH,
ActionResult.SENT_ADDITIONAL_KYC_DATA,
)
assert receiver_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert sender_app.run_once_background_job() == (
Action.REVIEW_KYC_DATA,
ActionResult.PASS,
)
assert sender_app.run_once_background_job() == ActionResult.SEND_REQUEST_SUCCESS
assert receiver_app.run_once_background_job() is None
assert sender_app.run_once_background_job() == (
Action.SUBMIT_TXN,
ActionResult.TXN_EXECUTED,
)
assert receiver_app.run_once_background_job() is None
assert_final_status(BOTH_READY, AMOUNT)
| 40.692547 | 115 | 0.759444 | 1,666 | 13,103 | 5.472389 | 0.045618 | 0.05923 | 0.098717 | 0.197433 | 0.974882 | 0.972908 | 0.966327 | 0.96161 | 0.960952 | 0.959416 | 0 | 0.001356 | 0.156071 | 13,103 | 321 | 116 | 40.819315 | 0.823114 | 0.0058 | 0 | 0.778195 | 0 | 0 | 0.014742 | 0 | 0 | 0 | 0 | 0 | 0.417293 | 1 | 0.033835 | false | 0.075188 | 0.007519 | 0 | 0.041353 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 10 |
c777a8b555fc424b2195bd52e29f74f9f882e194 | 6,370 | py | Python | lego/apps/followers/tests/test_views.py | ollfkaih/lego | b15aacaf09efe90e7f984d25b0e7bddbe12647e8 | [
"MIT"
] | 45 | 2017-10-24T12:09:06.000Z | 2021-11-03T21:21:03.000Z | lego/apps/followers/tests/test_views.py | ollfkaih/lego | b15aacaf09efe90e7f984d25b0e7bddbe12647e8 | [
"MIT"
] | 980 | 2017-10-24T12:29:07.000Z | 2022-03-31T04:04:31.000Z | lego/apps/followers/tests/test_views.py | ollfkaih/lego | b15aacaf09efe90e7f984d25b0e7bddbe12647e8 | [
"MIT"
] | 23 | 2018-04-11T16:34:22.000Z | 2021-11-23T12:28:30.000Z | from rest_framework import status
from lego.apps.followers.models import FollowCompany, FollowEvent, FollowUser
from lego.apps.users.models import User
from lego.utils.test_utils import BaseAPITestCase
class FollowEventViewTestCase(BaseAPITestCase):
fixtures = [
"test_abakus_groups.yaml",
"test_users.yaml",
"test_companies.yaml",
"test_events.yaml",
"test_followevent.yaml",
]
url = "/api/v1/followers-event/"
def setUp(self):
self.user = User.objects.get(id=1)
def test_list(self):
"""Try to list the follower apis with and without auth."""
response = self.client.get(self.url)
self.assertEquals(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.client.force_authenticate(self.user)
response = self.client.get(self.url)
self.assertEquals(response.status_code, status.HTTP_200_OK)
def test_create(self):
"""Try to follow an event, we should always store the follower as request.user"""
response = self.client.post(self.url, {"target": 1})
self.assertEquals(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.client.force_authenticate(self.user)
response = self.client.post(self.url, {"target": 1})
self.assertEquals(response.status_code, status.HTTP_201_CREATED)
# Always use request.user to set the follower
response = self.client.post(self.url, {"target": 2, "follower": 2})
self.assertEquals(response.status_code, status.HTTP_201_CREATED)
result_id = response.json()["id"]
self.assertEquals(FollowEvent.objects.get(id=result_id).follower, self.user)
def test_delete(self):
"""Try to delete follow items"""
response = self.client.delete(f"{self.url}1/")
self.assertEquals(response.status_code, status.HTTP_401_UNAUTHORIZED)
denied_user = User.objects.get(id=2)
self.client.force_authenticate(denied_user)
response = self.client.delete(f"{self.url}1/")
self.assertEquals(response.status_code, status.HTTP_404_NOT_FOUND)
self.client.force_authenticate(self.user)
response = self.client.delete(f"{self.url}1/")
self.assertEquals(response.status_code, status.HTTP_204_NO_CONTENT)
class FollowUserViewTestCase(BaseAPITestCase):
fixtures = ["test_abakus_groups.yaml", "test_users.yaml", "test_followuser.yaml"]
url = "/api/v1/followers-user/"
def setUp(self):
self.user = User.objects.get(id=1)
def test_list(self):
"""Try to list the follower apis with and without auth."""
response = self.client.get(self.url)
self.assertEquals(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.client.force_authenticate(self.user)
response = self.client.get(self.url)
self.assertEquals(response.status_code, status.HTTP_200_OK)
def test_create(self):
"""Try to follow a user, we should always store the follower as request.user"""
response = self.client.post(self.url, {"target": 1})
self.assertEquals(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.client.force_authenticate(self.user)
response = self.client.post(self.url, {"target": 1})
self.assertEquals(response.status_code, status.HTTP_201_CREATED)
# Always use request.user to set the follower
response = self.client.post(self.url, {"target": 2, "follower": 2})
self.assertEquals(response.status_code, status.HTTP_201_CREATED)
result_id = response.json()["id"]
self.assertEquals(FollowUser.objects.get(id=result_id).follower, self.user)
def test_delete(self):
"""Try to delete follow items"""
response = self.client.delete(f"{self.url}1/")
self.assertEquals(response.status_code, status.HTTP_401_UNAUTHORIZED)
denied_user = User.objects.get(id=2)
self.client.force_authenticate(denied_user)
response = self.client.delete(f"{self.url}1/")
self.assertEquals(response.status_code, status.HTTP_404_NOT_FOUND)
self.client.force_authenticate(self.user)
response = self.client.delete(f"{self.url}1/")
self.assertEquals(response.status_code, status.HTTP_204_NO_CONTENT)
class FollowCompanyViewTestCase(BaseAPITestCase):
fixtures = [
"test_abakus_groups.yaml",
"test_users.yaml",
"test_companies.yaml",
"test_followcompany.yaml",
]
url = "/api/v1/followers-company/"
def setUp(self):
self.user = User.objects.get(id=1)
def test_list(self):
"""Try to list the follower apis with and without auth."""
response = self.client.get(self.url)
self.assertEquals(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.client.force_authenticate(self.user)
response = self.client.get(self.url)
self.assertEquals(response.status_code, status.HTTP_200_OK)
def test_create(self):
"""Try to follow a user, we should always store the follower as request.user"""
response = self.client.post(self.url, {"target": 1})
self.assertEquals(response.status_code, status.HTTP_401_UNAUTHORIZED)
self.client.force_authenticate(self.user)
response = self.client.post(self.url, {"target": 1})
self.assertEquals(response.status_code, status.HTTP_201_CREATED)
# Always use request.user to set the follower
response = self.client.post(self.url, {"target": 2, "follower": 2})
self.assertEquals(response.status_code, status.HTTP_201_CREATED)
result_id = response.json()["id"]
self.assertEquals(FollowCompany.objects.get(id=result_id).follower, self.user)
def test_delete(self):
"""Try to delete follow items"""
response = self.client.delete(f"{self.url}1/")
self.assertEquals(response.status_code, status.HTTP_401_UNAUTHORIZED)
denied_user = User.objects.get(id=2)
self.client.force_authenticate(denied_user)
response = self.client.delete(f"{self.url}1/")
self.assertEquals(response.status_code, status.HTTP_404_NOT_FOUND)
self.client.force_authenticate(self.user)
response = self.client.delete(f"{self.url}1/")
self.assertEquals(response.status_code, status.HTTP_204_NO_CONTENT)
| 40.316456 | 89 | 0.687127 | 820 | 6,370 | 5.181707 | 0.108537 | 0.084726 | 0.101671 | 0.169452 | 0.917157 | 0.90233 | 0.90233 | 0.90233 | 0.90233 | 0.90233 | 0 | 0.019871 | 0.194192 | 6,370 | 157 | 90 | 40.573248 | 0.80791 | 0.093564 | 0 | 0.824074 | 0 | 0 | 0.086797 | 0.032483 | 0 | 0 | 0 | 0 | 0.25 | 1 | 0.111111 | false | 0 | 0.037037 | 0 | 0.231481 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
c7a054e4a425b35d5df609da0af7b27653e03077 | 758 | py | Python | src/design_patterns/solid/isp/printer_bad.py | schuna/design-patterns-python | 3798e6c418bcbc224e57dd6b95ef03046729e9d2 | [
"MIT"
] | null | null | null | src/design_patterns/solid/isp/printer_bad.py | schuna/design-patterns-python | 3798e6c418bcbc224e57dd6b95ef03046729e9d2 | [
"MIT"
] | null | null | null | src/design_patterns/solid/isp/printer_bad.py | schuna/design-patterns-python | 3798e6c418bcbc224e57dd6b95ef03046729e9d2 | [
"MIT"
] | null | null | null | class Machine:
def print(self, document):
raise NotImplementedError # pragma: no cover
def fax(self, document):
raise NotImplementedError # pragma: no cover
def scan(self, document):
raise NotImplementedError # pragma: no cover
class MultiFunctionPrinter(Machine):
def print(self, document):
return f"print {document}"
def fax(self, document):
return f"fax {document}"
def scan(self, document):
return f"scan {document}"
class OldFashionedPrinter(Machine):
def print(self, document):
return f"print {document}"
def fax(self, document):
return f"fax {document}"
def scan(self, document):
raise NotImplementedError("Printer cannot scan!")
| 23.6875 | 57 | 0.650396 | 84 | 758 | 5.869048 | 0.22619 | 0.219067 | 0.182556 | 0.192698 | 0.811359 | 0.780933 | 0.724138 | 0.624746 | 0.413793 | 0.413793 | 0 | 0 | 0.251979 | 758 | 31 | 58 | 24.451613 | 0.869489 | 0.065963 | 0 | 0.761905 | 0 | 0 | 0.134943 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.428571 | false | 0 | 0 | 0.238095 | 0.809524 | 0.238095 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 10 |
c7d791b542cd5e3d269a893c41dd080151f112b3 | 3,660 | py | Python | mchap/tests/test_assemble/test_arraymap.py | PlantandFoodResearch/MCHap | ea25c3af889275d742e64e722ceeed0f7201f690 | [
"MIT"
] | 9 | 2022-01-14T18:27:47.000Z | 2022-01-31T09:22:22.000Z | mchap/tests/test_assemble/test_arraymap.py | PlantandFoodResearch/MCHap | ea25c3af889275d742e64e722ceeed0f7201f690 | [
"MIT"
] | 12 | 2022-01-13T21:21:08.000Z | 2022-03-13T04:31:31.000Z | mchap/tests/test_assemble/test_arraymap.py | PlantandFoodResearch/MCHap | ea25c3af889275d742e64e722ceeed0f7201f690 | [
"MIT"
] | null | null | null | import numpy as np
from mchap.assemble import arraymap
def test_get__nan():
amap = arraymap.new(5, 2)
for _ in range(10):
a = np.random.randint(0, 2, 5)
actual = arraymap.get(amap, a)
assert np.isnan(actual)
def test_get_set_get():
amap = arraymap.new(5, 3)
a = np.array([0, 1, 2, 0, 1])
b = np.array([0, 1, 2, 0, 2])
assert np.isnan(arraymap.get(amap, a))
assert np.isnan(arraymap.get(amap, b))
amap = arraymap.set(amap, a, 0.5)
assert arraymap.get(amap, a) == 0.5
assert np.isnan(arraymap.get(amap, b))
amap = arraymap.set(amap, b, 0.1)
assert arraymap.get(amap, a) == 0.5
assert arraymap.get(amap, b) == 0.1
def test_set__grow_tree():
amap = arraymap.new(5, 3, initial_size=4)
a = np.array([0, 1, 2, 0, 1])
b = np.array([0, 1, 2, 0, 2])
assert np.isnan(arraymap.get(amap, a))
assert np.isnan(arraymap.get(amap, b))
assert amap[0].shape == (4, 3)
assert amap[1].shape == (4,)
amap = arraymap.set(amap, a, 0.5)
assert arraymap.get(amap, a) == 0.5
assert np.isnan(arraymap.get(amap, b))
assert amap[0].shape == (8, 3)
assert amap[1].shape == (4,)
amap = arraymap.set(amap, b, 0.1)
assert arraymap.get(amap, a) == 0.5
assert arraymap.get(amap, b) == 0.1
assert amap[0].shape == (16, 3)
assert amap[1].shape == (4,)
def test_set__grow_values():
amap = arraymap.new(5, 3, initial_size=2)
a = np.array([0, 1, 2, 0, 1])
b = np.array([0, 1, 2, 0, 2])
assert np.isnan(arraymap.get(amap, a))
assert np.isnan(arraymap.get(amap, b))
assert amap[0].shape == (2, 3)
assert amap[1].shape == (2,)
amap = arraymap.set(amap, a, 0.5)
assert arraymap.get(amap, a) == 0.5
assert np.isnan(arraymap.get(amap, b))
assert amap[0].shape == (8, 3)
assert amap[1].shape == (4,)
amap = arraymap.set(amap, b, 0.1)
assert arraymap.get(amap, a) == 0.5
assert arraymap.get(amap, b) == 0.1
assert amap[0].shape == (16, 3)
assert amap[1].shape == (4,)
def test_set__full():
amap = arraymap.new(5, 3, initial_size=4, max_size=8)
a = np.array([0, 1, 2, 0, 1])
b = np.array([1, 1, 2, 0, 2])
assert np.isnan(arraymap.get(amap, a))
assert np.isnan(arraymap.get(amap, b))
assert amap[0].shape == (4, 3)
assert amap[1].shape == (4,)
amap = arraymap.set(amap, a, 0.5)
assert arraymap.get(amap, a) == 0.5
assert np.isnan(arraymap.get(amap, b))
assert amap[0].shape == (8, 3)
assert amap[1].shape == (4,)
try:
amap = arraymap.set(amap, b, 0.1)
except ValueError:
pass
else:
assert False
def test_set__empty_if_full():
amap = arraymap.new(5, 3, initial_size=4, max_size=8)
a = np.array([0, 1, 2, 0, 1])
b = np.array([1, 1, 2, 0, 2])
assert np.isnan(arraymap.get(amap, a))
assert np.isnan(arraymap.get(amap, b))
assert amap[0].shape == (4, 3)
assert amap[1].shape == (4,)
amap = arraymap.set(amap, a, 0.5, empty_if_full=True)
assert arraymap.get(amap, a) == 0.5
assert np.isnan(arraymap.get(amap, b))
assert amap[0].shape == (8, 3)
assert amap[1].shape == (4,)
amap = arraymap.set(amap, b, 0.1, empty_if_full=True)
# amap should now be empty but the same size
assert np.all(amap[0] == -1)
assert np.all(np.isnan(amap[1]))
assert amap[2] == 5 # array length
assert amap[3] == 1 # intial ofset
assert amap[4] == 0 # intial ofset
assert amap[5] == 8 # max size
assert np.isnan(arraymap.get(amap, a))
assert np.isnan(arraymap.get(amap, b))
assert amap[0].shape == (8, 3)
assert amap[1].shape == (4,)
| 32.105263 | 57 | 0.585519 | 634 | 3,660 | 3.328076 | 0.097792 | 0.099526 | 0.206161 | 0.169194 | 0.804265 | 0.788152 | 0.788152 | 0.750711 | 0.736967 | 0.736967 | 0 | 0.065171 | 0.232787 | 3,660 | 113 | 58 | 32.389381 | 0.686254 | 0.02459 | 0 | 0.686869 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.585859 | 1 | 0.060606 | false | 0.010101 | 0.020202 | 0 | 0.080808 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
c7dc5907fb61353a9010188f5c2e43e6a9394ae3 | 17,702 | py | Python | arduino_iot_rest/api/series_v2_api.py | umbynos/iot-client-py | ae5d6de2868802d46e0232a29c6ac5df282c8682 | [
"Apache-2.0"
] | null | null | null | arduino_iot_rest/api/series_v2_api.py | umbynos/iot-client-py | ae5d6de2868802d46e0232a29c6ac5df282c8682 | [
"Apache-2.0"
] | null | null | null | arduino_iot_rest/api/series_v2_api.py | umbynos/iot-client-py | ae5d6de2868802d46e0232a29c6ac5df282c8682 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Iot API
Collection of all public API endpoints. # noqa: E501
The version of the OpenAPI document: 2.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from arduino_iot_rest.api_client import ApiClient
from arduino_iot_rest.exceptions import (
ApiTypeError,
ApiValueError
)
class SeriesV2Api(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def series_v2_batch_query(self, batch_query_requests_media_v1, **kwargs): # noqa: E501
"""batch_query series_v2 # noqa: E501
Returns the batch of time-series data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_v2_batch_query(batch_query_requests_media_v1, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param BatchQueryRequestsMediaV1 batch_query_requests_media_v1: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ArduinoSeriesBatch
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.series_v2_batch_query_with_http_info(batch_query_requests_media_v1, **kwargs) # noqa: E501
def series_v2_batch_query_with_http_info(self, batch_query_requests_media_v1, **kwargs): # noqa: E501
"""batch_query series_v2 # noqa: E501
Returns the batch of time-series data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_v2_batch_query_with_http_info(batch_query_requests_media_v1, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param BatchQueryRequestsMediaV1 batch_query_requests_media_v1: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ArduinoSeriesBatch, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['batch_query_requests_media_v1'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method series_v2_batch_query" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'batch_query_requests_media_v1' is set
if self.api_client.client_side_validation and ('batch_query_requests_media_v1' not in local_var_params or # noqa: E501
local_var_params['batch_query_requests_media_v1'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `batch_query_requests_media_v1` when calling `series_v2_batch_query`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'batch_query_requests_media_v1' in local_var_params:
body_params = local_var_params['batch_query_requests_media_v1']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/v2/series/batch_query', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArduinoSeriesBatch', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def series_v2_batch_query_raw(self, batch_query_raw_requests_media_v1, **kwargs): # noqa: E501
"""batch_query_raw series_v2 # noqa: E501
Returns the batch of time-series data raw # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_v2_batch_query_raw(batch_query_raw_requests_media_v1, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param BatchQueryRawRequestsMediaV1 batch_query_raw_requests_media_v1: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ArduinoSeriesRawBatch
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.series_v2_batch_query_raw_with_http_info(batch_query_raw_requests_media_v1, **kwargs) # noqa: E501
def series_v2_batch_query_raw_with_http_info(self, batch_query_raw_requests_media_v1, **kwargs): # noqa: E501
"""batch_query_raw series_v2 # noqa: E501
Returns the batch of time-series data raw # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_v2_batch_query_raw_with_http_info(batch_query_raw_requests_media_v1, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param BatchQueryRawRequestsMediaV1 batch_query_raw_requests_media_v1: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ArduinoSeriesRawBatch, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['batch_query_raw_requests_media_v1'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method series_v2_batch_query_raw" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'batch_query_raw_requests_media_v1' is set
if self.api_client.client_side_validation and ('batch_query_raw_requests_media_v1' not in local_var_params or # noqa: E501
local_var_params['batch_query_raw_requests_media_v1'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `batch_query_raw_requests_media_v1` when calling `series_v2_batch_query_raw`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'batch_query_raw_requests_media_v1' in local_var_params:
body_params = local_var_params['batch_query_raw_requests_media_v1']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/v2/series/batch_query_raw', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArduinoSeriesRawBatch', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def series_v2_batch_query_raw_last_value(self, batch_last_value_requests_media_v1, **kwargs): # noqa: E501
"""batch_query_raw_last_value series_v2 # noqa: E501
Returns the batch of time-series data raw # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_v2_batch_query_raw_last_value(batch_last_value_requests_media_v1, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param BatchLastValueRequestsMediaV1 batch_last_value_requests_media_v1: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ArduinoSeriesRawBatchLastvalue
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.series_v2_batch_query_raw_last_value_with_http_info(batch_last_value_requests_media_v1, **kwargs) # noqa: E501
def series_v2_batch_query_raw_last_value_with_http_info(self, batch_last_value_requests_media_v1, **kwargs): # noqa: E501
"""batch_query_raw_last_value series_v2 # noqa: E501
Returns the batch of time-series data raw # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_v2_batch_query_raw_last_value_with_http_info(batch_last_value_requests_media_v1, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param BatchLastValueRequestsMediaV1 batch_last_value_requests_media_v1: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ArduinoSeriesRawBatchLastvalue, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['batch_last_value_requests_media_v1'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method series_v2_batch_query_raw_last_value" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'batch_last_value_requests_media_v1' is set
if self.api_client.client_side_validation and ('batch_last_value_requests_media_v1' not in local_var_params or # noqa: E501
local_var_params['batch_last_value_requests_media_v1'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `batch_last_value_requests_media_v1` when calling `series_v2_batch_query_raw_last_value`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'batch_last_value_requests_media_v1' in local_var_params:
body_params = local_var_params['batch_last_value_requests_media_v1']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/v2/series/batch_query_raw/lastvalue', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArduinoSeriesRawBatchLastvalue', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 47.331551 | 168 | 0.636877 | 2,067 | 17,702 | 5.111272 | 0.089018 | 0.054898 | 0.059631 | 0.035779 | 0.930336 | 0.928348 | 0.926077 | 0.922101 | 0.920114 | 0.911406 | 0 | 0.020202 | 0.295334 | 17,702 | 373 | 169 | 47.458445 | 0.82676 | 0.450797 | 0 | 0.680982 | 1 | 0 | 0.213158 | 0.125014 | 0 | 0 | 0 | 0 | 0 | 1 | 0.042945 | false | 0 | 0.030675 | 0 | 0.116564 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4018278d7a95edf946aa9aa51cb44e6f69697c0d | 31 | py | Python | www/tests/inject_name_in_module.py | raspberrypieman/brython | 2cc23d1da6acda604d4a56b4c9d464eb7e374eda | [
"BSD-3-Clause"
] | 5,926 | 2015-01-01T07:45:08.000Z | 2022-03-31T12:34:38.000Z | www/tests/inject_name_in_module.py | raspberrypieman/brython | 2cc23d1da6acda604d4a56b4c9d464eb7e374eda | [
"BSD-3-Clause"
] | 1,728 | 2015-01-01T01:09:12.000Z | 2022-03-30T23:25:22.000Z | www/tests/inject_name_in_module.py | raspberrypieman/brython | 2cc23d1da6acda604d4a56b4c9d464eb7e374eda | [
"BSD-3-Clause"
] | 574 | 2015-01-02T01:36:10.000Z | 2022-03-26T10:18:48.000Z | def yyy():
return xxx*2 | 15.5 | 20 | 0.516129 | 5 | 31 | 3.2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.05 | 0.354839 | 31 | 2 | 20 | 15.5 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0 | 0 | 0.5 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
40396b85d8b0e936b9406ad24711a4e8fb1e7e05 | 130,555 | py | Python | tests/frequentist/test_ztest.py | erik-stenberg/confidence | c0668b2ffd4f8e5ab04dc4734c80cf3c65029c4a | [
"Apache-2.0"
] | null | null | null | tests/frequentist/test_ztest.py | erik-stenberg/confidence | c0668b2ffd4f8e5ab04dc4734c80cf3c65029c4a | [
"Apache-2.0"
] | null | null | null | tests/frequentist/test_ztest.py | erik-stenberg/confidence | c0668b2ffd4f8e5ab04dc4734c80cf3c65029c4a | [
"Apache-2.0"
] | null | null | null | import numpy as np
import pandas as pd
import pytest
import spotify_confidence
from spotify_confidence.analysis.constants import (
INCREASE_PREFFERED,
DECREASE_PREFFERED,
POINT_ESTIMATE,
CI_LOWER,
CI_UPPER,
P_VALUE,
ADJUSTED_LOWER,
ADJUSTED_UPPER,
DIFFERENCE,
BONFERRONI,
BONFERRONI_DO_NOT_COUNT_NON_INFERIORITY,
CORRECTION_METHODS,
SPOT_1,
CORRECTION_METHODS_THAT_SUPPORT_CI,
POWERED_EFFECT,
REQUIRED_SAMPLE_SIZE,
)
class TestPoweredEffectContinuousSingleMetric(object):
def setup(self):
self.data = pd.DataFrame(
{
"variation_name": [
"test",
"control",
"test2",
],
"nr_of_items": [
500,
8,
100,
],
"nr_of_items_sumsq": [
2500,
12,
150,
],
"users": [
1010,
22,
150,
],
"metric_name": ["metricA", "metricA", "metricA"],
"minimum_detectable_effect": [0.2, 0.2, 0.2],
}
)
self.test = spotify_confidence.ZTest(
self.data,
numerator_column="nr_of_items",
numerator_sum_squares_column="nr_of_items_sumsq",
denominator_column="users",
categorical_group_columns=["variation_name"],
ordinal_group_column=None,
interval_size=0.95,
metric_column="metric_name",
treatment_column="variation_name",
power=0.8,
)
def test_powered_effect1(self):
powered_effect = self.test.difference(
level_1="control", level_2="test", minimum_detectable_effects_column="minimum_detectable_effect"
)
assert np.isclose(powered_effect[POWERED_EFFECT][0], 0.3881, atol=0.001)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][0], 29412, atol=100)
def test_powered_effect2(self):
powered_effect = self.test.difference(
level_1="control", level_2="test2", minimum_detectable_effects_column="minimum_detectable_effect"
)
assert np.isclose(powered_effect[POWERED_EFFECT][0], 0.4111, atol=0.001)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][0], 5498, atol=100)
class TestPoweredEffectContinuousMultipleSuccessMetrics(object):
def setup(self):
self.data = pd.DataFrame(
{
"variation_name": ["test", "control", "test2", "test", "control", "test2"],
"nr_of_items": [
500,
8,
100,
500,
8,
100,
],
"nr_of_items_sumsq": [
2500,
12,
150,
2500,
12,
150,
],
"users": [
1010,
22,
150,
1010,
22,
150,
],
"metric_name": ["metricA", "metricA", "metricA", "metricB", "metricB", "metricB"],
"minimum_detectable_effect": [0.2, 0.2, 0.2, 0.2, 0.2, 0.2],
}
)
self.test = spotify_confidence.ZTest(
self.data,
numerator_column="nr_of_items",
numerator_sum_squares_column="nr_of_items_sumsq",
denominator_column="users",
categorical_group_columns=["variation_name", "metric_name"],
ordinal_group_column=None,
interval_size=0.95,
power=0.8,
)
def test_powered_effect1(self):
powered_effect = self.test.multiple_difference(
level="control",
groupby="metric_name",
level_as_reference=True,
minimum_detectable_effects_column="minimum_detectable_effect",
)
assert np.isclose(powered_effect[POWERED_EFFECT][0], 0.4626, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][1], 0.4900, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][2], 0.4626, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][3], 0.4900, atol=0.001)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][0], 41796, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][1], 7811, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][2], 41796, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][3], 7811, atol=100)
class TestPoweredEffectContinuousMultipleMetricTypes(object):
def setup(self):
self.data = pd.DataFrame(
{
"variation_name": ["test", "control", "test2", "test", "control", "test2"],
"nr_of_items": [
500,
8,
100,
500,
8,
100,
],
"nr_of_items_sumsq": [
2500,
12,
150,
2500,
12,
150,
],
"users": [
1010,
22,
150,
1010,
22,
150,
],
"metric_name": ["metricA", "metricA", "metricA", "metricB", "metricB", "metricB"],
"non_inferiority_margin": [None, None, None, 0.01, 0.01, 0.01],
"preferred_direction": [None, None, None, "increase", "increase", "increase"],
}
)
self.test = spotify_confidence.ZTest(
self.data,
numerator_column="nr_of_items",
numerator_sum_squares_column="nr_of_items_sumsq",
denominator_column="users",
categorical_group_columns=["variation_name", "metric_name"],
ordinal_group_column=None,
interval_size=0.95,
correction_method="spot-1-bonferroni",
metric_column="metric_name",
treatment_column="variation_name",
power=0.8,
)
def test_powered_effect(self):
powered_effect = self.test.multiple_difference(
level="control", groupby="metric_name", level_as_reference=True, non_inferiority_margins=True
)
assert np.isclose(powered_effect[POWERED_EFFECT][0], 0.4880, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][1], 0.5170, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][2], 0.4490, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][3], 0.4757, atol=0.001)
assert powered_effect[REQUIRED_SAMPLE_SIZE][0] == float("inf")
assert powered_effect[REQUIRED_SAMPLE_SIZE][1] == float("inf")
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][2], 15738437, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][3], 2943671, atol=100)
class TestPoweredEffectContinuousMultipleMetricsSegments(object):
def setup(self):
self.data = pd.DataFrame(
{
"variation_name": [
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
],
"nr_of_items": [
500,
8,
100,
500,
8,
100,
500,
8,
100,
500,
8,
100,
],
"nr_of_items_sumsq": [
2500,
12,
150,
2500,
12,
150,
2500,
12,
150,
2500,
12,
150,
],
"users": [
1010,
22,
150,
1010,
22,
150,
1010,
22,
150,
1010,
22,
150,
],
"metric_name": [
"metricA",
"metricA",
"metricA",
"metricB",
"metricB",
"metricB",
"metricA",
"metricA",
"metricA",
"metricB",
"metricB",
"metricB",
],
"non_inferiority_margin": [None, None, None, 0.01, 0.01, 0.01, None, None, None, 0.01, 0.01, 0.01],
"preferred_direction": [
None,
None,
None,
"increase",
"increase",
"increase",
None,
None,
None,
"increase",
"increase",
"increase",
],
"segment": ["us", "us", "us", "us", "us", "us", "se", "se", "se", "se", "se", "se"],
}
)
self.test = spotify_confidence.ZTest(
self.data,
numerator_column="nr_of_items",
numerator_sum_squares_column="nr_of_items_sumsq",
denominator_column="users",
categorical_group_columns=["variation_name", "metric_name", "segment"],
ordinal_group_column=None,
interval_size=0.95,
correction_method="spot-1-bonferroni",
metric_column="metric_name",
treatment_column="variation_name",
power=0.8,
)
def test_powered_effect(self):
powered_effect = self.test.multiple_difference(
level="control", groupby=["metric_name", "segment"], level_as_reference=True, non_inferiority_margins=True
)
assert np.isclose(powered_effect[POWERED_EFFECT][0], 0.5235, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][1], 0.5546, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][2], 0.5235, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][3], 0.5546, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][4], 0.4880, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][5], 0.5170, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][6], 0.4880, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][7], 0.5170, atol=0.001)
assert powered_effect[REQUIRED_SAMPLE_SIZE][0] == float("inf")
assert powered_effect[REQUIRED_SAMPLE_SIZE][1] == float("inf")
assert powered_effect[REQUIRED_SAMPLE_SIZE][2] == float("inf")
assert powered_effect[REQUIRED_SAMPLE_SIZE][3] == float("inf")
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][4], 18590000, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][5], 3477019, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][6], 18590000, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][7], 3477019, atol=100)
class TestPoweredEffectContinuousMultipleMetricsSegments2(object):
def setup(self):
self.data = pd.DataFrame(
{
"variation_name": [
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
],
"nr_of_items": [
500,
8,
100,
500,
8,
100,
500,
8,
100,
500,
8,
100,
],
"nr_of_items_sumsq": [
2500,
12,
150,
2500,
12,
150,
2500,
12,
150,
2500,
12,
150,
],
"users": [
1010,
22,
150,
1010,
22,
150,
1010,
22,
150,
1010,
22,
150,
],
"metric_name": [
"metricA",
"metricA",
"metricA",
"metricB",
"metricB",
"metricB",
"metricA",
"metricA",
"metricA",
"metricB",
"metricB",
"metricB",
],
"non_inferiority_margin": [0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01],
"preferred_direction": [
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
],
"segment": ["us", "us", "us", "us", "us", "us", "se", "se", "se", "se", "se", "se"],
}
)
self.test = spotify_confidence.ZTest(
self.data,
numerator_column="nr_of_items",
numerator_sum_squares_column="nr_of_items_sumsq",
denominator_column="users",
categorical_group_columns=["variation_name", "metric_name", "segment"],
ordinal_group_column=None,
interval_size=0.95,
correction_method="spot-1-bonferroni",
metric_column="metric_name",
treatment_column="variation_name",
power=0.8,
)
def test_powered_effect(self):
powered_effect = self.test.multiple_difference(
level="control", groupby=["metric_name", "segment"], level_as_reference=True, non_inferiority_margins=True
)
assert np.isclose(powered_effect[POWERED_EFFECT][0], 0.488, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][1], 0.5170, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][2], 0.488, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][3], 0.5170, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][4], 0.488, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][5], 0.5170, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][6], 0.488, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][7], 0.5170, atol=0.001)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][0], 18590000, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][1], 3477019, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][2], 18590000, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][3], 3477019, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][4], 18590000, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][5], 3477019, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][6], 18590000, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][7], 3477019, atol=100)
class TestPoweredEffectContinuousMultipleMetricsSegments3(object):
def setup(self):
self.data = pd.DataFrame(
{
"variation_name": [
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
],
"nr_of_items": [
500,
8,
100,
500,
8,
100,
500,
8,
100,
500,
8,
100,
],
"nr_of_items_sumsq": [
2500,
12,
150,
2500,
12,
150,
2500,
12,
150,
2500,
12,
150,
],
"users": [
1010,
22,
150,
1010,
22,
150,
1010,
22,
150,
1010,
22,
150,
],
"metric_name": [
"metricA",
"metricA",
"metricA",
"metricB",
"metricB",
"metricB",
"metricA",
"metricA",
"metricA",
"metricB",
"metricB",
"metricB",
],
"minimum_detectable_effect": [0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02],
"preferred_direction": [
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
],
"segment": ["us", "us", "us", "us", "us", "us", "se", "se", "se", "se", "se", "se"],
}
)
self.test = spotify_confidence.ZTest(
self.data,
numerator_column="nr_of_items",
numerator_sum_squares_column="nr_of_items_sumsq",
denominator_column="users",
categorical_group_columns=["variation_name", "metric_name", "segment"],
ordinal_group_column=None,
interval_size=0.95,
correction_method="spot-1-bonferroni",
metric_column="metric_name",
treatment_column="variation_name",
power=0.8,
)
def test_powered_effect(self):
powered_effect = self.test.multiple_difference(
level="control",
groupby=["metric_name", "segment"],
level_as_reference=True,
minimum_detectable_effects_column="minimum_detectable_effect",
)
assert np.isclose(powered_effect[POWERED_EFFECT][0], 0.4626, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][1], 0.4900, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][2], 0.4626, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][3], 0.4900, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][4], 0.4626, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][5], 0.4900, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][6], 0.4626, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][7], 0.4900, atol=0.001)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][0], 4175642, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][1], 781000, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][2], 4175642, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][3], 781000, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][4], 4175642, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][5], 781000, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][6], 4175642, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][7], 781000, atol=100)
class TestPoweredEffectBinary(object):
def setup(self):
np.random.seed(123)
self.data = pd.DataFrame(
{
"variation_name": [
"test",
"test",
"control",
"control",
"test2",
"test2",
"test3",
"test3",
"test",
"test",
"control",
"control",
"test2",
"test2",
"test3",
"test3",
],
"success": [50, 60, 140, 140, 10, 20, 20, 20, 50, 60, 140, 140, 10, 20, 20, 20],
"total": [100, 100, 200, 200, 50, 50, 60, 60, 100, 100, 200, 200, 50, 50, 60, 60],
"country": [
"us",
"ca",
"us",
"ca",
"us",
"ca",
"us",
"ca",
"us",
"ca",
"us",
"ca",
"us",
"ca",
"us",
"ca",
],
"metric_name": [
"metricA",
"metricA",
"metricA",
"metricA",
"metricA",
"metricA",
"metricA",
"metricA",
"metricB",
"metricB",
"metricB",
"metricB",
"metricB",
"metricB",
"metricB",
"metricB",
],
"preferred_direction": [
None,
None,
None,
None,
None,
None,
None,
None,
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
],
"non_inferiority_margin": [
None,
None,
None,
None,
None,
None,
None,
None,
0.01,
0.01,
0.01,
0.01,
0.01,
0.01,
0.01,
0.01,
],
}
)
self.test = spotify_confidence.ZTest(
self.data,
numerator_column="success",
numerator_sum_squares_column=None,
denominator_column="total",
categorical_group_columns=["country", "variation_name"],
interval_size=0.95,
correction_method="spot-1-bonferroni",
metric_column="metric_name",
treatment_column="variation_name",
power=0.8,
)
def test_powered_effect(self):
powered_effect = self.test.multiple_difference(
level="control", groupby=["metric_name", "country"], level_as_reference=True, non_inferiority_margins=True
)
# assert np.isclose(powered_effect[POWERED_EFFECT][0], 0.1984, atol=0.001)
# assert np.isclose(powered_effect[POWERED_EFFECT][1], 0.2599, atol=0.001)
# assert np.isclose(powered_effect[POWERED_EFFECT][2], 0.2411, atol=0.001)
# assert np.isclose(powered_effect[POWERED_EFFECT][3], 0.1984, atol=0.001)
# assert np.isclose(powered_effect[POWERED_EFFECT][4], 0.2599, atol=0.001)
# assert np.isclose(powered_effect[POWERED_EFFECT][5], 0.2411, atol=0.001)
# assert np.isclose(powered_effect[POWERED_EFFECT][6], 0.2062, atol=0.001)
# assert np.isclose(powered_effect[POWERED_EFFECT][7], 0.2663, atol=0.001)
# assert np.isclose(powered_effect[POWERED_EFFECT][8], 0.2479, atol=0.001)
# assert np.isclose(powered_effect[POWERED_EFFECT][9], 0.2062, atol=0.001)
# assert np.isclose(powered_effect[POWERED_EFFECT][10], 0.2663, atol=0.001)
# assert np.isclose(powered_effect[POWERED_EFFECT][11], 0.2479, atol=0.001)
assert powered_effect[REQUIRED_SAMPLE_SIZE][0] == float("inf")
assert powered_effect[REQUIRED_SAMPLE_SIZE][1] == float("inf")
assert powered_effect[REQUIRED_SAMPLE_SIZE][2] == float("inf")
assert powered_effect[REQUIRED_SAMPLE_SIZE][3] == float("inf")
assert powered_effect[REQUIRED_SAMPLE_SIZE][4] == float("inf")
assert powered_effect[REQUIRED_SAMPLE_SIZE][5] == float("inf")
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][6], 260541, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][7], 361863, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][8], 326159, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][9], 260541, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][10], 361863, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][11], 326159, atol=100)
class TestPoweredEffectBinaryOnlyGuardrail(object):
def setup(self):
np.random.seed(123)
self.data = pd.DataFrame(
{
"variation_name": ["test", "test", "control", "control", "test2", "test2", "test3", "test3"],
"success": [50, 60, 140, 140, 20, 20, 20, 20],
"total": [100, 100, 200, 200, 50, 50, 60, 60],
"country": ["us", "ca", "us", "ca", "us", "ca", "us", "ca"],
"metric_name": [
"metricB",
"metricB",
"metricB",
"metricB",
"metricB",
"metricB",
"metricB",
"metricB",
],
"preferred_direction": [
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
],
"non_inferiority_margin": [0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01],
}
)
self.test = spotify_confidence.ZTest(
self.data,
numerator_column="success",
numerator_sum_squares_column=None,
denominator_column="total",
categorical_group_columns=["country", "variation_name"],
interval_size=0.95,
correction_method="spot-1-bonferroni",
metric_column="metric_name",
treatment_column="variation_name",
power=0.8,
)
def test_powered_effect(self):
powered_effect = self.test.multiple_difference(
level="control", groupby=["metric_name", "country"], level_as_reference=True, non_inferiority_margins=True
)
assert np.isclose(powered_effect[POWERED_EFFECT][0], 0.1816, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][1], 0.2344, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][2], 0.2182, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][3], 0.1816, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][4], 0.2344, atol=0.001)
assert np.isclose(powered_effect[POWERED_EFFECT][5], 0.2182, atol=0.001)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][0], 201905, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][1], 280423, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][2], 252755, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][3], 201905, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][4], 280423, atol=100)
assert np.isclose(powered_effect[REQUIRED_SAMPLE_SIZE][5], 252755, atol=100)
class TestBinary(object):
def setup(self):
np.random.seed(123)
self.data = pd.DataFrame(
{
"variation_name": ["test", "control", "test2", "test3"],
"success": [50, 40, 10, 20],
"total": [100, 100, 50, 60],
}
)
self.test = spotify_confidence.ZTest(
self.data,
numerator_column="success",
numerator_sum_squares_column=None,
denominator_column="total",
categorical_group_columns="variation_name",
correction_method="bonferroni",
)
def test_init_sumsq_sum(self):
spotify_confidence.ZTest(
self.data,
numerator_column="success",
numerator_sum_squares_column="success",
denominator_column="total",
categorical_group_columns=["variation_name"],
)
def test_summary(self):
summary_df = self.test.summary()
assert len(summary_df) == len(self.data)
def test_difference(self):
difference_df = self.test.difference(level_1="control", level_2="test", absolute=True)
assert len(difference_df) == 1
assert difference_df["difference"][0] == 0.5 - 0.4
def test_difference_absolute_false(self):
difference_df = self.test.difference(level_1="control", level_2="test", absolute=False)
assert len(difference_df) == 1
assert difference_df["difference"][0] == (0.5 - 0.4) / 0.4
def test_multiple_difference(self):
difference_df = self.test.multiple_difference(level="control", level_as_reference=True)
assert len(difference_df) == self.data.variation_name.unique().size - 1
assert difference_df["difference"][0] == 0.5 - 0.4
def test_multiple_difference_level_as_reference_false(self):
difference_df_true_true = self.test.multiple_difference(
level="control", level_as_reference=True, absolute=True
)
difference_df = self.test.multiple_difference(level="control", level_as_reference=False, absolute=True)
assert len(difference_df) == self.data.variation_name.unique().size - 1
assert np.allclose(difference_df["difference"], -difference_df_true_true["difference"], atol=0)
assert np.allclose(difference_df["ci_lower"], -difference_df_true_true["ci_upper"], atol=0)
assert np.allclose(difference_df["ci_upper"], -difference_df_true_true["ci_lower"], atol=0)
assert np.allclose(difference_df["p-value"], difference_df_true_true["p-value"], atol=0)
def test_multiple_difference_absolute_false(self):
control_mean = self.test.summary().query("variation_name == 'control'")["point_estimate"].values[0]
difference_df_true_true = self.test.multiple_difference(
level="control", level_as_reference=True, absolute=True
)
difference_df = self.test.multiple_difference(level="control", level_as_reference=True, absolute=False)
assert len(difference_df) == self.data.variation_name.unique().size - 1
assert np.allclose(difference_df["difference"], difference_df_true_true["difference"] / control_mean, atol=0)
assert np.allclose(difference_df["ci_lower"], difference_df_true_true["ci_lower"] / control_mean, atol=0)
assert np.allclose(difference_df["ci_upper"], difference_df_true_true["ci_upper"] / control_mean, atol=0)
assert np.allclose(difference_df["p-value"], difference_df_true_true["p-value"], atol=0)
def test_multiple_difference_level_as_reference_false_absolute_false(self):
reference_mean = self.test.summary().query("variation_name != 'control'")["point_estimate"]
difference_df_true_true = self.test.multiple_difference(
level="control", level_as_reference=True, absolute=True
)
difference_df = self.test.multiple_difference(level="control", level_as_reference=False, absolute=False)
assert len(difference_df) == self.data.variation_name.unique().size - 1
assert np.allclose(
difference_df["difference"], -difference_df_true_true["difference"] / reference_mean.values, atol=0
)
assert np.allclose(
difference_df["ci_lower"], -difference_df_true_true["ci_upper"] / reference_mean.values, atol=0
)
assert np.allclose(
difference_df["ci_upper"], -difference_df_true_true["ci_lower"] / reference_mean.values, atol=0
)
assert np.allclose(difference_df["p-value"], difference_df_true_true["p-value"], atol=0)
def test_summary_plot(self):
chart_grid = self.test.summary_plot()
assert len(chart_grid.charts) == 1
def test_difference_plot(self):
chartgrid = self.test.difference_plot(level_1="control", level_2="test")
assert len(chartgrid.charts) == 1
def test_multiple_difference_plot(self):
chartgrid = self.test.multiple_difference_plot(level="control", level_as_reference=True)
assert len(chartgrid.charts) == 1
class TestCategoricalBinary(object):
def setup(self):
np.random.seed(123)
self.data = pd.DataFrame(
{
"variation_name": ["test", "test", "control", "control", "test2", "test2", "test3", "test3"],
"success": [50, 60, 40, 140, 10, 20, 20, 20],
"total": [100, 100, 100, 200, 50, 50, 60, 60],
"country": ["us", "ca", "us", "ca", "us", "ca", "us", "ca"],
}
)
self.test = spotify_confidence.ZTest(
self.data,
numerator_column="success",
numerator_sum_squares_column=None,
denominator_column="total",
categorical_group_columns=["country", "variation_name"],
)
def test_init_sumsq_sum(self):
spotify_confidence.ZTest(
self.data,
numerator_column="success",
numerator_sum_squares_column="success",
denominator_column="total",
categorical_group_columns=["variation_name", "country"],
)
def test_init_sumsq_sum_one_country(self):
spotify_confidence.ZTest(
self.data.query('country == "us"'),
numerator_column="success",
numerator_sum_squares_column="success",
denominator_column="total",
categorical_group_columns="variation_name",
)
def test_summary(self):
summary_df = self.test.summary()
assert len(summary_df) == len(self.data)
def test_difference(self):
difference_df = self.test.difference(level_1=("us", "control"), level_2=("ca", "test"))
assert len(difference_df) == 1
def test_difference_groupby(self):
difference_df = self.test.difference(level_1="control", level_2="test", groupby="country")
assert len(difference_df) == self.data.country.unique().size
def test_multiple_difference(self):
difference_df = self.test.multiple_difference(level=("us", "control"), level_as_reference=True, verbose=True)
assert len(difference_df) == (
(self.data.variation_name.unique().size - 1) * self.data.country.unique().size
+ self.data.country.unique().size
- 1
)
n_comp = len(difference_df)
assert np.allclose(
difference_df.apply(lambda row: min(row[P_VALUE] * n_comp, 1), axis=1),
difference_df["adjusted p-value"],
rtol=0.01,
)
def test_multiple_difference_level_as_reference_false(self):
difference_df = self.test.multiple_difference(level=("us", "control"), level_as_reference=False, verbose=True)
assert len(difference_df) == (
(self.data.variation_name.unique().size - 1) * self.data.country.unique().size
+ self.data.country.unique().size
- 1
)
n_comp = len(difference_df)
assert np.allclose(
difference_df.apply(lambda row: min(row[P_VALUE] * n_comp, 1), axis=1),
difference_df["adjusted p-value"],
rtol=0.01,
)
def test_multiple_difference_groupby(self):
difference_df = self.test.multiple_difference(
level="control", groupby="country", level_as_reference=True, verbose=True
)
assert len(difference_df) == ((self.data.variation_name.unique().size - 1) * self.data.country.unique().size)
n_comp = len(difference_df)
assert np.allclose(
difference_df.apply(lambda row: min(row[P_VALUE] * n_comp, 1), axis=1),
difference_df["adjusted p-value"],
rtol=0.01,
)
def test_summary_plot(self):
chart_grid = self.test.summary_plot()
assert len(chart_grid.charts) == 1
def test_summary_plot_groupby(self):
chart_grid = self.test.summary_plot(groupby="country")
assert len(chart_grid.charts) == self.data.country.unique().size
def test_difference_plot(self):
chartgrid = self.test.difference_plot(level_1=("us", "control"), level_2=("ca", "test"))
assert len(chartgrid.charts) == 1
def test_difference_plot_groupby(self):
chartgrid = self.test.difference_plot(level_1="control", level_2="test", groupby="country")
assert len(chartgrid.charts) == 1
def test_multiple_difference_plot(self):
chartgrid = self.test.multiple_difference_plot(level=("us", "control"), level_as_reference=True)
assert len(chartgrid.charts) == 1
def test_multiple_difference_plot_groupby(self):
chartgrid = self.test.multiple_difference_plot(level="control", groupby="country", level_as_reference=True)
assert len(chartgrid.charts) == 1
class TestCategoricalContinuous(object):
def setup(self):
np.random.seed(123)
self.data = pd.DataFrame(
{
"variation_name": ["test", "control", "test2", "test", "control", "test2"],
"nr_of_items": [1969, 312, 2955, 195, 24, 330],
"nr_of_items_sumsq": [5767, 984, 8771, 553, 80, 1010],
"users": [1009, 104, 1502, 100, 10, 150],
"country": ["us", "us", "us", "gb", "gb", "gb"],
}
)
self.test = spotify_confidence.ZTest(
self.data,
numerator_column="nr_of_items",
numerator_sum_squares_column="nr_of_items_sumsq",
denominator_column="users",
categorical_group_columns=["country", "variation_name"],
)
def test_init_one_country(self):
spotify_confidence.ZTest(
self.data.query('country == "us"'),
numerator_column="nr_of_items",
numerator_sum_squares_column="nr_of_items_sumsq",
denominator_column="users",
categorical_group_columns="variation_name",
)
def test_summary(self):
summary_df = self.test.summary()
assert len(summary_df) == len(self.data)
def test_difference(self):
difference_df = self.test.difference(level_1=("us", "control"), level_2=("us", "test"))
assert len(difference_df) == 1
def test_difference_groupby(self):
difference_df = self.test.difference(level_1="control", level_2="test", groupby="country")
assert len(difference_df) == self.data.country.unique().size
def test_multiple_difference(self):
difference_df = self.test.multiple_difference(level=("us", "control"), level_as_reference=True)
assert len(difference_df) == (
(self.data.variation_name.unique().size - 1) * self.data.country.unique().size
+ self.data.country.unique().size
- 1
)
n_comp = len(difference_df)
assert np.allclose(
difference_df["p-value"].map(lambda p: min(1, n_comp * p)), difference_df["adjusted p-value"], rtol=0.01
)
def test_multiple_difference_groupby(self):
difference_df = self.test.multiple_difference(level="control", groupby="country", level_as_reference=True)
assert len(difference_df) == ((self.data.variation_name.unique().size - 1) * self.data.country.unique().size)
n_comp = len(difference_df)
assert np.allclose(
difference_df["p-value"].map(lambda p: min(1, n_comp * p)), difference_df["adjusted p-value"], rtol=0.01
)
def test_summary_plot(self):
chart_grid = self.test.summary_plot()
assert len(chart_grid.charts) == 1
def test_summary_plot_groupby(self):
chart_grid = self.test.summary_plot(groupby="country")
assert len(chart_grid.charts) == self.data.country.unique().size
def test_difference_plot(self):
chartgrid = self.test.difference_plot(level_1=("us", "control"), level_2=("gb", "test"))
assert len(chartgrid.charts) == 1
def test_difference_plot_groupby(self):
chartgrid = self.test.difference_plot(level_1="control", level_2="test", groupby="country")
assert len(chartgrid.charts) == 1
def test_multiple_difference_plot(self):
chartgrid = self.test.multiple_difference_plot(level=("us", "control"), level_as_reference=True)
assert len(chartgrid.charts) == 1
def test_multiple_difference_plot_groupby(self):
chartgrid = self.test.multiple_difference_plot(level="control", groupby="country", level_as_reference=True)
assert len(chartgrid.charts) == 1
class TestOrdinal(object):
def setup(self):
self.data = pd.DataFrame(
{
"variation_name": [
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
],
"nr_of_items": [500, 8, 100, 510, 8, 100, 520, 9, 104, 530, 7, 100, 530, 8, 103],
"nr_of_items_sumsq": [2500, 12, 150, 2510, 13, 140, 2520, 14, 154, 2530, 15, 160, 2530, 16, 103],
"users": [1010, 22, 150, 1000, 20, 153, 1030, 23, 154, 1000, 20, 150, 1040, 21, 155],
"days_since_reg": [1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5],
}
)
self.test = spotify_confidence.ZTest(
self.data,
numerator_column="nr_of_items",
numerator_sum_squares_column="nr_of_items_sumsq",
denominator_column="users",
categorical_group_columns="variation_name",
ordinal_group_column="days_since_reg",
)
def test_summary(self):
summary_df = self.test.summary()
assert len(summary_df) == len(self.data)
def test_difference(self):
difference_df = self.test.difference(level_1=("control", 1), level_2=("test", 1))
assert len(difference_df) == 1
def test_difference_groupby(self):
difference_df = self.test.difference(level_1="control", level_2="test", groupby="days_since_reg")
assert len(difference_df) == self.data.days_since_reg.unique().size
def test_multiple_difference(self):
difference_df = self.test.multiple_difference(level=("control", 1), level_as_reference=True, verbose=True)
assert len(difference_df) == (
(self.data.variation_name.unique().size - 1) * self.data.days_since_reg.unique().size
+ self.data.days_since_reg.unique().size
- 1
)
n_comp = len(difference_df)
assert np.allclose(
difference_df.apply(lambda row: min(row[P_VALUE] * n_comp, 1), axis=1),
difference_df["adjusted p-value"],
rtol=0.01,
)
def test_multiple_difference_groupby(self):
difference_df = self.test.multiple_difference(
level="control", groupby="days_since_reg", level_as_reference=True, verbose=True
)
assert len(difference_df) == (
(self.data.variation_name.unique().size - 1) * self.data.days_since_reg.unique().size
)
n_comp = len(difference_df)
assert np.allclose(
difference_df.apply(lambda row: min(row[P_VALUE] * n_comp, 1), axis=1),
difference_df["adjusted p-value"],
rtol=0.01,
)
def test_summary_plot(self):
chart_grid = self.test.summary_plot()
assert len(chart_grid.charts) == 1
def test_summary_plot_groupby(self):
chart_grid = self.test.summary_plot(groupby="days_since_reg")
assert len(chart_grid.charts) == self.data.days_since_reg.unique().size
def test_difference_plot(self):
chartgrid = self.test.difference_plot(level_1=("control", 1), level_2=("test", 2))
assert len(chartgrid.charts) == 1
def test_difference_plot_groupby(self):
chartgrid = self.test.difference_plot(level_1="control", level_2="test", groupby="days_since_reg")
assert len(chartgrid.charts) == 1
def test_multiple_difference_plot(self):
chartgrid = self.test.multiple_difference_plot(level=("control", 1), level_as_reference=True)
assert len(chartgrid.charts) == 1
def test_multiple_difference_plot_groupby(self):
chartgrid = self.test.multiple_difference_plot(
level="control", groupby="days_since_reg", level_as_reference=True
)
assert len(chartgrid.charts) == 1
class TestOrdinalPlusTwoCategorical(object):
def setup(self):
self.data = pd.DataFrame(
{
"variation_name": [
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
],
"nr_of_items": [
500,
8,
100,
510,
8,
100,
520,
9,
104,
530,
7,
100,
530,
8,
103,
500,
8,
100,
510,
8,
100,
520,
9,
104,
530,
7,
100,
530,
8,
103,
],
"nr_of_items_sumsq": [
1010,
32,
250,
1000,
30,
253,
1030,
33,
254,
1000,
30,
250,
1040,
31,
255,
1010,
22,
150,
1000,
20,
153,
1030,
23,
154,
1000,
20,
150,
1040,
21,
155,
],
"users": [
2010,
42,
250,
2000,
40,
253,
2030,
43,
254,
2000,
40,
250,
2040,
41,
255,
1010,
22,
150,
1000,
20,
153,
1030,
23,
154,
1000,
20,
150,
1040,
21,
155,
],
"days_since_reg": [
1,
1,
1,
2,
2,
2,
3,
3,
3,
4,
4,
4,
5,
5,
5,
1,
1,
1,
2,
2,
2,
3,
3,
3,
4,
4,
4,
5,
5,
5,
],
"country": [
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
],
"non_inferiority_margin": [
0.01,
0.01,
0.01,
0.01,
0.01,
0.01,
0.01,
0.01,
0.01,
0.01,
0.01,
0.01,
0.01,
0.01,
0.01,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
],
"preferred_direction": [
DECREASE_PREFFERED,
DECREASE_PREFFERED,
DECREASE_PREFFERED,
DECREASE_PREFFERED,
DECREASE_PREFFERED,
DECREASE_PREFFERED,
DECREASE_PREFFERED,
DECREASE_PREFFERED,
DECREASE_PREFFERED,
DECREASE_PREFFERED,
DECREASE_PREFFERED,
DECREASE_PREFFERED,
DECREASE_PREFFERED,
DECREASE_PREFFERED,
DECREASE_PREFFERED,
INCREASE_PREFFERED,
INCREASE_PREFFERED,
INCREASE_PREFFERED,
INCREASE_PREFFERED,
INCREASE_PREFFERED,
INCREASE_PREFFERED,
INCREASE_PREFFERED,
INCREASE_PREFFERED,
INCREASE_PREFFERED,
INCREASE_PREFFERED,
INCREASE_PREFFERED,
INCREASE_PREFFERED,
INCREASE_PREFFERED,
INCREASE_PREFFERED,
INCREASE_PREFFERED,
],
}
)
self.test = spotify_confidence.ZTest(
self.data,
numerator_column="nr_of_items",
numerator_sum_squares_column="nr_of_items_sumsq",
denominator_column="users",
categorical_group_columns=["variation_name", "country"],
ordinal_group_column="days_since_reg",
)
def test_summary(self):
summary_df = self.test.summary()
assert len(summary_df) == len(self.data)
@pytest.mark.parametrize("correction_method", CORRECTION_METHODS, ids=lambda x: f"correction method: {x}")
def test_difference(self, correction_method):
self.test._confidence_computer._correction_method = correction_method
if BONFERRONI in correction_method:
difference_df = self.test.difference(level_1=("control", "gb", 1), level_2=("test", "us", 2))
assert len(difference_df) == 1
else:
difference_df = self.test.difference(
level_1=("control", "gb", 1), level_2=("test", "us", 2), non_inferiority_margins=(None, "increase")
)
assert len(difference_df) == 1
@pytest.mark.parametrize("correction_method", CORRECTION_METHODS, ids=lambda x: f"correction method: {x}")
def test_difference_groupby(self, correction_method):
self.test._confidence_computer._correction_method = correction_method
if BONFERRONI in correction_method:
difference_df = self.test.difference(
level_1="control", level_2="test", groupby=["country", "days_since_reg"]
)
assert len(difference_df) == self.data.days_since_reg.unique().size * self.data.country.unique().size
else:
difference_df = self.test.difference(
level_1="control",
level_2="test",
groupby=["country", "days_since_reg"],
non_inferiority_margins=(None, "increase"),
)
assert len(difference_df) == self.data.days_since_reg.unique().size * self.data.country.unique().size
@pytest.mark.parametrize("correction_method", CORRECTION_METHODS, ids=lambda x: f"correction method: {x}")
def test_multiple_difference(self, correction_method):
self.test._confidence_computer._correction_method = correction_method
if BONFERRONI in correction_method:
difference_df = self.test.multiple_difference(
level=("control", 1), groupby="country", level_as_reference=True
)
assert len(difference_df) == (
self.data.country.unique().size
* (
(self.data.variation_name.unique().size - 1) * self.data.days_since_reg.unique().size
+ self.data.days_since_reg.unique().size
- 1
)
)
n_comp = len(difference_df)
assert np.allclose(
difference_df["p-value"].map(lambda p: min(1, n_comp * p)),
difference_df["adjusted p-value"],
rtol=0.01,
)
else:
difference_df = self.test.multiple_difference(
level=("control", 1),
groupby="country",
level_as_reference=True,
non_inferiority_margins=(None, "increase"),
)
assert len(difference_df) == (
self.data.country.unique().size
* (
(self.data.variation_name.unique().size - 1) * self.data.days_since_reg.unique().size
+ self.data.days_since_reg.unique().size
- 1
)
)
@pytest.mark.parametrize("correction_method", CORRECTION_METHODS, ids=lambda x: f"correction method: {x}")
def test_multiple_difference_groupby(self, correction_method):
self.test._confidence_computer._correction_method = correction_method
if BONFERRONI in correction_method:
difference_df = self.test.multiple_difference(
level="control", groupby=["days_since_reg", "country"], level_as_reference=True
)
assert len(difference_df) == (
(self.data.variation_name.unique().size - 1)
* self.data.days_since_reg.unique().size
* self.data.country.unique().size
)
n_comp = len(difference_df)
assert np.allclose(
difference_df["p-value"].map(lambda p: min(1, n_comp * p)),
difference_df["adjusted p-value"],
rtol=0.01,
)
else:
difference_df = self.test.multiple_difference(
level="control",
groupby=["days_since_reg", "country"],
level_as_reference=True,
non_inferiority_margins=(None, "increase"),
)
assert len(difference_df) == (
(self.data.variation_name.unique().size - 1)
* self.data.days_since_reg.unique().size
* self.data.country.unique().size
)
if correction_method in CORRECTION_METHODS_THAT_SUPPORT_CI:
assert not any(difference_df[ADJUSTED_LOWER].isna())
@pytest.mark.parametrize("correction_method", CORRECTION_METHODS, ids=lambda x: f"correction method: {x}")
def test_differece_with_nims(self, correction_method):
self.test._confidence_computer._correction_method = correction_method
df = self.test.difference(
level_1=("test", "us"),
level_2=("control", "us"),
groupby="days_since_reg",
non_inferiority_margins=(0.01, INCREASE_PREFFERED),
)
assert len(df) == 5
assert "days_since_reg" in df.columns
df = self.test.difference(
level_1=("test", "us"),
level_2=("control", "us"),
groupby=["days_since_reg"],
non_inferiority_margins=(0.01, DECREASE_PREFFERED),
)
assert len(df) == 5
assert "days_since_reg" in df.columns
df = self.test.difference(
level_1=("test", 1),
level_2=("control", 1),
groupby=["country"],
non_inferiority_margins={"us": (0.01, INCREASE_PREFFERED), "gb": (0.05, INCREASE_PREFFERED)},
)
assert len(df) == 2
assert "country" in df.columns
df = self.test.difference(
level_1="test",
level_2="control",
groupby=["country", "days_since_reg"],
non_inferiority_margins=(0.01, DECREASE_PREFFERED),
)
assert len(df) == 10
assert "country" in df.columns
assert "days_since_reg" in df.columns
nims = {
("us", 1): (0.01, DECREASE_PREFFERED),
("us", 2): (0.1, INCREASE_PREFFERED),
("us", 3): (0.2, DECREASE_PREFFERED),
("us", 4): (0.5, INCREASE_PREFFERED),
("us", 5): (0.99, DECREASE_PREFFERED),
("gb", 1): (1.01, INCREASE_PREFFERED),
("gb", 2): (2.01, DECREASE_PREFFERED),
("gb", 3): (3.01, INCREASE_PREFFERED),
("gb", 4): (4.01, DECREASE_PREFFERED),
("gb", 5): (5.01, INCREASE_PREFFERED),
}
df = self.test.difference(
level_1="test", level_2="control", groupby=["country", "days_since_reg"], non_inferiority_margins=nims
)
assert len(df) == 10
assert "country" in df.columns
assert "days_since_reg" in df.columns
df = self.test.multiple_difference(
level="control",
level_as_reference=True,
groupby=["country", "days_since_reg"],
non_inferiority_margins=nims,
)
assert len(df) == 20
@pytest.mark.parametrize("correction_method", CORRECTION_METHODS, ids=lambda x: f"correction method: {x}")
def test_differece_with_nims_in_df(self, correction_method):
self.test._confidence_computer._correction_method = correction_method
df = self.test.difference(
level_1=("test", "us"), level_2=("control", "us"), groupby="days_since_reg", non_inferiority_margins=True
)
assert len(df) == 5
assert "days_since_reg" in df.columns
df = self.test.difference(
level_1=("test", "us"), level_2=("control", "us"), groupby=["days_since_reg"], non_inferiority_margins=True
)
assert len(df) == 5
assert "days_since_reg" in df.columns
df = self.test.difference(
level_1=("test", 1), level_2=("control", 1), groupby=["country"], non_inferiority_margins=True
)
assert len(df) == 2
assert "country" in df.columns
df = self.test.difference(
level_1="test", level_2="control", groupby=["country", "days_since_reg"], non_inferiority_margins=True
)
assert len(df) == 10
assert "country" in df.columns
assert "days_since_reg" in df.columns
df = self.test.difference(
level_1="test", level_2="control", groupby=["country", "days_since_reg"], non_inferiority_margins=True
)
assert len(df) == 10
assert "country" in df.columns
assert "days_since_reg" in df.columns
df = self.test.multiple_difference(
level="control",
level_as_reference=True,
groupby=["country", "days_since_reg"],
non_inferiority_margins=True,
)
assert len(df) == 20
def test_summary_plot(self):
chart_grid = self.test.summary_plot()
assert len(chart_grid.charts) == 1
def test_summary_plot_groupby(self):
chart_grid = self.test.summary_plot(groupby="country")
assert len(chart_grid.charts) == self.data.country.unique().size
def test_summary_plot_groupby_2(self):
chart_grid = self.test.summary_plot(groupby=["days_since_reg", "country"])
assert len(chart_grid.charts) == (self.data.country.unique().size * self.data.days_since_reg.unique().size)
def test_difference_plot(self):
chartgrid = self.test.difference_plot(level_1=("control", "gb", 1), level_2=("test", "us", 2))
assert len(chartgrid.charts) == 1
def test_difference_plot_groupby(self):
chartgrid = self.test.difference_plot(
level_1=("control", "gb"), level_2=("test", "us"), groupby="days_since_reg"
)
assert len(chartgrid.charts) == 1
def test_difference_plot_groupby_2(self):
chartgrid = self.test.difference_plot(level_1="control", level_2="test", groupby=["days_since_reg", "country"])
assert len(chartgrid.charts) == 1
def test_multiple_difference_plot(self):
chartgrid = self.test.multiple_difference_plot(
level=("control", 1), groupby="country", level_as_reference=True
)
assert len(chartgrid.charts) == 1
def test_multiple_difference_plot_groupby(self):
chartgrid = self.test.multiple_difference_plot(
level="control", groupby=["days_since_reg", "country"], level_as_reference=True
)
assert len(chartgrid.charts) == 1
def test_differece_plot_with_nims(self):
ch = self.test.difference_plot(
level_1=("test", "us"),
level_2=("control", "us"),
groupby="days_since_reg",
non_inferiority_margins=(0.01, INCREASE_PREFFERED),
)
assert len(ch.charts) == 1
ch = self.test.difference_plot(
level_1=("test", "us"),
level_2=("control", "us"),
groupby=["days_since_reg"],
non_inferiority_margins=(0.01, DECREASE_PREFFERED),
)
assert len(ch.charts) == 1
ch = self.test.difference_plot(
level_1=("test", 1),
level_2=("control", 1),
groupby=["country"],
non_inferiority_margins={"us": (0.01, INCREASE_PREFFERED), "gb": (0.05, INCREASE_PREFFERED)},
)
assert len(ch.charts) == 1
ch = self.test.difference_plot(
level_1="test",
level_2="control",
groupby=["country", "days_since_reg"],
non_inferiority_margins=(0.01, DECREASE_PREFFERED),
)
assert len(ch.charts) == 1
nims = {
("us", 1): (0.01, DECREASE_PREFFERED),
("us", 2): (0.1, INCREASE_PREFFERED),
("us", 3): (0.2, DECREASE_PREFFERED),
("us", 4): (0.5, INCREASE_PREFFERED),
("us", 5): (0.99, DECREASE_PREFFERED),
("gb", 1): (1.01, INCREASE_PREFFERED),
("gb", 2): (2.01, DECREASE_PREFFERED),
("gb", 3): (3.01, INCREASE_PREFFERED),
("gb", 4): (4.01, DECREASE_PREFFERED),
("gb", 5): (5.01, INCREASE_PREFFERED),
}
ch = self.test.difference_plot(
level_1="test", level_2="control", groupby=["country", "days_since_reg"], non_inferiority_margins=nims
)
assert len(ch.charts) == 1
ch = self.test.multiple_difference_plot(
level="control",
level_as_reference=True,
groupby=["country", "days_since_reg"],
non_inferiority_margins=nims,
)
assert len(ch.charts) == 1
def test_differece_plot_with_nims_in_df(self):
ch = self.test.difference_plot(
level_1=("test", "us"), level_2=("control", "us"), groupby="days_since_reg", non_inferiority_margins=True
)
assert len(ch.charts) == 1
ch = self.test.difference_plot(
level_1=("test", "us"), level_2=("control", "us"), groupby=["days_since_reg"], non_inferiority_margins=True
)
assert len(ch.charts) == 1
ch = self.test.difference_plot(
level_1=("test", 1), level_2=("control", 1), groupby=["country"], non_inferiority_margins=True
)
assert len(ch.charts) == 1
ch = self.test.difference_plot(
level_1="test", level_2="control", groupby=["country", "days_since_reg"], non_inferiority_margins=True
)
assert len(ch.charts) == 1
ch = self.test.difference_plot(
level_1="test", level_2="control", groupby=["country", "days_since_reg"], non_inferiority_margins=True
)
assert len(ch.charts) == 1
ch = self.test.multiple_difference_plot(
level="control",
level_as_reference=True,
groupby=["country", "days_since_reg"],
non_inferiority_margins=True,
)
assert len(ch.charts) == 1
class TestCategoricalBinomialData(object):
def setup(self):
np.random.seed(123)
self.data = pd.DataFrame(
{
"variation_name": ["test", "control", "test2", "test", "control", "test2"],
"success": [500, 42, 1005, 50, 4, 100],
"total": [1009, 104, 1502, 100, 10, 150],
"country": [
"us",
"us",
"us",
"gb",
"gb",
"gb",
],
}
)
self.test = spotify_confidence.ZTest(
self.data,
numerator_column="success",
numerator_sum_squares_column="success",
denominator_column="total",
categorical_group_columns=["country", "variation_name"],
)
def test_summary(self):
"""Area plot tests"""
summary = self.test.summary()
assert np.array_equal(summary.country, np.array(["us", "us", "us", "gb", "gb", "gb"]))
assert np.array_equal(summary.point_estimate, self.data.success / self.data.total)
assert np.allclose(
summary["ci_lower"],
np.array(
[
0.4646901340180582,
0.30954466010970333,
0.6453118311511006,
0.4020018007729973,
0.0963636851484016,
0.5912276177282552,
]
),
)
assert np.allclose(
summary["ci_upper"],
np.array(
[
0.5263901434844195,
0.4981476475826044,
0.692903881232388,
0.5979981992270027,
0.7036363148515985,
0.7421057156050781,
]
),
)
def test_multiple_difference(self):
with pytest.raises(ValueError):
self.test.multiple_difference(("bad_value", "bad_value"), level_as_reference=False)
diff = self.test.multiple_difference(("us", "control"), level_as_reference=False)
assert np.allclose(
diff["adjusted p-value"], np.array([1e00, 8.291843e-01, 9.971992e-05, 3.504662e-01, 4.504966e-07])
)
assert np.allclose(
diff["p-value"], np.array([9.81084197e-01, 1.65836862e-01, 1.99439850e-05, 7.00932382e-02, 9.00993166e-08])
)
assert np.allclose(
diff["adjusted ci_lower"], np.array([-0.41400184, -0.27489017, -0.42153065, -0.22209041, -0.39307973])
)
assert np.allclose(
diff["adjusted ci_upper"], np.array([0.42169415, 0.08258247, -0.10411038, 0.03870244, -0.13744367])
)
diff = self.test.multiple_difference("test", groupby="country", level_as_reference=False)
assert np.allclose(diff["adjusted p-value"], np.array([1.00000000e00, 3.30302805e-02, 2.80372953e-01, 0.0]))
assert np.allclose(diff["p-value"], np.array([5.39020329e-01, 8.25757011e-03, 7.00932382e-02, 0.0]))
assert np.allclose(diff["adjusted ci_lower"], np.array([-0.30659699, -0.32426934, -0.03474758, -0.2232184]))
assert np.allclose(diff["adjusted ci_upper"], np.array([0.50659699, -0.00906399, 0.21813554, -0.12391703]))
class TestWithNims(object):
def setup(self):
self.data = pd.DataFrame(
[
{
"group": "1",
"count": 5000,
"sum": 10021.0,
"sum_of_squares": 25142.0,
"avg": 2.004210,
"var": 1.0116668,
},
{
"group": "2",
"count": 5000,
"sum": 9892.0,
"sum_of_squares": 24510.0,
"avg": 1.978424,
"var": 0.9881132,
},
]
)
self.test = spotify_confidence.ZTest(
self.data,
numerator_column="sum",
numerator_sum_squares_column="sum_of_squares",
denominator_column="count",
categorical_group_columns="group",
interval_size=0.99,
)
@pytest.mark.parametrize("correction_method", CORRECTION_METHODS, ids=lambda x: f"correction method: {x}")
def test_compare_series_non_inferiority_improve_postitive(self, correction_method):
summary = self.test.summary()
control_avg = self.data.query("group == '1'").avg.values[0]
assert np.allclose(control_avg, summary.query("group == '1'")[POINT_ESTIMATE])
diff = self.test.difference(level_1="1", level_2="2", non_inferiority_margins=(0.02, "increase"))
np.testing.assert_almost_equal(diff[DIFFERENCE].values[0], -0.0258, 3)
assert np.isinf(diff[CI_UPPER].values[0])
np.testing.assert_almost_equal(diff[CI_LOWER].values[0], -0.0723, 3)
assert diff[P_VALUE].values[0] > 0.01
self.test._confidence_computer._correction_method = correction_method
diff_2 = self.test.difference(level_1="1", level_2="2", non_inferiority_margins=(0.02, "increase"))
if SPOT_1 in correction_method:
assert all(diff[CI_LOWER] == diff_2[CI_LOWER])
assert np.isfinite(diff_2[CI_UPPER].values[0])
if BONFERRONI not in correction_method and correction_method in CORRECTION_METHODS_THAT_SUPPORT_CI:
assert all(diff[ADJUSTED_LOWER] <= diff_2[ADJUSTED_LOWER])
assert all(diff[ADJUSTED_UPPER] >= diff_2[ADJUSTED_UPPER])
@pytest.mark.parametrize("correction_method", CORRECTION_METHODS, ids=lambda x: f"correction method: {x}")
def test_compare_series_non_inferiority_improve_negative(self, correction_method):
summary = self.test.summary()
control_avg = self.data.query("group == '1'").avg.values[0]
assert np.allclose(control_avg, summary.query("group == '1'")[POINT_ESTIMATE])
diff = self.test.difference(level_1="1", level_2="2", non_inferiority_margins=(0.02, "decrease"))
np.testing.assert_almost_equal(diff[DIFFERENCE].values[0], -0.0258, 3)
assert diff[CI_LOWER].values[0] == -float("inf")
np.testing.assert_almost_equal(diff[CI_UPPER].values[0], 0.0207, 3)
assert diff[P_VALUE].values[0] < 0.01
self.test._confidence_computer._correction_method = correction_method
diff_2 = self.test.difference(level_1="1", level_2="2", non_inferiority_margins=(0.02, "decrease"))
if SPOT_1 in correction_method:
assert all(diff[CI_UPPER] == diff_2[CI_UPPER])
assert np.isfinite(diff_2[CI_LOWER].values[0])
if BONFERRONI not in correction_method and correction_method in CORRECTION_METHODS_THAT_SUPPORT_CI:
assert all(diff[ADJUSTED_LOWER] <= diff_2[ADJUSTED_LOWER])
assert all(diff[ADJUSTED_UPPER] >= diff_2[ADJUSTED_UPPER])
def test_one_sided_ztest_positive(self):
summary = self.test.summary()
control_avg = self.data.query("group == '1'").avg.values[0]
assert np.allclose(control_avg, summary.query("group == '1'")[POINT_ESTIMATE])
diff = self.test.difference(level_1="1", level_2="2", non_inferiority_margins=(None, "increase"))
np.testing.assert_almost_equal(diff[DIFFERENCE].values[0], -0.0258, 3)
assert diff[CI_UPPER].values[0] == float("inf")
np.testing.assert_almost_equal(diff[CI_LOWER].values[0], -0.0723, 3)
assert diff[P_VALUE].values[0] > 0.01
def test_one_sided_ztest_negative(self):
summary = self.test.summary()
control_avg = self.data.query("group == '1'").avg.values[0]
assert np.allclose(control_avg, summary.query("group == '1'")[POINT_ESTIMATE])
diff = self.test.difference(level_1="1", level_2="2", non_inferiority_margins=(None, "decrease"))
np.testing.assert_almost_equal(diff[DIFFERENCE].values[0], -0.0258, 3)
assert diff[CI_LOWER].values[0] == -float("inf")
np.testing.assert_almost_equal(diff[CI_UPPER].values[0], 0.0207, 3)
assert diff[P_VALUE].values[0] > 0.01
class TestSequentialOrdinalPlusTwoCategorical(object):
def setup(self):
np.random.seed(123)
d = 50 + 1 * np.random.randn(60)
u = np.floor(2000 + np.linspace(0, 1000, 60) + 10 * np.random.randn(60))
self.data = pd.DataFrame(
{
"variation_name": [
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
"test",
"control",
"test2",
],
"nr_of_items": d,
"nr_of_items_sumsq": d / 20,
"users": u,
"date": pd.to_datetime(
[
"2021-04-01",
"2021-04-01",
"2021-04-01",
"2021-04-02",
"2021-04-02",
"2021-04-02",
"2021-04-03",
"2021-04-03",
"2021-04-03",
"2021-04-04",
"2021-04-04",
"2021-04-04",
"2021-04-05",
"2021-04-05",
"2021-04-05",
"2021-04-01",
"2021-04-01",
"2021-04-01",
"2021-04-02",
"2021-04-02",
"2021-04-02",
"2021-04-03",
"2021-04-03",
"2021-04-03",
"2021-04-04",
"2021-04-04",
"2021-04-04",
"2021-04-05",
"2021-04-05",
"2021-04-05",
"2021-04-01",
"2021-04-01",
"2021-04-01",
"2021-04-02",
"2021-04-02",
"2021-04-02",
"2021-04-03",
"2021-04-03",
"2021-04-03",
"2021-04-04",
"2021-04-04",
"2021-04-04",
"2021-04-05",
"2021-04-05",
"2021-04-05",
"2021-04-01",
"2021-04-01",
"2021-04-01",
"2021-04-02",
"2021-04-02",
"2021-04-02",
"2021-04-03",
"2021-04-03",
"2021-04-03",
"2021-04-04",
"2021-04-04",
"2021-04-04",
"2021-04-05",
"2021-04-05",
"2021-04-05",
]
),
"country": [
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"us",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
"gb",
],
"metric": [
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m1",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
"m2",
],
"non_inferiority_margin": [
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.2,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
"preferred_direction": [
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
"increase",
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
],
}
).assign(final_sample_size=5000)
self.test = spotify_confidence.ZTest(
self.data,
numerator_column="nr_of_items",
numerator_sum_squares_column="nr_of_items_sumsq",
denominator_column="users",
categorical_group_columns=["variation_name", "country", "metric"],
ordinal_group_column="date",
)
@pytest.mark.parametrize("correction_method", CORRECTION_METHODS, ids=lambda x: f"correction method: {x}")
def test_multiple_difference_groupby(self, correction_method):
self.test._confidence_computer._correction_method = correction_method
def get_diff() -> pd.DataFrame:
return self.test.multiple_difference(
level="control",
groupby=["date", "country", "metric"],
level_as_reference=True,
final_expected_sample_size_column="final_sample_size",
)
if BONFERRONI not in correction_method:
with pytest.raises(ValueError):
difference_df = get_diff()
else:
difference_df = get_diff()
assert len(difference_df) == (
(self.data.variation_name.unique().size - 1)
* self.data.date.unique().size
* self.data.country.unique().size
* self.data.metric.unique().size
)
assert difference_df["p-value"].isnull().all()
assert difference_df["adjusted p-value"].isnull().all()
def test_multiple_difference_plot_groupby(self):
charts = self.test.multiple_difference_plot(
level="control",
groupby=["date", "country", "metric"],
level_as_reference=True,
final_expected_sample_size_column="final_sample_size",
).charts
assert len(charts) == 1
def test_multiple_difference_groupby_onesided_decrease(self):
difference_df = self.test.multiple_difference(
level="control",
groupby=["date", "country", "metric"],
level_as_reference=True,
non_inferiority_margins=(0.05, "decrease"),
final_expected_sample_size_column="final_sample_size",
)
assert len(difference_df) == (
(self.data.variation_name.unique().size - 1)
* self.data.date.unique().size
* self.data.country.unique().size
* self.data.metric.unique().size
)
assert difference_df["p-value"].isnull().all()
assert difference_df["adjusted p-value"].isnull().all()
def test_multiple_difference_groupby_onesided_increase(self):
difference_df = self.test.multiple_difference(
level="control",
groupby=["date", "country", "metric"],
level_as_reference=True,
non_inferiority_margins=(0.05, "increase"),
final_expected_sample_size_column="final_sample_size",
)
assert len(difference_df) == (
(self.data.variation_name.unique().size - 1)
* self.data.date.unique().size
* self.data.country.unique().size
* self.data.metric.unique().size
)
assert difference_df["p-value"].isnull().all()
assert difference_df["adjusted p-value"].isnull().all()
def test_multiple_difference_groupby_mixed_nims(self):
nims = {
(pd.to_datetime("2021-04-01"), "us", "m1"): (0.2, "increase"),
(pd.to_datetime("2021-04-02"), "us", "m1"): (0.2, "increase"),
(pd.to_datetime("2021-04-03"), "us", "m1"): (0.2, "increase"),
(pd.to_datetime("2021-04-04"), "us", "m1"): (0.2, "increase"),
(pd.to_datetime("2021-04-05"), "us", "m1"): (0.2, "increase"),
(pd.to_datetime("2021-04-01"), "gb", "m1"): (0.1, "increase"),
(pd.to_datetime("2021-04-02"), "gb", "m1"): (0.1, "increase"),
(pd.to_datetime("2021-04-03"), "gb", "m1"): (0.1, "increase"),
(pd.to_datetime("2021-04-04"), "gb", "m1"): (0.1, "increase"),
(pd.to_datetime("2021-04-05"), "gb", "m1"): (0.1, "increase"),
(pd.to_datetime("2021-04-01"), "us", "m2"): (0, None),
(pd.to_datetime("2021-04-02"), "us", "m2"): (0, None),
(pd.to_datetime("2021-04-03"), "us", "m2"): (0, None),
(pd.to_datetime("2021-04-04"), "us", "m2"): (0, None),
(pd.to_datetime("2021-04-05"), "us", "m2"): (0, None),
(pd.to_datetime("2021-04-01"), "gb", "m2"): (0, None),
(pd.to_datetime("2021-04-02"), "gb", "m2"): (0, None),
(pd.to_datetime("2021-04-03"), "gb", "m2"): (0, None),
(pd.to_datetime("2021-04-04"), "gb", "m2"): (0, None),
(pd.to_datetime("2021-04-05"), "gb", "m2"): (0, None),
}
difference_df = self.test.multiple_difference(
level="control",
groupby=["date", "country", "metric"],
level_as_reference=True,
non_inferiority_margins=nims,
final_expected_sample_size_column="final_sample_size",
)
assert len(difference_df) == (
(self.data.variation_name.unique().size - 1)
* self.data.date.unique().size
* self.data.country.unique().size
* self.data.metric.unique().size
)
assert difference_df["p-value"].isnull().all()
assert difference_df["adjusted p-value"].isnull().all()
difference_df_2 = self.test.multiple_difference(
level="control",
groupby=["date", "country", "metric"],
level_as_reference=True,
non_inferiority_margins=True,
final_expected_sample_size_column="final_sample_size",
)
for column in difference_df.columns:
assert (difference_df[column] == difference_df_2[column]).all() or (
difference_df["p-value"].isnull() == difference_df_2["p-value"].isnull()
).all()
DATE = "date"
COUNT = "count"
SUM = "sum"
SUM_OF_SQUARES = "sum_of_squares"
GROUP = "group"
class TestSequentialOrdinalPlusTwoCategorical2(object):
def setup(self):
self.data = pd.DataFrame(
[
{
DATE: "2020-04-01",
GROUP: "1",
"country": "swe",
"platform": "ios",
"metric": "bananas_per_user_1d",
COUNT: 1000,
SUM: 2016.416,
SUM_OF_SQUARES: 5082.122,
"non_inferiority_margin": None,
"preferred_direction": None,
},
{
DATE: "2020-04-01",
GROUP: "2",
"country": "swe",
"platform": "ios",
"metric": "bananas_per_user_1d",
COUNT: 1000,
SUM: 2028.478,
SUM_OF_SQUARES: 5210.193,
"non_inferiority_margin": None,
"preferred_direction": None,
},
{
DATE: "2020-04-01",
GROUP: "1",
"country": "fin",
"platform": "ios",
"metric": "bananas_per_user_1d",
COUNT: 1000,
SUM: 1991.554,
SUM_OF_SQUARES: 4919.282,
"non_inferiority_margin": None,
"preferred_direction": None,
},
{
DATE: "2020-04-01",
GROUP: "2",
"country": "fin",
"platform": "ios",
"metric": "bananas_per_user_1d",
COUNT: 1000,
SUM: 1958.713,
SUM_OF_SQUARES: 4818.665,
"non_inferiority_margin": None,
"preferred_direction": None,
},
{
DATE: "2020-04-01",
GROUP: "1",
"country": "swe",
"platform": "andr",
"metric": "bananas_per_user_1d",
COUNT: 1000,
SUM: 2030.252,
SUM_OF_SQUARES: 5129.574,
"non_inferiority_margin": None,
"preferred_direction": None,
},
{
DATE: "2020-04-01",
GROUP: "2",
"country": "swe",
"platform": "andr",
"metric": "bananas_per_user_1d",
COUNT: 1000,
SUM: 1966.138,
SUM_OF_SQUARES: 4848.321,
"non_inferiority_margin": None,
"preferred_direction": None,
},
{
DATE: "2020-04-01",
GROUP: "1",
"country": "fin",
"platform": "andr",
"metric": "bananas_per_user_1d",
COUNT: 1000,
SUM: 1995.389,
SUM_OF_SQUARES: 4992.710,
"non_inferiority_margin": None,
"preferred_direction": None,
},
{
DATE: "2020-04-01",
GROUP: "2",
"country": "fin",
"platform": "andr",
"metric": "bananas_per_user_1d",
COUNT: 1000,
SUM: 1952.098,
SUM_OF_SQUARES: 4798.772,
"non_inferiority_margin": None,
"preferred_direction": None,
},
{
DATE: "2020-04-02",
GROUP: "1",
"country": "swe",
"platform": "ios",
"metric": "bananas_per_user_1d",
COUNT: 1500,
SUM: 2986.667,
SUM_OF_SQUARES: 7427.582,
"non_inferiority_margin": None,
"preferred_direction": None,
},
{
DATE: "2020-04-02",
GROUP: "2",
"country": "swe",
"platform": "ios",
"metric": "bananas_per_user_1d",
COUNT: 1500,
SUM: 2989.488,
SUM_OF_SQUARES: 7421.710,
"non_inferiority_margin": None,
"preferred_direction": None,
},
{
DATE: "2020-04-02",
GROUP: "1",
"country": "fin",
"platform": "ios",
"metric": "bananas_per_user_1d",
COUNT: 1500,
SUM: 3008.681,
SUM_OF_SQUARES: 7565.406,
"non_inferiority_margin": None,
"preferred_direction": None,
},
{
DATE: "2020-04-02",
GROUP: "2",
"country": "fin",
"platform": "ios",
"metric": "bananas_per_user_1d",
COUNT: 1500,
SUM: 2933.173,
SUM_OF_SQUARES: 7207.038,
"non_inferiority_margin": None,
"preferred_direction": None,
},
{
DATE: "2020-04-02",
GROUP: "1",
"country": "swe",
"platform": "andr",
"metric": "bananas_per_user_1d",
COUNT: 1500,
SUM: 2986.308,
SUM_OF_SQUARES: 7584.148,
"non_inferiority_margin": None,
"preferred_direction": None,
},
{
DATE: "2020-04-02",
GROUP: "2",
"country": "swe",
"platform": "andr",
"metric": "bananas_per_user_1d",
COUNT: 1500,
SUM: 2985.802,
SUM_OF_SQUARES: 7446.539,
"non_inferiority_margin": None,
"preferred_direction": None,
},
{
DATE: "2020-04-02",
GROUP: "1",
"country": "fin",
"platform": "andr",
"metric": "bananas_per_user_1d",
COUNT: 1500,
SUM: 3008.190,
SUM_OF_SQUARES: 7532.521,
"non_inferiority_margin": None,
"preferred_direction": None,
},
{
DATE: "2020-04-02",
GROUP: "2",
"country": "fin",
"platform": "andr",
"metric": "bananas_per_user_1d",
COUNT: 1500,
SUM: 3001.494,
SUM_OF_SQUARES: 7467.535,
"non_inferiority_margin": None,
"preferred_direction": None,
},
{
DATE: "2020-04-01",
GROUP: "1",
"country": "swe",
"platform": "ios",
"metric": "bananas_per_user_7d",
COUNT: 1000,
SUM: 2016.416,
SUM_OF_SQUARES: 5082.122,
"non_inferiority_margin": 0.01,
"preferred_direction": "increase",
},
{
DATE: "2020-04-01",
GROUP: "2",
"country": "swe",
"platform": "ios",
"metric": "bananas_per_user_7d",
COUNT: 1000,
SUM: 2028.478,
SUM_OF_SQUARES: 5210.193,
"non_inferiority_margin": 0.01,
"preferred_direction": "increase",
},
{
DATE: "2020-04-01",
GROUP: "1",
"country": "fin",
"platform": "ios",
"metric": "bananas_per_user_7d",
COUNT: 1000,
SUM: 1991.554,
SUM_OF_SQUARES: 4919.282,
"non_inferiority_margin": 0.01,
"preferred_direction": "increase",
},
{
DATE: "2020-04-01",
GROUP: "2",
"country": "fin",
"platform": "ios",
"metric": "bananas_per_user_7d",
COUNT: 1000,
SUM: 1958.713,
SUM_OF_SQUARES: 4818.665,
"non_inferiority_margin": 0.01,
"preferred_direction": "increase",
},
{
DATE: "2020-04-01",
GROUP: "1",
"country": "swe",
"platform": "andr",
"metric": "bananas_per_user_7d",
COUNT: 1000,
SUM: 2030.252,
SUM_OF_SQUARES: 5129.574,
"non_inferiority_margin": 0.01,
"preferred_direction": "increase",
},
{
DATE: "2020-04-01",
GROUP: "2",
"country": "swe",
"platform": "andr",
"metric": "bananas_per_user_7d",
COUNT: 1000,
SUM: 1966.138,
SUM_OF_SQUARES: 4848.321,
"non_inferiority_margin": 0.01,
"preferred_direction": "increase",
},
{
DATE: "2020-04-01",
GROUP: "1",
"country": "fin",
"platform": "andr",
"metric": "bananas_per_user_7d",
COUNT: 1000,
SUM: 1995.389,
SUM_OF_SQUARES: 4992.710,
"non_inferiority_margin": 0.01,
"preferred_direction": "increase",
},
{
DATE: "2020-04-01",
GROUP: "2",
"country": "fin",
"platform": "andr",
"metric": "bananas_per_user_7d",
COUNT: 1000,
SUM: 1952.098,
SUM_OF_SQUARES: 4798.772,
"non_inferiority_margin": 0.01,
"preferred_direction": "increase",
},
{
DATE: "2020-04-02",
GROUP: "1",
"country": "swe",
"platform": "ios",
"metric": "bananas_per_user_7d",
COUNT: 1500,
SUM: 2986.667,
SUM_OF_SQUARES: 7427.582,
"non_inferiority_margin": 0.01,
"preferred_direction": "increase",
},
{
DATE: "2020-04-02",
GROUP: "2",
"country": "swe",
"platform": "ios",
"metric": "bananas_per_user_7d",
COUNT: 1500,
SUM: 2989.488,
SUM_OF_SQUARES: 7421.710,
"non_inferiority_margin": 0.01,
"preferred_direction": "increase",
},
{
DATE: "2020-04-02",
GROUP: "1",
"country": "fin",
"platform": "ios",
"metric": "bananas_per_user_7d",
COUNT: 1500,
SUM: 3008.681,
SUM_OF_SQUARES: 7565.406,
"non_inferiority_margin": 0.01,
"preferred_direction": "increase",
},
{
DATE: "2020-04-02",
GROUP: "2",
"country": "fin",
"platform": "ios",
"metric": "bananas_per_user_7d",
COUNT: 1500,
SUM: 2933.173,
SUM_OF_SQUARES: 7207.038,
"non_inferiority_margin": 0.01,
"preferred_direction": "increase",
},
{
DATE: "2020-04-02",
GROUP: "1",
"country": "swe",
"platform": "andr",
"metric": "bananas_per_user_7d",
COUNT: 1500,
SUM: 2986.308,
SUM_OF_SQUARES: 7584.148,
"non_inferiority_margin": 0.01,
"preferred_direction": "increase",
},
{
DATE: "2020-04-02",
GROUP: "2",
"country": "swe",
"platform": "andr",
"metric": "bananas_per_user_7d",
COUNT: 1500,
SUM: 2985.802,
SUM_OF_SQUARES: 7446.539,
"non_inferiority_margin": 0.01,
"preferred_direction": "increase",
},
{
DATE: "2020-04-02",
GROUP: "1",
"country": "fin",
"platform": "andr",
"metric": "bananas_per_user_7d",
COUNT: 1500,
SUM: 3008.190,
SUM_OF_SQUARES: 7532.521,
"non_inferiority_margin": 0.01,
"preferred_direction": "increase",
},
{
DATE: "2020-04-02",
GROUP: "2",
"country": "fin",
"platform": "andr",
"metric": "bananas_per_user_7d",
COUNT: 1500,
SUM: 3001.494,
SUM_OF_SQUARES: 7467.535,
"non_inferiority_margin": 0.01,
"preferred_direction": "increase",
},
]
)
self.data[DATE] = pd.to_datetime(self.data[DATE])
self.data = (
self.data.groupby([DATE, GROUP, "country", "platform", "metric"])
.sum()
.groupby([GROUP, "country", "platform", "metric"])
.cumsum()
.reset_index()
.assign(
non_inferiority_margin=lambda df: df["metric"].map(
{"bananas_per_user_1d": None, "bananas_per_user_7d": 0.01}
)
)
.assign(
preferred_direction=lambda df: df["metric"].map(
{"bananas_per_user_1d": None, "bananas_per_user_7d": "increase"}
)
)
.assign(final_expected_sample_size=5000)
)
self.test = spotify_confidence.ZTest(
self.data,
numerator_column=SUM,
numerator_sum_squares_column=SUM_OF_SQUARES,
denominator_column=COUNT,
categorical_group_columns=[GROUP, "country", "platform", "metric"],
ordinal_group_column=DATE,
interval_size=1 - 0.01,
correction_method=BONFERRONI_DO_NOT_COUNT_NON_INFERIORITY,
)
def test_with_manual_correction(self):
test = spotify_confidence.ZTest(
self.data.assign(blabla="hej"),
numerator_column=SUM,
numerator_sum_squares_column=SUM_OF_SQUARES,
denominator_column=COUNT,
categorical_group_columns=[GROUP, "country", "platform", "metric"],
ordinal_group_column=DATE,
interval_size=1 - 0.01 / 4,
)
difference_df = test.difference(
level_1=("1", "fin", "andr", "bananas_per_user_1d"),
level_2=("2", "fin", "andr", "bananas_per_user_1d"),
groupby="date",
final_expected_sample_size_column="final_expected_sample_size",
)
np.testing.assert_almost_equal(difference_df[ADJUSTED_LOWER].values[0], -0.2016570, 3)
np.testing.assert_almost_equal(difference_df[ADJUSTED_UPPER].values[0], 0.11507406, 3)
np.testing.assert_almost_equal(difference_df[ADJUSTED_LOWER].values[1], -0.1063633, 3)
np.testing.assert_almost_equal(difference_df[ADJUSTED_UPPER].values[1], 0.06637345, 3)
difference_df = test.difference(
level_1=("1", "swe", "ios", "bananas_per_user_1d"),
level_2=("2", "swe", "ios", "bananas_per_user_1d"),
groupby="date",
final_expected_sample_size_column="final_expected_sample_size",
)
np.testing.assert_almost_equal(difference_df[ADJUSTED_LOWER].values[0], -0.1506963, 3)
np.testing.assert_almost_equal(difference_df[ADJUSTED_UPPER].values[0], 0.17481994, 3)
np.testing.assert_almost_equal(difference_df[ADJUSTED_LOWER].values[1], -0.0812409, 3)
np.testing.assert_almost_equal(difference_df[ADJUSTED_UPPER].values[1], 0.09314668, 3)
difference_df = test.difference(
level_1=("1", "fin", "andr", "bananas_per_user_7d"),
level_2=("2", "fin", "andr", "bananas_per_user_7d"),
groupby="date",
non_inferiority_margins=(0.01, "increase"),
final_expected_sample_size_column="final_expected_sample_size",
)
np.testing.assert_almost_equal(difference_df[ADJUSTED_LOWER].values[0], -0.1932786, 3)
np.isinf(difference_df[ADJUSTED_UPPER].values[0])
np.testing.assert_almost_equal(difference_df[ADJUSTED_LOWER].values[1], -0.10027731, 3)
np.isinf(difference_df[ADJUSTED_UPPER].values[1])
difference_df = test.difference(
level_1=("1", "swe", "ios", "bananas_per_user_7d"),
level_2=("2", "swe", "ios", "bananas_per_user_7d"),
groupby="date",
non_inferiority_margins=(0.01, "increase"),
final_expected_sample_size_column="final_expected_sample_size",
)
np.testing.assert_almost_equal(difference_df[ADJUSTED_LOWER].values[0], -0.1420855, 3)
np.isinf(difference_df[ADJUSTED_UPPER].values[0])
np.testing.assert_almost_equal(difference_df[ADJUSTED_LOWER].values[1], -0.07509674, 3)
np.isinf(difference_df[ADJUSTED_UPPER].values[1])
def test_multiple_difference_plot(self):
charts = self.test.multiple_difference_plot(
level="1",
groupby=["date", "country", "platform", "metric"],
level_as_reference=True,
final_expected_sample_size_column="final_expected_sample_size",
).charts
assert len(charts) == 1
charts = self.test.difference_plot(
level_1=("1", "fin", "andr", "bananas_per_user_7d"),
level_2=("2", "fin", "andr", "bananas_per_user_7d"),
groupby="date",
non_inferiority_margins=(0.01, "increase"),
final_expected_sample_size_column="final_expected_sample_size",
).charts
assert len(charts) == 1
def test_multiple_difference_groupby(self):
summary_df = self.test.summary()
np.testing.assert_almost_equal(
summary_df.query(
'date == "2020-04-01" and group == "1" and country == "fin" '
'and platform == "andr" and metric=="bananas_per_user_1d"'
)[POINT_ESTIMATE].values[0],
1.995389,
5,
)
np.testing.assert_almost_equal(
summary_df.query(
'date == "2020-04-01" and group == "2" and country == "fin" '
'and platform == "andr" and metric=="bananas_per_user_1d"'
)[POINT_ESTIMATE].values[0],
1.952098,
5,
)
np.testing.assert_almost_equal(
summary_df.query(
'date == "2020-04-01" and group == "1" and country == "swe" '
'and platform == "ios" and metric=="bananas_per_user_1d"'
)[POINT_ESTIMATE].values[0],
2.016416,
5,
)
np.testing.assert_almost_equal(
summary_df.query(
'date == "2020-04-01" and group == "2" and country == "swe" '
'and platform == "ios" and metric=="bananas_per_user_1d"'
)[POINT_ESTIMATE].values[0],
2.028478,
5,
)
nims = {
(
pd.to_datetime("2020-04-01"),
"bananas_per_user_1d",
"fin",
"andr",
): (None, None),
(
pd.to_datetime("2020-04-01"),
"bananas_per_user_7d",
"fin",
"andr",
): (0.01, "increase"),
(
pd.to_datetime("2020-04-01"),
"bananas_per_user_1d",
"fin",
"ios",
): (None, None),
(
pd.to_datetime("2020-04-01"),
"bananas_per_user_7d",
"fin",
"ios",
): (0.01, "increase"),
(
pd.to_datetime("2020-04-01"),
"bananas_per_user_1d",
"swe",
"andr",
): (None, None),
(
pd.to_datetime("2020-04-01"),
"bananas_per_user_7d",
"swe",
"andr",
): (0.01, "increase"),
(
pd.to_datetime("2020-04-01"),
"bananas_per_user_1d",
"swe",
"ios",
): (None, None),
(
pd.to_datetime("2020-04-01"),
"bananas_per_user_7d",
"swe",
"ios",
): (0.01, "increase"),
(
pd.to_datetime("2020-04-02"),
"bananas_per_user_1d",
"fin",
"andr",
): (None, None),
(
pd.to_datetime("2020-04-02"),
"bananas_per_user_7d",
"fin",
"andr",
): (0.01, "increase"),
(
pd.to_datetime("2020-04-02"),
"bananas_per_user_1d",
"fin",
"ios",
): (None, None),
(
pd.to_datetime("2020-04-02"),
"bananas_per_user_7d",
"fin",
"ios",
): (0.01, "increase"),
(
pd.to_datetime("2020-04-02"),
"bananas_per_user_1d",
"swe",
"andr",
): (None, None),
(
pd.to_datetime("2020-04-02"),
"bananas_per_user_7d",
"swe",
"andr",
): (0.01, "increase"),
(
pd.to_datetime("2020-04-02"),
"bananas_per_user_1d",
"swe",
"ios",
): (None, None),
(
pd.to_datetime("2020-04-02"),
"bananas_per_user_7d",
"swe",
"ios",
): (0.01, "increase"),
}
difference_df = self.test.multiple_difference(
level="1",
groupby=["date", "metric", "country", "platform"],
level_as_reference=True,
final_expected_sample_size_column="final_expected_sample_size",
non_inferiority_margins=nims,
)
assert len(difference_df) == (
(self.data.group.unique().size - 1)
* self.data.date.unique().size
* self.data.country.unique().size
* self.data.platform.unique().size
* self.data.metric.unique().size
)
np.testing.assert_almost_equal(
difference_df.query(
'date == "2020-04-01" and country == "fin" and platform == "andr" and metric=="bananas_per_user_1d"'
)[ADJUSTED_LOWER].values[0],
-0.2016570,
3,
)
np.testing.assert_almost_equal(
difference_df.query(
'date == "2020-04-01" and country == "fin" and platform == "andr" and metric=="bananas_per_user_1d"'
)[ADJUSTED_UPPER].values[0],
0.11507406,
3,
)
np.testing.assert_almost_equal(
difference_df.query(
'date == "2020-04-01" and country == "swe" and platform == "ios" and metric=="bananas_per_user_1d"'
)[ADJUSTED_LOWER].values[0],
-0.1506963,
3,
)
np.testing.assert_almost_equal(
difference_df.query(
'date == "2020-04-01" and country == "swe" and platform == "ios" and metric=="bananas_per_user_1d"'
)[ADJUSTED_UPPER].values[0],
0.17481994,
3,
)
np.testing.assert_almost_equal(
difference_df.query(
'date == "2020-04-02" and country == "fin" and platform == "andr" and metric=="bananas_per_user_1d"'
)[ADJUSTED_LOWER].values[0],
-0.1063633,
3,
)
np.testing.assert_almost_equal(
difference_df.query(
'date == "2020-04-02" and country == "fin" and platform == "andr" and metric=="bananas_per_user_1d"'
)[ADJUSTED_UPPER].values[0],
0.06637345,
3,
)
np.testing.assert_almost_equal(
difference_df.query(
'date == "2020-04-02" and country == "swe" and platform == "ios" and metric=="bananas_per_user_1d"'
)[ADJUSTED_LOWER].values[0],
-0.0812409,
3,
)
np.testing.assert_almost_equal(
difference_df.query(
'date == "2020-04-02" and country == "swe" and platform == "ios" and metric=="bananas_per_user_1d"'
)[ADJUSTED_UPPER].values[0],
0.09314668,
3,
)
np.testing.assert_almost_equal(
difference_df.query(
'date == "2020-04-01" and country == "fin" and platform == "andr" and metric=="bananas_per_user_7d"'
)[ADJUSTED_LOWER].values[0],
-0.1932786,
3,
)
np.isinf(
difference_df.query(
'date == "2020-04-01" and country == "fin" and platform == "andr" and metric=="bananas_per_user_7d"'
)[ADJUSTED_UPPER].values[0]
)
np.testing.assert_almost_equal(
difference_df.query(
'date == "2020-04-01" and country == "swe" and platform == "ios" and metric=="bananas_per_user_7d"'
)[ADJUSTED_LOWER].values[0],
-0.1420855,
3,
)
np.isinf(
difference_df.query(
'date == "2020-04-01" and country == "swe" and platform == "ios" and metric=="bananas_per_user_7d"'
)[ADJUSTED_UPPER].values[0]
)
np.testing.assert_almost_equal(
difference_df.query(
'date == "2020-04-02" and country == "fin" and platform == "andr" and metric=="bananas_per_user_7d"'
)[ADJUSTED_LOWER].values[0],
-0.10027731,
3,
)
np.isinf(
difference_df.query(
'date == "2020-04-02" and country == "fin" and platform == "andr" and metric=="bananas_per_user_7d"'
)[ADJUSTED_UPPER].values[0]
)
np.testing.assert_almost_equal(
difference_df.query(
'date == "2020-04-02" and country == "swe" and platform == "ios" and metric=="bananas_per_user_7d"'
)[ADJUSTED_LOWER].values[0],
-0.07509674,
3,
)
np.isinf(
difference_df.query(
'date == "2020-04-02" and country == "swe" and platform == "ios" and metric=="bananas_per_user_7d"'
)[ADJUSTED_UPPER].values[0]
)
assert difference_df["p-value"].isnull().all()
assert difference_df["adjusted p-value"].isnull().all()
difference_df_2 = self.test.multiple_difference(
level="1",
groupby=["date", "metric", "country", "platform"],
level_as_reference=True,
final_expected_sample_size_column="final_expected_sample_size",
non_inferiority_margins=True,
)
for column in difference_df.columns:
assert (difference_df[column] == difference_df_2[column]).all() or (
difference_df["p-value"].isnull() == difference_df_2["p-value"].isnull()
).all()
class TestSequentialOneSided(object):
def setup(self):
DATE = "date"
COUNT = "count"
SUM = "sum"
SUM_OF_SQUARES = "sum_of_squares"
GROUP = "group"
self.data = pd.DataFrame(
[
{DATE: 1, GROUP: "1", COUNT: 1250, SUM: 2510.0, SUM_OF_SQUARES: 6304.0},
{DATE: 1, GROUP: "2", COUNT: 1250, SUM: -2492.0, SUM_OF_SQUARES: 6163.0},
]
).assign(final_expected_sample_size=1e4)
self.test = spotify_confidence.ZTest(
self.data,
numerator_column=SUM,
numerator_sum_squares_column=SUM_OF_SQUARES,
denominator_column=COUNT,
categorical_group_columns=GROUP,
ordinal_group_column=DATE,
interval_size=0.99,
)
def test_multiple_difference_groupby(self):
difference_df = self.test.multiple_difference(
level="1",
groupby="date",
level_as_reference=True,
final_expected_sample_size_column="final_expected_sample_size",
non_inferiority_margins=(0.01, "increase"),
)
assert len(difference_df) == ((self.data.group.unique().size - 1) * self.data.date.unique().size)
assert difference_df["p-value"].isnull().all()
assert difference_df["adjusted p-value"].isnull().all()
assert np.isinf(difference_df[CI_UPPER].values[0])
np.testing.assert_almost_equal(difference_df[ADJUSTED_LOWER].values[0], -4.129515314002298, 3)
np.testing.assert_almost_equal(difference_df[DIFFERENCE].values[0], -4.001416, 3)
class TestSequentialTwoSided(object):
def setup(self):
DATE = "date"
COUNT = "count"
SUM = "sum"
SUM_OF_SQUARES = "sum_of_squares"
GROUP = "group"
self.data = pd.DataFrame(
[
{"date": pd.to_datetime("2020-04-01"), GROUP: "1", COUNT: 1250, SUM: 2510.0, SUM_OF_SQUARES: 6304.0},
{"date": pd.to_datetime("2020-04-01"), GROUP: "2", COUNT: 1250, SUM: 2492.0, SUM_OF_SQUARES: 6163.0},
]
).assign(final_expected_sample_size=1e4)
self.test = spotify_confidence.ZTest(
self.data,
numerator_column=SUM,
numerator_sum_squares_column=SUM_OF_SQUARES,
denominator_column=COUNT,
categorical_group_columns=GROUP,
ordinal_group_column=DATE,
interval_size=0.99,
)
def test_multiple_difference_groupby(self):
difference_df = self.test.multiple_difference(
level="1",
groupby="date",
level_as_reference=True,
final_expected_sample_size_column="final_expected_sample_size",
)
assert len(difference_df) == ((self.data.group.unique().size - 1) * self.data.date.unique().size)
assert difference_df["p-value"].isnull().all()
assert difference_df["adjusted p-value"].isnull().all()
np.testing.assert_almost_equal(difference_df[ADJUSTED_UPPER].values[0], 0.121, 3)
np.testing.assert_almost_equal(difference_df[ADJUSTED_LOWER].values[0], -0.151, 3)
np.testing.assert_almost_equal(difference_df[DIFFERENCE].values[0], -0.0149, 3)
class TestNimsWithNaN(object):
def setup(self):
self.data = pd.DataFrame(
{
"count": {
0: 252934,
1: 253656,
2: 252328,
3: 464640,
4: 465726,
5: 465194,
6: 463493,
7: 464487,
8: 464059,
},
"sum": {
0: 89984.0,
1: 89992.0,
2: 89108.0,
3: 5815.0,
4: 5867.0,
5: 5896.0,
6: 13928.0,
7: 13688.0,
8: 13505.0,
},
"sum_of_squares": {
0: 89984.0,
1: 89992.0,
2: 89108.0,
3: 5815.0,
4: 5867.0,
5: 5896.0,
6: 13928.0,
7: 13688.0,
8: 13505.0,
},
"exposure_experiment_group_id": {
0: "Control",
1: "Treatment1",
2: "Treatment2",
3: "Control",
4: "Treatment1",
5: "Treatment2",
6: "Control",
7: "Treatment1",
8: "Treatment2",
},
"non_inferiority_margin": {
0: np.nan,
1: np.nan,
2: np.nan,
3: np.nan,
4: np.nan,
5: np.nan,
6: 5.0,
7: 5.0,
8: 5.0,
},
"preferred_direction": {
0: np.nan,
1: np.nan,
2: np.nan,
3: "DECREASE",
4: "DECREASE",
5: "DECREASE",
6: "INCREASE",
7: "INCREASE",
8: "INCREASE",
},
"metric": {0: "m1", 1: "m1", 2: "m1", 3: "m2", 4: "m2", 5: "m2", 6: "m3", 7: "m3", 8: "m3"},
}
)
def test_nims_with_nans(self):
ztest = spotify_confidence.ZTest(
data_frame=self.data,
numerator_column="sum",
numerator_sum_squares_column="sum_of_squares",
denominator_column="count",
categorical_group_columns=["metric", "exposure_experiment_group_id"],
interval_size=0.99,
)
diff_df = ztest.multiple_difference(
level="Control", level_as_reference=True, absolute=False, groupby="metric", non_inferiority_margins=True
)
assert len(diff_df) == 6
| 37.173975 | 119 | 0.461775 | 12,410 | 130,555 | 4.626914 | 0.042143 | 0.040752 | 0.024033 | 0.035249 | 0.936503 | 0.928631 | 0.922065 | 0.916458 | 0.904215 | 0.892982 | 0 | 0.085486 | 0.420106 | 130,555 | 3,511 | 120 | 37.184563 | 0.672954 | 0.00694 | 0 | 0.777164 | 0 | 0.006139 | 0.131511 | 0.01505 | 0 | 0 | 0 | 0 | 0.097299 | 1 | 0.034377 | false | 0 | 0.001535 | 0.000307 | 0.042357 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.