hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
fc5a81b39a8968c2fd162ad91a24828d757523bd
18,567
py
Python
pycatia/part_interfaces/trim.py
evereux/catia_python
08948585899b12587b0415ce3c9191a408b34897
[ "MIT" ]
90
2019-02-21T10:05:28.000Z
2022-03-19T01:53:41.000Z
pycatia/part_interfaces/trim.py
Luanee/pycatia
ea5eef8178f73de12404561c00baf7a7ca30da59
[ "MIT" ]
99
2019-05-21T08:29:12.000Z
2022-03-25T09:55:15.000Z
pycatia/part_interfaces/trim.py
Luanee/pycatia
ea5eef8178f73de12404561c00baf7a7ca30da59
[ "MIT" ]
26
2019-04-04T06:31:36.000Z
2022-03-30T07:24:47.000Z
#! usr/bin/python3.6 """ Module initially auto generated using V5Automation files from CATIA V5 R28 on 2020-06-11 12:40:47.360445 .. warning:: The notes denoted "CAA V5 Visual Basic Help" are to be used as reference only. They are there as a guide as to how the visual basic / catscript functions work and thus help debugging in pycatia. """ from pycatia.in_interfaces.reference import Reference from pycatia.part_interfaces.boolean_shape import BooleanShape class Trim(BooleanShape): """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445) | System.IUnknown | System.IDispatch | System.CATBaseUnknown | System.CATBaseDispatch | System.AnyObject | MecModInterfaces.Shape | PartInterfaces.BooleanShape | Trim | | Represents the Trim, or union trim boolean operation. | It is performed between a body and the current shape. """ def __init__(self, com_object): super().__init__(com_object) self.trim = com_object def add_face_to_keep(self, i_face_to_keep: Reference) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub AddFaceToKeep(Reference iFaceToKeep) | | Adds a new face to be kept (if face is not divided by | operation). | | Parameters: | | iFaceToKeep | The new face to process | The following | | Boundary object is supported: Face. | | Example: | The following example adds the new face face to Keep for the Trim | firstTrim: | | call firstTrim.AddFaceToKeep(face) :param Reference i_face_to_keep: :return: None :rtype: None """ return self.trim.AddFaceToKeep(i_face_to_keep.com_object) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'add_face_to_keep' # # vba_code = """ # # Public Function add_face_to_keep(trim) # # Dim iFaceToKeep (2) # # trim.AddFaceToKeep iFaceToKeep # # add_face_to_keep = iFaceToKeep # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def add_face_to_keep2(self, i_face_to_keep: Reference, i_face_adjacent_for_keep: Reference) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub AddFaceToKeep2(Reference iFaceToKeep, | Reference iFaceAdjacentForKeep) | | Adds a new face to be kept (if face is divided by | operation). | | Parameters: | | iFaceToKeep | The new face to process | The following | | Boundary object is supported: Face. | iFaceAdjacentForKeep | An adjacent face of iFaceToKeep belonging to the other | operand | The following Boundary object is supported: Face. | | Example: | The following example adds the new face face to Keep for the Trim | firstTrim: | | call firstTrim.AddFaceToKeep(face) :param Reference i_face_to_keep: :param Reference i_face_adjacent_for_keep: :return: None :rtype: None """ return self.trim.AddFaceToKeep2(i_face_to_keep.com_object, i_face_adjacent_for_keep.com_object) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'add_face_to_keep2' # # vba_code = """ # # Public Function add_face_to_keep2(trim) # # Dim iFaceToKeep (2) # # trim.AddFaceToKeep2 iFaceToKeep # # add_face_to_keep2 = iFaceToKeep # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def add_face_to_remove(self, i_face_to_remove: Reference) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub AddFaceToRemove(Reference iFaceToRemove) | | Adds a new face to be Removed (if face not divided by | operation). | | Parameters: | | iFaceToRemove | The new face to process | The following | | Boundary object is supported: Face. | | Example: | The following example adds the new face face to Remove for the Trim | firstTrim: | | call firstTrim.AddFaceToRemove(face) :param Reference i_face_to_remove: :return: None :rtype: None """ return self.trim.AddFaceToRemove(i_face_to_remove.com_object) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'add_face_to_remove' # # vba_code = """ # # Public Function add_face_to_remove(trim) # # Dim iFaceToRemove (2) # # trim.AddFaceToRemove iFaceToRemove # # add_face_to_remove = iFaceToRemove # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def add_face_to_remove2(self, i_face_to_remove: Reference, i_face_adjacent_for_remove: Reference) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub AddFaceToRemove2(Reference iFaceToRemove, | Reference iFaceAdjacentForRemove) | | Adds a new face to be Removed (if face is divided by | operation). | | Parameters: | | iFaceToRemove | The new face to process | The following | | Boundary object is supported: Face. | iFaceAdjacentForRemove | An adjacent face of iFaceToRemove belonging to the other | operand | The following Boundary object is supported: Face. | | Example: | The following example adds the new face face to Remove for the Trim | firstTrim: | | call firstTrim.AddFaceToRemove(face) :param Reference i_face_to_remove: :param Reference i_face_adjacent_for_remove: :return: None :rtype: None """ return self.trim.AddFaceToRemove2(i_face_to_remove.com_object, i_face_adjacent_for_remove.com_object) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'add_face_to_remove2' # # vba_code = """ # # Public Function add_face_to_remove2(trim) # # Dim iFaceToRemove (2) # # trim.AddFaceToRemove2 iFaceToRemove # # add_face_to_remove2 = iFaceToRemove # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def withdraw_face_to_keep(self, i_face_to_withdraw: Reference) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub WithdrawFaceToKeep(Reference iFaceToWithdraw) | | Withdraws an existing Kept face (if face is not divided by operation) | . | | Parameters: | | iFaceToWithdraw | The face to withdraw | The following | | Boundary object is supported: Face. | | Example: | The following example withdraws the existing face Kept face from the Trim | firstTrim: | | call firstTrim.WithdrawFaceToKeep(face) :param Reference i_face_to_withdraw: :return: None :rtype: None """ return self.trim.WithdrawFaceToKeep(i_face_to_withdraw.com_object) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'withdraw_face_to_keep' # # vba_code = """ # # Public Function withdraw_face_to_keep(trim) # # Dim iFaceToWithdraw (2) # # trim.WithdrawFaceToKeep iFaceToWithdraw # # withdraw_face_to_keep = iFaceToWithdraw # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def withdraw_face_to_keep2(self, i_face_to_withdraw: Reference, i_face_adjacent_for_keep: Reference) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub WithdrawFaceToKeep2(Reference iFaceToWithdraw, | Reference iFaceAdjacentForKeep) | | Withdraws an existing Kept face (if face is divided by | operation). | | Parameters: | | iFaceToWithdraw | The face to withdraw | The following | | Boundary object is supported: Face. | iFaceAdjacentForKeep | An adjacent face of iFaceToKeep belonging to the other | operand | The following Boundary object is supported: Face. | | Example: | The following example withdraws the existing face Kept face from the Trim | firstTrim: | | call firstTrim.WithdrawFaceToKeep(face) :param Reference i_face_to_withdraw: :param Reference i_face_adjacent_for_keep: :return: None :rtype: None """ return self.trim.WithdrawFaceToKeep2(i_face_to_withdraw.com_object, i_face_adjacent_for_keep.com_object) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'withdraw_face_to_keep2' # # vba_code = """ # # Public Function withdraw_face_to_keep2(trim) # # Dim iFaceToWithdraw (2) # # trim.WithdrawFaceToKeep2 iFaceToWithdraw # # withdraw_face_to_keep2 = iFaceToWithdraw # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def withdraw_face_to_remove(self, i_face_to_withdraw: Reference) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub WithdrawFaceToRemove(Reference iFaceToWithdraw) | | Withdraws an existing Removed face (if face not divided by | operation). | | Parameters: | | iFaceToWithdraw | The face to withdraw | The following | | Boundary object is supported: Face. | | Example: | The following example withdraws the existing face Removed face from the | Trim firstTrim: | | call firstTrim.WithdrawFaceToRemove(face) :param Reference i_face_to_withdraw: :return: None :rtype: None """ return self.trim.WithdrawFaceToRemove(i_face_to_withdraw.com_object) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'withdraw_face_to_remove' # # vba_code = """ # # Public Function withdraw_face_to_remove(trim) # # Dim iFaceToWithdraw (2) # # trim.WithdrawFaceToRemove iFaceToWithdraw # # withdraw_face_to_remove = iFaceToWithdraw # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def withdraw_face_to_remove2(self, i_face_to_withdraw: Reference, i_face_adjacent_for_remove: Reference) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)) | o Sub WithdrawFaceToRemove2(Reference iFaceToWithdraw, | Reference iFaceAdjacentForRemove) | | Withdraws an existing Removed face (if face is divided by | operation). | | Parameters: | | iFaceToWithdraw | The face to withdraw | The following | | Boundary object is supported: Face. | iFaceAdjacentForRemove | An adjacent face of iFaceToRemove belonging to the other | operand | The following Boundary object is supported: Face. | | Example: | The following example withdraws the existing face Removed face from the | Trim firstTrim: | | call firstTrim.WithdrawFaceToRemove(face) :param Reference i_face_to_withdraw: :param Reference i_face_adjacent_for_remove: :return: None :rtype: None """ return self.trim.WithdrawFaceToRemove2(i_face_to_withdraw.com_object, i_face_adjacent_for_remove.com_object) # # # # Autogenerated comment: # # some methods require a system service call as the methods expects a vb array object # # passed to it and there is no way to do this directly with python. In those cases the following code # # should be uncommented and edited accordingly. Otherwise completely remove all this. # # vba_function_name = 'withdraw_face_to_remove2' # # vba_code = """ # # Public Function withdraw_face_to_remove2(trim) # # Dim iFaceToWithdraw (2) # # trim.WithdrawFaceToRemove2 iFaceToWithdraw # # withdraw_face_to_remove2 = iFaceToWithdraw # # End Function # # """ # # system_service = self.application.system_service # # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object]) def __repr__(self): return f'Trim(name="{self.name}")'
42.781106
117
0.541714
1,846
18,567
5.271939
0.09805
0.04439
0.017263
0.032059
0.84104
0.80857
0.802096
0.750617
0.737567
0.7276
0
0.023095
0.393655
18,567
433
118
42.879908
0.841357
0.723111
0
0
1
0
0.008296
0.008296
0
0
0
0
0
1
0.416667
false
0
0.083333
0.041667
0.916667
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
9
fc87ad6a4d6c5dcc630b832e19be47f8cca25431
182,429
py
Python
anaconda_project/test/test_project_ops.py
vertingo/Anaconda_Videos_Tutos
f30f2a0549a7b81c17f4d5d249edc59eb3c05458
[ "BSD-3-Clause" ]
null
null
null
anaconda_project/test/test_project_ops.py
vertingo/Anaconda_Videos_Tutos
f30f2a0549a7b81c17f4d5d249edc59eb3c05458
[ "BSD-3-Clause" ]
null
null
null
anaconda_project/test/test_project_ops.py
vertingo/Anaconda_Videos_Tutos
f30f2a0549a7b81c17f4d5d249edc59eb3c05458
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright (c) 2016, Anaconda, Inc. All rights reserved. # # Licensed under the terms of the BSD 3-Clause License. # The full license is in the file LICENSE.txt, distributed with this software. # ----------------------------------------------------------------------------- from __future__ import absolute_import, print_function import codecs import os from tornado import gen import platform import pytest import tarfile import zipfile from anaconda_project import project_ops from anaconda_project.conda_manager import (CondaManager, CondaEnvironmentDeviations, CondaLockSet, CondaManagerError, push_conda_manager_class, pop_conda_manager_class) from anaconda_project.project import Project import anaconda_project.prepare as prepare from anaconda_project.internal.test.tmpfile_utils import (with_directory_contents, with_temporary_script_commandline, with_directory_contents_completing_project_file, complete_project_file_content) from anaconda_project.local_state_file import LocalStateFile from anaconda_project.project_file import DEFAULT_PROJECT_FILENAME, ProjectFile from anaconda_project.project_lock_file import DEFAULT_PROJECT_LOCK_FILENAME from anaconda_project.test.project_utils import project_no_dedicated_env from anaconda_project.internal.test.fake_frontend import FakeFrontend from anaconda_project.internal.test.test_conda_api import monkeypatch_conda_not_to_use_links from anaconda_project.test.fake_server import fake_server import anaconda_project.internal.keyring as keyring import anaconda_project.internal.conda_api as conda_api import anaconda_project.internal.plugins as plugins_api def test_create(monkeypatch): def check_create(dirname): subdir = os.path.join(dirname, 'foo') # dir doesn't exist project = project_ops.create(subdir, make_directory=False) assert [("Project directory '%s' does not exist." % subdir)] == project.problems # failing to create the dir def mock_failed_makedirs(path): raise IOError("nope") monkeypatch.setattr('os.makedirs', mock_failed_makedirs) project = project_ops.create(subdir, make_directory=True) assert [("Project directory '%s' does not exist." % subdir)] == project.problems monkeypatch.undo() # failing to create the .projectignore, but still create dir and anaconda-project.yml from codecs import open as real_open def mock_codecs_open(*args, **kwargs): if args[0].endswith(".projectignore") and args[1] == 'w': raise IOError("nope") else: return real_open(*args, **kwargs) monkeypatch.setattr('codecs.open', mock_codecs_open) project = project_ops.create(subdir, make_directory=True) monkeypatch.undo() assert [] == project.problems assert os.path.isfile(os.path.join(subdir, DEFAULT_PROJECT_FILENAME)) assert not os.path.isfile(os.path.join(subdir, ".projectignore")) # add .projectignore if we create again and it isn't there project = project_ops.create(subdir, make_directory=True) assert [] == project.problems assert os.path.isfile(os.path.join(subdir, DEFAULT_PROJECT_FILENAME)) assert os.path.isfile(os.path.join(subdir, ".projectignore")) assert sorted(list(project.env_specs.keys())) == sorted(['default']) spec = project.env_specs['default'] assert spec.conda_packages == ('anaconda', ) assert spec.pip_packages == () assert spec.channels == () with_directory_contents(dict(), check_create) def test_create_with_properties(): def check_create(dirname): project = project_ops.create(dirname, make_directory=False, name='hello', icon='something.png', description="Hello World") assert [] == project.problems assert os.path.isfile(os.path.join(dirname, DEFAULT_PROJECT_FILENAME)) assert project.name == 'hello' assert project.icon == os.path.join(dirname, 'something.png') assert project.description == "Hello World" with_directory_contents({'something.png': 'not a real png'}, check_create) def test_create_imports_environment_yml(): def check_create(dirname): project = project_ops.create(dirname, make_directory=False, name='hello', icon='something.png', description="Hello World") assert [] == project.problems assert os.path.isfile(os.path.join(dirname, DEFAULT_PROJECT_FILENAME)) assert sorted(list(project.env_specs.keys())) == sorted(['stuff']) spec = project.env_specs['stuff'] assert spec.conda_packages == ('a', 'b') assert spec.pip_packages == ('foo', ) assert spec.channels == ('bar', ) with_directory_contents( {'something.png': 'not a real png', "environment.yml": """ name: stuff dependencies: - a - b - pip: - foo channels: - bar """}, check_create) def test_create_imports_environment_yml_when_project_yml_exists_and_fix_problems(): def check_create(dirname): project = project_ops.create(dirname, make_directory=False, name='hello', icon='something.png', description="Hello World", fix_problems=True) assert [] == project.problems assert os.path.isfile(os.path.join(dirname, DEFAULT_PROJECT_FILENAME)) assert sorted(list(project.env_specs.keys())) == sorted(['stuff']) spec = project.env_specs['stuff'] assert spec.conda_packages == ('a', 'b') assert spec.pip_packages == ('foo', ) assert spec.channels == ('bar', ) with_directory_contents( {'something.png': 'not a real png', "anaconda-project.yml": """ name: foo platforms: [linux-32,linux-64,osx-64,win-32,win-64] """, "environment.yml": """ name: stuff dependencies: - a - b - pip: - foo channels: - bar """}, check_create) def test_create_no_import_environment_yml_when_not_fix_problems(): def check_create(dirname): project = project_ops.create(dirname, make_directory=False, name='hello', icon='something.png', description="Hello World", fix_problems=False) assert ["Environment spec 'stuff' from environment.yml is not in anaconda-project.yml."] == project.problems with_directory_contents( {'something.png': 'not a real png', "anaconda-project.yml": """ name: foo platforms: [linux-32,linux-64,osx-64,win-32,win-64] """, "environment.yml": """ name: stuff dependencies: - a - b - pip: - foo channels: - bar """}, check_create) def test_create_with_invalid_environment_yml(): def check_create(dirname): project = project_ops.create(dirname, make_directory=False) project_filename = os.path.join(dirname, DEFAULT_PROJECT_FILENAME) assert ["%s: invalid package specification: b $ 1.0" % DEFAULT_PROJECT_FILENAME] == project.problems # we should NOT create the anaconda-project.yml if it would be broken assert not os.path.isfile(project_filename) with_directory_contents( {'something.png': 'not a real png', "environment.yml": """ name: stuff dependencies: - b $ 1.0 """}, check_create) def test_create_imports_notebook(): def check_create(dirname): project = project_ops.create(dirname, make_directory=False, name='hello', description="Hello World") assert [] == project.problems assert [] == project.suggestions assert os.path.isfile(os.path.join(dirname, DEFAULT_PROJECT_FILENAME)) assert sorted(list(project.env_specs.keys())) == sorted(['default']) spec = project.env_specs['default'] # we default to anaconda in the env assert spec.conda_packages == ('anaconda', ) assert spec.channels == () assert ['foo.ipynb'] == list(project.commands.keys()) with_directory_contents({'foo.ipynb': '{}'}, check_create) def test_set_properties(): def check(dirname): project = project_ops.create(dirname, make_directory=False) assert [] == project.problems assert os.path.isfile(os.path.join(dirname, DEFAULT_PROJECT_FILENAME)) assert project.name == os.path.basename(dirname) assert project.icon is None result = project_ops.set_properties(project, name='hello', icon='something.png', description="HELLOOOO") assert result assert project.name == 'hello' assert project.icon == os.path.join(dirname, 'something.png') assert project.description == "HELLOOOO" # set to Unicode result = project_ops.set_properties(project, name=u'hello', icon=u'something.png', description=u'HELLOOOO') assert result assert project.name == u'hello' assert project.icon == os.path.join(dirname, 'something.png') assert project.description == u"HELLOOOO" with_directory_contents({'something.png': 'not a real png'}, check) def test_set_properties_with_project_file_problems(): def check(dirname): project = Project(dirname) status = project_ops.set_properties(project, name='foo') assert not status assert ["%s: variables section contains wrong value type 42, should be dict or list of requirements" % project.project_file.basename] == status.errors with_directory_contents_completing_project_file({DEFAULT_PROJECT_FILENAME: "variables:\n 42"}, check) def test_set_invalid_name(): def check(dirname): project = project_ops.create(dirname, make_directory=False) assert [] == project.problems assert os.path.isfile(os.path.join(dirname, DEFAULT_PROJECT_FILENAME)) assert project.name == os.path.basename(dirname) assert project.icon is None result = project_ops.set_properties(project, name=' ') print(repr(result)) assert not result assert 'Failed to set project properties.' == result.status_description assert ["%s: name: field is an empty or all-whitespace string." % (DEFAULT_PROJECT_FILENAME)] == result.errors assert [] == project.problems assert project.name == os.path.basename(dirname) assert project.icon is None with_directory_contents(dict(), check) def test_set_invalid_icon(): def check(dirname): project = project_ops.create(dirname, make_directory=False) assert [] == project.problems assert os.path.isfile(os.path.join(dirname, DEFAULT_PROJECT_FILENAME)) assert project.name == os.path.basename(dirname) assert project.icon is None result = project_ops.set_properties(project, icon='foobar') assert not result assert 'Failed to set project properties.' == result.status_description assert ["Icon file %s does not exist." % os.path.join(dirname, 'foobar')] == result.errors assert [] == project.problems assert project.name == os.path.basename(dirname) assert project.icon is None with_directory_contents(dict(), check) def test_add_variables(): def check_add_var(dirname): project = project_no_dedicated_env(dirname) status = project_ops.add_variables(project, None, ['foo', 'baz'], dict(foo='bar')) assert status req = project.find_requirements(project.default_env_spec_name, env_var='foo')[0] assert req.options['default'] == 'bar' req = project.find_requirements(project.default_env_spec_name, env_var='baz')[0] assert req.options.get('default') is None with_directory_contents_completing_project_file({DEFAULT_PROJECT_FILENAME: ""}, check_add_var) def test_add_variables_to_env_spec(): def check_add_var(dirname): project = project_no_dedicated_env(dirname) status = project_ops.add_variables(project, 'myspec', ['foo', 'baz'], dict(foo='bar')) assert status req = project.find_requirements('myspec', env_var='foo')[0] assert req.options['default'] == 'bar' assert [] == project.find_requirements('default', env_var='foo') assert [] == project.find_requirements('default', env_var='baz') with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ env_specs: default: packages: [python] channels: [] myspec: packages: [python] channels: [] """}, check_add_var) def test_add_variables_bad_env_spec(): def check_add_var(dirname): project = project_no_dedicated_env(dirname) status = project_ops.add_variables(project, 'nope', ['foo', 'baz'], dict(foo='bar')) assert "Environment spec nope doesn't exist." == status.status_description assert not status with_directory_contents_completing_project_file({DEFAULT_PROJECT_FILENAME: ""}, check_add_var) def test_add_variables_existing_download(): def check_set_var(dirname): project = project_no_dedicated_env(dirname) project_ops.add_variables(project, None, ['foo', 'baz']) re_loaded = ProjectFile.load_for_directory(project.directory_path) assert dict(foo=None, baz=None, preset=None) == re_loaded.get_value(['variables']) assert re_loaded.get_value(['downloads', 'datafile']) == 'http://localhost:8000/data.tgz' local_state = LocalStateFile.load_for_directory(dirname) assert local_state.get_value(['variables', 'foo']) is None assert local_state.get_value(['variables', 'baz']) is None assert local_state.get_value(['variables', 'datafile']) is None with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('variables:\n' ' preset: null\n' 'downloads:\n' ' datafile: http://localhost:8000/data.tgz')}, check_set_var) def test_add_variables_existing_options(): def check_set_var(dirname): project = project_no_dedicated_env(dirname) status = project_ops.add_variables(project, None, ['foo', 'baz', 'blah', 'woot', 'woot2'], dict(foo='bar', baz='qux', woot2='updated')) assert status re_loaded = ProjectFile.load_for_directory(project.directory_path) foo = re_loaded.get_value(['variables', 'foo']) assert isinstance(foo, dict) assert 'something' in foo assert foo['something'] == 42 baz = re_loaded.get_value(['variables', 'baz']) assert isinstance(baz, dict) assert 'default' in baz assert baz['default'] == 'qux' blah = re_loaded.get_value(['variables', 'blah']) assert isinstance(blah, dict) assert 'default' in blah assert blah['default'] == 'unchanged' woot = re_loaded.get_value(['variables', 'woot']) assert woot == 'world' woot2 = re_loaded.get_value(['variables', 'woot2']) assert woot2 == 'updated' with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('variables:\n' ' foo: { something: 42 }\n' ' baz: { default: "hello" }\n' ' blah: { default: "unchanged" }\n' ' woot: "world"\n' ' woot2: "changed"\n' 'downloads:\n' ' datafile: http://localhost:8000/data.tgz')}, check_set_var) def test_remove_variables(): def check_remove_var(dirname): project = project_no_dedicated_env(dirname) project_ops.remove_variables(project, None, ['foo', 'bar']) re_loaded = project.project_file.load_for_directory(project.directory_path) assert dict() == re_loaded.get_value(['variables']) assert re_loaded.get_value(['variables', 'foo']) is None assert re_loaded.get_value(['variables', 'bar']) is None local_state = LocalStateFile.load_for_directory(dirname) assert local_state.get_value(['variables', 'foo']) is None assert local_state.get_value(['variables', 'bar']) is None with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('variables:\n' ' foo: baz\n bar: qux')}, check_remove_var) def test_remove_variables_with_env_spec(): def check_remove_var(dirname): project = project_no_dedicated_env(dirname) pf = project.project_file assert pf.get_value(['env_specs', 'myspec', 'variables']) == dict(foo='baz', bar='qux') assert pf.get_value(['env_specs', 'myspec', 'variables', 'foo']) is not None assert pf.get_value(['env_specs', 'myspec', 'variables', 'bar']) is not None project_ops.remove_variables(project, 'myspec', ['foo', 'bar']) re_loaded = project.project_file.load_for_directory(project.directory_path) assert re_loaded.get_value(['env_specs', 'myspec', 'variables']) == {} assert re_loaded.get_value(['env_specs', 'myspec', 'variables', 'foo']) is None assert re_loaded.get_value(['env_specs', 'myspec', 'variables', 'bar']) is None with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ env_specs: default: packages: [python] channels: [] myspec: packages: [python] channels: [] variables: foo: baz bar: qux """}, check_remove_var) def test_set_variables(): def check_set_var(dirname): project = project_no_dedicated_env(dirname) status = project_ops.add_variables(project, None, ['foo', 'baz'], dict(foo='no', baz='nope')) assert status local_state = LocalStateFile.load_for_directory(dirname) assert local_state.get_value(['variables', 'foo']) is None assert local_state.get_value(['variables', 'baz']) is None status = project_ops.set_variables(project, None, [('foo', 'bar'), ('baz', 'qux')]) assert status local_state = LocalStateFile.load_for_directory(dirname) assert local_state.get_value(['variables', 'foo']) == 'bar' assert local_state.get_value(['variables', 'baz']) == 'qux' with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('variables:\n' ' preset: null')}, check_set_var) def test_set_variables_nonexistent(): def check_set_var(dirname): project = project_no_dedicated_env(dirname) status = project_ops.set_variables(project, None, [('foo', 'bar'), ('baz', 'qux')]) assert not status assert status.status_description == "Could not set variables." assert status.errors == ["Variable foo does not exist in the project.", "Variable baz does not exist in the project."] with_directory_contents_completing_project_file({DEFAULT_PROJECT_FILENAME: ''}, check_set_var) def test_set_variables_cannot_create_environment(monkeypatch): def mock_create(prefix, pkgs, channels, stdout_callback, stderr_callback): from anaconda_project.internal import conda_api raise conda_api.CondaError("error_from_conda_create") monkeypatch.setattr('anaconda_project.internal.conda_api.create', mock_create) def check_set_var(dirname): project = Project(dirname) status = project_ops.set_variables(project, None, [('foo', 'bar'), ('baz', 'qux')]) assert not status expected_env_path = os.path.join(dirname, 'envs', 'default') assert status.status_description == ("'%s' doesn't look like it contains a Conda environment yet." % expected_env_path) assert status.errors == ["Failed to create environment at %s: error_from_conda_create" % expected_env_path] with_directory_contents_completing_project_file({DEFAULT_PROJECT_FILENAME: ''}, check_set_var) def test_unset_variables(): def check_unset_var(dirname): project = project_no_dedicated_env(dirname) status = project_ops.add_variables(project, None, ['foo', 'baz']) assert status status = project_ops.set_variables(project, None, [('foo', 'no'), ('baz', 'nope')]) assert status local_state = LocalStateFile.load_for_directory(dirname) assert local_state.get_value(['variables', 'foo']) == 'no' assert local_state.get_value(['variables', 'baz']) == 'nope' status = project_ops.unset_variables(project, None, ['foo', 'baz']) assert status local_state = LocalStateFile.load_for_directory(dirname) assert local_state.get_value(['variables', 'foo']) is None assert local_state.get_value(['variables', 'baz']) is None with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('variables:\n' ' preset: null')}, check_unset_var) def test_set_and_unset_variables_encrypted(): keyring.reset_keyring_module() def check_set_var(dirname): project = project_no_dedicated_env(dirname) status = project_ops.add_variables(project, None, ['foo_PASSWORD', 'baz_SECRET'], dict(foo_PASSWORD='no', baz_SECRET='nope')) assert status local_state = LocalStateFile.load_for_directory(dirname) assert local_state.get_value(['variables', 'foo_PASSWORD']) is None assert local_state.get_value(['variables', 'baz_SECRET']) is None assert set(keyring.fallback_data().values()) == set() status = project_ops.set_variables(project, None, [('foo_PASSWORD', 'bar'), ('baz_SECRET', 'qux')]) assert status local_state = LocalStateFile.load_for_directory(dirname) # the encrypted variables are NOT in local state assert local_state.get_value(['variables', 'foo_PASSWORD']) is None assert local_state.get_value(['variables', 'baz_SECRET']) is None assert set(keyring.fallback_data().values()) == set(['bar', 'qux']) status = project_ops.unset_variables(project, None, ['foo_PASSWORD', 'baz_SECRET']) assert status assert set(keyring.fallback_data().values()) == set() try: keyring.enable_fallback_keyring() with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('variables:\n' ' preset: null')}, check_set_var) finally: keyring.disable_fallback_keyring() def test_set_and_unset_variables_some_encrypted(): keyring.reset_keyring_module() def check_set_var(dirname): project = project_no_dedicated_env(dirname) status = project_ops.add_variables(project, None, ['foo_PASSWORD', 'baz_SECRET', 'woo'], dict(foo_PASSWORD='no', baz_SECRET='nope', woo='something')) assert status local_state = LocalStateFile.load_for_directory(dirname) assert local_state.get_value(['variables', 'foo_PASSWORD']) is None assert local_state.get_value(['variables', 'baz_SECRET']) is None assert local_state.get_value(['variables', 'woo']) is None assert set(keyring.fallback_data().values()) == set() status = project_ops.set_variables(project, None, [('foo_PASSWORD', 'bar'), ('baz_SECRET', 'qux'), ('woo', 'w00t')]) assert status local_state = LocalStateFile.load_for_directory(dirname) # the encrypted variables are NOT in local state assert local_state.get_value(['variables', 'foo_PASSWORD']) is None assert local_state.get_value(['variables', 'baz_SECRET']) is None assert local_state.get_value(['variables', 'woo']) == 'w00t' assert set(keyring.fallback_data().values()) == set(['bar', 'qux']) status = project_ops.unset_variables(project, None, ['foo_PASSWORD', 'baz_SECRET', 'woo']) assert status local_state = LocalStateFile.load_for_directory(dirname) assert set(keyring.fallback_data().values()) == set() assert local_state.get_value(['variables', 'woo']) is None try: keyring.enable_fallback_keyring() with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('variables:\n' ' preset: null')}, check_set_var) finally: keyring.disable_fallback_keyring() def _test_add_command_line(command_type): def check_add_command(dirname): project = project_no_dedicated_env(dirname) result = project_ops.add_command(project, 'default', command_type, 'echo "test"') assert result re_loaded = project.project_file.load_for_directory(project.directory_path) command = re_loaded.get_value(['commands', 'default']) assert command[command_type] == 'echo "test"' with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('commands:\n' ' default:\n' ' %s: echo "pass"\n') % command_type}, check_add_command) def test_add_command_shell(): _test_add_command_line("unix") def test_add_command_windows(): _test_add_command_line("windows") def _test_add_command_windows_to_shell(command_type): def check_add_command(dirname): project = project_no_dedicated_env(dirname) result = project_ops.add_command(project, 'default', 'windows', 'echo "test"') assert result re_loaded = ProjectFile.load_for_directory(project.directory_path) command = re_loaded.get_value(['commands', 'default']) assert command['windows'] == 'echo "test"' assert command['unix'] == 'echo "pass"' with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('commands:\n' ' default:\n' ' unix: echo "pass"\n') % command_type}, check_add_command) def test_add_command_bokeh(): def check_add_command(dirname): project = project_no_dedicated_env(dirname) result = project_ops.add_command(project, 'bokeh_test', 'bokeh_app', 'file.py') assert result re_loaded = ProjectFile.load_for_directory(project.directory_path) command = re_loaded.get_value(['commands', 'bokeh_test']) assert len(command.keys()) == 2 assert command['bokeh_app'] == 'file.py' assert command['env_spec'] == 'default' with_directory_contents_completing_project_file({DEFAULT_PROJECT_FILENAME: ""}, check_add_command) def test_add_command_bokeh_overwrites(): def check_add_command(dirname): project = project_no_dedicated_env(dirname) result = project_ops.add_command(project, 'bokeh_test', 'bokeh_app', 'file.py') assert result re_loaded = ProjectFile.load_for_directory(project.directory_path) command = re_loaded.get_value(['commands', 'bokeh_test']) assert len(command.keys()) == 2 assert command['bokeh_app'] == 'file.py' assert command['env_spec'] == 'default' with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('commands:\n' ' bokeh_test:\n' ' bokeh_app: replaced.py\n' 'packages:\n' ' - bokeh\n')}, check_add_command) def test_add_command_sets_env_spec(): def check_add_command(dirname): project = project_no_dedicated_env(dirname) result = project_ops.add_command(project, 'bokeh_test', 'bokeh_app', 'file.py', env_spec_name='foo') assert result re_loaded = ProjectFile.load_for_directory(project.directory_path) command = re_loaded.get_value(['commands', 'bokeh_test']) assert command['bokeh_app'] == 'file.py' assert command['env_spec'] == 'foo' with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('env_specs:\n' ' foo: { "packages" : ["bokeh"] }\n' 'commands:\n' ' bokeh_test:\n' ' bokeh_app: replaced.py\n')}, check_add_command) def test_add_command_leaves_env_spec(): def check_add_command(dirname): project = project_no_dedicated_env(dirname) result = project_ops.add_command(project, 'bokeh_test', 'bokeh_app', 'file.py', env_spec_name=None) assert result re_loaded = ProjectFile.load_for_directory(project.directory_path) command = re_loaded.get_value(['commands', 'bokeh_test']) assert command['bokeh_app'] == 'file.py' assert command['env_spec'] == 'foo' with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('env_specs:\n' ' foo: { "packages" : ["bokeh"] }\n' 'commands:\n' ' bokeh_test:\n' ' env_spec: "foo"\n' ' bokeh_app: replaced.py\n')}, check_add_command) def test_add_command_generates_env_spec_suggestion(): def check_add_command(dirname): project = project_no_dedicated_env(dirname) assert project.problems == [] assert project.suggestions == [] # the 'bar' env spec does not have bokeh in it assert len(project.env_specs['bar'].conda_package_names_set) == 0 # We are changing the env spec from 'foo' to 'bar' result = project_ops.add_command(project, 'bokeh_test', 'bokeh_app', 'file.py', env_spec_name='bar') if not result: assert result.errors == [] # prints the errors assert result re_loaded = ProjectFile.load_for_directory(project.directory_path) command = re_loaded.get_value(['commands', 'bokeh_test']) assert command['bokeh_app'] == 'file.py' assert command['env_spec'] == 'bar' assert re_loaded.get_value(['env_specs', 'bar', 'packages']) is None assert project.problems == [] assert project.suggestions == [('%s: Command ' % project.project_file.basename) + 'bokeh_test uses env spec bar which does not have the packages: bokeh'] project.fix_problems_and_suggestions() project.project_file.save() re_loaded = ProjectFile.load_for_directory(project.directory_path) assert re_loaded.get_value(['env_specs', 'bar', 'packages']) == ['bokeh'] with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('env_specs:\n' ' foo: { "packages" : ["bokeh"] }\n' ' bar: {}\n' 'commands:\n' ' bokeh_test:\n' ' env_spec: "foo"\n' ' bokeh_app: replaced.py\n')}, check_add_command) def test_add_command_leaves_supports_http_options(): def check_add_command(dirname): project = project_no_dedicated_env(dirname) result = project_ops.add_command(project, 'bokeh_test', 'bokeh_app', 'file.py', env_spec_name=None, supports_http_options=None) assert result re_loaded = ProjectFile.load_for_directory(project.directory_path) command = re_loaded.get_value(['commands', 'bokeh_test']) assert command['bokeh_app'] == 'file.py' assert command['env_spec'] == 'foo' assert command['supports_http_options'] is False with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('env_specs:\n' ' foo: { "packages" : ["bokeh"] }\n' 'commands:\n' ' bokeh_test:\n' ' supports_http_options: false\n' ' bokeh_app: replaced.py\n')}, check_add_command) def test_add_command_leaves_supports_http_options_unset(): def check_add_command(dirname): project = project_no_dedicated_env(dirname) result = project_ops.add_command(project, 'bokeh_test', 'bokeh_app', 'file.py', env_spec_name=None, supports_http_options=None) assert result re_loaded = ProjectFile.load_for_directory(project.directory_path) command = re_loaded.get_value(['commands', 'bokeh_test']) assert command['bokeh_app'] == 'file.py' assert command['env_spec'] == 'foo' assert 'supports_http_options' not in command with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('env_specs:\n' ' foo: { "packages" : ["bokeh"] }\n' 'commands:\n' ' bokeh_test:\n' ' bokeh_app: replaced.py\n')}, check_add_command) def test_add_command_modifies_supports_http_options(): def check_add_command(dirname): project = project_no_dedicated_env(dirname) result = project_ops.add_command(project, 'bokeh_test', 'bokeh_app', 'file.py', env_spec_name=None, supports_http_options=True) assert result re_loaded = ProjectFile.load_for_directory(project.directory_path) command = re_loaded.get_value(['commands', 'bokeh_test']) assert command['bokeh_app'] == 'file.py' assert command['env_spec'] == 'foo' assert command['supports_http_options'] is True with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('env_specs:\n' ' foo: { "packages" : ["bokeh"] }\n' 'commands:\n' ' bokeh_test:\n' ' supports_http_options: false\n' ' bokeh_app: replaced.py\n')}, check_add_command) def test_add_command_notebook(): def check_add_command(dirname): project = project_no_dedicated_env(dirname) result = project_ops.add_command(project, 'notebook_test', 'notebook', 'foo.ipynb') assert [] == result.errors assert result re_loaded = ProjectFile.load_for_directory(project.directory_path) command = re_loaded.get_value(['commands', 'notebook_test']) assert len(command.keys()) == 3 assert command['notebook'] == 'foo.ipynb' assert command['env_spec'] == 'default' assert command['registers_fusion_function'] is True with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: "", 'foo.ipynb': """ { "cells" : [ { "source" : [ "@fusion.register\\n", "def foo():\\n", " pass\\n" ] } ] } """}, check_add_command) def test_add_command_broken_notebook(): def check_add_command(dirname): project = project_no_dedicated_env(dirname) result = project_ops.add_command(project, 'default', 'notebook', 'foo.ipynb') assert len(result.errors) > 0 assert not result assert 'Failed to read or parse' in result.errors[0] assert result.status_description == 'Unable to add the command.' with_directory_contents_completing_project_file({"foo.ipynb": "not valid json"}, check_add_command) def test_add_command_invalid_type(): def check_add_command(dirname): project = project_no_dedicated_env(dirname) with pytest.raises(ValueError) as excinfo: project_ops.add_command(project, 'default', 'foo', 'echo "test"') assert 'Invalid command type foo' in str(excinfo.value) with_directory_contents_completing_project_file(dict(), check_add_command) def test_add_command_conflicting_type(): def check_add_command(dirname): project = project_no_dedicated_env(dirname) result = project_ops.add_command(project, 'default', 'bokeh_app', 'myapp.py') assert [("%s: command 'default' has multiple commands in it, 'bokeh_app' can't go with 'unix'" % project.project_file.basename)] == result.errors re_loaded = ProjectFile.load_for_directory(project.directory_path) command = re_loaded.get_value(['commands', 'default']) assert command['unix'] == 'echo "pass"' assert 'bokeh_app' not in command with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('commands:\n' ' default:\n' ' unix: echo "pass"\n')}, check_add_command) def test_update_command_with_project_file_problems(): def check(dirname): project = Project(dirname) status = project_ops.update_command(project, 'foo', 'unix', 'echo hello') assert not status assert ["%s: variables section contains wrong value type 42, should be dict or list of requirements" % project.project_file.basename] == status.errors with_directory_contents_completing_project_file({DEFAULT_PROJECT_FILENAME: "variables:\n 42"}, check) def test_update_command_invalid_type(): def check(dirname): project = project_no_dedicated_env(dirname) with pytest.raises(ValueError) as excinfo: project_ops.update_command(project, 'default', 'foo', 'echo "test"') assert 'Invalid command type foo' in str(excinfo.value) with_directory_contents_completing_project_file(dict(), check) def test_update_command_rename(): file_content = complete_project_file_content('commands:\n # this is a comment\n' + ' foo:\n # another comment\n unix: echo "pass"\n') def check(dirname): project = project_no_dedicated_env(dirname) status = project_ops.update_command(project, 'foo', new_name='bar') print(status.status_description) print(status.errors) assert status project.project_file.load() with open(os.path.join(dirname, DEFAULT_PROJECT_FILENAME)) as proj_file: contents = proj_file.read() assert file_content.replace('foo:', 'bar:') == contents assert '# this is a comment' in contents assert '# another comment' in contents assert project.commands['bar'] assert 'foo' not in project.commands with_directory_contents_completing_project_file({DEFAULT_PROJECT_FILENAME: file_content}, check) def test_update_command_no_command(): def check(dirname): project = project_no_dedicated_env(dirname) with pytest.raises(ValueError) as excinfo: project_ops.update_command(project, 'default', 'bokeh_app') assert 'must also specify the command' in str(excinfo.value) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('commands:\n' ' default:\n' ' unix: echo "pass"\n')}, check) def test_update_command_does_not_exist(): def check(dirname): project = project_no_dedicated_env(dirname) assert [] == project.problems result = project_ops.update_command(project, 'myapp', 'bokeh_app', 'myapp.py') assert not result assert ["No command 'myapp' found."] == result.errors with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('commands:\n' ' default:\n' ' unix: echo "pass"\n')}, check) def test_update_command_conflicting_type(): def check(dirname): project = project_no_dedicated_env(dirname) assert [] == project.problems assert 'default' in project.commands command = project.commands['default'] assert command.bokeh_app is None assert command.unix_shell_commandline == 'echo "pass"' result = project_ops.update_command(project, 'default', 'bokeh_app', 'myapp.py') assert result assert 'default' in project.commands command = project.commands['default'] assert command.bokeh_app == 'myapp.py' re_loaded = ProjectFile.load_for_directory(project.directory_path) command = re_loaded.get_value(['commands', 'default']) assert command['bokeh_app'] == 'myapp.py' with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('commands:\n' ' default:\n' ' unix: echo "pass"\n')}, check) def test_update_command_same_type(): def check(dirname): project = project_no_dedicated_env(dirname) assert [] == project.problems assert 'default' in project.commands command = project.commands['default'] assert command.unix_shell_commandline == 'echo "pass"' result = project_ops.update_command(project, 'default', 'unix', 'echo "blah"') assert result assert 'default' in project.commands command = project.commands['default'] assert command.unix_shell_commandline == 'echo "blah"' re_loaded = ProjectFile.load_for_directory(project.directory_path) command = re_loaded.get_value(['commands', 'default']) assert command['unix'] == 'echo "blah"' with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('commands:\n' ' default:\n' ' unix: echo "pass"\n')}, check) def test_update_command_add_windows_alongside_shell(): def check(dirname): project = project_no_dedicated_env(dirname) assert [] == project.problems assert 'default' in project.commands command = project.commands['default'] assert command.unix_shell_commandline == 'echo "pass"' result = project_ops.update_command(project, 'default', 'windows', 'echo "blah"') assert result assert 'default' in project.commands command = project.commands['default'] assert command.unix_shell_commandline == 'echo "pass"' assert command.windows_cmd_commandline == 'echo "blah"' with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('commands:\n' ' default:\n' ' unix: echo "pass"\n')}, check) def test_update_command_add_shell_alongside_windows(): def check(dirname): project = project_no_dedicated_env(dirname) assert [] == project.problems assert 'default' in project.commands command = project.commands['default'] assert command.windows_cmd_commandline == 'echo "blah"' result = project_ops.update_command(project, 'default', 'unix', 'echo "pass"') assert result assert 'default' in project.commands command = project.commands['default'] assert command.unix_shell_commandline == 'echo "pass"' assert command.windows_cmd_commandline == 'echo "blah"' with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('commands:\n' ' default:\n' ' windows: echo "blah"\n')}, check) def test_update_command_empty_update(): def check(dirname): project = project_no_dedicated_env(dirname) assert [] == project.problems assert 'default' in project.commands command = project.commands['default'] assert command.unix_shell_commandline == 'echo "pass"' result = project_ops.update_command(project, 'default') assert result assert 'default' in project.commands command = project.commands['default'] assert command.unix_shell_commandline == 'echo "pass"' with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('commands:\n' ' default:\n' ' unix: echo "pass"\n')}, check) def test_update_command_to_non_string_value(): def check(dirname): project = project_no_dedicated_env(dirname) assert [] == project.problems assert 'default' in project.commands command = project.commands['default'] assert command.bokeh_app is None assert command.unix_shell_commandline == 'echo "pass"' result = project_ops.update_command(project, 'default', 'notebook', 42) assert not result assert [("%s: command 'default' attribute 'notebook' should be a string not '42'" % project.project_file.basename)] == result.errors assert 'default' in project.commands command = project.commands['default'] assert command.unix_shell_commandline == 'echo "pass"' with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: ('commands:\n' ' default:\n' ' unix: echo "pass"\n')}, check) def _monkeypatch_download_file(monkeypatch, dirname, filename='MYDATA', checksum=None): @gen.coroutine def mock_downloader_run(self, loop): class Res: pass res = Res() res.code = 200 with open(os.path.join(dirname, filename), 'w') as out: out.write('data') if checksum: self._hash = checksum raise gen.Return(res) monkeypatch.setattr("anaconda_project.internal.http_client.FileDownloader.run", mock_downloader_run) def _monkeypatch_download_file_fails(monkeypatch, dirname): @gen.coroutine def mock_downloader_run(self, loop): class Res: pass res = Res() res.code = 404 raise gen.Return(res) monkeypatch.setattr("anaconda_project.internal.http_client.FileDownloader.run", mock_downloader_run) def _monkeypatch_download_file_fails_to_get_http_response(monkeypatch, dirname): @gen.coroutine def mock_downloader_run(self, loop): self._errors.append("Nope nope nope") raise gen.Return(None) monkeypatch.setattr("anaconda_project.internal.http_client.FileDownloader.run", mock_downloader_run) def test_add_download(monkeypatch): def check(dirname): _monkeypatch_download_file(monkeypatch, dirname) project = project_no_dedicated_env(dirname) status = project_ops.add_download(project, None, 'MYDATA', 'http://localhost:123456') assert os.path.isfile(os.path.join(dirname, "MYDATA")) assert status assert [] == status.errors # be sure download was added to the file and saved project2 = project_no_dedicated_env(dirname) assert {"url": 'http://localhost:123456'} == project2.project_file.get_value(['downloads', 'MYDATA']) with_directory_contents_completing_project_file(dict(), check) def test_add_download_to_env_spec(monkeypatch): def check(dirname): _monkeypatch_download_file(monkeypatch, dirname) project = project_no_dedicated_env(dirname) status = project_ops.add_download(project, 'myspec', 'MYDATA', 'http://localhost:123456') assert os.path.isfile(os.path.join(dirname, "MYDATA")) assert status assert [] == status.errors # be sure download was added to the file and saved project2 = project_no_dedicated_env(dirname) assert {"url": 'http://localhost:123456' } == project2.project_file.get_value(['env_specs', 'myspec', 'downloads', 'MYDATA']) with_directory_contents_completing_project_file( { DEFAULT_PROJECT_FILENAME: """ env_specs: default: packages: [python] channels: [] myspec: packages: [python] channels: [] """ }, check) def test_add_download_with_filename(monkeypatch): def check(dirname): FILENAME = 'TEST_FILENAME' _monkeypatch_download_file(monkeypatch, dirname, FILENAME) project = project_no_dedicated_env(dirname) status = project_ops.add_download(project, None, 'MYDATA', 'http://localhost:123456', FILENAME) assert os.path.isfile(os.path.join(dirname, FILENAME)) assert status assert [] == status.errors # be sure download was added to the file and saved project2 = project_no_dedicated_env(dirname) requirement = project2.project_file.get_value(['downloads', 'MYDATA']) assert requirement['url'] == 'http://localhost:123456' assert requirement['filename'] == FILENAME with_directory_contents_completing_project_file(dict(), check) def test_add_download_with_checksum(monkeypatch): def check(dirname): FILENAME = 'MYDATA' _monkeypatch_download_file(monkeypatch, dirname, checksum='DIGEST') project = project_no_dedicated_env(dirname) status = project_ops.add_download(project, None, 'MYDATA', 'http://localhost:123456', hash_algorithm='md5', hash_value='DIGEST') assert os.path.isfile(os.path.join(dirname, FILENAME)) assert status assert [] == status.errors # be sure download was added to the file and saved project2 = project_no_dedicated_env(dirname) requirement = project2.project_file.get_value(['downloads', 'MYDATA']) assert requirement['url'] == 'http://localhost:123456' assert requirement['md5'] == 'DIGEST' with_directory_contents_completing_project_file(dict(), check) def test_add_download_which_already_exists(monkeypatch): def check(dirname): _monkeypatch_download_file(monkeypatch, dirname, filename='foobar') project = project_no_dedicated_env(dirname) assert [] == project.problems assert dict(url='http://localhost:56789', filename='foobar') == dict(project.project_file.get_value(['downloads', 'MYDATA'])) status = project_ops.add_download(project, None, 'MYDATA', 'http://localhost:123456') assert os.path.isfile(os.path.join(dirname, "foobar")) assert status assert [] == status.errors # be sure download was added to the file and saved, and # the filename attribute was kept project2 = project_no_dedicated_env(dirname) assert dict(url='http://localhost:123456', filename='foobar') == dict(project2.project_file.get_value(['downloads', 'MYDATA'])) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: 'downloads:\n MYDATA: { url: "http://localhost:56789", filename: foobar }'}, check) def test_add_download_which_already_exists_with_fname(monkeypatch): def check(dirname): _monkeypatch_download_file(monkeypatch, dirname, filename='bazqux') project = project_no_dedicated_env(dirname) assert [] == project.problems assert dict(url='http://localhost:56789', filename='foobar') == dict(project.project_file.get_value(['downloads', 'MYDATA'])) status = project_ops.add_download(project, None, 'MYDATA', 'http://localhost:123456', filename="bazqux") assert os.path.isfile(os.path.join(dirname, "bazqux")) assert status assert [] == status.errors # be sure download was added to the file and saved, and # the filename attribute was kept project2 = project_no_dedicated_env(dirname) assert dict(url='http://localhost:123456', filename='bazqux') == dict(project2.project_file.get_value(['downloads', 'MYDATA'])) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: 'downloads:\n MYDATA: { url: "http://localhost:56789", filename: foobar }'}, check) def test_add_download_fails(monkeypatch): def check(dirname): _monkeypatch_download_file_fails(monkeypatch, dirname) project = project_no_dedicated_env(dirname) status = project_ops.add_download(project, None, 'MYDATA', 'http://localhost:123456') assert not os.path.isfile(os.path.join(dirname, "MYDATA")) assert not status assert ['Error downloading http://localhost:123456: response code 404'] == status.errors # be sure download was NOT added to the file project2 = project_no_dedicated_env(dirname) assert project2.project_file.get_value(['downloads', 'MYDATA']) is None # should have been dropped from the original project object also assert project.project_file.get_value(['downloads', 'MYDATA']) is None with_directory_contents_completing_project_file(dict(), check) def test_add_download_fails_to_get_http_response(monkeypatch): def check(dirname): _monkeypatch_download_file_fails_to_get_http_response(monkeypatch, dirname) project = project_no_dedicated_env(dirname) status = project_ops.add_download(project, None, 'MYDATA', 'http://localhost:123456') assert not os.path.isfile(os.path.join(dirname, "MYDATA")) assert not status assert ['Nope nope nope'] == status.errors # be sure download was NOT added to the file project2 = project_no_dedicated_env(dirname) assert project2.project_file.get_value(['downloads', 'MYDATA']) is None # should have been dropped from the original project object also assert project.project_file.get_value(['downloads', 'MYDATA']) is None with_directory_contents_completing_project_file(dict(), check) def test_add_download_with_project_file_problems(): def check(dirname): project = project_no_dedicated_env(dirname) status = project_ops.add_download(project, None, 'MYDATA', 'http://localhost:123456') assert not os.path.isfile(os.path.join(dirname, "MYDATA")) assert not status assert ["%s: variables section contains wrong value type 42, should be dict or list of requirements" % project.project_file.basename] == status.errors # be sure download was NOT added to the file project2 = project_no_dedicated_env(dirname) assert project2.project_file.get_value(['downloads', 'MYDATA']) is None # should have been dropped from the original project object also assert project.project_file.get_value(['downloads', 'MYDATA']) is None with_directory_contents_completing_project_file({DEFAULT_PROJECT_FILENAME: "variables:\n 42"}, check) def test_remove_download(monkeypatch): def check(dirname): project = project_no_dedicated_env(dirname) status = project_ops.remove_download(project, None, 'MYDATA', prepare_result=None) assert status assert [] == status.errors # be sure it was removed project2 = project_no_dedicated_env(dirname) assert project2.project_file.get_value(['downloads', 'MYDATA']) is None assert not os.path.isfile(os.path.join(dirname, "MYDATA")) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ downloads: MYDATA: "http://localhost:123456" """}, check) def test_remove_download_with_prepare(monkeypatch): def check(dirname): _monkeypatch_download_file(monkeypatch, dirname) project = project_no_dedicated_env(dirname) result = prepare.prepare_without_interaction(project) assert result assert os.path.isfile(os.path.join(dirname, "MYDATA")) status = project_ops.remove_download(project, None, 'MYDATA', prepare_result=result) assert status assert [] == status.errors assert not os.path.isfile(os.path.join(dirname, "MYDATA")) # be sure download was removed project2 = project_no_dedicated_env(dirname) assert project2.project_file.get_value(['downloads', 'MYDATA']) is None with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ downloads: MYDATA: "http://localhost:123456" """}, check) def test_remove_download_with_env_spec(monkeypatch): def check(dirname): config_path = ['env_specs', 'myspec', 'downloads', 'MYDATA'] project = project_no_dedicated_env(dirname) assert "http://localhost:123456" == project.project_file.get_value(config_path) status = project_ops.remove_download(project, 'myspec', 'MYDATA', prepare_result=None) assert status assert [] == status.errors # be sure it was removed project2 = project_no_dedicated_env(dirname) assert project2.project_file.get_value(config_path) is None with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ env_specs: default: packages: [python] channels: [] myspec: packages: [python] channels: [] downloads: MYDATA: "http://localhost:123456" """}, check) # the other add_env_spec tests use a mock CondaManager, but we want to have # one test that does the real thing to be sure it works. @pytest.mark.slow def test_add_env_spec_with_real_conda_manager(monkeypatch): monkeypatch_conda_not_to_use_links(monkeypatch) def check(dirname): project = Project(dirname) status = project_ops.add_env_spec(project, name='foo', packages=['numpy'], channels=[]) if not status: print(status.status_description) print(repr(status.errors)) assert status assert 'foo' in project.env_specs env = project.env_specs['foo'] assert env.lock_set.enabled assert os.path.isfile(os.path.join(dirname, DEFAULT_PROJECT_LOCK_FILENAME)) # be sure it was really done project2 = Project(dirname) env_commented_map = project2.project_file.get_value(['env_specs', 'foo']) assert dict(packages=['numpy'], channels=[]) == dict(env_commented_map) assert os.path.isdir(os.path.join(dirname, 'envs', 'foo', 'conda-meta')) with_directory_contents_completing_project_file({DEFAULT_PROJECT_LOCK_FILENAME: "locking_enabled: true\n"}, check) def _push_conda_test(fix_works, missing_packages, wrong_version_packages, remove_error, resolve_dependencies, resolve_dependencies_error): class TestCondaManager(CondaManager): def __init__(self, frontend): self.fix_works = fix_works self.fixed = False self.deviations = CondaEnvironmentDeviations(summary="test deviation", missing_packages=missing_packages, wrong_version_packages=wrong_version_packages, missing_pip_packages=(), wrong_version_pip_packages=()) def resolve_dependencies(self, package_specs, channels, platforms): if resolve_dependencies_error is not None: raise CondaManagerError(resolve_dependencies_error) else: return CondaLockSet(resolve_dependencies, platforms=platforms) def find_environment_deviations(self, prefix, spec): if self.fixed: return CondaEnvironmentDeviations(summary="fixed", missing_packages=(), wrong_version_packages=(), missing_pip_packages=(), wrong_version_pip_packages=()) else: return self.deviations def fix_environment_deviations(self, prefix, spec, deviations=None, create=True): if self.fix_works: self.fixed = True def remove_packages(self, prefix, packages): if remove_error is not None: raise CondaManagerError(remove_error) push_conda_manager_class(TestCondaManager) def _pop_conda_test(): pop_conda_manager_class() def _with_conda_test(f, fix_works=True, missing_packages=(), wrong_version_packages=(), remove_error=None, resolve_dependencies=None, resolve_dependencies_error=None): try: if resolve_dependencies is None: resolve_dependencies = {'all': []} _push_conda_test(fix_works, missing_packages, wrong_version_packages, remove_error, resolve_dependencies, resolve_dependencies_error) f() finally: _pop_conda_test() def test_add_env_spec(): def check(dirname): def attempt(): project = Project(dirname) status = project_ops.add_env_spec(project, name='foo', packages=[], channels=[]) assert status # with "None" for the args status = project_ops.add_env_spec(project, name='bar', packages=None, channels=None) assert status _with_conda_test(attempt) # be sure we really made the config changes project2 = Project(dirname) assert dict(packages=[], channels=[]) == dict(project2.project_file.get_value(['env_specs', 'foo'])) assert dict(packages=[], channels=[]) == dict(project2.project_file.get_value(['env_specs', 'bar'])) assert dict( locked=True, env_spec_hash='a30f02c961ef4f3fe07ceb09e0906394c3885a79', packages=dict(all=[]), platforms=['linux-64', 'osx-64', 'win-64']) == dict(project2.lock_file.get_value(['env_specs', 'foo'])) assert dict( locked=True, env_spec_hash='a30f02c961ef4f3fe07ceb09e0906394c3885a79', packages=dict(all=[]), platforms=['linux-64', 'osx-64', 'win-64']) == dict(project2.lock_file.get_value(['env_specs', 'bar'])) with_directory_contents_completing_project_file({DEFAULT_PROJECT_LOCK_FILENAME: "locking_enabled: true\n"}, check) def test_add_env_spec_no_global_platforms(): def check(dirname): def attempt(): project = Project(dirname) platforms = project.project_file.get_value(['platforms']) project.project_file.unset_value(['platforms']) project.project_file.set_value(['env_specs', 'default', 'platforms'], platforms) project.save() assert project.project_file.get_value(['platforms']) is None assert len(project.env_specs['default'].platforms) > 0 status = project_ops.add_env_spec(project, name='foo', packages=[], channels=[]) assert status assert ('linux-64', 'osx-64', 'win-64') == project.env_specs['foo'].platforms _with_conda_test(attempt) # be sure we really made the config changes project2 = Project(dirname) assert dict( packages=[], channels=[], platforms=['linux-64', 'osx-64', 'win-64']) == dict(project2.project_file.get_value(['env_specs', 'foo'])) assert dict( locked=True, env_spec_hash='a30f02c961ef4f3fe07ceb09e0906394c3885a79', packages=dict(all=[]), platforms=['linux-64', 'osx-64', 'win-64']) == dict(project2.lock_file.get_value(['env_specs', 'foo'])) with_directory_contents_completing_project_file({DEFAULT_PROJECT_LOCK_FILENAME: "locking_enabled: true\n"}, check) def test_add_env_spec_with_packages_and_channels(): def check(dirname): def attempt(): project = Project(dirname) status = project_ops.add_env_spec(project, name='foo', packages=['a', 'b', 'c'], channels=['c1', 'c2', 'c3']) assert status _with_conda_test(attempt) # be sure download was added to the file and saved project2 = Project(dirname) assert dict(packages=['a', 'b', 'c'], channels=['c1', 'c2', 'c3']) == dict(project2.project_file.get_value(['env_specs', 'foo'])) env_spec = project2.env_specs['foo'] assert env_spec.name == 'foo' assert env_spec.lock_set.enabled assert env_spec.lock_set.equivalent_to(CondaLockSet({'all': []}, platforms=['linux-64', 'osx-64', 'win-64'])) with_directory_contents_completing_project_file({DEFAULT_PROJECT_LOCK_FILENAME: "locking_enabled: true\n"}, check) def test_add_env_spec_extending_existing_lists(): def check(dirname): def attempt(): project = Project(dirname) status = project_ops.add_env_spec(project, name='foo', packages=['a', 'b', 'c'], channels=['c1', 'c2', 'c3']) assert status _with_conda_test(attempt) # be sure download was added to the file and saved project2 = Project(dirname) assert dict(packages=['b', 'a', 'c'], channels=['c3', 'c1', 'c2']) == dict(project2.project_file.get_value(['env_specs', 'foo'])) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ env_specs: foo: packages: [ 'b' ] channels: [ 'c3'] """}, check) def test_add_env_spec_extending_existing_lists_with_versions(): def check(dirname): def attempt(): project = Project(dirname) status = project_ops.add_env_spec(project, name='foo', packages=['a', 'b=2.0', 'c'], channels=['c1', 'c2', 'c3']) assert status _with_conda_test(attempt) # be sure download was added to the file and saved project2 = Project(dirname) assert dict(packages=['b=2.0', 'a', 'c'], channels=['c3', 'c1', 'c2']) == dict(project2.project_file.get_value(['env_specs', 'foo'])) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ env_specs: foo: packages: [ 'b=1.0' ] channels: [ 'c3'] """}, check) def test_add_env_spec_cannot_resolve_deps(): def check(dirname): def attempt(): project = Project(dirname, frontend=FakeFrontend()) status = project_ops.add_env_spec(project, name='foo', packages=[], channels=[]) assert status.status_description == "Error resolving dependencies for foo: NOPE." assert status.errors == [] assert project.frontend.logs == [] assert not status _with_conda_test(attempt, resolve_dependencies_error="NOPE") # be sure we didn't make the config changes project2 = Project(dirname) assert project2.project_file.get_value(['env_specs', 'foo']) is None assert project2.lock_file.get_value(['env_specs', 'foo']) is None with_directory_contents_completing_project_file({DEFAULT_PROJECT_LOCK_FILENAME: "locking_enabled: true\n"}, check) def test_remove_env_spec(): def check(dirname): def attempt(): project = Project(dirname) assert project.lock_file.get_value(['env_specs', 'hello'], None) is not None assert 'hello' in project.env_specs env = project.env_specs['hello'] assert env.lock_set.enabled assert env.lock_set.package_specs_for_current_platform == ('a=1.0=1', ) status = project_ops.remove_env_spec(project, name='hello') assert [] == status.errors assert status.status_description == "Nothing to clean up for environment 'hello'." assert status assert 'hello' not in project.env_specs _with_conda_test(attempt) # we should have cleaned up the lock file too project2 = Project(dirname) assert project2.lock_file.get_value(['env_specs', 'hello'], None) is None with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ name: foo env_specs: hello: packages: - a another: packages: - b """, DEFAULT_PROJECT_LOCK_FILENAME: """ locking_enabled: true env_specs: hello: platforms: [linux-32,linux-64,osx-64,win-32,win-64] packages: all: - a=1.0=1 """}, check) def test_remove_only_env_spec(): def check(dirname): def attempt(): project = Project(dirname) assert 'hello' in project.env_specs status = project_ops.remove_env_spec(project, name='hello') assert [] == status.errors assert status.status_description == ( "At least one environment spec is required; " + "'hello' is the only one left.") assert not status assert 'hello' in project.env_specs _with_conda_test(attempt) # we should have cleaned up the lock file too project2 = Project(dirname) assert project2.lock_file.get_value(['env_specs', 'hello'], None) is None with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ name: foo env_specs: hello: packages: - a """}, check) def test_remove_env_spec_causes_problem(): def check(dirname): def attempt(): project = Project(dirname) assert project.lock_file.get_value(['env_specs', 'hello'], None) is not None assert 'hello' in project.env_specs env = project.env_specs['hello'] assert env.lock_set.enabled assert env.lock_set.package_specs_for_current_platform == ('a=1.0=1', ) status = project_ops.remove_env_spec(project, name='hello') assert [("anaconda-project.yml: env_spec 'hello' for command 'default'" + " does not appear in the env_specs section")] == status.errors assert status.status_description == "Unable to load the project." assert not status assert 'hello' in project.env_specs _with_conda_test(attempt) # we should not have made changes project2 = Project(dirname) assert project2.lock_file.get_value(['env_specs', 'hello'], None) is not None assert project2.project_file.get_value(['env_specs', 'hello'], None) is not None with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ name: foo commands: default: unix: echo hi env_spec: hello env_specs: hello: packages: - a another: packages: - b """, DEFAULT_PROJECT_LOCK_FILENAME: """ locking_enabled: true env_specs: hello: platforms: [linux-32,linux-64,osx-64,win-32,win-64] packages: all: - a=1.0=1 """}, check) def test_add_packages_to_all_environments(): def check(dirname): def attempt(): project = Project(dirname) status = project_ops.add_packages(project, env_spec_name=None, packages=['foo', 'bar'], channels=['hello', 'world']) assert status assert [] == status.errors _with_conda_test(attempt) # be sure we really made the config changes project2 = Project(dirname) assert [dict(pip=[]), 'foo', 'bar'] == list(project2.project_file.get_value('packages')) assert ['hello', 'world'] == list(project2.project_file.get_value('channels')) for env_spec in project2.env_specs.values(): assert env_spec.lock_set.enabled assert env_spec.lock_set.equivalent_to( CondaLockSet({'all': []}, platforms=['linux-64', 'osx-64', 'win-64'])) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ packages: - pip: [] # be sure we don't break with this in the list """, DEFAULT_PROJECT_LOCK_FILENAME: "locking_enabled: true\n"}, check) def test_add_packages_cannot_resolve_deps(): def check(dirname): def attempt(): project = Project(dirname, frontend=FakeFrontend()) status = project_ops.add_packages(project, env_spec_name=None, packages=['foo', 'bar'], channels=['hello', 'world']) assert status.status_description == "Error resolving dependencies for default: NOPE." assert status.errors == [] assert project.frontend.logs == [] assert not status _with_conda_test(attempt, resolve_dependencies_error="NOPE") # be sure we didn't make the config changes project2 = Project(dirname) assert project2.project_file.get_value('packages', None) is None assert project2.project_file.get_value('channels', None) is None for env_spec in project2.env_specs.values(): assert env_spec.lock_set.enabled assert env_spec.lock_set.platforms == () with_directory_contents_completing_project_file({DEFAULT_PROJECT_LOCK_FILENAME: "locking_enabled: true\n"}, check) def test_add_packages_nonexistent_environment(): def check(dirname): def attempt(): project = Project(dirname) status = project_ops.add_packages(project, env_spec_name="not_an_env", packages=['foo', 'bar'], channels=['hello', 'world']) assert not status assert [] == status.errors _with_conda_test(attempt) with_directory_contents_completing_project_file(dict(), check) def test_add_packages_invalid_spec(): def check(dirname): def attempt(): project = Project(dirname) status = project_ops.add_packages(project, env_spec_name=None, packages=['='], channels=[]) assert not status assert 'Could not add packages.' == status.status_description assert ['Bad package specifications: =.'] == status.errors _with_conda_test(attempt) with_directory_contents_completing_project_file(dict(), check) def test_remove_packages_from_all_environments(): def check(dirname): def attempt(): os.makedirs(os.path.join(dirname, 'envs', 'hello')) # forces us to really run remove_packages project = Project(dirname) for env_spec in project.env_specs.values(): assert env_spec.lock_set.enabled assert env_spec.lock_set.platforms == () assert ['foo', 'bar', 'baz'] == list(project.project_file.get_value('packages')) assert ['foo', 'woot'] == list(project.project_file.get_value(['env_specs', 'hello', 'packages'], [])) status = project_ops.remove_packages(project, env_spec_name=None, packages=['foo', 'bar']) assert [] == status.errors assert status _with_conda_test(attempt, remove_error="Removal fail") # be sure we really made the config changes project2 = Project(dirname) assert ['baz'] == list(project2.project_file.get_value('packages')) assert ['woot'] == list(project2.project_file.get_value(['env_specs', 'hello', 'packages'])) for env_spec in project2.env_specs.values(): assert env_spec.lock_set.enabled assert env_spec.lock_set.equivalent_to( CondaLockSet({'all': []}, platforms=['linux-64', 'osx-64', 'win-64'])) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ packages: - foo - bar - baz env_specs: hello: packages: - foo - woot hello2: packages: - foo - bar - pip: [] # make sure we don't choke on non-string items in list """, DEFAULT_PROJECT_LOCK_FILENAME: "locking_enabled: true\n"}, check) def test_remove_packages_from_one_environment(): def check(dirname): def attempt(): project = Project(dirname) for env_spec in project.env_specs.values(): assert env_spec.lock_set.enabled assert env_spec.lock_set.platforms == () assert ['qbert', 'foo', 'bar'] == list(project.project_file.get_value('packages')) assert ['foo'] == list(project.project_file.get_value(['env_specs', 'hello', 'packages'], [])) status = project_ops.remove_packages(project, env_spec_name='hello', packages=['foo', 'bar']) assert status assert [] == status.errors _with_conda_test(attempt) # be sure we really made the config changes project2 = Project(dirname) # note that hello will still inherit the deps from the global packages, # and that's fine assert ['qbert'] == list(project2.project_file.get_value('packages')) assert [] == list(project2.project_file.get_value(['env_specs', 'hello', 'packages'], [])) # be sure we didn't delete comments from global packages section content = codecs.open(project2.project_file.filename, 'r', 'utf-8').read() assert '# this is a pre comment' in content assert '# this is a post comment' in content for env_spec in project2.env_specs.values(): if env_spec.name == 'hello': assert env_spec.lock_set.enabled assert env_spec.lock_set.equivalent_to( CondaLockSet({'all': []}, platforms=['linux-64', 'osx-64', 'win-64'])) else: assert env_spec.lock_set.enabled with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ packages: # this is a pre comment - qbert # this is a post comment - foo - bar env_specs: hello: packages: - foo """, DEFAULT_PROJECT_LOCK_FILENAME: "locking_enabled: true\n"}, check) def test_remove_packages_from_one_environment_leaving_others_unaffected(): def check(dirname): def attempt(): project = Project(dirname) assert ['qbert', 'foo', 'bar'] == list(project.project_file.get_value('packages')) assert ['foo'] == list(project.project_file.get_value(['env_specs', 'hello', 'packages'], [])) status = project_ops.remove_packages(project, env_spec_name='hello', packages=['foo', 'bar']) assert status assert [] == status.errors _with_conda_test(attempt) # be sure we really made the config changes project2 = Project(dirname) assert ['qbert'] == list(project2.project_file.get_value('packages')) assert [] == list(project2.project_file.get_value(['env_specs', 'hello', 'packages'], [])) assert set(['baz', 'foo', 'bar']) == set(project2.project_file.get_value( ['env_specs', 'another', 'packages'], [])) assert project2.env_specs['another'].conda_package_names_set == set(['qbert', 'foo', 'bar', 'baz']) assert project2.env_specs['hello'].conda_package_names_set == set(['qbert']) # be sure we didn't delete comments from the env content = codecs.open(project2.project_file.filename, 'r', 'utf-8').read() assert '# this is a pre comment' in content assert '# this is a post comment' in content with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ packages: - qbert - foo - bar env_specs: hello: packages: - foo another: packages: # this is a pre comment - baz # this is a post comment """}, check) def test_remove_packages_cannot_resolve_deps(): def check(dirname): def attempt(): os.makedirs(os.path.join(dirname, 'envs', 'hello')) # forces us to really run remove_packages project = Project(dirname, frontend=FakeFrontend()) for env_spec in project.env_specs.values(): assert env_spec.lock_set.enabled assert env_spec.lock_set.platforms == () assert ['foo', 'bar', 'baz'] == list(project.project_file.get_value('packages')) assert ['foo', 'woot'] == list(project.project_file.get_value(['env_specs', 'hello', 'packages'], [])) status = project_ops.remove_packages(project, env_spec_name=None, packages=['foo', 'bar']) assert status.status_description == "Error resolving dependencies for hello: NOPE." assert status.errors == [] assert project.frontend.logs == [] assert not status _with_conda_test(attempt, resolve_dependencies_error="NOPE") # be sure we didn't make the config changes project2 = Project(dirname) assert ['foo', 'bar', 'baz'] == list(project2.project_file.get_value('packages')) assert ['foo', 'woot'] == list(project2.project_file.get_value(['env_specs', 'hello', 'packages'])) for env_spec in project2.env_specs.values(): assert env_spec.lock_set.enabled assert env_spec.lock_set.platforms == () with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ packages: - foo - bar - baz env_specs: hello: packages: - foo - woot """, DEFAULT_PROJECT_LOCK_FILENAME: "locking_enabled: true\n"}, check) def test_remove_packages_from_nonexistent_environment(): def check(dirname): def attempt(): project = Project(dirname) assert ['foo', 'bar'] == list(project.project_file.get_value('packages')) status = project_ops.remove_packages(project, env_spec_name='not_an_environment', packages=['foo', 'bar']) assert not status assert [] == status.errors assert "Environment spec not_an_environment doesn't exist." == status.status_description _with_conda_test(attempt) # be sure we didn't make the config changes project2 = Project(dirname) assert ['foo', 'bar'] == list(project2.project_file.get_value('packages')) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ packages: - foo - bar """}, check) def test_remove_packages_with_project_file_problems(): def check(dirname): project = Project(dirname) status = project_ops.remove_packages(project, env_spec_name=None, packages=['foo']) assert not status assert ["%s: variables section contains wrong value type 42, should be dict or list of requirements" % project.project_file.basename] == status.errors with_directory_contents_completing_project_file({DEFAULT_PROJECT_FILENAME: "variables:\n 42"}, check) def test_add_platforms_to_all_environments(): def check(dirname): def attempt(): project = Project(dirname) status = project_ops.add_platforms(project, env_spec_name=None, platforms=['linux-64', 'win-64']) assert status assert [] == status.errors _with_conda_test(attempt) # be sure we really made the config changes project2 = Project(dirname) assert ['osx-32', 'linux-64', 'win-64'] == list(project2.project_file.get_value('platforms')) for env_spec in project2.env_specs.values(): assert env_spec.lock_set.enabled assert env_spec.lock_set.equivalent_to( CondaLockSet({'all': []}, platforms=['linux-64', 'osx-32', 'win-64'])) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ platforms: [osx-32] """, DEFAULT_PROJECT_LOCK_FILENAME: "locking_enabled: true\n"}, check) def test_add_platforms_already_exists(): def check(dirname): def attempt(): project = Project(dirname) status = project_ops.add_platforms(project, env_spec_name=None, platforms=['osx-32']) assert status assert [] == status.errors _with_conda_test(attempt) project2 = Project(dirname) assert ['osx-32', 'win-64'] == list(project2.project_file.get_value('platforms')) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ platforms: [osx-32, win-64] """, DEFAULT_PROJECT_LOCK_FILENAME: "locking_enabled: true\n"}, check) def test_add_platforms_cannot_resolve_deps(): def check(dirname): def attempt(): project = Project(dirname, frontend=FakeFrontend()) assert project.project_file.get_value('platforms', None) == ['linux-64', 'osx-64', 'win-64'] status = project_ops.add_platforms(project, env_spec_name=None, platforms=['osx-32', 'win-32']) assert status.status_description == "Error resolving dependencies for default: NOPE." assert status.errors == [] assert project.frontend.logs == [] assert not status _with_conda_test(attempt, resolve_dependencies_error="NOPE") # be sure we didn't make the config changes project2 = Project(dirname) assert project2.project_file.get_value('platforms', None) == ['linux-64', 'osx-64', 'win-64'] for env_spec in project2.env_specs.values(): assert env_spec.lock_set.enabled assert env_spec.lock_set.platforms == () with_directory_contents_completing_project_file({DEFAULT_PROJECT_LOCK_FILENAME: "locking_enabled: true\n"}, check) def test_add_platforms_nonexistent_environment(): def check(dirname): def attempt(): project = Project(dirname) status = project_ops.add_platforms(project, env_spec_name="not_an_env", platforms=['foo', 'bar']) assert not status assert [] == status.errors _with_conda_test(attempt) with_directory_contents_completing_project_file(dict(), check) def test_add_platforms_invalid_platform(): def check(dirname): def attempt(): project = Project(dirname) status = project_ops.add_platforms(project, env_spec_name=None, platforms=['invalid_platform']) assert not status assert 'Unable to load the project.' == status.status_description assert ["anaconda-project.yml: Platform name 'invalid_platform' is invalid (valid " "examples: linux-64, osx-64, win-64)"] == status.errors _with_conda_test(attempt) with_directory_contents_completing_project_file(dict(), check) def test_remove_platforms_from_all_environments(): def check(dirname): def attempt(): project = Project(dirname) for env_spec in project.env_specs.values(): assert env_spec.lock_set.enabled assert env_spec.lock_set.platforms == () assert ['linux-64', 'osx-32'] == list(project.project_file.get_value('platforms')) status = project_ops.remove_platforms(project, env_spec_name=None, platforms=['linux-64']) assert [] == status.errors assert status _with_conda_test(attempt) # be sure we really made the config changes project2 = Project(dirname) assert ['osx-32'] == list(project2.project_file.get_value('platforms')) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ platforms: - linux-64 - osx-32 env_specs: hello: {} """, DEFAULT_PROJECT_LOCK_FILENAME: "locking_enabled: true\n"}, check) def test_remove_platforms_from_one_environment(): def check(dirname): def attempt(): project = Project(dirname) for env_spec in project.env_specs.values(): assert env_spec.lock_set.enabled assert env_spec.lock_set.platforms == () assert ['linux-64', 'osx-32'] == list(project.project_file.get_value('platforms')) assert ['linux-32', 'osx-32'] == list(project.project_file.get_value( ['env_specs', 'hello', 'platforms'], [])) status = project_ops.remove_platforms(project, env_spec_name='hello', platforms=['osx-32']) assert status assert [] == status.errors _with_conda_test(attempt) # be sure we really made the config changes project2 = Project(dirname) # remove_platforms is too simple to take this osx-32 out, but really it should, # similar to how remove_packages does it. assert ['linux-64', 'osx-32'] == list(project2.project_file.get_value('platforms')) # note that hello will still inherit the deps from the global platforms, # and that's fine assert ['linux-32'] == list(project2.project_file.get_value(['env_specs', 'hello', 'platforms'], [])) # be sure we didn't delete comments from global platforms section content = codecs.open(project2.project_file.filename, 'r', 'utf-8').read() assert '# this is a pre comment' in content assert '# this is a post comment' in content with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ platforms: # this is a pre comment - linux-64 # this is a post comment - osx-32 env_specs: hello: platforms: - linux-32 - osx-32 """, DEFAULT_PROJECT_LOCK_FILENAME: "locking_enabled: true\n"}, check) def test_remove_platforms_cannot_resolve_deps(): def check(dirname): def attempt(): project = Project(dirname, frontend=FakeFrontend()) for env_spec in project.env_specs.values(): assert env_spec.lock_set.enabled assert env_spec.lock_set.platforms == () assert ['linux-64', 'osx-32'] == list(project.project_file.get_value('platforms')) assert ['linux-32', 'osx-32'] == list(project.project_file.get_value( ['env_specs', 'hello', 'platforms'], [])) status = project_ops.remove_platforms(project, env_spec_name='hello', platforms=['linux-32']) assert status.errors == [] assert project.frontend.logs == [] assert status.status_description == "Error resolving dependencies for hello: NOPE." assert not status _with_conda_test(attempt, resolve_dependencies_error="NOPE") # be sure we didn't make the config changes project2 = Project(dirname) assert ['linux-64', 'osx-32'] == list(project2.project_file.get_value('platforms')) assert ['linux-32', 'osx-32'] == list(project2.project_file.get_value(['env_specs', 'hello', 'platforms'], [])) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ platforms: - linux-64 - osx-32 env_specs: hello: platforms: - linux-32 - osx-32 """, DEFAULT_PROJECT_LOCK_FILENAME: "locking_enabled: true\n"}, check) def test_remove_platforms_from_nonexistent_environment(): def check(dirname): def attempt(): project = Project(dirname) assert ['linux-64'] == list(project.project_file.get_value('platforms')) status = project_ops.remove_platforms(project, env_spec_name='not_an_environment', platforms=['linux-64']) assert not status assert [] == status.errors assert "Environment spec not_an_environment doesn't exist." == status.status_description _with_conda_test(attempt) # be sure we didn't make the config changes project2 = Project(dirname) assert ['linux-64'] == list(project2.project_file.get_value('platforms')) with_directory_contents_completing_project_file({DEFAULT_PROJECT_FILENAME: """ platforms: - linux-64 """}, check) def test_remove_platforms_with_project_file_problems(): def check(dirname): project = Project(dirname) status = project_ops.remove_platforms(project, env_spec_name=None, platforms=['foo']) assert not status assert ["%s: variables section contains wrong value type 42, should be dict or list of requirements" % project.project_file.basename] == status.errors with_directory_contents_completing_project_file({DEFAULT_PROJECT_FILENAME: "variables:\n 42"}, check) def test_lock_nonexistent_environment(): def check(dirname): def attempt(): project = project_no_dedicated_env(dirname) status = project_ops.lock(project, env_spec_name="not_an_env") assert not status assert [] == status.errors assert "Environment spec not_an_env doesn't exist." == status.status_description _with_conda_test(attempt) with_directory_contents_completing_project_file(dict(), check) def test_unlock_nonexistent_environment(): def check(dirname): def attempt(): project = project_no_dedicated_env(dirname) status = project_ops.unlock(project, env_spec_name="not_an_env") assert not status assert [] == status.errors assert "Environment spec not_an_env doesn't exist." == status.status_description _with_conda_test(attempt) with_directory_contents_completing_project_file(dict(), check) def test_lock_broken_project(): def check(dirname): def attempt(): project = project_no_dedicated_env(dirname) status = project_ops.lock(project, env_spec_name=None) assert not status assert len(status.errors) > 0 _with_conda_test(attempt) with_directory_contents({DEFAULT_PROJECT_FILENAME: ""}, check) def test_unlock_broken_project(): def check(dirname): def attempt(): project = project_no_dedicated_env(dirname) status = project_ops.unlock(project, env_spec_name=None) assert not status assert len(status.errors) > 0 _with_conda_test(attempt) with_directory_contents({DEFAULT_PROJECT_FILENAME: ""}, check) def test_lock_and_update_and_unlock_all_envs(): def check(dirname): resolve_results = {'all': ['a=1.0=1']} def attempt(): filename = os.path.join(dirname, DEFAULT_PROJECT_LOCK_FILENAME) assert not os.path.isfile(filename) project = Project(dirname, frontend=FakeFrontend()) assert project.env_specs['foo'].platforms == () assert project.env_specs['bar'].platforms == () # Lock status = project_ops.lock(project, env_spec_name=None) assert [] == status.errors assert status # yapf: disable assert [ 'Set project platforms list to linux-64, osx-64, win-64', 'Updating locked dependencies for env spec bar...', 'Changes to locked dependencies for bar:', ' platforms:', '+ linux-64', '+ osx-64', '+ win-64', ' packages:', '+ all:', '+ a=1.0=1', 'Added locked dependencies for env spec bar to anaconda-project-lock.yml.', 'Updating locked dependencies for env spec foo...', 'Changes to locked dependencies for foo:', ' platforms:', '+ linux-64', '+ osx-64', '+ win-64', ' packages:', '+ all:', '+ a=1.0=1', 'Added locked dependencies for env spec foo to anaconda-project-lock.yml.' ] == project.frontend.logs # yapf: enable assert os.path.isfile(filename) assert ('a=1.0=1', ) == project.env_specs['foo'].lock_set.package_specs_for_current_platform assert ('a=1.0=1', ) == project.env_specs['bar'].lock_set.package_specs_for_current_platform assert ('a=1.0=1', ) == project.env_specs['foo'].conda_packages_for_create # 'b' gets dropped here since it wasn't in the lock set assert ('a=1.0=1', ) == project.env_specs['bar'].conda_packages_for_create assert project.env_specs['foo'].platforms == conda_api.default_platforms assert project.env_specs['bar'].platforms == conda_api.default_platforms # we should have set the global platforms, not in each env spec assert conda_api.default_platforms == project.project_file.get_value('platforms') assert project.project_file.get_value(['env_specs', 'foo', 'platforms'], None) is None assert project.project_file.get_value(['env_specs', 'bar', 'platforms'], None) is None # Lock again (idempotent) project.frontend.reset() status = project_ops.lock(project, env_spec_name=None) assert [] == status.errors assert status # Update (does nothing in this case) project.frontend.reset() status = project_ops.update(project, env_spec_name=None) assert [] == status.errors assert status assert ["Updating locked dependencies for env spec bar...", "Locked dependencies for env spec bar are already up to date.", "Updating locked dependencies for env spec foo...", "Locked dependencies for env spec foo are already up to date."] == project.frontend.logs assert status.status_description == "Update complete." # Update (does something after tweaking resolve results) project.frontend.reset() resolve_results['all'] = ['a=2.0=0'] status = project_ops.update(project, env_spec_name=None) assert [] == status.errors assert status assert status.status_description == "Update complete." assert ('a=2.0=0', ) == project.env_specs['foo'].conda_packages_for_create assert '- a=1.0=1' in project.frontend.logs assert '+ a=2.0=0' in project.frontend.logs # Unlock project.frontend.reset() status = project_ops.unlock(project, env_spec_name=None) assert [] == status.errors assert status assert 'Dependency locking is now disabled.' == status.status_description assert project.env_specs['foo'].lock_set.disabled assert project.env_specs['bar'].lock_set.disabled assert ('a', ) == project.env_specs['foo'].conda_packages_for_create assert ('b', ) == project.env_specs['bar'].conda_packages_for_create _with_conda_test(attempt, resolve_dependencies=resolve_results) with_directory_contents( {DEFAULT_PROJECT_FILENAME: """ name: locktest env_specs: foo: packages: - a bar: packages: - b """}, check) def test_lock_and_unlock_single_env(): def check(dirname): def attempt(): filename = os.path.join(dirname, DEFAULT_PROJECT_LOCK_FILENAME) assert not os.path.isfile(filename) project = Project(dirname, frontend=FakeFrontend()) assert project.env_specs['foo'].platforms == () assert project.env_specs['bar'].platforms == ('osx-64', ) # Lock status = project_ops.lock(project, env_spec_name='foo') assert [] == status.errors assert status # yapf: disable assert ['Set platforms for foo to linux-64, osx-64, win-64', 'Updating locked dependencies for env spec foo...', 'Changes to locked dependencies for foo:', ' platforms:', '+ linux-64', '+ osx-64', '+ win-64', ' packages:', '+ all:', '+ a=1.0=1', 'Added locked dependencies for env spec foo to anaconda-project-lock.yml.'] == project.frontend.logs # yapf: enable assert 'Project dependencies are locked.' == status.status_description assert os.path.isfile(filename) foo_lock_set = project.env_specs['foo'].lock_set assert ('a=1.0=1', ) == foo_lock_set.package_specs_for_current_platform assert foo_lock_set.env_spec_hash == 'b7f3266407fe0056da25fc23764bb7643c3560be' assert project.env_specs['bar'].lock_set.disabled assert ('a=1.0=1', ) == project.env_specs['foo'].conda_packages_for_create assert ('b', ) == project.env_specs['bar'].conda_packages_for_create assert project.env_specs['foo'].platforms == conda_api.default_platforms assert project.env_specs['bar'].platforms == ('osx-64', ) # we should NOT have set the global platforms assert project.project_file.get_value('platforms', None) is None assert conda_api.default_platforms == project.project_file.get_value( ['env_specs', 'foo', 'platforms'], None) assert ['osx-64', ] == project.project_file.get_value(['env_specs', 'bar', 'platforms'], None) # Locking a second time is a no-op project.frontend.reset() status = project_ops.lock(project, env_spec_name='foo') assert [] == status.errors assert status assert ['Env spec foo is already locked.'] == project.frontend.logs assert 'Project dependencies are locked.' == status.status_description # Update (does nothing in this case) project.frontend.reset() status = project_ops.update(project, env_spec_name='foo') assert [] == status.errors assert status assert ["Updating locked dependencies for env spec foo...", "Locked dependencies for env spec foo are already up to date."] == project.frontend.logs assert 'Update complete.' == status.status_description # Now add a package (should change the hash) project.frontend.reset() status = project_ops.add_packages(project, 'foo', packages='q', channels=[]) assert [] == status.errors assert status assert [] == project.frontend.logs assert status.status_description.startswith("Using Conda environment") foo_lock_set = project.env_specs['foo'].lock_set assert ('a=1.0=1', ) == foo_lock_set.package_specs_for_current_platform assert foo_lock_set.env_spec_hash == 'fb71df6e984eb3330f442f1e9a7726aaa698ca59' # Now unlock project.frontend.reset() status = project_ops.unlock(project, env_spec_name='foo') assert [] == status.errors assert status assert [] == project.frontend.logs assert 'Dependency locking is now disabled for env spec foo.' == status.status_description assert project.env_specs['foo'].lock_set.disabled assert project.env_specs['bar'].lock_set.disabled assert ('a', 'q') == project.env_specs['foo'].conda_packages_for_create assert ('b', ) == project.env_specs['bar'].conda_packages_for_create _with_conda_test(attempt, resolve_dependencies={'all': ['a=1.0=1']}) with_directory_contents( {DEFAULT_PROJECT_FILENAME: """ name: locktest env_specs: foo: packages: - a bar: platforms: [osx-64] packages: - b """}, check) def test_locking_with_missing_lock_set_does_an_update(): def check(dirname): def attempt(): filename = os.path.join(dirname, DEFAULT_PROJECT_LOCK_FILENAME) assert os.path.isfile(filename) project = Project(dirname, frontend=FakeFrontend()) assert project.env_specs['foo'].platforms == ('linux-64', 'osx-64', 'win-64') # lock set should be enabled yet missing and empty assert project.env_specs['foo'].lock_set.enabled assert project.env_specs['foo'].lock_set.missing # Lock status = project_ops.lock(project, env_spec_name='foo') assert [] == status.errors assert status # yapf: disable assert ['Updating locked dependencies for env spec foo...', 'Changes to locked dependencies for foo:', ' platforms:', '+ linux-64', '+ osx-64', '+ win-64', ' packages:', '+ all:', '+ a=1.0=1', 'Added locked dependencies for env spec foo to anaconda-project-lock.yml.'] == project.frontend.logs # yapf: enable assert 'Project dependencies are locked.' == status.status_description assert os.path.isfile(filename) assert project.lock_file.get_value(['env_specs', 'foo']) is not None foo_lock_set = project.env_specs['foo'].lock_set assert ('a=1.0=1', ) == foo_lock_set.package_specs_for_current_platform assert foo_lock_set.env_spec_hash == 'b7f3266407fe0056da25fc23764bb7643c3560be' assert project.env_specs['foo'].lock_set.enabled assert not project.env_specs['foo'].lock_set.missing _with_conda_test(attempt, resolve_dependencies={'all': ['a=1.0=1']}) with_directory_contents( {DEFAULT_PROJECT_FILENAME: """ name: locktest platforms: [linux-64,osx-64,win-64] env_specs: foo: packages: - a """, DEFAULT_PROJECT_LOCK_FILENAME: """ locking_enabled: true # No lock set in here! """}, check) def test_update_changes_only_the_hash(): def check(dirname): def attempt(): project = Project(dirname, frontend=FakeFrontend()) foo_lock_set = project.env_specs['foo'].lock_set assert ('a=1.0=1', ) == foo_lock_set.package_specs_for_current_platform assert foo_lock_set.env_spec_hash == 'old' assert ('a=1.0=1', ) == project.env_specs['foo'].conda_packages_for_create # Update status = project_ops.update(project, env_spec_name='foo') assert [] == status.errors assert status assert ['Updating locked dependencies for env spec foo...', 'Updated hash for env spec foo to 9990ec43408f9593030a3a136c916022189f04b3 in ' 'anaconda-project-lock.yml.'] == project.frontend.logs assert 'Update complete.' == status.status_description foo_lock_set = project.env_specs['foo'].lock_set assert ('a=1.0=1', ) == foo_lock_set.package_specs_for_current_platform assert foo_lock_set.env_spec_hash == '9990ec43408f9593030a3a136c916022189f04b3' _with_conda_test(attempt, resolve_dependencies={'all': ['a=1.0=1']}) with_directory_contents( {DEFAULT_PROJECT_FILENAME: """ name: locktest platforms: [linux-32,linux-64,osx-64,win-32,win-64] env_specs: foo: packages: - a """, DEFAULT_PROJECT_LOCK_FILENAME: """ locking_enabled: true env_specs: foo: platforms: [linux-32,linux-64,osx-64,win-32,win-64] env_spec_hash: old packages: all: ['a=1.0=1'] """}, check) def test_lock_conda_error(): def check(dirname): def attempt(): filename = os.path.join(dirname, DEFAULT_PROJECT_LOCK_FILENAME) assert not os.path.isfile(filename) project = Project(dirname, frontend=FakeFrontend()) status = project_ops.lock(project, env_spec_name=None) assert [] == status.errors assert not status assert "test deviation" == status.status_description assert not os.path.isfile(filename) _with_conda_test(attempt, missing_packages=('a', 'b'), resolve_dependencies={'all': ['a=1.0=1']}, fix_works=False) with_directory_contents( {DEFAULT_PROJECT_FILENAME: """ name: locktest platforms: [linux-32,linux-64,osx-64,win-32,win-64] env_specs: foo: packages: - a bar: packages: - b """}, check) def test_lock_resolve_dependencies_error(monkeypatch): def check(dirname): def attempt(): filename = os.path.join(dirname, DEFAULT_PROJECT_LOCK_FILENAME) assert not os.path.isfile(filename) project = Project(dirname, frontend=FakeFrontend()) status = project_ops.lock(project, env_spec_name=None) assert [] == status.errors assert not status assert 'Nope on resolve' in status.status_description assert not os.path.isfile(filename) _with_conda_test(attempt, missing_packages=('a', 'b'), resolve_dependencies_error="Nope on resolve") with_directory_contents( {DEFAULT_PROJECT_FILENAME: """ name: locktest platforms: [linux-32,linux-64,osx-64,win-32,win-64] env_specs: foo: packages: - a bar: packages: - b """}, check) def test_unlock_conda_error(): def check(dirname): def attempt(): filename = os.path.join(dirname, DEFAULT_PROJECT_LOCK_FILENAME) assert os.path.isfile(filename) project = Project(dirname, frontend=FakeFrontend()) assert project.env_specs['foo'].lock_set.enabled assert project.env_specs['bar'].lock_set.enabled status = project_ops.unlock(project, env_spec_name=None) assert [] == status.errors assert not status assert "test deviation" == status.status_description assert os.path.isfile(filename) assert project.env_specs['foo'].lock_set.enabled assert project.env_specs['bar'].lock_set.enabled _with_conda_test(attempt, missing_packages=('a', 'b'), resolve_dependencies={'all': ['a=1.0=1']}, fix_works=False) with_directory_contents( {DEFAULT_PROJECT_FILENAME: """ name: locktest platforms: [linux-32,linux-64,osx-64,win-32,win-64] env_specs: foo: packages: - a bar: packages: - b """, DEFAULT_PROJECT_LOCK_FILENAME: """ locking_enabled: true env_specs: foo: locked: true platforms: [linux-32,linux-64,osx-64,win-32,win-64] packages: all: - c bar: locked: true platforms: [linux-32,linux-64,osx-64,win-32,win-64] packages: all: - d """}, check) def test_update_unlocked_envs(): def check(dirname): resolve_results = {'all': ['a=1.0=1']} def attempt(): filename = os.path.join(dirname, DEFAULT_PROJECT_LOCK_FILENAME) assert not os.path.isfile(filename) project = Project(dirname, frontend=FakeFrontend()) # all lock sets disabled for env in project.env_specs.values(): assert env.lock_set.disabled # Update (should install packages but not make a lock file) status = project_ops.update(project, env_spec_name=None) assert [] == status.errors assert status assert status.status_description == "Update complete." assert project.frontend.logs == [ 'Updating locked dependencies for env spec bar...', 'Updated installed dependencies for bar.', 'Updating locked dependencies for env spec foo...', 'Updated installed dependencies for foo.' ] # no project lock file created assert not os.path.isfile(filename) # all lock sets still disabled for env in project.env_specs.values(): assert env.lock_set.disabled _with_conda_test(attempt, resolve_dependencies=resolve_results) with_directory_contents( {DEFAULT_PROJECT_FILENAME: """ name: locktest platforms: [linux-32,linux-64,osx-64,win-32,win-64] env_specs: foo: packages: - a bar: packages: - b """}, check) def test_update_empty_lock_sets(): def check(dirname): resolve_results = {'all': ['a=1.0=1']} def attempt(): project = Project(dirname, frontend=FakeFrontend()) # all lock sets enabled but empty for env in project.env_specs.values(): assert env.lock_set.enabled assert env.lock_set.platforms == () assert not env.lock_set.supports_current_platform # Update status = project_ops.update(project, env_spec_name=None) assert [] == status.errors assert status assert status.status_description == "Update complete." # yapf: disable assert project.frontend.logs == [ 'Updating locked dependencies for env spec bar...', 'Changes to locked dependencies for bar:', ' platforms:', '+ linux-64', '+ osx-64', '+ win-64', ' packages:', '+ all:', '+ a=1.0=1', 'Updated locked dependencies for env spec bar in anaconda-project-lock.yml.', 'Updating locked dependencies for env spec foo...', 'Changes to locked dependencies for foo:', ' platforms:', '+ linux-64', '+ osx-64', '+ win-64', ' packages:', '+ all:', '+ a=1.0=1', 'Updated locked dependencies for env spec foo in anaconda-project-lock.yml.' ] # yapf: enable for env in project.env_specs.values(): assert env.lock_set.enabled assert env.lock_set.supports_current_platform assert env.lock_set.platforms == conda_api.default_platforms assert env.lock_set.package_specs_for_current_platform == ('a=1.0=1', ) _with_conda_test(attempt, resolve_dependencies=resolve_results) with_directory_contents( {DEFAULT_PROJECT_FILENAME: """ name: locktest platforms: [linux-64,osx-64,win-64] env_specs: foo: packages: - a bar: packages: - b """, DEFAULT_PROJECT_LOCK_FILENAME: "locking_enabled: true\n"}, check) def test_export_env_spec(): def check(dirname): project = project_no_dedicated_env(dirname) exported = os.path.join(dirname, "exported.yml") status = project_ops.export_env_spec(project, name='default', filename=exported) assert status assert status.status_description == ('Exported environment spec default to %s.' % exported) with_directory_contents_completing_project_file( { "anaconda-project.yml": """ env_specs: default: packages: - blah channels: - boo """ }, check) def test_export_nonexistent_env_spec(): def check(dirname): project = project_no_dedicated_env(dirname) exported = os.path.join(dirname, "exported.yml") status = project_ops.export_env_spec(project, name='bar', filename=exported) assert not status assert not os.path.exists(exported) assert status.status_description == "Environment spec bar doesn't exist." with_directory_contents_completing_project_file( { "anaconda-project.yml": """ env_specs: default: packages: - blah channels: - boo """ }, check) def test_export_env_spec_io_error(monkeypatch): def check(dirname): project = project_no_dedicated_env(dirname) exported = os.path.join(dirname, "exported.yml") def mock_atomic_replace(*args, **kwargs): raise IOError("NOOO") monkeypatch.setattr('anaconda_project.yaml_file._atomic_replace', mock_atomic_replace) status = project_ops.export_env_spec(project, name='default', filename=exported) assert not status assert not os.path.exists(exported) assert status.status_description == ("Failed to save %s: NOOO." % exported) with_directory_contents_completing_project_file( { "anaconda-project.yml": """ env_specs: default: packages: - blah channels: - boo """ }, check) def test_export_env_spec_broken_project(monkeypatch): def check(dirname): project = project_no_dedicated_env(dirname) status = project_ops.export_env_spec(project, name='default', filename='foo') assert not status assert status.status_description == 'Unable to load the project.' with_directory_contents({DEFAULT_PROJECT_FILENAME: """ name: broken """}, check) def _monkeypatch_can_connect_to_socket_on_standard_redis_port(monkeypatch): from anaconda_project.requirements_registry.network_util import can_connect_to_socket as real_can_connect_to_socket can_connect_args_list = [] def mock_can_connect_to_socket(host, port, timeout_seconds=0.5): can_connect_args = dict() can_connect_args['host'] = host can_connect_args['port'] = port can_connect_args['timeout_seconds'] = timeout_seconds can_connect_args_list.append(can_connect_args) if port == 6379: return True else: return real_can_connect_to_socket(host, port, timeout_seconds) monkeypatch.setattr("anaconda_project.requirements_registry.network_util.can_connect_to_socket", mock_can_connect_to_socket) return can_connect_args_list def test_add_service(monkeypatch): def check(dirname): _monkeypatch_can_connect_to_socket_on_standard_redis_port(monkeypatch) project = project_no_dedicated_env(dirname) status = project_ops.add_service(project, None, service_type='redis') assert status assert isinstance(project.frontend.logs, list) assert [] == status.errors # be sure service was added to the file and saved project2 = project_no_dedicated_env(dirname) assert 'redis' == project2.project_file.get_value(['services', 'REDIS_URL']) with_directory_contents_completing_project_file(dict(), check) def test_add_service_with_env_spec(monkeypatch): def check(dirname): _monkeypatch_can_connect_to_socket_on_standard_redis_port(monkeypatch) project = project_no_dedicated_env(dirname) status = project_ops.add_service(project, 'myspec', service_type='redis') assert status assert isinstance(project.frontend.logs, list) assert [] == status.errors # be sure service was added to the file and saved project2 = project_no_dedicated_env(dirname) assert 'redis' == project2.project_file.get_value(['env_specs', 'myspec', 'services', 'REDIS_URL']) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ env_specs: default: packages: [python] channels: [] myspec: packages: [python] channels: [] """}, check) def test_add_service_nondefault_variable_name(monkeypatch): def check(dirname): _monkeypatch_can_connect_to_socket_on_standard_redis_port(monkeypatch) project = project_no_dedicated_env(dirname) status = project_ops.add_service(project, None, service_type='redis', variable_name='MY_SPECIAL_REDIS') assert status assert isinstance(project.frontend.logs, list) assert [] == status.errors # be sure service was added to the file and saved project2 = project_no_dedicated_env(dirname) assert 'redis' == project2.project_file.get_value(['services', 'MY_SPECIAL_REDIS']) with_directory_contents_completing_project_file(dict(), check) def test_add_service_with_project_file_problems(): def check(dirname): project = Project(dirname, frontend=FakeFrontend()) status = project_ops.add_service(project, None, service_type='redis') assert not status assert ["%s: variables section contains wrong value type 42, should be dict or list of requirements" % project.project_file.basename] == status.errors # be sure service was NOT added to the file project2 = Project(dirname, frontend=FakeFrontend()) assert project2.project_file.get_value(['services', 'REDIS_URL']) is None # should have been dropped from the original project object also assert project.project_file.get_value(['services', 'REDIS_URL']) is None with_directory_contents_completing_project_file({DEFAULT_PROJECT_FILENAME: "variables:\n 42"}, check) def test_add_service_already_exists(monkeypatch): def check(dirname): _monkeypatch_can_connect_to_socket_on_standard_redis_port(monkeypatch) project = project_no_dedicated_env(dirname) status = project_ops.add_service(project, None, service_type='redis') assert status assert isinstance(project.frontend.logs, list) assert [] == status.errors # be sure service was added to the file and saved project2 = Project(dirname, frontend=FakeFrontend()) assert 'redis' == project2.project_file.get_value(['services', 'REDIS_URL']) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ services: REDIS_URL: redis """}, check) def test_add_service_already_exists_with_different_type(monkeypatch): def check(dirname): _monkeypatch_can_connect_to_socket_on_standard_redis_port(monkeypatch) project = Project(dirname, frontend=FakeFrontend()) status = project_ops.add_service(project, None, service_type='redis') assert not status # Once we have >1 known service types, we should change this test # to use the one other than redis and then this error will change. assert ["Service REDIS_URL has an unknown type 'foo'."] == status.errors with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ services: REDIS_URL: foo """}, check) def test_add_service_already_exists_as_non_service(monkeypatch): def check(dirname): _monkeypatch_can_connect_to_socket_on_standard_redis_port(monkeypatch) project = Project(dirname, frontend=FakeFrontend()) status = project_ops.add_service(project, None, service_type='redis') assert not status assert ['Variable REDIS_URL is already in use.'] == status.errors with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ variables: REDIS_URL: something """}, check) def test_add_service_bad_service_type(monkeypatch): def check(dirname): _monkeypatch_can_connect_to_socket_on_standard_redis_port(monkeypatch) project = Project(dirname, frontend=FakeFrontend()) status = project_ops.add_service(project, None, service_type='not_a_service') assert not status assert ["Unknown service type 'not_a_service', we know about: redis"] == status.errors with_directory_contents_completing_project_file(dict(), check) def test_remove_service(monkeypatch): def check(dirname): project = project_no_dedicated_env(dirname) status = project_ops.remove_service(project, None, variable_name='redis') assert status assert [] == status.errors project2 = project_no_dedicated_env(dirname) assert project2.project_file.get_value(['services', 'REDIS_URL']) is None with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ services: REDIS_URL: redis """}, check) def test_remove_service_with_prepare(monkeypatch): def check(dirname): _monkeypatch_can_connect_to_socket_on_standard_redis_port(monkeypatch) project = project_no_dedicated_env(dirname) result = prepare.prepare_without_interaction(project) assert result status = project_ops.remove_service(project, None, variable_name='redis', prepare_result=result) assert status assert [] == status.errors project2 = project_no_dedicated_env(dirname) assert project2.project_file.get_value(['services', 'REDIS_URL']) is None with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ services: REDIS_URL: redis """}, check) def test_remove_service_with_env_spec(monkeypatch): def check(dirname): config_path = ['env_specs', 'myspec', 'services', 'REDIS_URL'] project = project_no_dedicated_env(dirname) assert project.project_file.get_value(config_path) == 'redis' status = project_ops.remove_service(project, 'myspec', variable_name='redis') assert status assert [] == status.errors project2 = project_no_dedicated_env(dirname) assert project2.project_file.get_value(config_path) is None with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ env_specs: default: packages: [python] channels: [] myspec: packages: [python] channels: [] services: REDIS_URL: redis """}, check) def check_cleaned(dirname, envs_dirname="envs"): project = Project(dirname, frontend=FakeFrontend()) result = prepare.prepare_without_interaction(project, env_spec_name='foo') assert result envs_dir = os.path.join(dirname, envs_dirname) assert os.path.isdir(os.path.join(envs_dir, "foo")) # prepare again with 'bar' this time result = prepare.prepare_without_interaction(project, env_spec_name='bar') assert result bar_dir = os.path.join(dirname, envs_dirname, "bar") assert os.path.isdir(bar_dir) # we don't really have a service in the test project file because # redis-server doesn't work on Windows and it's good to run this # test on Windows. So create some fake junk in services dir. services_dir = os.path.join(dirname, "services") os.makedirs(os.path.join(services_dir, "leftover-debris")) status = project_ops.clean(project, result) assert status assert status.status_description == "Cleaned." assert project.frontend.logs == [("Deleted environment files in %s." % bar_dir), ("Removing %s." % services_dir), ("Removing %s." % envs_dir)] assert status.errors == [] assert not os.path.isdir(os.path.join(dirname, envs_dirname)) assert not os.path.isdir(os.path.join(dirname, "services")) def test_clean(monkeypatch): def mock_create(prefix, pkgs, channels, stdout_callback, stderr_callback): os.makedirs(os.path.join(prefix, "conda-meta")) monkeypatch.setattr('anaconda_project.internal.conda_api.create', mock_create) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ env_specs: foo: {} bar: {} """}, check_cleaned) def test_clean_from_environ(monkeypatch): def mock_create(prefix, pkgs, channels, stdout_callback, stderr_callback): os.makedirs(os.path.join(prefix, "conda-meta")) monkeypatch.setattr('anaconda_project.internal.conda_api.create', mock_create) def check(dirname): os.environ['ANACONDA_PROJECT_ENVS_PATH'] = os.path.join(dirname, "some_random_path") res = check_cleaned(dirname, "some_random_path") os.environ.pop('ANACONDA_PROJECT_ENVS_PATH') return res with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ env_specs: foo: {} bar: {} """}, check) def test_clean_failed_delete(monkeypatch): def mock_create(prefix, pkgs, channels, stdout_callback, stderr_callback): os.makedirs(os.path.join(prefix, "conda-meta")) monkeypatch.setattr('anaconda_project.internal.conda_api.create', mock_create) def check(dirname): project = Project(dirname, frontend=FakeFrontend()) result = prepare.prepare_without_interaction(project, env_spec_name='foo') assert result envs_dir = os.path.join(dirname, "envs") assert os.path.isdir(os.path.join(envs_dir, "foo")) # prepare again with 'bar' this time project.frontend.reset() result = prepare.prepare_without_interaction(project, env_spec_name='bar') assert result bar_dir = os.path.join(dirname, "envs", "bar") assert os.path.isdir(bar_dir) # we don't really have a service in the test project file because # redis-server doesn't work on Windows and it's good to run this # test on Windows. So create some fake junk in services dir. services_dir = os.path.join(dirname, "services") os.makedirs(os.path.join(services_dir, "leftover-debris")) def mock_rmtree(path, onerror=None): raise IOError("No rmtree here") monkeypatch.setattr('shutil.rmtree', mock_rmtree) project.frontend.reset() status = project_ops.clean(project, result) assert not status assert status.status_description == "Failed to clean everything up." assert project.frontend.logs == [("Removing %s." % services_dir), ("Removing %s." % envs_dir)] assert status.errors == [("Failed to remove environment files in %s: No rmtree here." % bar_dir), ("Error removing %s: No rmtree here." % services_dir), ("Error removing %s: No rmtree here." % envs_dir)] assert os.path.isdir(os.path.join(dirname, "envs")) assert os.path.isdir(os.path.join(dirname, "services")) # so with_directory_contents_completing_project_file can remove our tmp dir monkeypatch.undo() with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ env_specs: foo: {} bar: {} """}, check) def test_clean_environ_failed_delete(monkeypatch): def mock_create(prefix, pkgs, channels, stdout_callback, stderr_callback): os.makedirs(os.path.join(prefix, "conda-meta")) monkeypatch.setattr('anaconda_project.internal.conda_api.create', mock_create) def check(dirname): envs_dir = os.environ['ANACONDA_PROJECT_ENVS_PATH'] = os.path.join(dirname, "some_random_failed_path") project = Project(dirname, frontend=FakeFrontend()) result = prepare.prepare_without_interaction(project, env_spec_name='foo') assert result assert os.path.isdir(os.path.join(envs_dir, "foo")) # prepare again with 'bar' this time project.frontend.reset() result = prepare.prepare_without_interaction(project, env_spec_name='bar') assert result bar_dir = os.path.join(envs_dir, "bar") assert os.path.isdir(bar_dir) # we don't really have a service in the test project file because # redis-server doesn't work on Windows and it's good to run this # test on Windows. So create some fake junk in services dir. services_dir = os.path.join(dirname, "services") os.makedirs(os.path.join(services_dir, "leftover-debris")) def mock_rmtree(path, onerror=None): raise IOError("No rmtree here") monkeypatch.setattr('shutil.rmtree', mock_rmtree) project.frontend.reset() status = project_ops.clean(project, result) assert not status assert status.status_description == "Failed to clean everything up." assert project.frontend.logs == [("Removing %s." % services_dir), ("Removing %s." % envs_dir)] assert status.errors == [("Failed to remove environment files in %s: No rmtree here." % bar_dir), ("Error removing %s: No rmtree here." % services_dir), ("Error removing %s: No rmtree here." % envs_dir)] assert os.path.isdir(os.path.join(envs_dir)) assert os.path.isdir(os.path.join(dirname, "services")) # so with_directory_contents_completing_project_file can remove our tmp dir monkeypatch.undo() # clean environ os.environ.pop('ANACONDA_PROJECT_ENVS_PATH') with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ env_specs: foo: {} bar: {} """}, check) def _strip_prefixes(names): return list([name[len("archivedproj/"):] for name in names]) def _assert_zip_contains(zip_path, filenames): with zipfile.ZipFile(zip_path, mode='r') as zf: assert sorted(_strip_prefixes(zf.namelist())) == sorted(filenames) def _assert_tar_contains(tar_path, filenames): with tarfile.open(tar_path, mode='r') as tf: assert sorted(_strip_prefixes(tf.getnames())) == sorted(filenames) def _relative_to(root, path): prefix = root + os.sep assert path.startswith(prefix) return path[len(prefix):] def _recursive_list(dir_path): for root, directories, filenames in os.walk(dir_path): for dir in directories: if not os.listdir(os.path.join(root, dir)): yield _relative_to(dir_path, os.path.join(root, dir)) for filename in filenames: yield _relative_to(dir_path, os.path.join(root, filename)) def _assert_dir_contains(dir_path, filenames): assert sorted([filename.replace("\\", "/") for filename in _recursive_list(dir_path)]) == sorted(filenames) def test_archive_zip(): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.zip") def check(dirname): # be sure we ignore these os.makedirs(os.path.join(dirname, "services")) os.makedirs(os.path.join(dirname, "envs")) project = project_no_dedicated_env(dirname) status = project_ops.archive(project, archivefile) assert status assert os.path.exists(archivefile) _assert_zip_contains(archivefile, ['a/b/c/d.py', 'a/b/c/e.py', 'emptydir/', 'foo.py', 'anaconda-project.yml', 'anaconda-project-local.yml']) # overwriting should work status = project_ops.archive(project, archivefile) assert status assert os.path.exists(archivefile) _assert_zip_contains(archivefile, ['a/b/c/d.py', 'a/b/c/e.py', 'emptydir/', 'foo.py', 'anaconda-project.yml', 'anaconda-project-local.yml']) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ name: archivedproj services: REDIS_URL: redis """, "foo.py": "print('hello')\n", "emptydir": None, "a/b/c/d.py": "", "a/b/c/e.py": ""}, check) with_directory_contents_completing_project_file(dict(), archivetest) def test_archive_unlocked_warning(): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.zip") def check(dirname): project = project_no_dedicated_env(dirname) assert [] == project.problems assert project.env_specs['foo'].lock_set.enabled assert project.env_specs['bar'].lock_set.disabled status = project_ops.archive(project, archivefile) assert status assert os.path.exists(archivefile) # yapf: disable assert [ ' added ' + os.path.join("archivedproj", "anaconda-project-local.yml"), ' added ' + os.path.join("archivedproj", "anaconda-project-lock.yml"), ' added ' + os.path.join("archivedproj", "anaconda-project.yml"), ' added ' + os.path.join("archivedproj", "foo.py"), 'Warning: env specs are not locked, which means they may not work ' 'consistently for others or when deployed.', " Consider using the 'anaconda-project lock' command to lock the project.", ' Unlocked env specs are: bar' ] == project.frontend.logs # yapf: enable with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ name: archivedproj env_specs: foo: packages: [] bar: packages: [] """, DEFAULT_PROJECT_LOCK_FILENAME: """ locking_enabled: false env_specs: foo: locked: true platforms: [linux-32,linux-64,osx-64,win-32,win-64] packages: all: [] """, "foo.py": "print('hello')\n"}, check) with_directory_contents_completing_project_file(dict(), archivetest) def test_archive_tar(): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.tar") def check(dirname): # be sure we ignore these os.makedirs(os.path.join(dirname, "services")) os.makedirs(os.path.join(dirname, "envs")) project = project_no_dedicated_env(dirname) status = project_ops.archive(project, archivefile) assert status assert os.path.exists(archivefile) _assert_tar_contains(archivefile, ['a/b/c/d.py', 'a/b/c/e.py', 'emptydir', 'foo.py', 'anaconda-project.yml', 'anaconda-project-local.yml']) # overwriting should work status = project_ops.archive(project, archivefile) assert status assert os.path.exists(archivefile) _assert_tar_contains(archivefile, ['a/b/c/d.py', 'a/b/c/e.py', 'emptydir', 'foo.py', 'anaconda-project.yml', 'anaconda-project-local.yml']) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ name: archivedproj services: REDIS_URL: redis """, "foo.py": "print('hello')\n", "emptydir": None, "a/b/c/d.py": "", "a/b/c/e.py": ""}, check) with_directory_contents_completing_project_file(dict(), archivetest) def test_archive_tar_gz(): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.tar.gz") def check(dirname): # be sure we ignore these os.makedirs(os.path.join(dirname, "services")) os.makedirs(os.path.join(dirname, "envs")) project = project_no_dedicated_env(dirname) status = project_ops.archive(project, archivefile) assert status assert os.path.exists(archivefile) _assert_tar_contains(archivefile, ['a/b/c/d.py', 'a/b/c/e.py', 'emptydir', 'foo.py', 'anaconda-project.yml', 'anaconda-project-local.yml']) # overwriting should work status = project_ops.archive(project, archivefile) assert status assert os.path.exists(archivefile) _assert_tar_contains(archivefile, ['a/b/c/d.py', 'a/b/c/e.py', 'emptydir', 'foo.py', 'anaconda-project.yml', 'anaconda-project-local.yml']) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ name: archivedproj services: REDIS_URL: redis """, "foo.py": "print('hello')\n", "emptydir": None, "a/b/c/d.py": "", "a/b/c/e.py": ""}, check) with_directory_contents_completing_project_file(dict(), archivetest) def test_archive_tar_bz2(): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.tar.bz2") def check(dirname): # be sure we ignore these os.makedirs(os.path.join(dirname, "services")) os.makedirs(os.path.join(dirname, "envs")) project = project_no_dedicated_env(dirname) status = project_ops.archive(project, archivefile) assert status assert os.path.exists(archivefile) _assert_tar_contains(archivefile, ['a/b/c/d.py', 'a/b/c/e.py', 'emptydir', 'foo.py', 'anaconda-project.yml', 'anaconda-project-local.yml']) # overwriting should work status = project_ops.archive(project, archivefile) assert status assert os.path.exists(archivefile) _assert_tar_contains(archivefile, ['a/b/c/d.py', 'a/b/c/e.py', 'emptydir', 'foo.py', 'anaconda-project.yml', 'anaconda-project-local.yml']) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ name: archivedproj services: REDIS_URL: redis """, "foo.py": "print('hello')\n", "emptydir": None, "a/b/c/d.py": "", "a/b/c/e.py": ""}, check) with_directory_contents_completing_project_file(dict(), archivetest) def test_archive_cannot_write_destination_path(monkeypatch): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.zip") def mock_ZipFile(*args, **kwargs): raise IOError("NOPE") monkeypatch.setattr('zipfile.ZipFile', mock_ZipFile) # need to mock plugins since entry_points uses zipfile.ZipFile that # we are mocking for this test monkeypatch.setattr(plugins_api, 'get_plugins', lambda x='fake': {}) def check(dirname): # be sure we ignore this os.makedirs(os.path.join(dirname, "envs")) project = project_no_dedicated_env(dirname) status = project_ops.archive(project, archivefile) assert not status assert status.status_description == ('Failed to write project archive %s.' % archivefile) assert ['NOPE'] == status.errors with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ name: archivedproj """, "foo.py": "print('hello')\n"}, check) with_directory_contents_completing_project_file(dict(), archivetest) def _add_empty_git(contents): contents.update({ # I'm not sure these are all really needed for git to # recognize the directory as a git repo, but this is what # "git init" creates. '.git/branches': None, '.git/hooks': None, '.git/info': None, '.git/objects/info': None, '.git/objects/pack': None, '.git/refs/heads': None, '.git/refs/tags': None, '.git/config': """ [core] repositoryformatversion = 0 filemode = true bare = false logallrefupdates = true """, '.git/description': "TestingGitRepository\n", '.git/HEAD': 'ref: refs/heads/master\n' }) return contents def test_archive_zip_with_gitignore(): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.zip") def check(dirname): # be sure we ignore this os.makedirs(os.path.join(dirname, "envs")) project = project_no_dedicated_env(dirname) status = project_ops.archive(project, archivefile) assert status assert os.path.exists(archivefile) _assert_zip_contains(archivefile, ['foo.py', '.gitignore', 'anaconda-project.yml', 'anaconda-project-local.yml']) with_directory_contents_completing_project_file( _add_empty_git({DEFAULT_PROJECT_FILENAME: """ name: archivedproj """, "foo.py": "print('hello')\n", '.gitignore': "/ignored.py\n/subdir\n/subwithslash/\n", 'ignored.py': 'print("ignore me!")', 'subdir/foo.py': 'foo', 'subdir/subsub/bar.py': 'bar', 'subwithslash/something.py': 'something'}), check) with_directory_contents_completing_project_file(dict(), archivetest) def test_archive_zip_with_failing_git_command(monkeypatch): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.zip") def check(dirname): # be sure we ignore this os.makedirs(os.path.join(dirname, "envs")) project = project_no_dedicated_env(dirname) from subprocess import check_output as real_check_output def mock_check_output(args, cwd): def run(commandline): return real_check_output(commandline) return with_temporary_script_commandline("import sys\nsys.exit(1)\n", run) monkeypatch.setattr('subprocess.check_output', mock_check_output) status = project_ops.archive(project, archivefile) assert not status assert not os.path.exists(archivefile) # before the "." is the command output, but "false" has no output. assert status.errors == ["'git ls-files' failed to list ignored files: ."] with_directory_contents_completing_project_file( _add_empty_git({DEFAULT_PROJECT_FILENAME: """ """, "foo.py": "print('hello')\n"}), check) with_directory_contents_completing_project_file(dict(), archivetest) def test_archive_zip_with_exception_executing_git_command(monkeypatch): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.zip") def check(dirname): # be sure we ignore these os.makedirs(os.path.join(dirname, "services")) os.makedirs(os.path.join(dirname, "envs")) project = project_no_dedicated_env(dirname) from subprocess import check_output as real_check_output def mock_check_output(args, cwd): return real_check_output(args=['this-is-not-a-real-command'], cwd=cwd) monkeypatch.setattr('subprocess.check_output', mock_check_output) status = project_ops.archive(project, archivefile) assert not status assert not os.path.exists(archivefile) assert len(status.errors) == 1 # full error message is platform-dependent assert status.errors[0].startswith("Failed to run 'git ls-files'") with_directory_contents_completing_project_file( _add_empty_git({DEFAULT_PROJECT_FILENAME: """ """, "foo.py": "print('hello')\n"}), check) with_directory_contents_completing_project_file(dict(), archivetest) def test_archive_zip_with_inability_to_walk_directory(monkeypatch): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.zip") def check(dirname): # be sure we ignore these os.makedirs(os.path.join(dirname, "services")) os.makedirs(os.path.join(dirname, "envs")) project = project_no_dedicated_env(dirname) assert project.problems == [] def mock_os_walk(dirname): raise OSError("NOPE") monkeypatch.setattr('os.walk', mock_os_walk) status = project_ops.archive(project, archivefile) assert not status assert not os.path.exists(archivefile) assert status.status_description == "Failed to list files in the project." assert len(status.errors) > 0 assert status.errors[0].startswith("Could not list files in") with_directory_contents_completing_project_file( _add_empty_git({DEFAULT_PROJECT_FILENAME: """ name: archivedproj """, "foo.py": "print('hello')\n"}), check) with_directory_contents_completing_project_file(dict(), archivetest) def test_archive_zip_with_unreadable_projectignore(monkeypatch): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.zip") def check(dirname): # be sure we ignore these os.makedirs(os.path.join(dirname, "services")) os.makedirs(os.path.join(dirname, "envs")) project = project_no_dedicated_env(dirname) ignorefile = os.path.join(dirname, ".projectignore") with codecs.open(ignorefile, 'w', 'utf-8') as f: f.write("\n") from codecs import open as real_open def mock_codecs_open(*args, **kwargs): if args[0].endswith(".projectignore"): raise IOError("NOPE") else: return real_open(*args, **kwargs) monkeypatch.setattr('codecs.open', mock_codecs_open) status = project_ops.archive(project, archivefile) assert not status assert not os.path.exists(archivefile) assert ["Failed to read %s: NOPE" % ignorefile] == status.errors with_directory_contents_completing_project_file( _add_empty_git({DEFAULT_PROJECT_FILENAME: """ name: archivedproj """, "foo.py": "print('hello')\n"}), check) with_directory_contents_completing_project_file(dict(), archivetest) def test_archive_with_bogus_filename(monkeypatch): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.bar") def check(dirname): project = project_no_dedicated_env(dirname) status = project_ops.archive(project, archivefile) assert not status assert not os.path.exists(archivefile) assert status.status_description == "Project archive filename must be a .zip, .tar.gz, or .tar.bz2." assert status.errors == ["Unsupported archive filename %s." % archivefile] with_directory_contents_completing_project_file( _add_empty_git({DEFAULT_PROJECT_FILENAME: """ name: archivedproj """, "foo.py": "print('hello')\n"}), check) with_directory_contents_completing_project_file(dict(), archivetest) def test_archive_with_no_project_file(monkeypatch): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.zip") def check(dirname): project = project_no_dedicated_env(dirname) assert not os.path.exists(project.project_file.filename) status = project_ops.archive(project, archivefile) assert not status assert not os.path.exists(archivefile) assert status.status_description == "Can't create an archive." assert status.errors == ["%s does not exist." % DEFAULT_PROJECT_FILENAME] with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_archive_with_unsaved_project(monkeypatch): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.zip") def check(dirname): project = project_no_dedicated_env(dirname) assert os.path.exists(project.project_file.filename) project.project_file.set_value(['name'], "hello") status = project_ops.archive(project, archivefile) assert not status assert not os.path.exists(archivefile) assert status.status_description == "Can't create an archive." assert status.errors == ["%s has been modified but not saved." % DEFAULT_PROJECT_FILENAME] with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ env_specs: default: packages: [] """}, check) with_directory_contents(dict(), archivetest) def test_archive_zip_with_downloaded_file(): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.zip") def check(dirname): project = project_no_dedicated_env(dirname) status = project_ops.archive(project, archivefile) assert status assert os.path.exists(archivefile) _assert_zip_contains(archivefile, ['foo.py', 'anaconda-project.yml', 'anaconda-project-local.yml']) with_directory_contents_completing_project_file( _add_empty_git({DEFAULT_PROJECT_FILENAME: """ name: archivedproj downloads: MYDOWNLOAD: "http://example.com/downloaded.py" """, "foo.py": "print('hello')\n", 'downloaded.py': 'print("ignore me!")', 'downloaded.py.part': ''}), check) with_directory_contents_completing_project_file(dict(), archivetest) def test_archive_zip_overwrites_but_does_not_include_the_dest_zip(): def check(dirname): project = project_no_dedicated_env(dirname) archivefile = os.path.join(dirname, "foo.zip") assert os.path.isfile(archivefile) status = project_ops.archive(project, archivefile) assert status assert os.path.exists(archivefile) _assert_zip_contains(archivefile, ['foo.py', 'anaconda-project.yml', 'anaconda-project-local.yml']) # re-archive to the same file status = project_ops.archive(project, archivefile) assert status assert os.path.exists(archivefile) _assert_zip_contains(archivefile, ['foo.py', 'anaconda-project.yml', 'anaconda-project-local.yml']) with_directory_contents_completing_project_file( _add_empty_git({DEFAULT_PROJECT_FILENAME: """ name: archivedproj """, "foo.py": "print('hello')\n", 'foo.zip': ""}), check) def test_archive_zip_with_projectignore(): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.zip") def check(dirname): # be sure we ignore this os.makedirs(os.path.join(dirname, "envs")) project = project_ops.create(dirname) assert [] == project.problems status = project_ops.archive(project, archivefile) assert status assert os.path.exists(archivefile) _assert_zip_contains(archivefile, ['foo.py', 'anaconda-project.yml', '.projectignore', 'bar/']) with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: """ name: archivedproj """, "foo.py": "print('hello')\n", "foo.pyc": "", ".ipynb_checkpoints": "", "bar/blah.pyc": ""}, check) with_directory_contents_completing_project_file(dict(), archivetest) _CONTENTS_DIR = 1 _CONTENTS_FILE = 2 _CONTENTS_SYMLINK = 3 def _make_zip(archive_dest_dir, contents): archivefile = os.path.join(archive_dest_dir, "foo.zip") with zipfile.ZipFile(archivefile, 'w') as zf: for (key, what) in contents.items(): if what is _CONTENTS_DIR: # create a directory if not key.endswith(os.sep): key = key + os.sep zf.writestr(key, "") elif what is _CONTENTS_FILE: zf.writestr(key, "hello") else: raise AssertionError("can't put this in a zip") return archivefile def _make_tar(archive_dest_dir, contents, compression=None): mode = 'w' extension = '.tar' if compression == 'gz': mode = mode + ':gz' extension = extension + '.gz' elif compression == 'bz2': mode = mode + ':bz2' extension = extension + '.bz2' # the tarfile API only lets us put in files, so we need # files to put in a_directory = os.path.join(archive_dest_dir, "a_directory") os.mkdir(a_directory) a_file = os.path.join(archive_dest_dir, "a_file") with open(a_file, 'w') as f: f.write("hello") a_symlink = os.path.join(archive_dest_dir, "a_link") if _CONTENTS_SYMLINK in contents.values(): os.symlink("/somewhere", a_symlink) archivefile = os.path.join(archive_dest_dir, "foo" + extension) with tarfile.open(archivefile, mode) as tf: for (key, what) in contents.items(): t = tarfile.TarInfo(key) if what is _CONTENTS_DIR: t.type = tarfile.DIRTYPE elif what is _CONTENTS_FILE: pass elif what is _CONTENTS_SYMLINK: t.type = tarfile.SYMTYPE tf.addfile(t) os.remove(a_file) os.rmdir(a_directory) if os.path.exists(a_symlink): os.remove(a_symlink) return archivefile def _test_unarchive_tar(compression): def archivetest(archive_dest_dir): archivefile = _make_tar(archive_dest_dir, {'a/a.txt': _CONTENTS_FILE, 'a/q/b.txt': _CONTENTS_FILE, 'a/c': _CONTENTS_DIR, 'a': _CONTENTS_DIR}, compression=compression) # with tarfile.open(archivefile, 'r') as tf: # tf.list() if compression is not None: assert archivefile.endswith(compression) def check(dirname): unpacked = os.path.join(dirname, "foo") status = project_ops.unarchive(archivefile, unpacked) assert status.errors == [] assert status assert os.path.isdir(unpacked) _assert_dir_contains(unpacked, ['a.txt', 'c', 'q/b.txt']) with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_tar(): _test_unarchive_tar(compression=None) def test_unarchive_tar_gz(): _test_unarchive_tar(compression='gz') def test_unarchive_tar_bz2(): _test_unarchive_tar(compression='bz2') def test_unarchive_zip(): def archivetest(archive_dest_dir): archivefile = _make_zip(archive_dest_dir, {'a/a.txt': _CONTENTS_FILE, 'a/q/b.txt': _CONTENTS_FILE, 'a/c': _CONTENTS_DIR, 'a': _CONTENTS_DIR}) # with zipfile.ZipFile(archivefile, 'r') as zf: # print(repr(zf.namelist())) def check(dirname): unpacked = os.path.join(dirname, "foo") status = project_ops.unarchive(archivefile, unpacked) assert status.errors == [] assert status assert os.path.isdir(unpacked) _assert_dir_contains(unpacked, ['a.txt', 'c', 'q/b.txt']) assert status.project_dir == unpacked with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_zip_to_current_directory(): def archivetest(archive_dest_dir): archivefile = _make_zip(archive_dest_dir, {'a/a.txt': _CONTENTS_FILE, 'a/q/b.txt': _CONTENTS_FILE, 'a/c': _CONTENTS_DIR, 'a': _CONTENTS_DIR}) # with zipfile.ZipFile(archivefile, 'r') as zf: # print(repr(zf.namelist())) def check(dirname): old = os.getcwd() try: os.chdir(dirname) status = project_ops.unarchive(archivefile, project_dir=None) finally: os.chdir(old) unpacked = os.path.join(dirname, "a") assert status.errors == [] assert status assert os.path.isdir(unpacked) _assert_dir_contains(unpacked, ['a.txt', 'c', 'q/b.txt']) assert status.project_dir == unpacked with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_zip_to_parent_dir_with_auto_project_dir(): def archivetest(archive_dest_dir): archivefile = _make_zip(archive_dest_dir, {'a/a.txt': _CONTENTS_FILE, 'a/q/b.txt': _CONTENTS_FILE, 'a/c': _CONTENTS_DIR}) # with zipfile.ZipFile(archivefile, 'r') as zf: # print(repr(zf.namelist())) def check(dirname): unpacked = os.path.join(dirname, "a") status = project_ops.unarchive(archivefile, project_dir=None, parent_dir=dirname) assert status.errors == [] assert status assert os.path.isdir(unpacked) _assert_dir_contains(unpacked, ['a.txt', 'c', 'q/b.txt']) assert status.project_dir == unpacked with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_tar_to_parent_dir_with_auto_project_dir(): def archivetest(archive_dest_dir): archivefile = _make_tar(archive_dest_dir, {'a/a.txt': _CONTENTS_FILE, 'a/q/b.txt': _CONTENTS_FILE, 'a/c': _CONTENTS_DIR}) def check(dirname): unpacked = os.path.join(dirname, "a") status = project_ops.unarchive(archivefile, project_dir=None, parent_dir=dirname) assert status.errors == [] assert status assert os.path.isdir(unpacked) _assert_dir_contains(unpacked, ['a.txt', 'c', 'q/b.txt']) assert status.project_dir == unpacked with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_error_on_dest_dir_exists(): def archivetest(archive_dest_dir): archivefile = _make_tar(archive_dest_dir, {'a/a.txt': _CONTENTS_FILE}) def check(dirname): unpacked = os.path.join(dirname, "foo") os.mkdir(unpacked) status = project_ops.unarchive(archivefile, unpacked) message = "Directory '%s' already exists." % unpacked assert status.errors == [message] assert not status with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_ignore_rmtree_fail_when_unzipping(monkeypatch): def archivetest(archive_dest_dir): archivefile = _make_zip(archive_dest_dir, {'a/a.txt': _CONTENTS_FILE}) def check(dirname): unpacked = os.path.join(dirname, "foo") def mock_rmtree(*args, **kwargs): raise IOError("FAILURE") monkeypatch.setattr('shutil.rmtree', mock_rmtree) status = project_ops.unarchive(archivefile, unpacked) monkeypatch.undo() assert status assert os.path.isdir(unpacked) assert os.path.isfile(os.path.join(unpacked, "a.txt")) with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_error_on_bad_extension(): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.bar") with open(archivefile, 'w') as f: f.write("hello") def check(dirname): unpacked = os.path.join(dirname, "foo") status = project_ops.unarchive(archivefile, unpacked) message = "Unsupported archive filename %s, must be a .zip, .tar.gz, or .tar.bz2" % archivefile assert status.errors == [message] assert not status assert not os.path.isdir(unpacked) with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_error_on_corrupt_zip(): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.zip") with open(archivefile, 'w') as f: f.write("hello") def check(dirname): unpacked = os.path.join(dirname, "foo") status = project_ops.unarchive(archivefile, unpacked) message = "File is not a zip file" assert status.errors == [message] assert not status assert not os.path.isdir(unpacked) with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_error_on_corrupt_tar(): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.tar") with open(archivefile, 'w') as f: f.write("hello") def check(dirname): unpacked = os.path.join(dirname, "foo") status = project_ops.unarchive(archivefile, unpacked) message = "file could not be opened successfully" assert status.errors == [message] assert not status assert not os.path.isdir(unpacked) with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_error_on_nonexistent_tar(): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.tar") def check(dirname): unpacked = os.path.join(dirname, "foo") status = project_ops.unarchive(archivefile, unpacked) # the exact message here varies by OS so not checking assert len(status.errors) == 1 assert not status assert not os.path.isdir(unpacked) with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_error_on_nonexistent_zip(): def archivetest(archive_dest_dir): archivefile = os.path.join(archive_dest_dir, "foo.zip") def check(dirname): unpacked = os.path.join(dirname, "foo") status = project_ops.unarchive(archivefile, unpacked) # the exact message here varies by OS so not checking assert len(status.errors) == 1 assert not status assert not os.path.isdir(unpacked) with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_tar_ignores_symlink(): if platform.system() == 'Windows': print("Can't test tars with symlinks on Windows because there's no way to create one") return def archivetest(archive_dest_dir): archivefile = _make_tar(archive_dest_dir, {'a/a.txt': _CONTENTS_FILE, 'a/q/b.txt': _CONTENTS_FILE, 'a/c': _CONTENTS_DIR, 'a/link': _CONTENTS_SYMLINK}) with tarfile.open(archivefile, 'r') as tf: member = tf.getmember('a/link') assert member is not None assert member.issym() def check(dirname): unpacked = os.path.join(dirname, "foo") status = project_ops.unarchive(archivefile, unpacked) assert status.errors == [] assert status assert os.path.isdir(unpacked) _assert_dir_contains(unpacked, ['a.txt', 'c', 'q/b.txt']) with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_tar_error_on_relative_path(): def archivetest(archive_dest_dir): archivefile = _make_tar(archive_dest_dir, {'a/../a.txt': _CONTENTS_FILE}) def check(dirname): unpacked = os.path.join(dirname, "foo") status = project_ops.unarchive(archivefile, unpacked) assert not os.path.exists(unpacked) message = "Archive entry 'a/../a.txt' would end up at '%s' which is outside '%s'." % (os.path.join( dirname, "a.txt"), os.path.join(unpacked)) assert status.errors == [message] assert not status with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_tar_error_on_root_relative_path(): def archivetest(archive_dest_dir): archivefile = _make_tar(archive_dest_dir, {'../a.txt': _CONTENTS_FILE}) def check(dirname): # root relative path fails when project_dir=None status = project_ops.unarchive(archivefile, project_dir=None, parent_dir=dirname) message = "Archive contains relative path '../a.txt' which is not allowed." assert status.errors == [message] assert not status # and also when it is specified unpacked = os.path.join(dirname, "foo") status = project_ops.unarchive(archivefile, project_dir=unpacked) message = "Archive contains relative path '../a.txt' which is not allowed." assert status.errors == [message] assert not status assert not os.path.isdir(unpacked) with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_zip_error_on_relative_path(): def archivetest(archive_dest_dir): archivefile = _make_zip(archive_dest_dir, {'a/../a.txt': _CONTENTS_FILE}) def check(dirname): unpacked = os.path.join(dirname, "foo") status = project_ops.unarchive(archivefile, unpacked) message = "Archive entry 'a/../a.txt' would end up at '%s' which is outside '%s'." % (os.path.join( dirname, "a.txt"), os.path.join(unpacked)) assert status.errors == [message] assert not status assert not os.path.exists(unpacked) with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_zip_error_on_root_relative_path(): def archivetest(archive_dest_dir): archivefile = _make_zip(archive_dest_dir, {'../a.txt': _CONTENTS_FILE}) def check(dirname): # root relative path fails when project_dir=None status = project_ops.unarchive(archivefile, project_dir=None, parent_dir=dirname) message = "Archive contains relative path '../a.txt' which is not allowed." assert status.errors == [message] assert not status # and also when it is specified unpacked = os.path.join(dirname, "foo") status = project_ops.unarchive(archivefile, project_dir=unpacked) message = "Archive contains relative path '../a.txt' which is not allowed." assert status.errors == [message] assert not status assert not os.path.isdir(unpacked) with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_tar_error_on_no_directory(): def archivetest(archive_dest_dir): archivefile = _make_tar(archive_dest_dir, {'a.txt': _CONTENTS_FILE}) def check(dirname): unpacked = os.path.join(dirname, "foo") status = project_ops.unarchive(archivefile, unpacked) assert not os.path.exists(unpacked) message = "Archive does not contain a project directory or is empty." assert status.errors == [message] assert not status with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_tar_error_on_only_directory(): def archivetest(archive_dest_dir): archivefile = _make_tar(archive_dest_dir, {'a': _CONTENTS_DIR}) def check(dirname): unpacked = os.path.join(dirname, "foo") status = project_ops.unarchive(archivefile, unpacked) assert not os.path.exists(unpacked) message = "Archive does not contain a project directory or is empty." assert status.errors == [message] assert not status with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_tar_error_on_multiple_directories(): def archivetest(archive_dest_dir): archivefile = _make_tar(archive_dest_dir, {'a/b.txt': _CONTENTS_FILE, 'c/d.txt': _CONTENTS_FILE}) def check(dirname): unpacked = os.path.join(dirname, "foo") status = project_ops.unarchive(archivefile, unpacked) assert not os.path.exists(unpacked) message = "A valid project archive contains only one project directory " + \ "with all files inside that directory. 'c/d.txt' is outside 'a'." assert status.errors == [message] assert not status with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_tar_error_on_empty(): def archivetest(archive_dest_dir): archivefile = _make_tar(archive_dest_dir, {}) def check(dirname): unpacked = os.path.join(dirname, "foo") status = project_ops.unarchive(archivefile, unpacked) assert not os.path.exists(unpacked) message = "A valid project archive must contain at least one file." assert status.errors == [message] assert not status with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_zip_error_on_empty(): def archivetest(archive_dest_dir): archivefile = _make_zip(archive_dest_dir, {}) def check(dirname): unpacked = os.path.join(dirname, "foo") status = project_ops.unarchive(archivefile, unpacked) assert not os.path.exists(unpacked) message = "A valid project archive must contain at least one file." assert status.errors == [message] assert not status with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_abs_project_dir_with_parent_dir(): with pytest.raises(ValueError) as excinfo: project_ops.unarchive("foo.tar.gz", "/absolute", "/bar") assert "If supplying parent_dir to unarchive, project_dir must be relative or None" == str(excinfo.value) def test_unarchive_tar_error_on_writing_removes_dir(monkeypatch): def archivetest(archive_dest_dir): archivefile = _make_tar(archive_dest_dir, {'a/b.txt': _CONTENTS_FILE, 'a/c.txt': _CONTENTS_FILE}) def check(dirname): unpacked = os.path.join(dirname, "foo") # this test is trying to prove that we clean up the dest # directory if we get IO errors partway through creating # it. state = dict(count=0) def mock_copyfileobj(*args, **kwargs): # assert that 'unpacked' exists at some point assert os.path.exists(unpacked) state['count'] = state['count'] + 1 if state['count'] == 2: raise IOError("Not copying second file") monkeypatch.setattr('tarfile.copyfileobj', mock_copyfileobj) status = project_ops.unarchive(archivefile, unpacked) assert state['count'] == 2 assert not os.path.exists(unpacked) message = "Not copying second file" assert status.errors == [message] assert not status with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_unarchive_tar_error_on_writing_then_error_removing_dir(monkeypatch): def archivetest(archive_dest_dir): archivefile = _make_tar(archive_dest_dir, {'a/b.txt': _CONTENTS_FILE, 'a/c.txt': _CONTENTS_FILE}) def check(dirname): unpacked = os.path.join(dirname, "foo") state = dict(count=0, rmtree_count=0) def mock_copyfileobj(*args, **kwargs): # assert that 'unpacked' exists at some point assert os.path.exists(unpacked) state['count'] = state['count'] + 1 if state['count'] == 2: raise IOError("Not copying second file") monkeypatch.setattr('tarfile.copyfileobj', mock_copyfileobj) # this test is trying to prove that we ignore an exception # from rmtree when cleaning up "unpacked" def mock_rmtree(path): assert os.path.exists(unpacked) state['rmtree_count'] = state['rmtree_count'] + 1 raise IOError("rmtree failed") monkeypatch.setattr('shutil.rmtree', mock_rmtree) status = project_ops.unarchive(archivefile, unpacked) monkeypatch.undo() assert state['count'] == 2 assert state['rmtree_count'] == 1 assert os.path.exists(unpacked) # since the rmtree failed message = "Not copying second file" assert status.errors == [message] assert not status with_directory_contents(dict(), check) with_directory_contents(dict(), archivetest) def test_upload(monkeypatch): def check(dirname): with fake_server(monkeypatch, expected_basename='foo.tar.bz2'): project = project_no_dedicated_env(dirname) assert [] == project.problems status = project_ops.upload(project, site='unit_test') assert status assert status.url == 'http://example.com/whatevs' with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: "name: foo\n", "foo.py": "print('hello')\n"}, check) def test_upload_with_project_file_problems(): def check(dirname): project = Project(dirname, frontend=FakeFrontend()) status = project_ops.upload(project) assert not status assert ["%s: variables section contains wrong value type 42, should be dict or list of requirements" % project.project_file.basename] == status.errors with_directory_contents_completing_project_file({DEFAULT_PROJECT_FILENAME: "variables:\n 42"}, check) def test_upload_cannot_walk_directory(monkeypatch): def check(dirname): project = project_no_dedicated_env(dirname) assert [] == project.problems def mock_os_walk(dirname): raise OSError("NOPE") monkeypatch.setattr('os.walk', mock_os_walk) status = project_ops.upload(project, site='unit_test') assert not status assert status.errors[0].startswith("Could not list files in") with_directory_contents_completing_project_file( {DEFAULT_PROJECT_FILENAME: "name: foo\n", "foo.py": "print('hello')\n"}, check)
37.793453
120
0.621688
20,651
182,429
5.237809
0.035204
0.0147
0.040965
0.039264
0.869857
0.846994
0.824963
0.804282
0.782926
0.763678
0
0.008461
0.270467
182,429
4,826
121
37.801285
0.80428
0.038393
0
0.744904
0
0.005096
0.16265
0.014243
0
0
0
0
0.260759
1
0.131937
false
0.011042
0.009626
0.000849
0.147508
0.007361
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
fc881222f41e2ceab04aafa2b6908b338b652e73
96
py
Python
flask_perms/__init__.py
jrice128/permission_manager
02d415a5f6bc0b4fb2cf40fa6431c813fabe38ba
[ "MIT" ]
null
null
null
flask_perms/__init__.py
jrice128/permission_manager
02d415a5f6bc0b4fb2cf40fa6431c813fabe38ba
[ "MIT" ]
null
null
null
flask_perms/__init__.py
jrice128/permission_manager
02d415a5f6bc0b4fb2cf40fa6431c813fabe38ba
[ "MIT" ]
1
2020-04-28T09:18:14.000Z
2020-04-28T09:18:14.000Z
from flask_perms.flask_perms import Flask_Perms from flask_perms.flask_perms import perm_manager
48
48
0.90625
16
96
5.0625
0.375
0.617284
0.345679
0.469136
0.740741
0.740741
0
0
0
0
0
0
0.072917
96
2
48
48
0.910112
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
0
1
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
8
5d87c02290645e3f5a9c1e2f75cb94f560afb95b
8,696
py
Python
GUI/printer/Pillow-2.7.0/Tests/test_imagemath.py
y-gupta/rfid-auth-system
44f3de884d05e1906757b97f0a1a140469a3290f
[ "Apache-2.0" ]
5
2015-01-21T14:13:34.000Z
2016-05-14T06:53:38.000Z
GUI/printer/Pillow-2.7.0/Tests/test_imagemath.py
1upon0/rfid-auth-system
44f3de884d05e1906757b97f0a1a140469a3290f
[ "Apache-2.0" ]
null
null
null
GUI/printer/Pillow-2.7.0/Tests/test_imagemath.py
1upon0/rfid-auth-system
44f3de884d05e1906757b97f0a1a140469a3290f
[ "Apache-2.0" ]
3
2015-02-01T17:10:39.000Z
2019-12-05T05:21:42.000Z
from helper import unittest, PillowTestCase from PIL import Image from PIL import ImageMath def pixel(im): if hasattr(im, "im"): return "%s %r" % (im.mode, im.getpixel((0, 0))) else: if isinstance(im, type(0)): return int(im) # hack to deal with booleans print(im) A = Image.new("L", (1, 1), 1) B = Image.new("L", (1, 1), 2) Z = Image.new("L", (1, 1), 0) # Z for zero F = Image.new("F", (1, 1), 3) I = Image.new("I", (1, 1), 4) A2 = A.resize((2, 2)) B2 = B.resize((2, 2)) images = {"A": A, "B": B, "F": F, "I": I} class TestImageMath(PillowTestCase): def test_sanity(self): self.assertEqual(ImageMath.eval("1"), 1) self.assertEqual(ImageMath.eval("1+A", A=2), 3) self.assertEqual(pixel(ImageMath.eval("A+B", A=A, B=B)), "I 3") self.assertEqual(pixel(ImageMath.eval("A+B", images)), "I 3") self.assertEqual(pixel(ImageMath.eval("float(A)+B", images)), "F 3.0") self.assertEqual(pixel( ImageMath.eval("int(float(A)+B)", images)), "I 3") def test_ops(self): self.assertEqual(pixel(ImageMath.eval("-A", images)), "I -1") self.assertEqual(pixel(ImageMath.eval("+B", images)), "L 2") self.assertEqual(pixel(ImageMath.eval("A+B", images)), "I 3") self.assertEqual(pixel(ImageMath.eval("A-B", images)), "I -1") self.assertEqual(pixel(ImageMath.eval("A*B", images)), "I 2") self.assertEqual(pixel(ImageMath.eval("A/B", images)), "I 0") self.assertEqual(pixel(ImageMath.eval("B**2", images)), "I 4") self.assertEqual(pixel( ImageMath.eval("B**33", images)), "I 2147483647") self.assertEqual(pixel(ImageMath.eval("float(A)+B", images)), "F 3.0") self.assertEqual(pixel(ImageMath.eval("float(A)-B", images)), "F -1.0") self.assertEqual(pixel(ImageMath.eval("float(A)*B", images)), "F 2.0") self.assertEqual(pixel(ImageMath.eval("float(A)/B", images)), "F 0.5") self.assertEqual(pixel(ImageMath.eval("float(B)**2", images)), "F 4.0") self.assertEqual(pixel( ImageMath.eval("float(B)**33", images)), "F 8589934592.0") def test_logical(self): self.assertEqual(pixel(ImageMath.eval("not A", images)), 0) self.assertEqual(pixel(ImageMath.eval("A and B", images)), "L 2") self.assertEqual(pixel(ImageMath.eval("A or B", images)), "L 1") def test_convert(self): self.assertEqual(pixel( ImageMath.eval("convert(A+B, 'L')", images)), "L 3") self.assertEqual(pixel( ImageMath.eval("convert(A+B, '1')", images)), "1 0") self.assertEqual(pixel( ImageMath.eval("convert(A+B, 'RGB')", images)), "RGB (3, 3, 3)") def test_compare(self): self.assertEqual(pixel(ImageMath.eval("min(A, B)", images)), "I 1") self.assertEqual(pixel(ImageMath.eval("max(A, B)", images)), "I 2") self.assertEqual(pixel(ImageMath.eval("A == 1", images)), "I 1") self.assertEqual(pixel(ImageMath.eval("A == 2", images)), "I 0") def test_one_image_larger(self): self.assertEqual(pixel(ImageMath.eval("A+B", A=A2, B=B)), "I 3") self.assertEqual(pixel(ImageMath.eval("A+B", A=A, B=B2)), "I 3") def test_abs(self): self.assertEqual(pixel(ImageMath.eval("abs(A)", A=A)), "I 1") self.assertEqual(pixel(ImageMath.eval("abs(B)", B=B)), "I 2") def test_binary_mod(self): self.assertEqual(pixel(ImageMath.eval("A%A", A=A)), "I 0") self.assertEqual(pixel(ImageMath.eval("B%B", B=B)), "I 0") self.assertEqual(pixel(ImageMath.eval("A%B", A=A, B=B)), "I 1") self.assertEqual(pixel(ImageMath.eval("B%A", A=A, B=B)), "I 0") self.assertEqual(pixel(ImageMath.eval("Z%A", A=A, Z=Z)), "I 0") self.assertEqual(pixel(ImageMath.eval("Z%B", B=B, Z=Z)), "I 0") def test_bitwise_invert(self): self.assertEqual(pixel(ImageMath.eval("~Z", Z=Z)), "I -1") self.assertEqual(pixel(ImageMath.eval("~A", A=A)), "I -2") self.assertEqual(pixel(ImageMath.eval("~B", B=B)), "I -3") def test_bitwise_and(self): self.assertEqual(pixel(ImageMath.eval("Z&Z", A=A, Z=Z)), "I 0") self.assertEqual(pixel(ImageMath.eval("Z&A", A=A, Z=Z)), "I 0") self.assertEqual(pixel(ImageMath.eval("A&Z", A=A, Z=Z)), "I 0") self.assertEqual(pixel(ImageMath.eval("A&A", A=A, Z=Z)), "I 1") def test_bitwise_or(self): self.assertEqual(pixel(ImageMath.eval("Z|Z", A=A, Z=Z)), "I 0") self.assertEqual(pixel(ImageMath.eval("Z|A", A=A, Z=Z)), "I 1") self.assertEqual(pixel(ImageMath.eval("A|Z", A=A, Z=Z)), "I 1") self.assertEqual(pixel(ImageMath.eval("A|A", A=A, Z=Z)), "I 1") def test_bitwise_xor(self): self.assertEqual(pixel(ImageMath.eval("Z^Z", A=A, Z=Z)), "I 0") self.assertEqual(pixel(ImageMath.eval("Z^A", A=A, Z=Z)), "I 1") self.assertEqual(pixel(ImageMath.eval("A^Z", A=A, Z=Z)), "I 1") self.assertEqual(pixel(ImageMath.eval("A^A", A=A, Z=Z)), "I 0") def test_bitwise_leftshift(self): self.assertEqual(pixel(ImageMath.eval("Z<<0", Z=Z)), "I 0") self.assertEqual(pixel(ImageMath.eval("Z<<1", Z=Z)), "I 0") self.assertEqual(pixel(ImageMath.eval("A<<0", A=A)), "I 1") self.assertEqual(pixel(ImageMath.eval("A<<1", A=A)), "I 2") def test_bitwise_rightshift(self): self.assertEqual(pixel(ImageMath.eval("Z>>0", Z=Z)), "I 0") self.assertEqual(pixel(ImageMath.eval("Z>>1", Z=Z)), "I 0") self.assertEqual(pixel(ImageMath.eval("A>>0", A=A)), "I 1") self.assertEqual(pixel(ImageMath.eval("A>>1", A=A)), "I 0") def test_logical_eq(self): self.assertEqual(pixel(ImageMath.eval("A==A", A=A)), "I 1") self.assertEqual(pixel(ImageMath.eval("B==B", B=B)), "I 1") self.assertEqual(pixel(ImageMath.eval("A==B", A=A, B=B)), "I 0") self.assertEqual(pixel(ImageMath.eval("B==A", A=A, B=B)), "I 0") def test_logical_ne(self): self.assertEqual(pixel(ImageMath.eval("A!=A", A=A)), "I 0") self.assertEqual(pixel(ImageMath.eval("B!=B", B=B)), "I 0") self.assertEqual(pixel(ImageMath.eval("A!=B", A=A, B=B)), "I 1") self.assertEqual(pixel(ImageMath.eval("B!=A", A=A, B=B)), "I 1") def test_logical_lt(self): self.assertEqual(pixel(ImageMath.eval("A<A", A=A)), "I 0") self.assertEqual(pixel(ImageMath.eval("B<B", B=B)), "I 0") self.assertEqual(pixel(ImageMath.eval("A<B", A=A, B=B)), "I 1") self.assertEqual(pixel(ImageMath.eval("B<A", A=A, B=B)), "I 0") def test_logical_le(self): self.assertEqual(pixel(ImageMath.eval("A<=A", A=A)), "I 1") self.assertEqual(pixel(ImageMath.eval("B<=B", B=B)), "I 1") self.assertEqual(pixel(ImageMath.eval("A<=B", A=A, B=B)), "I 1") self.assertEqual(pixel(ImageMath.eval("B<=A", A=A, B=B)), "I 0") def test_logical_gt(self): self.assertEqual(pixel(ImageMath.eval("A>A", A=A)), "I 0") self.assertEqual(pixel(ImageMath.eval("B>B", B=B)), "I 0") self.assertEqual(pixel(ImageMath.eval("A>B", A=A, B=B)), "I 0") self.assertEqual(pixel(ImageMath.eval("B>A", A=A, B=B)), "I 1") def test_logical_ge(self): self.assertEqual(pixel(ImageMath.eval("A>=A", A=A)), "I 1") self.assertEqual(pixel(ImageMath.eval("B>=B", B=B)), "I 1") self.assertEqual(pixel(ImageMath.eval("A>=B", A=A, B=B)), "I 0") self.assertEqual(pixel(ImageMath.eval("B>=A", A=A, B=B)), "I 1") def test_logical_equal(self): self.assertEqual(pixel(ImageMath.eval("equal(A, A)", A=A)), "I 1") self.assertEqual(pixel(ImageMath.eval("equal(B, B)", B=B)), "I 1") self.assertEqual(pixel(ImageMath.eval("equal(Z, Z)", Z=Z)), "I 1") self.assertEqual(pixel(ImageMath.eval("equal(A, B)", A=A, B=B)), "I 0") self.assertEqual(pixel(ImageMath.eval("equal(B, A)", A=A, B=B)), "I 0") self.assertEqual(pixel(ImageMath.eval("equal(A, Z)", A=A, Z=Z)), "I 0") def test_logical_not_equal(self): self.assertEqual(pixel(ImageMath.eval("notequal(A, A)", A=A)), "I 0") self.assertEqual(pixel(ImageMath.eval("notequal(B, B)", B=B)), "I 0") self.assertEqual(pixel(ImageMath.eval("notequal(Z, Z)", Z=Z)), "I 0") self.assertEqual( pixel(ImageMath.eval("notequal(A, B)", A=A, B=B)), "I 1") self.assertEqual( pixel(ImageMath.eval("notequal(B, A)", A=A, B=B)), "I 1") self.assertEqual( pixel(ImageMath.eval("notequal(A, Z)", A=A, Z=Z)), "I 1") if __name__ == '__main__': unittest.main() # End of file
46.010582
79
0.584522
1,382
8,696
3.643271
0.065847
0.294935
0.385303
0.558689
0.865144
0.840119
0.809136
0.752731
0.676663
0.660973
0
0.024676
0.193767
8,696
188
80
46.255319
0.693482
0.005635
0
0.086093
0
0
0.107833
0
0
0
0
0
0.655629
1
0.152318
false
0
0.019868
0
0.192053
0.006623
0
0
0
null
1
1
1
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
9
5dae4200946c8cfaac41da0b0e395d21e35ef9b2
3,052
py
Python
Zhangjiashan_eemd/projects/esvr_one_step.py
zjy8006/MonthlyRunoffForecastByAutoReg
661fcb5dcdfbbb2ec6861e1668a035b50e69f7c2
[ "MIT" ]
2
2020-05-18T06:45:04.000Z
2021-05-18T06:38:23.000Z
Zhangjiashan_eemd/projects/esvr_one_step.py
zjy8006/MonthlyRunoffForecastByAutoReg
661fcb5dcdfbbb2ec6861e1668a035b50e69f7c2
[ "MIT" ]
null
null
null
Zhangjiashan_eemd/projects/esvr_one_step.py
zjy8006/MonthlyRunoffForecastByAutoReg
661fcb5dcdfbbb2ec6861e1668a035b50e69f7c2
[ "MIT" ]
1
2020-01-17T02:56:18.000Z
2020-01-17T02:56:18.000Z
import sys import matplotlib.pyplot as plt import os root_path = os.path.dirname(os.path.abspath('__file__')) sys.path.append(root_path) from tools.models import one_step_esvr, one_step_esvr_multi_seed from Zhangjiashan_eemd.projects.variables import variables if __name__ == '__main__': # one_step_esvr_multi_seed( # root_path=root_path, # station='Zhangjiashan', # decomposer='eemd', # predict_pattern='one_step_1_ahead_forecast_pacf_train_val',# hindcast or forecast or hindcast_with_pca_mle or forecast_with_pca_mle # n_calls=100, # ) # one_step_esvr_multi_seed( # root_path=root_path, # station='Zhangjiashan', # decomposer='eemd', # predict_pattern='one_step_1_ahead_forecast_pacf_traindev_test',# hindcast or forecast or hindcast_with_pca_mle or forecast_with_pca_mle # n_calls=100, # ) # one_step_esvr_multi_seed( # root_path=root_path, # station='Zhangjiashan', # decomposer='eemd', # predict_pattern='one_step_1_ahead_forecast_pacf_traindev_append',# hindcast or forecast or hindcast_with_pca_mle or forecast_with_pca_mle # n_calls=100, # ) # for leading_time in [1,3,5,7,9]: # one_step_esvr_multi_seed( # root_path=root_path, # station='Zhangjiashan', # decomposer='eemd', # predict_pattern='one_step_'+str(leading_time)+'_ahead_forecast_pacf',# hindcast or forecast or hindcast_with_pca_mle or forecast_with_pca_mle # n_calls=100, # ) for leading_time in [1,3,5,7,9]: one_step_esvr_multi_seed( root_path=root_path, station='Zhangjiashan', decomposer='eemd', predict_pattern='one_step_'+str(leading_time)+'_ahead_forecast_pcc_local',# hindcast or forecast or hindcast_with_pca_mle or forecast_with_pca_mle n_calls=100, ) # one_step_esvr_multi_seed( # root_path=root_path, # station='Zhangjiashan', # decomposer='eemd', # predict_pattern='one_step_1_ahead_forecast_pacf_pca22',#+str(i),# hindcast or forecast or hindcast_with_pca_mle or forecast_with_pca_mle # n_calls=100, # ) # one_step_esvr_multi_seed( # root_path=root_path, # station='Zhangjiashan', # decomposer='eemd', # predict_pattern='one_step_1_ahead_forecast_pacf_pcamle',#+str(i),# hindcast or forecast or hindcast_with_pca_mle or forecast_with_pca_mle # n_calls=100, # ) # num_in_one = sum(variables['lags_dict'].values()) # for n_components in range(num_in_one-16,num_in_one+1): # one_step_esvr_multi_seed( # root_path=root_path, # station='Zhangjiashan', # decomposer='eemd', # predict_pattern='one_step_1_ahead_forecast_pacf_pca'+str(n_components),# hindcast or forecast or hindcast_with_pca_mle or forecast_with_pca_mle # n_calls=100, # )
42.388889
158
0.66481
401
3,052
4.571072
0.174564
0.07856
0.087289
0.07856
0.820513
0.809602
0.809602
0.809602
0.809602
0.809602
0
0.019455
0.242136
3,052
71
159
42.985915
0.773022
0.702818
0
0
0
0
0.078199
0.029621
0
0
0
0
0
1
0
false
0
0.3125
0
0.3125
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
7
5de539ebf39393ee4bf3baad8cd824066cd67aef
17,262
py
Python
python/paddle/fluid/tests/unittests/rnn/test_rnn_nets_static.py
zmxdream/Paddle
04f042a5d507ad98f7f2cfc3cbc44b06d7a7f45c
[ "Apache-2.0" ]
17,085
2016-11-18T06:40:52.000Z
2022-03-31T22:52:32.000Z
python/paddle/fluid/tests/unittests/rnn/test_rnn_nets_static.py
zmxdream/Paddle
04f042a5d507ad98f7f2cfc3cbc44b06d7a7f45c
[ "Apache-2.0" ]
29,769
2016-11-18T06:35:22.000Z
2022-03-31T16:46:15.000Z
python/paddle/fluid/tests/unittests/rnn/test_rnn_nets_static.py
zmxdream/Paddle
04f042a5d507ad98f7f2cfc3cbc44b06d7a7f45c
[ "Apache-2.0" ]
4,641
2016-11-18T07:43:33.000Z
2022-03-31T15:15:02.000Z
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import paddle paddle.set_default_dtype("float64") from paddle.fluid.layers import sequence_mask paddle.enable_static() import numpy as np import unittest from convert import convert_params_for_net_static from rnn_numpy import SimpleRNN, LSTM, GRU bidirectional_list = ["bidirectional", "bidirect"] class TestSimpleRNN(unittest.TestCase): def __init__(self, time_major=True, direction="forward", place="cpu"): super(TestSimpleRNN, self).__init__("runTest") self.time_major = time_major self.direction = direction self.num_directions = 2 if direction in bidirectional_list else 1 self.place = place def setUp(self): # Since `set_device` is global, set `set_device` in `setUp` rather than # `__init__` to avoid using an error device set by another test case. place = paddle.set_device(self.place) rnn1 = SimpleRNN( 16, 32, 2, time_major=self.time_major, direction=self.direction) mp = paddle.static.Program() sp = paddle.static.Program() with paddle.fluid.unique_name.guard(): with paddle.static.program_guard(mp, sp): rnn2 = paddle.nn.SimpleRNN( 16, 32, 2, time_major=self.time_major, direction=self.direction) exe = paddle.static.Executor(place) scope = paddle.fluid.Scope() with paddle.static.scope_guard(scope): exe.run(sp) convert_params_for_net_static(rnn1, rnn2, place) self.mp = mp self.sp = sp self.rnn1 = rnn1 self.rnn2 = rnn2 self.place = place self.executor = exe self.scope = scope def test_with_initial_state(self): mp = self.mp.clone().clone() sp = self.sp rnn1 = self.rnn1 rnn2 = self.rnn2 exe = self.executor scope = self.scope x = np.random.randn(12, 4, 16) if not self.time_major: x = np.transpose(x, [1, 0, 2]) prev_h = np.random.randn(2 * self.num_directions, 4, 32) y1, h1 = rnn1(x, prev_h) with paddle.fluid.unique_name.guard(): with paddle.static.program_guard(mp, sp): x_data = paddle.fluid.data( "input", [-1, -1, 16], dtype=paddle.framework.get_default_dtype()) init_h = paddle.fluid.data( "init_h", [2 * self.num_directions, -1, 32], dtype=paddle.framework.get_default_dtype()) y, h = rnn2(x_data, init_h) feed_dict = {x_data.name: x, init_h.name: prev_h} with paddle.static.scope_guard(scope): y2, h2 = exe.run(mp, feed=feed_dict, fetch_list=[y, h]) np.testing.assert_allclose(y1, y2, atol=1e-8, rtol=1e-5) np.testing.assert_allclose(h1, h2, atol=1e-8, rtol=1e-5) def test_with_zero_state(self): mp = self.mp.clone() sp = self.sp rnn1 = self.rnn1 rnn2 = self.rnn2 exe = self.executor scope = self.scope x = np.random.randn(12, 4, 16) if not self.time_major: x = np.transpose(x, [1, 0, 2]) y1, h1 = rnn1(x) with paddle.fluid.unique_name.guard(): with paddle.static.program_guard(mp, sp): x_data = paddle.fluid.data( "input", [-1, -1, 16], dtype=paddle.framework.get_default_dtype()) y, h = rnn2(x_data) feed_dict = {x_data.name: x} with paddle.static.scope_guard(scope): y2, h2 = exe.run(mp, feed=feed_dict, fetch_list=[y, h]) np.testing.assert_allclose(y1, y2, atol=1e-8, rtol=1e-5) np.testing.assert_allclose(h1, h2, atol=1e-8, rtol=1e-5) def test_with_input_lengths(self): mp = self.mp.clone() sp = self.sp rnn1 = self.rnn1 rnn2 = self.rnn2 exe = self.executor scope = self.scope x = np.random.randn(12, 4, 16) if not self.time_major: x = np.transpose(x, [1, 0, 2]) sequence_length = np.array([12, 10, 9, 8], dtype=np.int64) y1, h1 = rnn1(x, sequence_length=sequence_length) with paddle.fluid.unique_name.guard(): with paddle.static.program_guard(mp, sp): x_data = paddle.fluid.data( "input", [-1, -1, 16], dtype=paddle.framework.get_default_dtype()) seq_len = paddle.fluid.data("seq_len", [-1], dtype="int64") mask = sequence_mask(seq_len, dtype=paddle.get_default_dtype()) if self.time_major: mask = paddle.transpose(mask, [1, 0]) y, h = rnn2(x_data, sequence_length=seq_len) mask = paddle.unsqueeze(mask, -1) y = paddle.multiply(y, mask) feed_dict = {x_data.name: x, seq_len.name: sequence_length} with paddle.static.scope_guard(scope): y2, h2 = exe.run(mp, feed=feed_dict, fetch_list=[y, h]) np.testing.assert_allclose(y1, y2, atol=1e-8, rtol=1e-5) np.testing.assert_allclose(h1, h2, atol=1e-8, rtol=1e-5) def runTest(self): self.test_with_initial_state() self.test_with_zero_state() self.test_with_input_lengths() class TestGRU(unittest.TestCase): def __init__(self, time_major=True, direction="forward", place="cpu"): super(TestGRU, self).__init__("runTest") self.time_major = time_major self.direction = direction self.num_directions = 2 if direction in bidirectional_list else 1 self.place = place def setUp(self): # Since `set_device` is global, set `set_device` in `setUp` rather than # `__init__` to avoid using an error device set by another test case. place = paddle.set_device(self.place) rnn1 = GRU(16, 32, 2, time_major=self.time_major, direction=self.direction) mp = paddle.static.Program() sp = paddle.static.Program() with paddle.fluid.unique_name.guard(): with paddle.static.program_guard(mp, sp): rnn2 = paddle.nn.GRU(16, 32, 2, time_major=self.time_major, direction=self.direction) exe = paddle.static.Executor(place) scope = paddle.fluid.Scope() with paddle.static.scope_guard(scope): exe.run(sp) convert_params_for_net_static(rnn1, rnn2, place) self.mp = mp self.sp = sp self.rnn1 = rnn1 self.rnn2 = rnn2 self.place = place self.executor = exe self.scope = scope def test_with_initial_state(self): mp = self.mp.clone() sp = self.sp rnn1 = self.rnn1 rnn2 = self.rnn2 exe = self.executor scope = self.scope x = np.random.randn(12, 4, 16) if not self.time_major: x = np.transpose(x, [1, 0, 2]) prev_h = np.random.randn(2 * self.num_directions, 4, 32) y1, h1 = rnn1(x, prev_h) with paddle.fluid.unique_name.guard(): with paddle.static.program_guard(mp, sp): x_data = paddle.fluid.data( "input", [-1, -1, 16], dtype=paddle.framework.get_default_dtype()) init_h = paddle.fluid.data( "init_h", [2 * self.num_directions, -1, 32], dtype=paddle.framework.get_default_dtype()) y, h = rnn2(x_data, init_h) feed_dict = {x_data.name: x, init_h.name: prev_h} with paddle.static.scope_guard(scope): y2, h2 = exe.run(mp, feed=feed_dict, fetch_list=[y, h]) np.testing.assert_allclose(y1, y2, atol=1e-8, rtol=1e-5) np.testing.assert_allclose(h1, h2, atol=1e-8, rtol=1e-5) def test_with_zero_state(self): mp = self.mp.clone() sp = self.sp rnn1 = self.rnn1 rnn2 = self.rnn2 exe = self.executor scope = self.scope x = np.random.randn(12, 4, 16) if not self.time_major: x = np.transpose(x, [1, 0, 2]) y1, h1 = rnn1(x) with paddle.fluid.unique_name.guard(): with paddle.static.program_guard(mp, sp): x_data = paddle.fluid.data( "input", [-1, -1, 16], dtype=paddle.framework.get_default_dtype()) y, h = rnn2(x_data) feed_dict = {x_data.name: x} with paddle.static.scope_guard(scope): y2, h2 = exe.run(mp, feed=feed_dict, fetch_list=[y, h]) np.testing.assert_allclose(y1, y2, atol=1e-8, rtol=1e-5) np.testing.assert_allclose(h1, h2, atol=1e-8, rtol=1e-5) def test_with_input_lengths(self): mp = self.mp.clone() sp = self.sp rnn1 = self.rnn1 rnn2 = self.rnn2 exe = self.executor scope = self.scope x = np.random.randn(12, 4, 16) if not self.time_major: x = np.transpose(x, [1, 0, 2]) sequence_length = np.array([12, 10, 9, 8], dtype=np.int64) y1, h1 = rnn1(x, sequence_length=sequence_length) with paddle.fluid.unique_name.guard(): with paddle.static.program_guard(mp, sp): x_data = paddle.fluid.data( "input", [-1, -1, 16], dtype=paddle.framework.get_default_dtype()) seq_len = paddle.fluid.data("seq_len", [-1], dtype="int64") mask = sequence_mask(seq_len, dtype=paddle.get_default_dtype()) if self.time_major: mask = paddle.transpose(mask, [1, 0]) y, h = rnn2(x_data, sequence_length=seq_len) mask = paddle.unsqueeze(mask, -1) y = paddle.multiply(y, mask) feed_dict = {x_data.name: x, seq_len.name: sequence_length} with paddle.static.scope_guard(scope): y2, h2 = exe.run(mp, feed=feed_dict, fetch_list=[y, h]) np.testing.assert_allclose(y1, y2, atol=1e-8, rtol=1e-5) np.testing.assert_allclose(h1, h2, atol=1e-8, rtol=1e-5) def runTest(self): self.test_with_initial_state() self.test_with_zero_state() class TestLSTM(unittest.TestCase): def __init__(self, time_major=True, direction="forward", place="cpu"): super(TestLSTM, self).__init__("runTest") self.time_major = time_major self.direction = direction self.num_directions = 2 if direction in bidirectional_list else 1 self.place = place def setUp(self): # Since `set_device` is global, set `set_device` in `setUp` rather than # `__init__` to avoid using an error device set by another test case. place = paddle.set_device(self.place) rnn1 = LSTM( 16, 32, 2, time_major=self.time_major, direction=self.direction) mp = paddle.static.Program() sp = paddle.static.Program() with paddle.fluid.unique_name.guard(): with paddle.static.program_guard(mp, sp): rnn2 = paddle.nn.LSTM( 16, 32, 2, time_major=self.time_major, direction=self.direction) exe = paddle.static.Executor(place) scope = paddle.fluid.Scope() with paddle.static.scope_guard(scope): exe.run(sp) convert_params_for_net_static(rnn1, rnn2, place) self.mp = mp self.sp = sp self.rnn1 = rnn1 self.rnn2 = rnn2 self.place = place self.executor = exe self.scope = scope def test_with_initial_state(self): mp = self.mp.clone() sp = self.sp rnn1 = self.rnn1 rnn2 = self.rnn2 exe = self.executor scope = self.scope x = np.random.randn(12, 4, 16) if not self.time_major: x = np.transpose(x, [1, 0, 2]) prev_h = np.random.randn(2 * self.num_directions, 4, 32) prev_c = np.random.randn(2 * self.num_directions, 4, 32) y1, (h1, c1) = rnn1(x, (prev_h, prev_c)) with paddle.fluid.unique_name.guard(): with paddle.static.program_guard(mp, sp): x_data = paddle.fluid.data( "input", [-1, -1, 16], dtype=paddle.framework.get_default_dtype()) init_h = paddle.fluid.data( "init_h", [2 * self.num_directions, -1, 32], dtype=paddle.framework.get_default_dtype()) init_c = paddle.fluid.data( "init_c", [2 * self.num_directions, -1, 32], dtype=paddle.framework.get_default_dtype()) y, (h, c) = rnn2(x_data, (init_h, init_c)) feed_dict = {x_data.name: x, init_h.name: prev_h, init_c.name: prev_c} with paddle.static.scope_guard(scope): y2, h2, c2 = exe.run(mp, feed=feed_dict, fetch_list=[y, h, c]) np.testing.assert_allclose(y1, y2, atol=1e-8, rtol=1e-5) np.testing.assert_allclose(h1, h2, atol=1e-8, rtol=1e-5) np.testing.assert_allclose(c1, c2, atol=1e-8, rtol=1e-5) def test_with_zero_state(self): mp = self.mp.clone() sp = self.sp rnn1 = self.rnn1 rnn2 = self.rnn2 exe = self.executor scope = self.scope x = np.random.randn(12, 4, 16) if not self.time_major: x = np.transpose(x, [1, 0, 2]) y1, (h1, c1) = rnn1(x) with paddle.fluid.unique_name.guard(): with paddle.static.program_guard(mp, sp): x_data = paddle.fluid.data( "input", [-1, -1, 16], dtype=paddle.framework.get_default_dtype()) y, (h, c) = rnn2(x_data) feed_dict = {x_data.name: x} with paddle.static.scope_guard(scope): y2, h2, c2 = exe.run(mp, feed=feed_dict, fetch_list=[y, h, c]) np.testing.assert_allclose(y1, y2, atol=1e-8, rtol=1e-5) np.testing.assert_allclose(h1, h2, atol=1e-8, rtol=1e-5) np.testing.assert_allclose(c1, c2, atol=1e-8, rtol=1e-5) def test_with_input_lengths(self): mp = self.mp.clone() sp = self.sp rnn1 = self.rnn1 rnn2 = self.rnn2 exe = self.executor scope = self.scope x = np.random.randn(12, 4, 16) if not self.time_major: x = np.transpose(x, [1, 0, 2]) sequence_length = np.array([12, 10, 9, 8], dtype=np.int64) y1, (h1, c1) = rnn1(x, sequence_length=sequence_length) with paddle.fluid.unique_name.guard(): with paddle.static.program_guard(mp, sp): x_data = paddle.fluid.data( "input", [-1, -1, 16], dtype=paddle.framework.get_default_dtype()) seq_len = paddle.fluid.data("seq_len", [-1], dtype="int64") mask = sequence_mask(seq_len, dtype=paddle.get_default_dtype()) if self.time_major: mask = paddle.transpose(mask, [1, 0]) y, (h, c) = rnn2(x_data, sequence_length=seq_len) mask = paddle.unsqueeze(mask, -1) y = paddle.multiply(y, mask) feed_dict = {x_data.name: x, seq_len.name: sequence_length} with paddle.static.scope_guard(scope): y2, h2, c2 = exe.run(mp, feed=feed_dict, fetch_list=[y, h, c]) np.testing.assert_allclose(y1, y2, atol=1e-8, rtol=1e-5) np.testing.assert_allclose(h1, h2, atol=1e-8, rtol=1e-5) np.testing.assert_allclose(c1, c2, atol=1e-8, rtol=1e-5) def runTest(self): self.test_with_initial_state() self.test_with_zero_state() self.test_with_input_lengths() def load_tests(loader, tests, pattern): suite = unittest.TestSuite() devices = ["cpu", "gpu"] if paddle.fluid.is_compiled_with_cuda() \ else ["cpu"] for direction in ["forward", "bidirectional", "bidirect"]: for time_major in [True, False]: for device in devices: for test_class in [TestSimpleRNN, TestLSTM, TestGRU]: suite.addTest(test_class(time_major, direction, device)) return suite if __name__ == "__main__": unittest.main()
35.665289
79
0.565635
2,312
17,262
4.056661
0.086505
0.038384
0.033266
0.051498
0.881331
0.876959
0.876853
0.876426
0.876426
0.876426
0
0.040176
0.316533
17,262
483
80
35.73913
0.754789
0.057757
0
0.881402
0
0
0.014097
0
0
0
0
0
0.056604
1
0.051213
false
0
0.016173
0
0.078167
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
f8da0cf6fb0f466b4432f8e0102c5597411e346f
8,760
py
Python
etl/parsers/etw/Microsoft_Windows_Kernel_Network.py
IMULMUL/etl-parser
76b7c046866ce0469cd129ee3f7bb3799b34e271
[ "Apache-2.0" ]
104
2020-03-04T14:31:31.000Z
2022-03-28T02:59:36.000Z
etl/parsers/etw/Microsoft_Windows_Kernel_Network.py
IMULMUL/etl-parser
76b7c046866ce0469cd129ee3f7bb3799b34e271
[ "Apache-2.0" ]
7
2020-04-20T09:18:39.000Z
2022-03-19T17:06:19.000Z
etl/parsers/etw/Microsoft_Windows_Kernel_Network.py
IMULMUL/etl-parser
76b7c046866ce0469cd129ee3f7bb3799b34e271
[ "Apache-2.0" ]
16
2020-03-05T18:55:59.000Z
2022-03-01T10:19:28.000Z
# -*- coding: utf-8 -*- """ Microsoft-Windows-Kernel-Network GUID : 7dd42a49-5329-4832-8dfd-43d979153a88 """ from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct from etl.utils import WString, CString, SystemTime, Guid from etl.dtyp import Sid from etl.parsers.etw.core import Etw, declare, guid @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=10, version=0) class Microsoft_Windows_Kernel_Network_10_0(Etw): pattern = Struct( "PID" / Int32ul, "size" / Int32ul, "daddr" / Int32ul, "saddr" / Int32ul, "dport" / Int16ul, "sport" / Int16ul, "startime" / Int32ul, "endtime" / Int32ul, "seqnum" / Int32ul, "connid" / Int32ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=11, version=0) class Microsoft_Windows_Kernel_Network_11_0(Etw): pattern = Struct( "PID" / Int32ul, "size" / Int32ul, "daddr" / Int32ul, "saddr" / Int32ul, "dport" / Int16ul, "sport" / Int16ul, "seqnum" / Int32ul, "connid" / Int32ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=12, version=0) class Microsoft_Windows_Kernel_Network_12_0(Etw): pattern = Struct( "PID" / Int32ul, "size" / Int32ul, "daddr" / Int32ul, "saddr" / Int32ul, "dport" / Int16ul, "sport" / Int16ul, "mss" / Int16ul, "sackopt" / Int16ul, "tsopt" / Int16ul, "wsopt" / Int16ul, "rcvwin" / Int32ul, "rcvwinscale" / Int16ul, "sndwinscale" / Int16ul, "seqnum" / Int32ul, "connid" / Int32ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=13, version=0) class Microsoft_Windows_Kernel_Network_13_0(Etw): pattern = Struct( "PID" / Int32ul, "size" / Int32ul, "daddr" / Int32ul, "saddr" / Int32ul, "dport" / Int16ul, "sport" / Int16ul, "seqnum" / Int32ul, "connid" / Int32ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=14, version=0) class Microsoft_Windows_Kernel_Network_14_0(Etw): pattern = Struct( "PID" / Int32ul, "size" / Int32ul, "daddr" / Int32ul, "saddr" / Int32ul, "dport" / Int16ul, "sport" / Int16ul, "seqnum" / Int32ul, "connid" / Int32ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=15, version=0) class Microsoft_Windows_Kernel_Network_15_0(Etw): pattern = Struct( "PID" / Int32ul, "size" / Int32ul, "daddr" / Int32ul, "saddr" / Int32ul, "dport" / Int16ul, "sport" / Int16ul, "mss" / Int16ul, "sackopt" / Int16ul, "tsopt" / Int16ul, "wsopt" / Int16ul, "rcvwin" / Int32ul, "rcvwinscale" / Int16ul, "sndwinscale" / Int16ul, "seqnum" / Int32ul, "connid" / Int32ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=16, version=0) class Microsoft_Windows_Kernel_Network_16_0(Etw): pattern = Struct( "PID" / Int32ul, "size" / Int32ul, "daddr" / Int32ul, "saddr" / Int32ul, "dport" / Int16ul, "sport" / Int16ul, "seqnum" / Int32ul, "connid" / Int32ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=17, version=0) class Microsoft_Windows_Kernel_Network_17_0(Etw): pattern = Struct( "Proto" / Int16ul, "FailureCode" / Int16ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=18, version=0) class Microsoft_Windows_Kernel_Network_18_0(Etw): pattern = Struct( "PID" / Int32ul, "size" / Int32ul, "daddr" / Int32ul, "saddr" / Int32ul, "dport" / Int16ul, "sport" / Int16ul, "seqnum" / Int32ul, "connid" / Int32ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=26, version=0) class Microsoft_Windows_Kernel_Network_26_0(Etw): pattern = Struct( "PID" / Int32ul, "size" / Int32ul, "dport" / Int16ul, "sport" / Int16ul, "startime" / Int32ul, "endtime" / Int32ul, "seqnum" / Int32ul, "connid" / Int32ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=27, version=0) class Microsoft_Windows_Kernel_Network_27_0(Etw): pattern = Struct( "PID" / Int32ul, "size" / Int32ul, "dport" / Int16ul, "sport" / Int16ul, "seqnum" / Int32ul, "connid" / Int32ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=28, version=0) class Microsoft_Windows_Kernel_Network_28_0(Etw): pattern = Struct( "PID" / Int32ul, "size" / Int32ul, "dport" / Int16ul, "sport" / Int16ul, "mss" / Int16ul, "sackopt" / Int16ul, "tsopt" / Int16ul, "wsopt" / Int16ul, "rcvwin" / Int32ul, "rcvwinscale" / Int16ul, "sndwinscale" / Int16ul, "seqnum" / Int32ul, "connid" / Int32ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=29, version=0) class Microsoft_Windows_Kernel_Network_29_0(Etw): pattern = Struct( "PID" / Int32ul, "size" / Int32ul, "dport" / Int16ul, "sport" / Int16ul, "seqnum" / Int32ul, "connid" / Int32ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=30, version=0) class Microsoft_Windows_Kernel_Network_30_0(Etw): pattern = Struct( "PID" / Int32ul, "size" / Int32ul, "dport" / Int16ul, "sport" / Int16ul, "seqnum" / Int32ul, "connid" / Int32ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=31, version=0) class Microsoft_Windows_Kernel_Network_31_0(Etw): pattern = Struct( "PID" / Int32ul, "size" / Int32ul, "dport" / Int16ul, "sport" / Int16ul, "mss" / Int16ul, "sackopt" / Int16ul, "tsopt" / Int16ul, "wsopt" / Int16ul, "rcvwin" / Int32ul, "rcvwinscale" / Int16ul, "sndwinscale" / Int16ul, "seqnum" / Int32ul, "connid" / Int32ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=32, version=0) class Microsoft_Windows_Kernel_Network_32_0(Etw): pattern = Struct( "PID" / Int32ul, "size" / Int32ul, "dport" / Int16ul, "sport" / Int16ul, "seqnum" / Int32ul, "connid" / Int32ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=34, version=0) class Microsoft_Windows_Kernel_Network_34_0(Etw): pattern = Struct( "PID" / Int32ul, "size" / Int32ul, "dport" / Int16ul, "sport" / Int16ul, "seqnum" / Int32ul, "connid" / Int32ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=42, version=0) class Microsoft_Windows_Kernel_Network_42_0(Etw): pattern = Struct( "PID" / Int32ul, "size" / Int32ul, "daddr" / Int32ul, "saddr" / Int32ul, "dport" / Int16ul, "sport" / Int16ul, "seqnum" / Int32ul, "connid" / Int32ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=43, version=0) class Microsoft_Windows_Kernel_Network_43_0(Etw): pattern = Struct( "PID" / Int32ul, "size" / Int32ul, "daddr" / Int32ul, "saddr" / Int32ul, "dport" / Int16ul, "sport" / Int16ul, "seqnum" / Int32ul, "connid" / Int32ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=49, version=0) class Microsoft_Windows_Kernel_Network_49_0(Etw): pattern = Struct( "Proto" / Int16ul, "FailureCode" / Int16ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=58, version=0) class Microsoft_Windows_Kernel_Network_58_0(Etw): pattern = Struct( "PID" / Int32ul, "size" / Int32ul, "dport" / Int16ul, "sport" / Int16ul, "seqnum" / Int32ul, "connid" / Int32ul ) @declare(guid=guid("7dd42a49-5329-4832-8dfd-43d979153a88"), event_id=59, version=0) class Microsoft_Windows_Kernel_Network_59_0(Etw): pattern = Struct( "PID" / Int32ul, "size" / Int32ul, "dport" / Int16ul, "sport" / Int16ul, "seqnum" / Int32ul, "connid" / Int32ul )
27.460815
123
0.588128
908
8,760
5.528634
0.094714
0.073307
0.100797
0.132869
0.933865
0.933865
0.926693
0.742629
0.742629
0.742629
0
0.165514
0.273744
8,760
318
124
27.54717
0.623546
0.011301
0
0.738806
0
0
0.199491
0.09154
0
0
0
0
0
1
0
false
0
0.014925
0
0.179104
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
f8db93ac372020837533a75894709c246936f4eb
27,038
py
Python
sdk/python/pulumi_gcp/kms/registry.py
sisisin/pulumi-gcp
af6681d70ea457843409110c1324817fe55f68ad
[ "ECL-2.0", "Apache-2.0" ]
121
2018-06-18T19:16:42.000Z
2022-03-31T06:06:48.000Z
sdk/python/pulumi_gcp/kms/registry.py
sisisin/pulumi-gcp
af6681d70ea457843409110c1324817fe55f68ad
[ "ECL-2.0", "Apache-2.0" ]
492
2018-06-22T19:41:03.000Z
2022-03-31T15:33:53.000Z
sdk/python/pulumi_gcp/kms/registry.py
sisisin/pulumi-gcp
af6681d70ea457843409110c1324817fe55f68ad
[ "ECL-2.0", "Apache-2.0" ]
43
2018-06-19T01:43:13.000Z
2022-03-23T22:43:37.000Z
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities from . import outputs from ._inputs import * __all__ = ['RegistryArgs', 'Registry'] @pulumi.input_type class RegistryArgs: def __init__(__self__, *, credentials: Optional[pulumi.Input[Sequence[pulumi.Input['RegistryCredentialArgs']]]] = None, event_notification_configs: Optional[pulumi.Input[Sequence[pulumi.Input['RegistryEventNotificationConfigItemArgs']]]] = None, http_config: Optional[pulumi.Input[Mapping[str, Any]]] = None, log_level: Optional[pulumi.Input[str]] = None, mqtt_config: Optional[pulumi.Input[Mapping[str, Any]]] = None, name: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[str]] = None, region: Optional[pulumi.Input[str]] = None, state_notification_config: Optional[pulumi.Input[Mapping[str, Any]]] = None): """ The set of arguments for constructing a Registry resource. :param pulumi.Input[Sequence[pulumi.Input['RegistryCredentialArgs']]] credentials: List of public key certificates to authenticate devices. :param pulumi.Input[Sequence[pulumi.Input['RegistryEventNotificationConfigItemArgs']]] event_notification_configs: List of configurations for event notifications, such as PubSub topics to publish device events to. :param pulumi.Input[Mapping[str, Any]] http_config: Activate or deactivate HTTP. :param pulumi.Input[str] log_level: The default logging verbosity for activity from devices in this registry. Specifies which events should be written to logs. For example, if the LogLevel is ERROR, only events that terminate in errors will be logged. LogLevel is inclusive; enabling INFO logging will also enable ERROR logging. Default value: "NONE" Possible values: ["NONE", "ERROR", "INFO", "DEBUG"] :param pulumi.Input[Mapping[str, Any]] mqtt_config: Activate or deactivate MQTT. :param pulumi.Input[str] name: A unique name for the resource, required by device registry. :param pulumi.Input[str] region: The region in which the created registry should reside. If it is not provided, the provider region is used. :param pulumi.Input[Mapping[str, Any]] state_notification_config: A PubSub topic to publish device state updates. """ if credentials is not None: pulumi.set(__self__, "credentials", credentials) if event_notification_configs is not None: pulumi.set(__self__, "event_notification_configs", event_notification_configs) if http_config is not None: pulumi.set(__self__, "http_config", http_config) if log_level is not None: pulumi.set(__self__, "log_level", log_level) if mqtt_config is not None: pulumi.set(__self__, "mqtt_config", mqtt_config) if name is not None: pulumi.set(__self__, "name", name) if project is not None: pulumi.set(__self__, "project", project) if region is not None: pulumi.set(__self__, "region", region) if state_notification_config is not None: pulumi.set(__self__, "state_notification_config", state_notification_config) @property @pulumi.getter def credentials(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RegistryCredentialArgs']]]]: """ List of public key certificates to authenticate devices. """ return pulumi.get(self, "credentials") @credentials.setter def credentials(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['RegistryCredentialArgs']]]]): pulumi.set(self, "credentials", value) @property @pulumi.getter(name="eventNotificationConfigs") def event_notification_configs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RegistryEventNotificationConfigItemArgs']]]]: """ List of configurations for event notifications, such as PubSub topics to publish device events to. """ return pulumi.get(self, "event_notification_configs") @event_notification_configs.setter def event_notification_configs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['RegistryEventNotificationConfigItemArgs']]]]): pulumi.set(self, "event_notification_configs", value) @property @pulumi.getter(name="httpConfig") def http_config(self) -> Optional[pulumi.Input[Mapping[str, Any]]]: """ Activate or deactivate HTTP. """ return pulumi.get(self, "http_config") @http_config.setter def http_config(self, value: Optional[pulumi.Input[Mapping[str, Any]]]): pulumi.set(self, "http_config", value) @property @pulumi.getter(name="logLevel") def log_level(self) -> Optional[pulumi.Input[str]]: """ The default logging verbosity for activity from devices in this registry. Specifies which events should be written to logs. For example, if the LogLevel is ERROR, only events that terminate in errors will be logged. LogLevel is inclusive; enabling INFO logging will also enable ERROR logging. Default value: "NONE" Possible values: ["NONE", "ERROR", "INFO", "DEBUG"] """ return pulumi.get(self, "log_level") @log_level.setter def log_level(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "log_level", value) @property @pulumi.getter(name="mqttConfig") def mqtt_config(self) -> Optional[pulumi.Input[Mapping[str, Any]]]: """ Activate or deactivate MQTT. """ return pulumi.get(self, "mqtt_config") @mqtt_config.setter def mqtt_config(self, value: Optional[pulumi.Input[Mapping[str, Any]]]): pulumi.set(self, "mqtt_config", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ A unique name for the resource, required by device registry. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter def project(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "project") @project.setter def project(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "project", value) @property @pulumi.getter def region(self) -> Optional[pulumi.Input[str]]: """ The region in which the created registry should reside. If it is not provided, the provider region is used. """ return pulumi.get(self, "region") @region.setter def region(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "region", value) @property @pulumi.getter(name="stateNotificationConfig") def state_notification_config(self) -> Optional[pulumi.Input[Mapping[str, Any]]]: """ A PubSub topic to publish device state updates. """ return pulumi.get(self, "state_notification_config") @state_notification_config.setter def state_notification_config(self, value: Optional[pulumi.Input[Mapping[str, Any]]]): pulumi.set(self, "state_notification_config", value) @pulumi.input_type class _RegistryState: def __init__(__self__, *, credentials: Optional[pulumi.Input[Sequence[pulumi.Input['RegistryCredentialArgs']]]] = None, event_notification_configs: Optional[pulumi.Input[Sequence[pulumi.Input['RegistryEventNotificationConfigItemArgs']]]] = None, http_config: Optional[pulumi.Input[Mapping[str, Any]]] = None, log_level: Optional[pulumi.Input[str]] = None, mqtt_config: Optional[pulumi.Input[Mapping[str, Any]]] = None, name: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[str]] = None, region: Optional[pulumi.Input[str]] = None, state_notification_config: Optional[pulumi.Input[Mapping[str, Any]]] = None): """ Input properties used for looking up and filtering Registry resources. :param pulumi.Input[Sequence[pulumi.Input['RegistryCredentialArgs']]] credentials: List of public key certificates to authenticate devices. :param pulumi.Input[Sequence[pulumi.Input['RegistryEventNotificationConfigItemArgs']]] event_notification_configs: List of configurations for event notifications, such as PubSub topics to publish device events to. :param pulumi.Input[Mapping[str, Any]] http_config: Activate or deactivate HTTP. :param pulumi.Input[str] log_level: The default logging verbosity for activity from devices in this registry. Specifies which events should be written to logs. For example, if the LogLevel is ERROR, only events that terminate in errors will be logged. LogLevel is inclusive; enabling INFO logging will also enable ERROR logging. Default value: "NONE" Possible values: ["NONE", "ERROR", "INFO", "DEBUG"] :param pulumi.Input[Mapping[str, Any]] mqtt_config: Activate or deactivate MQTT. :param pulumi.Input[str] name: A unique name for the resource, required by device registry. :param pulumi.Input[str] region: The region in which the created registry should reside. If it is not provided, the provider region is used. :param pulumi.Input[Mapping[str, Any]] state_notification_config: A PubSub topic to publish device state updates. """ if credentials is not None: pulumi.set(__self__, "credentials", credentials) if event_notification_configs is not None: pulumi.set(__self__, "event_notification_configs", event_notification_configs) if http_config is not None: pulumi.set(__self__, "http_config", http_config) if log_level is not None: pulumi.set(__self__, "log_level", log_level) if mqtt_config is not None: pulumi.set(__self__, "mqtt_config", mqtt_config) if name is not None: pulumi.set(__self__, "name", name) if project is not None: pulumi.set(__self__, "project", project) if region is not None: pulumi.set(__self__, "region", region) if state_notification_config is not None: pulumi.set(__self__, "state_notification_config", state_notification_config) @property @pulumi.getter def credentials(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RegistryCredentialArgs']]]]: """ List of public key certificates to authenticate devices. """ return pulumi.get(self, "credentials") @credentials.setter def credentials(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['RegistryCredentialArgs']]]]): pulumi.set(self, "credentials", value) @property @pulumi.getter(name="eventNotificationConfigs") def event_notification_configs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RegistryEventNotificationConfigItemArgs']]]]: """ List of configurations for event notifications, such as PubSub topics to publish device events to. """ return pulumi.get(self, "event_notification_configs") @event_notification_configs.setter def event_notification_configs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['RegistryEventNotificationConfigItemArgs']]]]): pulumi.set(self, "event_notification_configs", value) @property @pulumi.getter(name="httpConfig") def http_config(self) -> Optional[pulumi.Input[Mapping[str, Any]]]: """ Activate or deactivate HTTP. """ return pulumi.get(self, "http_config") @http_config.setter def http_config(self, value: Optional[pulumi.Input[Mapping[str, Any]]]): pulumi.set(self, "http_config", value) @property @pulumi.getter(name="logLevel") def log_level(self) -> Optional[pulumi.Input[str]]: """ The default logging verbosity for activity from devices in this registry. Specifies which events should be written to logs. For example, if the LogLevel is ERROR, only events that terminate in errors will be logged. LogLevel is inclusive; enabling INFO logging will also enable ERROR logging. Default value: "NONE" Possible values: ["NONE", "ERROR", "INFO", "DEBUG"] """ return pulumi.get(self, "log_level") @log_level.setter def log_level(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "log_level", value) @property @pulumi.getter(name="mqttConfig") def mqtt_config(self) -> Optional[pulumi.Input[Mapping[str, Any]]]: """ Activate or deactivate MQTT. """ return pulumi.get(self, "mqtt_config") @mqtt_config.setter def mqtt_config(self, value: Optional[pulumi.Input[Mapping[str, Any]]]): pulumi.set(self, "mqtt_config", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ A unique name for the resource, required by device registry. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter def project(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "project") @project.setter def project(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "project", value) @property @pulumi.getter def region(self) -> Optional[pulumi.Input[str]]: """ The region in which the created registry should reside. If it is not provided, the provider region is used. """ return pulumi.get(self, "region") @region.setter def region(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "region", value) @property @pulumi.getter(name="stateNotificationConfig") def state_notification_config(self) -> Optional[pulumi.Input[Mapping[str, Any]]]: """ A PubSub topic to publish device state updates. """ return pulumi.get(self, "state_notification_config") @state_notification_config.setter def state_notification_config(self, value: Optional[pulumi.Input[Mapping[str, Any]]]): pulumi.set(self, "state_notification_config", value) warnings.warn("""gcp.kms.Registry has been deprecated in favor of gcp.iot.Registry""", DeprecationWarning) class Registry(pulumi.CustomResource): warnings.warn("""gcp.kms.Registry has been deprecated in favor of gcp.iot.Registry""", DeprecationWarning) @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, credentials: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RegistryCredentialArgs']]]]] = None, event_notification_configs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RegistryEventNotificationConfigItemArgs']]]]] = None, http_config: Optional[pulumi.Input[Mapping[str, Any]]] = None, log_level: Optional[pulumi.Input[str]] = None, mqtt_config: Optional[pulumi.Input[Mapping[str, Any]]] = None, name: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[str]] = None, region: Optional[pulumi.Input[str]] = None, state_notification_config: Optional[pulumi.Input[Mapping[str, Any]]] = None, __props__=None): """ Create a Registry resource with the given unique name, props, and options. :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RegistryCredentialArgs']]]] credentials: List of public key certificates to authenticate devices. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RegistryEventNotificationConfigItemArgs']]]] event_notification_configs: List of configurations for event notifications, such as PubSub topics to publish device events to. :param pulumi.Input[Mapping[str, Any]] http_config: Activate or deactivate HTTP. :param pulumi.Input[str] log_level: The default logging verbosity for activity from devices in this registry. Specifies which events should be written to logs. For example, if the LogLevel is ERROR, only events that terminate in errors will be logged. LogLevel is inclusive; enabling INFO logging will also enable ERROR logging. Default value: "NONE" Possible values: ["NONE", "ERROR", "INFO", "DEBUG"] :param pulumi.Input[Mapping[str, Any]] mqtt_config: Activate or deactivate MQTT. :param pulumi.Input[str] name: A unique name for the resource, required by device registry. :param pulumi.Input[str] region: The region in which the created registry should reside. If it is not provided, the provider region is used. :param pulumi.Input[Mapping[str, Any]] state_notification_config: A PubSub topic to publish device state updates. """ ... @overload def __init__(__self__, resource_name: str, args: Optional[RegistryArgs] = None, opts: Optional[pulumi.ResourceOptions] = None): """ Create a Registry resource with the given unique name, props, and options. :param str resource_name: The name of the resource. :param RegistryArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """ ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(RegistryArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, credentials: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RegistryCredentialArgs']]]]] = None, event_notification_configs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RegistryEventNotificationConfigItemArgs']]]]] = None, http_config: Optional[pulumi.Input[Mapping[str, Any]]] = None, log_level: Optional[pulumi.Input[str]] = None, mqtt_config: Optional[pulumi.Input[Mapping[str, Any]]] = None, name: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[str]] = None, region: Optional[pulumi.Input[str]] = None, state_notification_config: Optional[pulumi.Input[Mapping[str, Any]]] = None, __props__=None): pulumi.log.warn("""Registry is deprecated: gcp.kms.Registry has been deprecated in favor of gcp.iot.Registry""") if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = RegistryArgs.__new__(RegistryArgs) __props__.__dict__["credentials"] = credentials __props__.__dict__["event_notification_configs"] = event_notification_configs __props__.__dict__["http_config"] = http_config __props__.__dict__["log_level"] = log_level __props__.__dict__["mqtt_config"] = mqtt_config __props__.__dict__["name"] = name __props__.__dict__["project"] = project __props__.__dict__["region"] = region __props__.__dict__["state_notification_config"] = state_notification_config super(Registry, __self__).__init__( 'gcp:kms/registry:Registry', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, credentials: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RegistryCredentialArgs']]]]] = None, event_notification_configs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RegistryEventNotificationConfigItemArgs']]]]] = None, http_config: Optional[pulumi.Input[Mapping[str, Any]]] = None, log_level: Optional[pulumi.Input[str]] = None, mqtt_config: Optional[pulumi.Input[Mapping[str, Any]]] = None, name: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[str]] = None, region: Optional[pulumi.Input[str]] = None, state_notification_config: Optional[pulumi.Input[Mapping[str, Any]]] = None) -> 'Registry': """ Get an existing Registry resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RegistryCredentialArgs']]]] credentials: List of public key certificates to authenticate devices. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RegistryEventNotificationConfigItemArgs']]]] event_notification_configs: List of configurations for event notifications, such as PubSub topics to publish device events to. :param pulumi.Input[Mapping[str, Any]] http_config: Activate or deactivate HTTP. :param pulumi.Input[str] log_level: The default logging verbosity for activity from devices in this registry. Specifies which events should be written to logs. For example, if the LogLevel is ERROR, only events that terminate in errors will be logged. LogLevel is inclusive; enabling INFO logging will also enable ERROR logging. Default value: "NONE" Possible values: ["NONE", "ERROR", "INFO", "DEBUG"] :param pulumi.Input[Mapping[str, Any]] mqtt_config: Activate or deactivate MQTT. :param pulumi.Input[str] name: A unique name for the resource, required by device registry. :param pulumi.Input[str] region: The region in which the created registry should reside. If it is not provided, the provider region is used. :param pulumi.Input[Mapping[str, Any]] state_notification_config: A PubSub topic to publish device state updates. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _RegistryState.__new__(_RegistryState) __props__.__dict__["credentials"] = credentials __props__.__dict__["event_notification_configs"] = event_notification_configs __props__.__dict__["http_config"] = http_config __props__.__dict__["log_level"] = log_level __props__.__dict__["mqtt_config"] = mqtt_config __props__.__dict__["name"] = name __props__.__dict__["project"] = project __props__.__dict__["region"] = region __props__.__dict__["state_notification_config"] = state_notification_config return Registry(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter def credentials(self) -> pulumi.Output[Optional[Sequence['outputs.RegistryCredential']]]: """ List of public key certificates to authenticate devices. """ return pulumi.get(self, "credentials") @property @pulumi.getter(name="eventNotificationConfigs") def event_notification_configs(self) -> pulumi.Output[Sequence['outputs.RegistryEventNotificationConfigItem']]: """ List of configurations for event notifications, such as PubSub topics to publish device events to. """ return pulumi.get(self, "event_notification_configs") @property @pulumi.getter(name="httpConfig") def http_config(self) -> pulumi.Output[Mapping[str, Any]]: """ Activate or deactivate HTTP. """ return pulumi.get(self, "http_config") @property @pulumi.getter(name="logLevel") def log_level(self) -> pulumi.Output[Optional[str]]: """ The default logging verbosity for activity from devices in this registry. Specifies which events should be written to logs. For example, if the LogLevel is ERROR, only events that terminate in errors will be logged. LogLevel is inclusive; enabling INFO logging will also enable ERROR logging. Default value: "NONE" Possible values: ["NONE", "ERROR", "INFO", "DEBUG"] """ return pulumi.get(self, "log_level") @property @pulumi.getter(name="mqttConfig") def mqtt_config(self) -> pulumi.Output[Mapping[str, Any]]: """ Activate or deactivate MQTT. """ return pulumi.get(self, "mqtt_config") @property @pulumi.getter def name(self) -> pulumi.Output[str]: """ A unique name for the resource, required by device registry. """ return pulumi.get(self, "name") @property @pulumi.getter def project(self) -> pulumi.Output[str]: return pulumi.get(self, "project") @property @pulumi.getter def region(self) -> pulumi.Output[str]: """ The region in which the created registry should reside. If it is not provided, the provider region is used. """ return pulumi.get(self, "region") @property @pulumi.getter(name="stateNotificationConfig") def state_notification_config(self) -> pulumi.Output[Optional[Mapping[str, Any]]]: """ A PubSub topic to publish device state updates. """ return pulumi.get(self, "state_notification_config")
50.256506
239
0.670908
3,119
27,038
5.63033
0.064444
0.089573
0.087637
0.046637
0.899208
0.892831
0.883435
0.878481
0.875577
0.868231
0
0.000048
0.2262
27,038
537
240
50.350093
0.839308
0.32262
0
0.839009
1
0
0.13191
0.072487
0
0
0
0
0
1
0.160991
false
0.003096
0.021672
0.009288
0.278638
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
5d391d7d91f7baf4982e710eb48f76b7da31c38d
4,802
py
Python
lib/Model.py
bcrafton/ssdfa
661f9059184fde6ba7ad1ca710c5b5a1954c5ea6
[ "MIT" ]
10
2019-01-23T17:07:59.000Z
2021-07-13T10:18:10.000Z
semantic-segmentation/lib/Model.py
bcrafton/icsrl-deep-learning
e3616982d1dda5f978d61d6591c91cb0da76ab02
[ "MIT" ]
1
2019-07-30T00:55:58.000Z
2019-09-17T13:31:48.000Z
semantic-segmentation/lib/Model.py
bcrafton/icsrl-deep-learning
e3616982d1dda5f978d61d6591c91cb0da76ab02
[ "MIT" ]
1
2019-07-28T17:28:30.000Z
2019-07-28T17:28:30.000Z
import tensorflow as tf import numpy as np np.set_printoptions(threshold=1000) class Model: def __init__(self, layers : tuple): self.num_layers = len(layers) self.layers = layers def num_params(self): param_sum = 0 for ii in range(self.num_layers): l = self.layers[ii] param_sum += l.num_params() return param_sum def get_weights(self): weights = {} for ii in range(self.num_layers): l = self.layers[ii] tup = l.get_weights() for (key, value) in tup: weights[key] = value return weights def predict(self, X): A = [None] * self.num_layers for ii in range(self.num_layers): l = self.layers[ii] if ii == 0: A[ii] = l.forward(X) else: A[ii] = l.forward(A[ii-1]['aout']) return A[self.num_layers-1]['aout'] #################################################################### def gvs(self, X, Y): A = [None] * self.num_layers D = [None] * self.num_layers grads_and_vars = [] for ii in range(self.num_layers): l = self.layers[ii] if ii == 0: A[ii] = l.forward(X) else: A[ii] = l.forward(A[ii-1]['aout']) E = tf.nn.softmax(A[self.num_layers-1]['aout']) - Y N = tf.shape(A[self.num_layers-1]['aout'])[0] N = tf.cast(N, dtype=tf.float32) E = E / N for ii in range(self.num_layers-1, -1, -1): l = self.layers[ii] if (ii == self.num_layers-1): D[ii], gvs = l.bp(A[ii-1]['aout'], A[ii]['aout'], E, A[ii]['cache']) grads_and_vars.extend(gvs) elif (ii == 0): D[ii], gvs = l.bp(X, A[ii]['aout'], D[ii+1]['dout'], A[ii]['cache']) grads_and_vars.extend(gvs) else: D[ii], gvs = l.bp(A[ii-1]['aout'], A[ii]['aout'], D[ii+1]['dout'], A[ii]['cache']) grads_and_vars.extend(gvs) return grads_and_vars def dfa_gvs(self, X, Y): A = [None] * self.num_layers D = [None] * self.num_layers grads_and_vars = [] for ii in range(self.num_layers): l = self.layers[ii] if ii == 0: A[ii] = l.forward(X) else: A[ii] = l.forward(A[ii-1]['aout']) E = tf.nn.softmax(A[self.num_layers-1]['aout']) - Y N = tf.shape(A[self.num_layers-1]['aout'])[0] N = tf.cast(N, dtype=tf.float32) E = E / N for ii in range(self.num_layers-1, -1, -1): l = self.layers[ii] if (ii == self.num_layers-1): D[ii], gvs = l.dfa(A[ii-1]['aout'], A[ii]['aout'], E, E, A[ii]['cache']) grads_and_vars.extend(gvs) elif (ii == 0): D[ii], gvs = l.dfa(X, A[ii]['aout'], E, D[ii+1]['dout'], A[ii]['cache']) grads_and_vars.extend(gvs) else: D[ii], gvs = l.dfa(A[ii-1]['aout'], A[ii]['aout'], E, D[ii+1]['dout'], A[ii]['cache']) grads_and_vars.extend(gvs) return grads_and_vars def lel_gvs(self, X, Y): A = [None] * self.num_layers D = [None] * self.num_layers grads_and_vars = [] for ii in range(self.num_layers): l = self.layers[ii] if ii == 0: A[ii] = l.forward(X) else: A[ii] = l.forward(A[ii-1]['aout']) E = tf.nn.softmax(A[self.num_layers-1]['aout']) - Y N = tf.shape(A[self.num_layers-1]['aout'])[0] N = tf.cast(N, dtype=tf.float32) E = E / N for ii in range(self.num_layers-1, -1, -1): l = self.layers[ii] if (ii == self.num_layers-1): D[ii], gvs = l.lel(A[ii-1]['aout'], A[ii]['aout'], E, E, Y, A[ii]['cache']) grads_and_vars.extend(gvs) elif (ii == 0): D[ii], gvs = l.lel(X, A[ii]['aout'], E, D[ii+1]['dout'], Y, A[ii]['cache']) grads_and_vars.extend(gvs) else: D[ii], gvs = l.lel(A[ii-1]['aout'], A[ii]['aout'], E, D[ii+1]['dout'], Y, A[ii]['cache']) grads_and_vars.extend(gvs) return grads_and_vars ####################################################################
32.013333
105
0.420242
652
4,802
2.986196
0.09816
0.05547
0.180277
0.093477
0.843349
0.829995
0.820236
0.820236
0.820236
0.818182
0
0.019218
0.39317
4,802
149
106
32.228188
0.648936
0
0
0.712963
0
0
0.037085
0
0
0
0
0
0
1
0.064815
false
0
0.018519
0
0.148148
0.009259
0
0
0
null
0
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
5d48d7505370a663f5097d7969f8ad12b5150ad5
152
py
Python
examples/software_defined_assets/software_defined_assets_tests/test_spark_weather_job.py
makotonium/dagster
f5d56514b7e7c5bca28ea14060316d242f51b71b
[ "Apache-2.0" ]
4,606
2018-06-21T17:45:20.000Z
2022-03-31T23:39:42.000Z
examples/software_defined_assets/software_defined_assets_tests/test_spark_weather_job.py
makotonium/dagster
f5d56514b7e7c5bca28ea14060316d242f51b71b
[ "Apache-2.0" ]
6,221
2018-06-12T04:36:01.000Z
2022-03-31T21:43:05.000Z
examples/software_defined_assets/software_defined_assets_tests/test_spark_weather_job.py
makotonium/dagster
f5d56514b7e7c5bca28ea14060316d242f51b71b
[ "Apache-2.0" ]
619
2018-08-22T22:43:09.000Z
2022-03-31T22:48:06.000Z
from software_defined_assets.spark_weather_job import weather_job def test_airport_weather_job(): assert weather_job.execute_in_process().success
25.333333
65
0.855263
22
152
5.409091
0.727273
0.336134
0
0
0
0
0
0
0
0
0
0
0.092105
152
5
66
30.4
0.862319
0
0
0
0
0
0
0
0
0
0
0
0.333333
1
0.333333
true
0
0.333333
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
7
5373c1304cae10875a5a9f6505575d6e1fc4d283
22
py
Python
Unit1/Part1/7.py
Devansu-Yadav/BasicsOfPython
503ffb2f89da815d95e3eabf5b14de544729e705
[ "MIT" ]
1
2020-01-16T18:31:29.000Z
2020-01-16T18:31:29.000Z
Unit1/Part1/7.py
golashboy/BasicsOfPython
b4a8db65ff9b513f836cf68e8dd4bcaebe5072bb
[ "MIT" ]
null
null
null
Unit1/Part1/7.py
golashboy/BasicsOfPython
b4a8db65ff9b513f836cf68e8dd4bcaebe5072bb
[ "MIT" ]
null
null
null
a,b=100,200 print(a,b)
11
11
0.681818
7
22
2.142857
0.714286
0.266667
0
0
0
0
0
0
0
0
0
0.285714
0.045455
22
2
12
11
0.428571
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
0.5
1
1
0
null
1
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
8
53bc6eec4922c6b267f60a3d886e8c2e179faef5
43,902
py
Python
code/deep-high-resolution-net.pytorch/lib/models/pose_resnext.py
SomaKishimoto/AwA-Pose
b9877d538af25d07a6e2f9fa0b5aa2fdf544db99
[ "MIT" ]
22
2019-08-12T09:35:48.000Z
2022-02-15T15:09:56.000Z
code/deep-high-resolution-net.pytorch/lib/models/pose_resnext.py
SomaKishimoto/AwA-Pose
b9877d538af25d07a6e2f9fa0b5aa2fdf544db99
[ "MIT" ]
10
2019-09-13T09:57:33.000Z
2021-07-19T12:15:59.000Z
code/deep-high-resolution-net.pytorch/lib/models/pose_resnext.py
SomaKishimoto/AwA-Pose
b9877d538af25d07a6e2f9fa0b5aa2fdf544db99
[ "MIT" ]
2
2019-09-29T12:18:28.000Z
2021-07-26T10:34:05.000Z
import torch import torch.nn as nn import os import logging # import torch.legacy.nn as lnn from functools import reduce from torch.autograd import Variable BN_MOMENTUM = 0.1 logger = logging.getLogger(__name__) class LambdaBase(nn.Sequential): def __init__(self, fn, *args): super(LambdaBase, self).__init__(*args) self.lambda_func = fn def forward_prepare(self, input): output = [] for module in self._modules.values(): output.append(module(input)) return output if output else input class Lambda(LambdaBase): def forward(self, input): return self.lambda_func(self.forward_prepare(input)) class LambdaMap(LambdaBase): def forward(self, input): return list(map(self.lambda_func, self.forward_prepare(input))) class LambdaReduce(LambdaBase): def forward(self, input): return reduce(self.lambda_func, self.forward_prepare(input)) class PoseResneXt(nn.Module): def __init__(self): self.inplanes = 2048 self.deconv_with_bias = False super(PoseResneXt, self).__init__() self.resnext_101_64x4d = nn.Sequential( # Sequential, nn.Conv2d(3, 64, (7, 7), (2, 2), (3, 3), 1, 1, bias=False), nn.BatchNorm2d(64), nn.ReLU(), nn.MaxPool2d((3, 3), (2, 2), (1, 1)), nn.Sequential( # Sequential, nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(64, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(256), nn.ReLU(), nn.Conv2d(256, 256, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(256), nn.ReLU(), ), nn.Conv2d(256, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(256), ), nn.Sequential( # Sequential, nn.Conv2d(64, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(256), ), ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(256, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(256), nn.ReLU(), nn.Conv2d(256, 256, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(256), nn.ReLU(), ), nn.Conv2d(256, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(256), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(256, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(256), nn.ReLU(), nn.Conv2d(256, 256, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(256), nn.ReLU(), ), nn.Conv2d(256, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(256), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), ), nn.Sequential( # Sequential, nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(256, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512, 512, (3, 3), (2, 2), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(512), ), nn.Sequential( # Sequential, nn.Conv2d(256, 512, (1, 1), (2, 2), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(512), ), ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512, 512, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(512), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512, 512, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(512), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512, 512, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(512), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), ), nn.Sequential( # Sequential, nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(512, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (2, 2), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), nn.Sequential( # Sequential, nn.Conv2d(512, 1024, (1, 1), (2, 2), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), ), nn.Sequential( # Sequential, nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024, 2048, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(2048), nn.ReLU(), nn.Conv2d(2048, 2048, (3, 3), (2, 2), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(2048), nn.ReLU(), ), nn.Conv2d(2048, 2048, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(2048), ), nn.Sequential( # Sequential, nn.Conv2d(1024, 2048, (1, 1), (2, 2), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(2048), ), ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(2048, 2048, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(2048), nn.ReLU(), nn.Conv2d(2048, 2048, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(2048), nn.ReLU(), ), nn.Conv2d(2048, 2048, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(2048), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(2048, 2048, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(2048), nn.ReLU(), nn.Conv2d(2048, 2048, (3, 3), (1, 1), (1, 1), 1, 64, bias=False), nn.BatchNorm2d(2048), nn.ReLU(), ), nn.Conv2d(2048, 2048, (1, 1), (1, 1), (0, 0), 1, 1, bias=False), nn.BatchNorm2d(2048), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x, y: x + y), # CAddTable, nn.ReLU(), ), ), # nn.AvgPool2d((7, 7), (1, 1)), # Lambda(lambda x: x.view(x.size(0), -1)), # View, # nn.Sequential(Lambda(lambda x: x.view(1, -1) if 1 == len(x.size()) else x), nn.Linear(2048, 1000)), # Linear, ) self.deconv_layers = self._make_deconv_layer( 3, [256, 256, 256], [4, 4, 4], ) self.final_layer = nn.Conv2d( in_channels=256, out_channels=15, kernel_size=1, stride=1, padding=0) def _get_deconv_cfg(self, deconv_kernel, index): if deconv_kernel == 4: padding = 1 output_padding = 0 elif deconv_kernel == 3: padding = 1 output_padding = 1 elif deconv_kernel == 2: padding = 0 output_padding = 0 return deconv_kernel, padding, output_padding def _make_deconv_layer(self, num_layers, num_filters, num_kernels): assert num_layers == len(num_filters), \ 'ERROR: num_deconv_layers is different len(num_deconv_filters)' assert num_layers == len(num_kernels), \ 'ERROR: num_deconv_layers is different len(num_deconv_filters)' layers = [] for i in range(num_layers): kernel, padding, output_padding = \ self._get_deconv_cfg(num_kernels[i], i) planes = num_filters[i] layers.append( nn.ConvTranspose2d( in_channels=self.inplanes, out_channels=planes, kernel_size=kernel, stride=2, padding=padding, output_padding=output_padding, bias=self.deconv_with_bias)) layers.append(nn.BatchNorm2d(planes, momentum=0.1)) layers.append(nn.ReLU(inplace=True)) self.inplanes = planes return nn.Sequential(*layers) def forward(self, x): x = self.resnext_101_64x4d(x) x = self.deconv_layers(x) x = self.final_layer(x) return x def init_weights(self, pretrained=''): if os.path.isfile(pretrained): logger.info('=> init deconv weights from normal distribution') for name, m in self.deconv_layers.named_modules(): if isinstance(m, nn.ConvTranspose2d): logger.info('=> init {}.weight as normal(0, 0.001)'.format(name)) logger.info('=> init {}.bias as 0'.format(name)) nn.init.normal_(m.weight, std=0.001) if self.deconv_with_bias: nn.init.constant_(m.bias, 0) elif isinstance(m, nn.BatchNorm2d): logger.info('=> init {}.weight as 1'.format(name)) logger.info('=> init {}.bias as 0'.format(name)) nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias, 0) logger.info('=> init final conv weights from normal distribution') for m in self.final_layer.modules(): if isinstance(m, nn.Conv2d): # nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') logger.info('=> init {}.weight as normal(0, 0.001)'.format(name)) logger.info('=> init {}.bias as 0'.format(name)) nn.init.normal_(m.weight, std=0.001) nn.init.constant_(m.bias, 0) pretrained_state_dict = torch.load(pretrained) need_init_state_dict = {} logger.info('=> loading pretrained model {}'.format(pretrained)) for name, value in pretrained_state_dict.items(): need_init_state_dict['resnext_101_64x4d.{}'.format(name)] = value self.load_state_dict(need_init_state_dict, strict=False) else: logger.info('=> init weights from normal distribution') for m in self.modules(): if isinstance(m, nn.Conv2d): # nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') nn.init.normal_(m.weight, std=0.001) # nn.init.constant_(m.bias, 0) elif isinstance(m, nn.BatchNorm2d): nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias, 0) elif isinstance(m, nn.ConvTranspose2d): nn.init.normal_(m.weight, std=0.001) if self.deconv_with_bias: nn.init.constant_(m.bias, 0) def get_pose_net(is_train, **kwargs): pretrained = './models/pytorch/resnext_101_64x4d.pth' model = PoseResneXt() if is_train: model.init_weights(pretrained) return model def main(): model = PoseResneXt() device = torch.device('cuda:0') pretrained_dict = torch.load('../../models/pytorch/resnext_101_64x4d.pth') need_init_state_dict = {} for name, value in pretrained_dict.items(): need_init_state_dict['resnext_101_64x4d.{}'.format(name)] = value for name, value in need_init_state_dict.items(): print(name, ':', value) print(need_init_state_dict) model.load_state_dict(need_init_state_dict, strict=False) model.to(device) x = torch.rand(1, 3, 256, 192).to(device) x = model(x) print(x.size()) for name, parameters in model.named_parameters(): print(name, ':', parameters.size()) if __name__ == '__main__': main()
51.893617
113
0.332604
3,654
43,902
3.949097
0.041598
0.055994
0.047193
0.035204
0.852529
0.836452
0.8228
0.8228
0.808663
0.794941
0
0.121663
0.551228
43,902
845
114
51.95503
0.61075
0.062981
0
0.849246
0
0
0.014221
0.003079
0
0
0
0
0.002513
1
0.015075
false
0
0.007538
0.003769
0.038945
0.005025
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
1
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
53bf61129f5563225944909831928aee8b685683
181
py
Python
narwhallet/core/kcl/bip_utils/base58/__init__.py
Snider/narwhallet
0d528763c735f1e68b8264e302854d41e7cf1956
[ "MIT" ]
3
2021-12-29T11:25:13.000Z
2022-01-16T13:57:17.000Z
narwhallet/core/kcl/bip_utils/base58/__init__.py
Snider/narwhallet
0d528763c735f1e68b8264e302854d41e7cf1956
[ "MIT" ]
null
null
null
narwhallet/core/kcl/bip_utils/base58/__init__.py
Snider/narwhallet
0d528763c735f1e68b8264e302854d41e7cf1956
[ "MIT" ]
1
2022-01-16T13:57:20.000Z
2022-01-16T13:57:20.000Z
from narwhallet.core.kcl.bip_utils.base58.base58_ex import Base58ChecksumError from narwhallet.core.kcl.bip_utils.base58.base58 import Base58Alphabets, Base58Decoder, Base58Encoder
60.333333
101
0.878453
23
181
6.782609
0.565217
0.179487
0.230769
0.269231
0.525641
0.525641
0.525641
0.525641
0
0
0
0.093567
0.055249
181
2
102
90.5
0.818713
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
53c3e1acf176d51449326cb0091c6f54b4f61343
285,875
py
Python
tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py
steffnay/python-datacatalog
90a0be276e38e889a5086f8fd233d5b25e19965e
[ "Apache-2.0" ]
null
null
null
tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py
steffnay/python-datacatalog
90a0be276e38e889a5086f8fd233d5b25e19965e
[ "Apache-2.0" ]
null
null
null
tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py
steffnay/python-datacatalog
90a0be276e38e889a5086f8fd233d5b25e19965e
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os import mock import packaging.version import grpc from grpc.experimental import aio import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.datacatalog_v1beta1.services.data_catalog import ( DataCatalogAsyncClient, ) from google.cloud.datacatalog_v1beta1.services.data_catalog import DataCatalogClient from google.cloud.datacatalog_v1beta1.services.data_catalog import pagers from google.cloud.datacatalog_v1beta1.services.data_catalog import transports from google.cloud.datacatalog_v1beta1.services.data_catalog.transports.base import ( _GOOGLE_AUTH_VERSION, ) from google.cloud.datacatalog_v1beta1.types import common from google.cloud.datacatalog_v1beta1.types import datacatalog from google.cloud.datacatalog_v1beta1.types import gcs_fileset_spec from google.cloud.datacatalog_v1beta1.types import schema from google.cloud.datacatalog_v1beta1.types import search from google.cloud.datacatalog_v1beta1.types import table_spec from google.cloud.datacatalog_v1beta1.types import tags from google.cloud.datacatalog_v1beta1.types import timestamps from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.type import expr_pb2 # type: ignore import google.auth # TODO(busunkim): Once google-auth >= 1.25.0 is required transitively # through google-api-core: # - Delete the auth "less than" test cases # - Delete these pytest markers (Make the "greater than or equal to" tests the default). requires_google_auth_lt_1_25_0 = pytest.mark.skipif( packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), reason="This test requires google-auth < 1.25.0", ) requires_google_auth_gte_1_25_0 = pytest.mark.skipif( packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), reason="This test requires google-auth >= 1.25.0", ) def client_cert_source_callback(): return b"cert bytes", b"key bytes" # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): return ( "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT ) def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" sandbox_endpoint = "example.sandbox.googleapis.com" sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" non_googleapi = "api.example.com" assert DataCatalogClient._get_default_mtls_endpoint(None) is None assert ( DataCatalogClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint ) assert ( DataCatalogClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint ) assert ( DataCatalogClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint ) assert ( DataCatalogClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint ) assert DataCatalogClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi @pytest.mark.parametrize("client_class", [DataCatalogClient, DataCatalogAsyncClient,]) def test_data_catalog_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info) assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == "datacatalog.googleapis.com:443" @pytest.mark.parametrize( "transport_class,transport_name", [ (transports.DataCatalogGrpcTransport, "grpc"), (transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio"), ], ) def test_data_catalog_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( service_account.Credentials, "with_always_use_jwt_access", create=True ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) with mock.patch.object( service_account.Credentials, "with_always_use_jwt_access", create=True ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() @pytest.mark.parametrize("client_class", [DataCatalogClient, DataCatalogAsyncClient,]) def test_data_catalog_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == "datacatalog.googleapis.com:443" def test_data_catalog_client_get_transport_class(): transport = DataCatalogClient.get_transport_class() available_transports = [ transports.DataCatalogGrpcTransport, ] assert transport in available_transports transport = DataCatalogClient.get_transport_class("grpc") assert transport == transports.DataCatalogGrpcTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc"), ( DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", ), ], ) @mock.patch.object( DataCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogClient) ) @mock.patch.object( DataCatalogAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogAsyncClient), ) def test_data_catalog_client_client_options( client_class, transport_class, transport_name ): # Check that if channel is provided we won't create a new one. with mock.patch.object(DataCatalogClient, "get_transport_class") as gtc: transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. with mock.patch.object(DataCatalogClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, host="squid.clam.whelk", scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): client = client_class() # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): client = client_class() # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", "true"), ( DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", "true", ), (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", "false"), ( DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", "false", ), ], ) @mock.patch.object( DataCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogClient) ) @mock.patch.object( DataCatalogAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogAsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_data_catalog_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env ): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} ): options = client_options.ClientOptions( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None expected_host = client.DEFAULT_ENDPOINT else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT patched.assert_called_once_with( credentials=None, credentials_file=None, host=expected_host, scopes=None, client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( "google.auth.transport.mtls.has_default_client_cert_source", return_value=True, ): with mock.patch( "google.auth.transport.mtls.default_client_cert_source", return_value=client_cert_source_callback, ): if use_client_cert_env == "false": expected_host = client.DEFAULT_ENDPOINT expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT expected_client_cert_source = client_cert_source_callback patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, host=expected_host, scopes=None, client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( "google.auth.transport.mtls.has_default_client_cert_source", return_value=False, ): patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc"), ( DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", ), ], ) def test_data_catalog_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc"), ( DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", ), ], ) def test_data_catalog_client_client_options_credentials_file( client_class, transport_class, transport_name ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) def test_data_catalog_client_client_options_from_dict(): with mock.patch( "google.cloud.datacatalog_v1beta1.services.data_catalog.transports.DataCatalogGrpcTransport.__init__" ) as grpc_transport: grpc_transport.return_value = None client = DataCatalogClient(client_options={"api_endpoint": "squid.clam.whelk"}) grpc_transport.assert_called_once_with( credentials=None, credentials_file=None, host="squid.clam.whelk", scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) def test_search_catalog( transport: str = "grpc", request_type=datacatalog.SearchCatalogRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.SearchCatalogResponse( next_page_token="next_page_token_value", ) response = client.search_catalog(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.SearchCatalogRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchCatalogPager) assert response.next_page_token == "next_page_token_value" def test_search_catalog_from_dict(): test_search_catalog(request_type=dict) def test_search_catalog_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: client.search_catalog() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.SearchCatalogRequest() @pytest.mark.asyncio async def test_search_catalog_async( transport: str = "grpc_asyncio", request_type=datacatalog.SearchCatalogRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.SearchCatalogResponse(next_page_token="next_page_token_value",) ) response = await client.search_catalog(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.SearchCatalogRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchCatalogAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_search_catalog_async_from_dict(): await test_search_catalog_async(request_type=dict) def test_search_catalog_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.SearchCatalogResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.search_catalog( scope=datacatalog.SearchCatalogRequest.Scope( include_org_ids=["include_org_ids_value"] ), query="query_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].scope == datacatalog.SearchCatalogRequest.Scope( include_org_ids=["include_org_ids_value"] ) assert args[0].query == "query_value" def test_search_catalog_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.search_catalog( datacatalog.SearchCatalogRequest(), scope=datacatalog.SearchCatalogRequest.Scope( include_org_ids=["include_org_ids_value"] ), query="query_value", ) @pytest.mark.asyncio async def test_search_catalog_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.SearchCatalogResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.SearchCatalogResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.search_catalog( scope=datacatalog.SearchCatalogRequest.Scope( include_org_ids=["include_org_ids_value"] ), query="query_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].scope == datacatalog.SearchCatalogRequest.Scope( include_org_ids=["include_org_ids_value"] ) assert args[0].query == "query_value" @pytest.mark.asyncio async def test_search_catalog_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.search_catalog( datacatalog.SearchCatalogRequest(), scope=datacatalog.SearchCatalogRequest.Scope( include_org_ids=["include_org_ids_value"] ), query="query_value", ) def test_search_catalog_pager(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.SearchCatalogResponse( results=[ search.SearchCatalogResult(), search.SearchCatalogResult(), search.SearchCatalogResult(), ], next_page_token="abc", ), datacatalog.SearchCatalogResponse(results=[], next_page_token="def",), datacatalog.SearchCatalogResponse( results=[search.SearchCatalogResult(),], next_page_token="ghi", ), datacatalog.SearchCatalogResponse( results=[search.SearchCatalogResult(), search.SearchCatalogResult(),], ), RuntimeError, ) metadata = () pager = client.search_catalog(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, search.SearchCatalogResult) for i in results) def test_search_catalog_pages(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.SearchCatalogResponse( results=[ search.SearchCatalogResult(), search.SearchCatalogResult(), search.SearchCatalogResult(), ], next_page_token="abc", ), datacatalog.SearchCatalogResponse(results=[], next_page_token="def",), datacatalog.SearchCatalogResponse( results=[search.SearchCatalogResult(),], next_page_token="ghi", ), datacatalog.SearchCatalogResponse( results=[search.SearchCatalogResult(), search.SearchCatalogResult(),], ), RuntimeError, ) pages = list(client.search_catalog(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_search_catalog_async_pager(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.search_catalog), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.SearchCatalogResponse( results=[ search.SearchCatalogResult(), search.SearchCatalogResult(), search.SearchCatalogResult(), ], next_page_token="abc", ), datacatalog.SearchCatalogResponse(results=[], next_page_token="def",), datacatalog.SearchCatalogResponse( results=[search.SearchCatalogResult(),], next_page_token="ghi", ), datacatalog.SearchCatalogResponse( results=[search.SearchCatalogResult(), search.SearchCatalogResult(),], ), RuntimeError, ) async_pager = await client.search_catalog(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, search.SearchCatalogResult) for i in responses) @pytest.mark.asyncio async def test_search_catalog_async_pages(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.search_catalog), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.SearchCatalogResponse( results=[ search.SearchCatalogResult(), search.SearchCatalogResult(), search.SearchCatalogResult(), ], next_page_token="abc", ), datacatalog.SearchCatalogResponse(results=[], next_page_token="def",), datacatalog.SearchCatalogResponse( results=[search.SearchCatalogResult(),], next_page_token="ghi", ), datacatalog.SearchCatalogResponse( results=[search.SearchCatalogResult(), search.SearchCatalogResult(),], ), RuntimeError, ) pages = [] async for page_ in (await client.search_catalog(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_create_entry_group( transport: str = "grpc", request_type=datacatalog.CreateEntryGroupRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup( name="name_value", display_name="display_name_value", description="description_value", ) response = client.create_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.CreateEntryGroupRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.EntryGroup) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" def test_create_entry_group_from_dict(): test_create_entry_group(request_type=dict) def test_create_entry_group_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_entry_group), "__call__" ) as call: client.create_entry_group() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.CreateEntryGroupRequest() @pytest.mark.asyncio async def test_create_entry_group_async( transport: str = "grpc_asyncio", request_type=datacatalog.CreateEntryGroupRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup( name="name_value", display_name="display_name_value", description="description_value", ) ) response = await client.create_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.CreateEntryGroupRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.EntryGroup) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" @pytest.mark.asyncio async def test_create_entry_group_async_from_dict(): await test_create_entry_group_async(request_type=dict) def test_create_entry_group_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateEntryGroupRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_entry_group), "__call__" ) as call: call.return_value = datacatalog.EntryGroup() client.create_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_entry_group_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateEntryGroupRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_entry_group), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup() ) await client.create_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_entry_group_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_entry_group( parent="parent_value", entry_group_id="entry_group_id_value", entry_group=datacatalog.EntryGroup(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].entry_group_id == "entry_group_id_value" assert args[0].entry_group == datacatalog.EntryGroup(name="name_value") def test_create_entry_group_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_entry_group( datacatalog.CreateEntryGroupRequest(), parent="parent_value", entry_group_id="entry_group_id_value", entry_group=datacatalog.EntryGroup(name="name_value"), ) @pytest.mark.asyncio async def test_create_entry_group_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_entry_group( parent="parent_value", entry_group_id="entry_group_id_value", entry_group=datacatalog.EntryGroup(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].entry_group_id == "entry_group_id_value" assert args[0].entry_group == datacatalog.EntryGroup(name="name_value") @pytest.mark.asyncio async def test_create_entry_group_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_entry_group( datacatalog.CreateEntryGroupRequest(), parent="parent_value", entry_group_id="entry_group_id_value", entry_group=datacatalog.EntryGroup(name="name_value"), ) def test_update_entry_group( transport: str = "grpc", request_type=datacatalog.UpdateEntryGroupRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup( name="name_value", display_name="display_name_value", description="description_value", ) response = client.update_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.UpdateEntryGroupRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.EntryGroup) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" def test_update_entry_group_from_dict(): test_update_entry_group(request_type=dict) def test_update_entry_group_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_entry_group), "__call__" ) as call: client.update_entry_group() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.UpdateEntryGroupRequest() @pytest.mark.asyncio async def test_update_entry_group_async( transport: str = "grpc_asyncio", request_type=datacatalog.UpdateEntryGroupRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup( name="name_value", display_name="display_name_value", description="description_value", ) ) response = await client.update_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.UpdateEntryGroupRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.EntryGroup) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" @pytest.mark.asyncio async def test_update_entry_group_async_from_dict(): await test_update_entry_group_async(request_type=dict) def test_update_entry_group_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateEntryGroupRequest() request.entry_group.name = "entry_group.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_entry_group), "__call__" ) as call: call.return_value = datacatalog.EntryGroup() client.update_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "entry_group.name=entry_group.name/value",) in kw[ "metadata" ] @pytest.mark.asyncio async def test_update_entry_group_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateEntryGroupRequest() request.entry_group.name = "entry_group.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_entry_group), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup() ) await client.update_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "entry_group.name=entry_group.name/value",) in kw[ "metadata" ] def test_update_entry_group_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_entry_group( entry_group=datacatalog.EntryGroup(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].entry_group == datacatalog.EntryGroup(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_entry_group_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_entry_group( datacatalog.UpdateEntryGroupRequest(), entry_group=datacatalog.EntryGroup(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_entry_group_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_entry_group( entry_group=datacatalog.EntryGroup(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].entry_group == datacatalog.EntryGroup(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_entry_group_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_entry_group( datacatalog.UpdateEntryGroupRequest(), entry_group=datacatalog.EntryGroup(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_get_entry_group( transport: str = "grpc", request_type=datacatalog.GetEntryGroupRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup( name="name_value", display_name="display_name_value", description="description_value", ) response = client.get_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.GetEntryGroupRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.EntryGroup) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" def test_get_entry_group_from_dict(): test_get_entry_group(request_type=dict) def test_get_entry_group_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: client.get_entry_group() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.GetEntryGroupRequest() @pytest.mark.asyncio async def test_get_entry_group_async( transport: str = "grpc_asyncio", request_type=datacatalog.GetEntryGroupRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup( name="name_value", display_name="display_name_value", description="description_value", ) ) response = await client.get_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.GetEntryGroupRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.EntryGroup) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" @pytest.mark.asyncio async def test_get_entry_group_async_from_dict(): await test_get_entry_group_async(request_type=dict) def test_get_entry_group_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.GetEntryGroupRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: call.return_value = datacatalog.EntryGroup() client.get_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_entry_group_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.GetEntryGroupRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup() ) await client.get_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_entry_group_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_entry_group( name="name_value", read_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" assert args[0].read_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_get_entry_group_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_entry_group( datacatalog.GetEntryGroupRequest(), name="name_value", read_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_get_entry_group_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_entry_group( name="name_value", read_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" assert args[0].read_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_get_entry_group_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_entry_group( datacatalog.GetEntryGroupRequest(), name="name_value", read_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_delete_entry_group( transport: str = "grpc", request_type=datacatalog.DeleteEntryGroupRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.DeleteEntryGroupRequest() # Establish that the response is the type that we expect. assert response is None def test_delete_entry_group_from_dict(): test_delete_entry_group(request_type=dict) def test_delete_entry_group_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_entry_group), "__call__" ) as call: client.delete_entry_group() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.DeleteEntryGroupRequest() @pytest.mark.asyncio async def test_delete_entry_group_async( transport: str = "grpc_asyncio", request_type=datacatalog.DeleteEntryGroupRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.DeleteEntryGroupRequest() # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio async def test_delete_entry_group_async_from_dict(): await test_delete_entry_group_async(request_type=dict) def test_delete_entry_group_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteEntryGroupRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_entry_group), "__call__" ) as call: call.return_value = None client.delete_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_entry_group_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteEntryGroupRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_entry_group), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_entry_group(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_entry_group_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_entry_group(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_delete_entry_group_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_entry_group( datacatalog.DeleteEntryGroupRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_entry_group_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_entry_group(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_entry_group_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_entry_group( datacatalog.DeleteEntryGroupRequest(), name="name_value", ) def test_list_entry_groups( transport: str = "grpc", request_type=datacatalog.ListEntryGroupsRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_entry_groups), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntryGroupsResponse( next_page_token="next_page_token_value", ) response = client.list_entry_groups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.ListEntryGroupsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListEntryGroupsPager) assert response.next_page_token == "next_page_token_value" def test_list_entry_groups_from_dict(): test_list_entry_groups(request_type=dict) def test_list_entry_groups_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_entry_groups), "__call__" ) as call: client.list_entry_groups() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.ListEntryGroupsRequest() @pytest.mark.asyncio async def test_list_entry_groups_async( transport: str = "grpc_asyncio", request_type=datacatalog.ListEntryGroupsRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_entry_groups), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListEntryGroupsResponse( next_page_token="next_page_token_value", ) ) response = await client.list_entry_groups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.ListEntryGroupsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListEntryGroupsAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_entry_groups_async_from_dict(): await test_list_entry_groups_async(request_type=dict) def test_list_entry_groups_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.ListEntryGroupsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_entry_groups), "__call__" ) as call: call.return_value = datacatalog.ListEntryGroupsResponse() client.list_entry_groups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_entry_groups_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.ListEntryGroupsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_entry_groups), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListEntryGroupsResponse() ) await client.list_entry_groups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_entry_groups_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_entry_groups), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntryGroupsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_entry_groups(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" def test_list_entry_groups_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_entry_groups( datacatalog.ListEntryGroupsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_entry_groups_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_entry_groups), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntryGroupsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListEntryGroupsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_entry_groups(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_entry_groups_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_entry_groups( datacatalog.ListEntryGroupsRequest(), parent="parent_value", ) def test_list_entry_groups_pager(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_entry_groups), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.ListEntryGroupsResponse( entry_groups=[ datacatalog.EntryGroup(), datacatalog.EntryGroup(), datacatalog.EntryGroup(), ], next_page_token="abc", ), datacatalog.ListEntryGroupsResponse( entry_groups=[], next_page_token="def", ), datacatalog.ListEntryGroupsResponse( entry_groups=[datacatalog.EntryGroup(),], next_page_token="ghi", ), datacatalog.ListEntryGroupsResponse( entry_groups=[datacatalog.EntryGroup(), datacatalog.EntryGroup(),], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_entry_groups(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, datacatalog.EntryGroup) for i in results) def test_list_entry_groups_pages(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_entry_groups), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.ListEntryGroupsResponse( entry_groups=[ datacatalog.EntryGroup(), datacatalog.EntryGroup(), datacatalog.EntryGroup(), ], next_page_token="abc", ), datacatalog.ListEntryGroupsResponse( entry_groups=[], next_page_token="def", ), datacatalog.ListEntryGroupsResponse( entry_groups=[datacatalog.EntryGroup(),], next_page_token="ghi", ), datacatalog.ListEntryGroupsResponse( entry_groups=[datacatalog.EntryGroup(), datacatalog.EntryGroup(),], ), RuntimeError, ) pages = list(client.list_entry_groups(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_entry_groups_async_pager(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_entry_groups), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.ListEntryGroupsResponse( entry_groups=[ datacatalog.EntryGroup(), datacatalog.EntryGroup(), datacatalog.EntryGroup(), ], next_page_token="abc", ), datacatalog.ListEntryGroupsResponse( entry_groups=[], next_page_token="def", ), datacatalog.ListEntryGroupsResponse( entry_groups=[datacatalog.EntryGroup(),], next_page_token="ghi", ), datacatalog.ListEntryGroupsResponse( entry_groups=[datacatalog.EntryGroup(), datacatalog.EntryGroup(),], ), RuntimeError, ) async_pager = await client.list_entry_groups(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, datacatalog.EntryGroup) for i in responses) @pytest.mark.asyncio async def test_list_entry_groups_async_pages(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_entry_groups), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.ListEntryGroupsResponse( entry_groups=[ datacatalog.EntryGroup(), datacatalog.EntryGroup(), datacatalog.EntryGroup(), ], next_page_token="abc", ), datacatalog.ListEntryGroupsResponse( entry_groups=[], next_page_token="def", ), datacatalog.ListEntryGroupsResponse( entry_groups=[datacatalog.EntryGroup(),], next_page_token="ghi", ), datacatalog.ListEntryGroupsResponse( entry_groups=[datacatalog.EntryGroup(), datacatalog.EntryGroup(),], ), RuntimeError, ) pages = [] async for page_ in (await client.list_entry_groups(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_create_entry( transport: str = "grpc", request_type=datacatalog.CreateEntryRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", display_name="display_name_value", description="description_value", type_=datacatalog.EntryType.TABLE, integrated_system=common.IntegratedSystem.BIGQUERY, gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec( file_patterns=["file_patterns_value"] ), ) response = client.create_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.CreateEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) assert response.name == "name_value" assert response.linked_resource == "linked_resource_value" assert response.display_name == "display_name_value" assert response.description == "description_value" def test_create_entry_from_dict(): test_create_entry(request_type=dict) def test_create_entry_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_entry), "__call__") as call: client.create_entry() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.CreateEntryRequest() @pytest.mark.asyncio async def test_create_entry_async( transport: str = "grpc_asyncio", request_type=datacatalog.CreateEntryRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", display_name="display_name_value", description="description_value", ) ) response = await client.create_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.CreateEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) assert response.name == "name_value" assert response.linked_resource == "linked_resource_value" assert response.display_name == "display_name_value" assert response.description == "description_value" @pytest.mark.asyncio async def test_create_entry_async_from_dict(): await test_create_entry_async(request_type=dict) def test_create_entry_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateEntryRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_entry), "__call__") as call: call.return_value = datacatalog.Entry() client.create_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_entry_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateEntryRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_entry), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) await client.create_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_entry_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_entry( parent="parent_value", entry_id="entry_id_value", entry=datacatalog.Entry(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].entry_id == "entry_id_value" assert args[0].entry == datacatalog.Entry(name="name_value") def test_create_entry_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_entry( datacatalog.CreateEntryRequest(), parent="parent_value", entry_id="entry_id_value", entry=datacatalog.Entry(name="name_value"), ) @pytest.mark.asyncio async def test_create_entry_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_entry( parent="parent_value", entry_id="entry_id_value", entry=datacatalog.Entry(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].entry_id == "entry_id_value" assert args[0].entry == datacatalog.Entry(name="name_value") @pytest.mark.asyncio async def test_create_entry_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_entry( datacatalog.CreateEntryRequest(), parent="parent_value", entry_id="entry_id_value", entry=datacatalog.Entry(name="name_value"), ) def test_update_entry( transport: str = "grpc", request_type=datacatalog.UpdateEntryRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", display_name="display_name_value", description="description_value", type_=datacatalog.EntryType.TABLE, integrated_system=common.IntegratedSystem.BIGQUERY, gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec( file_patterns=["file_patterns_value"] ), ) response = client.update_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.UpdateEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) assert response.name == "name_value" assert response.linked_resource == "linked_resource_value" assert response.display_name == "display_name_value" assert response.description == "description_value" def test_update_entry_from_dict(): test_update_entry(request_type=dict) def test_update_entry_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_entry), "__call__") as call: client.update_entry() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.UpdateEntryRequest() @pytest.mark.asyncio async def test_update_entry_async( transport: str = "grpc_asyncio", request_type=datacatalog.UpdateEntryRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", display_name="display_name_value", description="description_value", ) ) response = await client.update_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.UpdateEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) assert response.name == "name_value" assert response.linked_resource == "linked_resource_value" assert response.display_name == "display_name_value" assert response.description == "description_value" @pytest.mark.asyncio async def test_update_entry_async_from_dict(): await test_update_entry_async(request_type=dict) def test_update_entry_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateEntryRequest() request.entry.name = "entry.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_entry), "__call__") as call: call.return_value = datacatalog.Entry() client.update_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "entry.name=entry.name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_update_entry_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateEntryRequest() request.entry.name = "entry.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_entry), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) await client.update_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "entry.name=entry.name/value",) in kw["metadata"] def test_update_entry_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_entry( entry=datacatalog.Entry(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].entry == datacatalog.Entry(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_entry_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_entry( datacatalog.UpdateEntryRequest(), entry=datacatalog.Entry(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_entry_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_entry( entry=datacatalog.Entry(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].entry == datacatalog.Entry(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_entry_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_entry( datacatalog.UpdateEntryRequest(), entry=datacatalog.Entry(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_delete_entry( transport: str = "grpc", request_type=datacatalog.DeleteEntryRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.DeleteEntryRequest() # Establish that the response is the type that we expect. assert response is None def test_delete_entry_from_dict(): test_delete_entry(request_type=dict) def test_delete_entry_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: client.delete_entry() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.DeleteEntryRequest() @pytest.mark.asyncio async def test_delete_entry_async( transport: str = "grpc_asyncio", request_type=datacatalog.DeleteEntryRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.DeleteEntryRequest() # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio async def test_delete_entry_async_from_dict(): await test_delete_entry_async(request_type=dict) def test_delete_entry_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteEntryRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: call.return_value = None client.delete_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_entry_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteEntryRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_entry_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_entry(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_delete_entry_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_entry( datacatalog.DeleteEntryRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_entry_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_entry(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_entry_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_entry( datacatalog.DeleteEntryRequest(), name="name_value", ) def test_get_entry(transport: str = "grpc", request_type=datacatalog.GetEntryRequest): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", display_name="display_name_value", description="description_value", type_=datacatalog.EntryType.TABLE, integrated_system=common.IntegratedSystem.BIGQUERY, gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec( file_patterns=["file_patterns_value"] ), ) response = client.get_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.GetEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) assert response.name == "name_value" assert response.linked_resource == "linked_resource_value" assert response.display_name == "display_name_value" assert response.description == "description_value" def test_get_entry_from_dict(): test_get_entry(request_type=dict) def test_get_entry_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry), "__call__") as call: client.get_entry() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.GetEntryRequest() @pytest.mark.asyncio async def test_get_entry_async( transport: str = "grpc_asyncio", request_type=datacatalog.GetEntryRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", display_name="display_name_value", description="description_value", ) ) response = await client.get_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.GetEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) assert response.name == "name_value" assert response.linked_resource == "linked_resource_value" assert response.display_name == "display_name_value" assert response.description == "description_value" @pytest.mark.asyncio async def test_get_entry_async_from_dict(): await test_get_entry_async(request_type=dict) def test_get_entry_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.GetEntryRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry), "__call__") as call: call.return_value = datacatalog.Entry() client.get_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_entry_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.GetEntryRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) await client.get_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_entry_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_entry(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_get_entry_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_entry( datacatalog.GetEntryRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_entry_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_entry(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_entry_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_entry( datacatalog.GetEntryRequest(), name="name_value", ) def test_lookup_entry( transport: str = "grpc", request_type=datacatalog.LookupEntryRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", display_name="display_name_value", description="description_value", type_=datacatalog.EntryType.TABLE, integrated_system=common.IntegratedSystem.BIGQUERY, gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec( file_patterns=["file_patterns_value"] ), ) response = client.lookup_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.LookupEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) assert response.name == "name_value" assert response.linked_resource == "linked_resource_value" assert response.display_name == "display_name_value" assert response.description == "description_value" def test_lookup_entry_from_dict(): test_lookup_entry(request_type=dict) def test_lookup_entry_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call: client.lookup_entry() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.LookupEntryRequest() @pytest.mark.asyncio async def test_lookup_entry_async( transport: str = "grpc_asyncio", request_type=datacatalog.LookupEntryRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", display_name="display_name_value", description="description_value", ) ) response = await client.lookup_entry(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.LookupEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) assert response.name == "name_value" assert response.linked_resource == "linked_resource_value" assert response.display_name == "display_name_value" assert response.description == "description_value" @pytest.mark.asyncio async def test_lookup_entry_async_from_dict(): await test_lookup_entry_async(request_type=dict) def test_list_entries( transport: str = "grpc", request_type=datacatalog.ListEntriesRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntriesResponse( next_page_token="next_page_token_value", ) response = client.list_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.ListEntriesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListEntriesPager) assert response.next_page_token == "next_page_token_value" def test_list_entries_from_dict(): test_list_entries(request_type=dict) def test_list_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: client.list_entries() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.ListEntriesRequest() @pytest.mark.asyncio async def test_list_entries_async( transport: str = "grpc_asyncio", request_type=datacatalog.ListEntriesRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListEntriesResponse(next_page_token="next_page_token_value",) ) response = await client.list_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.ListEntriesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListEntriesAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_entries_async_from_dict(): await test_list_entries_async(request_type=dict) def test_list_entries_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.ListEntriesRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: call.return_value = datacatalog.ListEntriesResponse() client.list_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_entries_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.ListEntriesRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListEntriesResponse() ) await client.list_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_entries_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntriesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_entries(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" def test_list_entries_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_entries( datacatalog.ListEntriesRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_entries_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntriesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListEntriesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_entries(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_entries_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_entries( datacatalog.ListEntriesRequest(), parent="parent_value", ) def test_list_entries_pager(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.ListEntriesResponse( entries=[ datacatalog.Entry(), datacatalog.Entry(), datacatalog.Entry(), ], next_page_token="abc", ), datacatalog.ListEntriesResponse(entries=[], next_page_token="def",), datacatalog.ListEntriesResponse( entries=[datacatalog.Entry(),], next_page_token="ghi", ), datacatalog.ListEntriesResponse( entries=[datacatalog.Entry(), datacatalog.Entry(),], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_entries(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, datacatalog.Entry) for i in results) def test_list_entries_pages(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_entries), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.ListEntriesResponse( entries=[ datacatalog.Entry(), datacatalog.Entry(), datacatalog.Entry(), ], next_page_token="abc", ), datacatalog.ListEntriesResponse(entries=[], next_page_token="def",), datacatalog.ListEntriesResponse( entries=[datacatalog.Entry(),], next_page_token="ghi", ), datacatalog.ListEntriesResponse( entries=[datacatalog.Entry(), datacatalog.Entry(),], ), RuntimeError, ) pages = list(client.list_entries(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_entries_async_pager(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_entries), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.ListEntriesResponse( entries=[ datacatalog.Entry(), datacatalog.Entry(), datacatalog.Entry(), ], next_page_token="abc", ), datacatalog.ListEntriesResponse(entries=[], next_page_token="def",), datacatalog.ListEntriesResponse( entries=[datacatalog.Entry(),], next_page_token="ghi", ), datacatalog.ListEntriesResponse( entries=[datacatalog.Entry(), datacatalog.Entry(),], ), RuntimeError, ) async_pager = await client.list_entries(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, datacatalog.Entry) for i in responses) @pytest.mark.asyncio async def test_list_entries_async_pages(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_entries), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.ListEntriesResponse( entries=[ datacatalog.Entry(), datacatalog.Entry(), datacatalog.Entry(), ], next_page_token="abc", ), datacatalog.ListEntriesResponse(entries=[], next_page_token="def",), datacatalog.ListEntriesResponse( entries=[datacatalog.Entry(),], next_page_token="ghi", ), datacatalog.ListEntriesResponse( entries=[datacatalog.Entry(), datacatalog.Entry(),], ), RuntimeError, ) pages = [] async for page_ in (await client.list_entries(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_create_tag_template( transport: str = "grpc", request_type=datacatalog.CreateTagTemplateRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate( name="name_value", display_name="display_name_value", ) response = client.create_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.CreateTagTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplate) assert response.name == "name_value" assert response.display_name == "display_name_value" def test_create_tag_template_from_dict(): test_create_tag_template(request_type=dict) def test_create_tag_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_tag_template), "__call__" ) as call: client.create_tag_template() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.CreateTagTemplateRequest() @pytest.mark.asyncio async def test_create_tag_template_async( transport: str = "grpc_asyncio", request_type=datacatalog.CreateTagTemplateRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplate(name="name_value", display_name="display_name_value",) ) response = await client.create_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.CreateTagTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplate) assert response.name == "name_value" assert response.display_name == "display_name_value" @pytest.mark.asyncio async def test_create_tag_template_async_from_dict(): await test_create_tag_template_async(request_type=dict) def test_create_tag_template_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateTagTemplateRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_tag_template), "__call__" ) as call: call.return_value = tags.TagTemplate() client.create_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_tag_template_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateTagTemplateRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_tag_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) await client.create_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_tag_template_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_tag_template( parent="parent_value", tag_template_id="tag_template_id_value", tag_template=tags.TagTemplate(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].tag_template_id == "tag_template_id_value" assert args[0].tag_template == tags.TagTemplate(name="name_value") def test_create_tag_template_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_tag_template( datacatalog.CreateTagTemplateRequest(), parent="parent_value", tag_template_id="tag_template_id_value", tag_template=tags.TagTemplate(name="name_value"), ) @pytest.mark.asyncio async def test_create_tag_template_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_tag_template( parent="parent_value", tag_template_id="tag_template_id_value", tag_template=tags.TagTemplate(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].tag_template_id == "tag_template_id_value" assert args[0].tag_template == tags.TagTemplate(name="name_value") @pytest.mark.asyncio async def test_create_tag_template_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_tag_template( datacatalog.CreateTagTemplateRequest(), parent="parent_value", tag_template_id="tag_template_id_value", tag_template=tags.TagTemplate(name="name_value"), ) def test_get_tag_template( transport: str = "grpc", request_type=datacatalog.GetTagTemplateRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate( name="name_value", display_name="display_name_value", ) response = client.get_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.GetTagTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplate) assert response.name == "name_value" assert response.display_name == "display_name_value" def test_get_tag_template_from_dict(): test_get_tag_template(request_type=dict) def test_get_tag_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: client.get_tag_template() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.GetTagTemplateRequest() @pytest.mark.asyncio async def test_get_tag_template_async( transport: str = "grpc_asyncio", request_type=datacatalog.GetTagTemplateRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplate(name="name_value", display_name="display_name_value",) ) response = await client.get_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.GetTagTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplate) assert response.name == "name_value" assert response.display_name == "display_name_value" @pytest.mark.asyncio async def test_get_tag_template_async_from_dict(): await test_get_tag_template_async(request_type=dict) def test_get_tag_template_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.GetTagTemplateRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: call.return_value = tags.TagTemplate() client.get_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_tag_template_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.GetTagTemplateRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) await client.get_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_tag_template_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_tag_template(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_get_tag_template_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_tag_template( datacatalog.GetTagTemplateRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_tag_template_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_tag_template(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_tag_template_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_tag_template( datacatalog.GetTagTemplateRequest(), name="name_value", ) def test_update_tag_template( transport: str = "grpc", request_type=datacatalog.UpdateTagTemplateRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate( name="name_value", display_name="display_name_value", ) response = client.update_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.UpdateTagTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplate) assert response.name == "name_value" assert response.display_name == "display_name_value" def test_update_tag_template_from_dict(): test_update_tag_template(request_type=dict) def test_update_tag_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_tag_template), "__call__" ) as call: client.update_tag_template() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.UpdateTagTemplateRequest() @pytest.mark.asyncio async def test_update_tag_template_async( transport: str = "grpc_asyncio", request_type=datacatalog.UpdateTagTemplateRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplate(name="name_value", display_name="display_name_value",) ) response = await client.update_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.UpdateTagTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplate) assert response.name == "name_value" assert response.display_name == "display_name_value" @pytest.mark.asyncio async def test_update_tag_template_async_from_dict(): await test_update_tag_template_async(request_type=dict) def test_update_tag_template_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateTagTemplateRequest() request.tag_template.name = "tag_template.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_tag_template), "__call__" ) as call: call.return_value = tags.TagTemplate() client.update_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "tag_template.name=tag_template.name/value", ) in kw["metadata"] @pytest.mark.asyncio async def test_update_tag_template_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateTagTemplateRequest() request.tag_template.name = "tag_template.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_tag_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) await client.update_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "tag_template.name=tag_template.name/value", ) in kw["metadata"] def test_update_tag_template_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_tag_template( tag_template=tags.TagTemplate(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].tag_template == tags.TagTemplate(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_tag_template_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_tag_template( datacatalog.UpdateTagTemplateRequest(), tag_template=tags.TagTemplate(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_tag_template_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_tag_template( tag_template=tags.TagTemplate(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].tag_template == tags.TagTemplate(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_tag_template_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_tag_template( datacatalog.UpdateTagTemplateRequest(), tag_template=tags.TagTemplate(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_delete_tag_template( transport: str = "grpc", request_type=datacatalog.DeleteTagTemplateRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.DeleteTagTemplateRequest() # Establish that the response is the type that we expect. assert response is None def test_delete_tag_template_from_dict(): test_delete_tag_template(request_type=dict) def test_delete_tag_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_tag_template), "__call__" ) as call: client.delete_tag_template() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.DeleteTagTemplateRequest() @pytest.mark.asyncio async def test_delete_tag_template_async( transport: str = "grpc_asyncio", request_type=datacatalog.DeleteTagTemplateRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.DeleteTagTemplateRequest() # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio async def test_delete_tag_template_async_from_dict(): await test_delete_tag_template_async(request_type=dict) def test_delete_tag_template_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteTagTemplateRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_tag_template), "__call__" ) as call: call.return_value = None client.delete_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_tag_template_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteTagTemplateRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_tag_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_tag_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_tag_template_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_tag_template( name="name_value", force=True, ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" assert args[0].force == True def test_delete_tag_template_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_tag_template( datacatalog.DeleteTagTemplateRequest(), name="name_value", force=True, ) @pytest.mark.asyncio async def test_delete_tag_template_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_tag_template(name="name_value", force=True,) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" assert args[0].force == True @pytest.mark.asyncio async def test_delete_tag_template_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_tag_template( datacatalog.DeleteTagTemplateRequest(), name="name_value", force=True, ) def test_create_tag_template_field( transport: str = "grpc", request_type=datacatalog.CreateTagTemplateFieldRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField( name="name_value", display_name="display_name_value", is_required=True, order=540, ) response = client.create_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.CreateTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplateField) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.is_required is True assert response.order == 540 def test_create_tag_template_field_from_dict(): test_create_tag_template_field(request_type=dict) def test_create_tag_template_field_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_tag_template_field), "__call__" ) as call: client.create_tag_template_field() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.CreateTagTemplateFieldRequest() @pytest.mark.asyncio async def test_create_tag_template_field_async( transport: str = "grpc_asyncio", request_type=datacatalog.CreateTagTemplateFieldRequest, ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField( name="name_value", display_name="display_name_value", is_required=True, order=540, ) ) response = await client.create_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.CreateTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplateField) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.is_required is True assert response.order == 540 @pytest.mark.asyncio async def test_create_tag_template_field_async_from_dict(): await test_create_tag_template_field_async(request_type=dict) def test_create_tag_template_field_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateTagTemplateFieldRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_tag_template_field), "__call__" ) as call: call.return_value = tags.TagTemplateField() client.create_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_tag_template_field_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateTagTemplateFieldRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_tag_template_field), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField() ) await client.create_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_tag_template_field_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_tag_template_field( parent="parent_value", tag_template_field_id="tag_template_field_id_value", tag_template_field=tags.TagTemplateField(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].tag_template_field_id == "tag_template_field_id_value" assert args[0].tag_template_field == tags.TagTemplateField(name="name_value") def test_create_tag_template_field_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_tag_template_field( datacatalog.CreateTagTemplateFieldRequest(), parent="parent_value", tag_template_field_id="tag_template_field_id_value", tag_template_field=tags.TagTemplateField(name="name_value"), ) @pytest.mark.asyncio async def test_create_tag_template_field_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_tag_template_field( parent="parent_value", tag_template_field_id="tag_template_field_id_value", tag_template_field=tags.TagTemplateField(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].tag_template_field_id == "tag_template_field_id_value" assert args[0].tag_template_field == tags.TagTemplateField(name="name_value") @pytest.mark.asyncio async def test_create_tag_template_field_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_tag_template_field( datacatalog.CreateTagTemplateFieldRequest(), parent="parent_value", tag_template_field_id="tag_template_field_id_value", tag_template_field=tags.TagTemplateField(name="name_value"), ) def test_update_tag_template_field( transport: str = "grpc", request_type=datacatalog.UpdateTagTemplateFieldRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField( name="name_value", display_name="display_name_value", is_required=True, order=540, ) response = client.update_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplateField) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.is_required is True assert response.order == 540 def test_update_tag_template_field_from_dict(): test_update_tag_template_field(request_type=dict) def test_update_tag_template_field_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_tag_template_field), "__call__" ) as call: client.update_tag_template_field() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() @pytest.mark.asyncio async def test_update_tag_template_field_async( transport: str = "grpc_asyncio", request_type=datacatalog.UpdateTagTemplateFieldRequest, ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField( name="name_value", display_name="display_name_value", is_required=True, order=540, ) ) response = await client.update_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplateField) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.is_required is True assert response.order == 540 @pytest.mark.asyncio async def test_update_tag_template_field_async_from_dict(): await test_update_tag_template_field_async(request_type=dict) def test_update_tag_template_field_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateTagTemplateFieldRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_tag_template_field), "__call__" ) as call: call.return_value = tags.TagTemplateField() client.update_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_update_tag_template_field_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateTagTemplateFieldRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_tag_template_field), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField() ) await client.update_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_update_tag_template_field_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_tag_template_field( name="name_value", tag_template_field=tags.TagTemplateField(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" assert args[0].tag_template_field == tags.TagTemplateField(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_tag_template_field_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_tag_template_field( datacatalog.UpdateTagTemplateFieldRequest(), name="name_value", tag_template_field=tags.TagTemplateField(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_tag_template_field_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_tag_template_field( name="name_value", tag_template_field=tags.TagTemplateField(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" assert args[0].tag_template_field == tags.TagTemplateField(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_tag_template_field_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_tag_template_field( datacatalog.UpdateTagTemplateFieldRequest(), name="name_value", tag_template_field=tags.TagTemplateField(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_rename_tag_template_field( transport: str = "grpc", request_type=datacatalog.RenameTagTemplateFieldRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.rename_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField( name="name_value", display_name="display_name_value", is_required=True, order=540, ) response = client.rename_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.RenameTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplateField) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.is_required is True assert response.order == 540 def test_rename_tag_template_field_from_dict(): test_rename_tag_template_field(request_type=dict) def test_rename_tag_template_field_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.rename_tag_template_field), "__call__" ) as call: client.rename_tag_template_field() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.RenameTagTemplateFieldRequest() @pytest.mark.asyncio async def test_rename_tag_template_field_async( transport: str = "grpc_asyncio", request_type=datacatalog.RenameTagTemplateFieldRequest, ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.rename_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField( name="name_value", display_name="display_name_value", is_required=True, order=540, ) ) response = await client.rename_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.RenameTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplateField) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.is_required is True assert response.order == 540 @pytest.mark.asyncio async def test_rename_tag_template_field_async_from_dict(): await test_rename_tag_template_field_async(request_type=dict) def test_rename_tag_template_field_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.RenameTagTemplateFieldRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.rename_tag_template_field), "__call__" ) as call: call.return_value = tags.TagTemplateField() client.rename_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_rename_tag_template_field_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.RenameTagTemplateFieldRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.rename_tag_template_field), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField() ) await client.rename_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_rename_tag_template_field_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.rename_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.rename_tag_template_field( name="name_value", new_tag_template_field_id="new_tag_template_field_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" assert args[0].new_tag_template_field_id == "new_tag_template_field_id_value" def test_rename_tag_template_field_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.rename_tag_template_field( datacatalog.RenameTagTemplateFieldRequest(), name="name_value", new_tag_template_field_id="new_tag_template_field_id_value", ) @pytest.mark.asyncio async def test_rename_tag_template_field_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.rename_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.rename_tag_template_field( name="name_value", new_tag_template_field_id="new_tag_template_field_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" assert args[0].new_tag_template_field_id == "new_tag_template_field_id_value" @pytest.mark.asyncio async def test_rename_tag_template_field_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.rename_tag_template_field( datacatalog.RenameTagTemplateFieldRequest(), name="name_value", new_tag_template_field_id="new_tag_template_field_id_value", ) def test_delete_tag_template_field( transport: str = "grpc", request_type=datacatalog.DeleteTagTemplateFieldRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert response is None def test_delete_tag_template_field_from_dict(): test_delete_tag_template_field(request_type=dict) def test_delete_tag_template_field_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_tag_template_field), "__call__" ) as call: client.delete_tag_template_field() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() @pytest.mark.asyncio async def test_delete_tag_template_field_async( transport: str = "grpc_asyncio", request_type=datacatalog.DeleteTagTemplateFieldRequest, ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio async def test_delete_tag_template_field_async_from_dict(): await test_delete_tag_template_field_async(request_type=dict) def test_delete_tag_template_field_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteTagTemplateFieldRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_tag_template_field), "__call__" ) as call: call.return_value = None client.delete_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_tag_template_field_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteTagTemplateFieldRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_tag_template_field), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_tag_template_field(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_tag_template_field_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_tag_template_field( name="name_value", force=True, ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" assert args[0].force == True def test_delete_tag_template_field_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_tag_template_field( datacatalog.DeleteTagTemplateFieldRequest(), name="name_value", force=True, ) @pytest.mark.asyncio async def test_delete_tag_template_field_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_tag_template_field( name="name_value", force=True, ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" assert args[0].force == True @pytest.mark.asyncio async def test_delete_tag_template_field_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_tag_template_field( datacatalog.DeleteTagTemplateFieldRequest(), name="name_value", force=True, ) def test_create_tag(transport: str = "grpc", request_type=datacatalog.CreateTagRequest): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag( name="name_value", template="template_value", template_display_name="template_display_name_value", column="column_value", ) response = client.create_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.CreateTagRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.Tag) assert response.name == "name_value" assert response.template == "template_value" assert response.template_display_name == "template_display_name_value" def test_create_tag_from_dict(): test_create_tag(request_type=dict) def test_create_tag_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_tag), "__call__") as call: client.create_tag() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.CreateTagRequest() @pytest.mark.asyncio async def test_create_tag_async( transport: str = "grpc_asyncio", request_type=datacatalog.CreateTagRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.Tag( name="name_value", template="template_value", template_display_name="template_display_name_value", ) ) response = await client.create_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.CreateTagRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.Tag) assert response.name == "name_value" assert response.template == "template_value" assert response.template_display_name == "template_display_name_value" @pytest.mark.asyncio async def test_create_tag_async_from_dict(): await test_create_tag_async(request_type=dict) def test_create_tag_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateTagRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_tag), "__call__") as call: call.return_value = tags.Tag() client.create_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_tag_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.CreateTagRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_tag), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) await client.create_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_tag_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_tag( parent="parent_value", tag=tags.Tag(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].tag == tags.Tag(name="name_value") def test_create_tag_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_tag( datacatalog.CreateTagRequest(), parent="parent_value", tag=tags.Tag(name="name_value"), ) @pytest.mark.asyncio async def test_create_tag_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_tag( parent="parent_value", tag=tags.Tag(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].tag == tags.Tag(name="name_value") @pytest.mark.asyncio async def test_create_tag_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_tag( datacatalog.CreateTagRequest(), parent="parent_value", tag=tags.Tag(name="name_value"), ) def test_update_tag(transport: str = "grpc", request_type=datacatalog.UpdateTagRequest): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag( name="name_value", template="template_value", template_display_name="template_display_name_value", column="column_value", ) response = client.update_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.UpdateTagRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.Tag) assert response.name == "name_value" assert response.template == "template_value" assert response.template_display_name == "template_display_name_value" def test_update_tag_from_dict(): test_update_tag(request_type=dict) def test_update_tag_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_tag), "__call__") as call: client.update_tag() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.UpdateTagRequest() @pytest.mark.asyncio async def test_update_tag_async( transport: str = "grpc_asyncio", request_type=datacatalog.UpdateTagRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.Tag( name="name_value", template="template_value", template_display_name="template_display_name_value", ) ) response = await client.update_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.UpdateTagRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.Tag) assert response.name == "name_value" assert response.template == "template_value" assert response.template_display_name == "template_display_name_value" @pytest.mark.asyncio async def test_update_tag_async_from_dict(): await test_update_tag_async(request_type=dict) def test_update_tag_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateTagRequest() request.tag.name = "tag.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_tag), "__call__") as call: call.return_value = tags.Tag() client.update_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "tag.name=tag.name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_update_tag_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.UpdateTagRequest() request.tag.name = "tag.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_tag), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) await client.update_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "tag.name=tag.name/value",) in kw["metadata"] def test_update_tag_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_tag( tag=tags.Tag(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].tag == tags.Tag(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_tag_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_tag( datacatalog.UpdateTagRequest(), tag=tags.Tag(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_tag_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_tag( tag=tags.Tag(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].tag == tags.Tag(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_tag_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_tag( datacatalog.UpdateTagRequest(), tag=tags.Tag(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_delete_tag(transport: str = "grpc", request_type=datacatalog.DeleteTagRequest): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.DeleteTagRequest() # Establish that the response is the type that we expect. assert response is None def test_delete_tag_from_dict(): test_delete_tag(request_type=dict) def test_delete_tag_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: client.delete_tag() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.DeleteTagRequest() @pytest.mark.asyncio async def test_delete_tag_async( transport: str = "grpc_asyncio", request_type=datacatalog.DeleteTagRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.DeleteTagRequest() # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio async def test_delete_tag_async_from_dict(): await test_delete_tag_async(request_type=dict) def test_delete_tag_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteTagRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: call.return_value = None client.delete_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_tag_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.DeleteTagRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_tag_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_tag(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_delete_tag_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_tag( datacatalog.DeleteTagRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_tag_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_tag(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_tag_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_tag( datacatalog.DeleteTagRequest(), name="name_value", ) def test_list_tags(transport: str = "grpc", request_type=datacatalog.ListTagsRequest): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListTagsResponse( next_page_token="next_page_token_value", ) response = client.list_tags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.ListTagsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTagsPager) assert response.next_page_token == "next_page_token_value" def test_list_tags_from_dict(): test_list_tags(request_type=dict) def test_list_tags_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tags), "__call__") as call: client.list_tags() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.ListTagsRequest() @pytest.mark.asyncio async def test_list_tags_async( transport: str = "grpc_asyncio", request_type=datacatalog.ListTagsRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListTagsResponse(next_page_token="next_page_token_value",) ) response = await client.list_tags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == datacatalog.ListTagsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTagsAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_tags_async_from_dict(): await test_list_tags_async(request_type=dict) def test_list_tags_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.ListTagsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tags), "__call__") as call: call.return_value = datacatalog.ListTagsResponse() client.list_tags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_tags_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datacatalog.ListTagsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tags), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListTagsResponse() ) await client.list_tags(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_tags_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListTagsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_tags(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" def test_list_tags_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_tags( datacatalog.ListTagsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_tags_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListTagsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListTagsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_tags(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_tags_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_tags( datacatalog.ListTagsRequest(), parent="parent_value", ) def test_list_tags_pager(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.ListTagsResponse( tags=[tags.Tag(), tags.Tag(), tags.Tag(),], next_page_token="abc", ), datacatalog.ListTagsResponse(tags=[], next_page_token="def",), datacatalog.ListTagsResponse(tags=[tags.Tag(),], next_page_token="ghi",), datacatalog.ListTagsResponse(tags=[tags.Tag(), tags.Tag(),],), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_tags(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, tags.Tag) for i in results) def test_list_tags_pages(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.ListTagsResponse( tags=[tags.Tag(), tags.Tag(), tags.Tag(),], next_page_token="abc", ), datacatalog.ListTagsResponse(tags=[], next_page_token="def",), datacatalog.ListTagsResponse(tags=[tags.Tag(),], next_page_token="ghi",), datacatalog.ListTagsResponse(tags=[tags.Tag(), tags.Tag(),],), RuntimeError, ) pages = list(client.list_tags(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_tags_async_pager(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_tags), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.ListTagsResponse( tags=[tags.Tag(), tags.Tag(), tags.Tag(),], next_page_token="abc", ), datacatalog.ListTagsResponse(tags=[], next_page_token="def",), datacatalog.ListTagsResponse(tags=[tags.Tag(),], next_page_token="ghi",), datacatalog.ListTagsResponse(tags=[tags.Tag(), tags.Tag(),],), RuntimeError, ) async_pager = await client.list_tags(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, tags.Tag) for i in responses) @pytest.mark.asyncio async def test_list_tags_async_pages(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_tags), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.ListTagsResponse( tags=[tags.Tag(), tags.Tag(), tags.Tag(),], next_page_token="abc", ), datacatalog.ListTagsResponse(tags=[], next_page_token="def",), datacatalog.ListTagsResponse(tags=[tags.Tag(),], next_page_token="ghi",), datacatalog.ListTagsResponse(tags=[tags.Tag(), tags.Tag(),],), RuntimeError, ) pages = [] async for page_ in (await client.list_tags(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_set_iam_policy( transport: str = "grpc", request_type=iam_policy_pb2.SetIamPolicyRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) response = client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == iam_policy_pb2.SetIamPolicyRequest() # Establish that the response is the type that we expect. assert isinstance(response, policy_pb2.Policy) assert response.version == 774 assert response.etag == b"etag_blob" def test_set_iam_policy_from_dict(): test_set_iam_policy(request_type=dict) def test_set_iam_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: client.set_iam_policy() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == iam_policy_pb2.SetIamPolicyRequest() @pytest.mark.asyncio async def test_set_iam_policy_async( transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policy_pb2.Policy(version=774, etag=b"etag_blob",) ) response = await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == iam_policy_pb2.SetIamPolicyRequest() # Establish that the response is the type that we expect. assert isinstance(response, policy_pb2.Policy) assert response.version == 774 assert response.etag == b"etag_blob" @pytest.mark.asyncio async def test_set_iam_policy_async_from_dict(): await test_set_iam_policy_async(request_type=dict) def test_set_iam_policy_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = iam_policy_pb2.SetIamPolicyRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: call.return_value = policy_pb2.Policy() client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] @pytest.mark.asyncio async def test_set_iam_policy_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = iam_policy_pb2.SetIamPolicyRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] def test_set_iam_policy_from_dict_foreign(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy_pb2.Policy() response = client.set_iam_policy( request={ "resource": "resource_value", "policy": policy_pb2.Policy(version=774), } ) call.assert_called() def test_set_iam_policy_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy_pb2.Policy() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.set_iam_policy(resource="resource_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].resource == "resource_value" def test_set_iam_policy_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.set_iam_policy( iam_policy_pb2.SetIamPolicyRequest(), resource="resource_value", ) @pytest.mark.asyncio async def test_set_iam_policy_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy_pb2.Policy() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.set_iam_policy(resource="resource_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].resource == "resource_value" @pytest.mark.asyncio async def test_set_iam_policy_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.set_iam_policy( iam_policy_pb2.SetIamPolicyRequest(), resource="resource_value", ) def test_get_iam_policy( transport: str = "grpc", request_type=iam_policy_pb2.GetIamPolicyRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) response = client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == iam_policy_pb2.GetIamPolicyRequest() # Establish that the response is the type that we expect. assert isinstance(response, policy_pb2.Policy) assert response.version == 774 assert response.etag == b"etag_blob" def test_get_iam_policy_from_dict(): test_get_iam_policy(request_type=dict) def test_get_iam_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: client.get_iam_policy() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == iam_policy_pb2.GetIamPolicyRequest() @pytest.mark.asyncio async def test_get_iam_policy_async( transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policy_pb2.Policy(version=774, etag=b"etag_blob",) ) response = await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == iam_policy_pb2.GetIamPolicyRequest() # Establish that the response is the type that we expect. assert isinstance(response, policy_pb2.Policy) assert response.version == 774 assert response.etag == b"etag_blob" @pytest.mark.asyncio async def test_get_iam_policy_async_from_dict(): await test_get_iam_policy_async(request_type=dict) def test_get_iam_policy_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = iam_policy_pb2.GetIamPolicyRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: call.return_value = policy_pb2.Policy() client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = iam_policy_pb2.GetIamPolicyRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) await client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] def test_get_iam_policy_from_dict_foreign(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy_pb2.Policy() response = client.get_iam_policy( request={ "resource": "resource_value", "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), } ) call.assert_called() def test_get_iam_policy_flattened(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy_pb2.Policy() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_iam_policy(resource="resource_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].resource == "resource_value" def test_get_iam_policy_flattened_error(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_iam_policy( iam_policy_pb2.GetIamPolicyRequest(), resource="resource_value", ) @pytest.mark.asyncio async def test_get_iam_policy_flattened_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy_pb2.Policy() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_iam_policy(resource="resource_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].resource == "resource_value" @pytest.mark.asyncio async def test_get_iam_policy_flattened_error_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_iam_policy( iam_policy_pb2.GetIamPolicyRequest(), resource="resource_value", ) def test_test_iam_permissions( transport: str = "grpc", request_type=iam_policy_pb2.TestIamPermissionsRequest ): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = iam_policy_pb2.TestIamPermissionsResponse( permissions=["permissions_value"], ) response = client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() # Establish that the response is the type that we expect. assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) assert response.permissions == ["permissions_value"] def test_test_iam_permissions_from_dict(): test_test_iam_permissions(request_type=dict) def test_test_iam_permissions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: client.test_iam_permissions() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() @pytest.mark.asyncio async def test_test_iam_permissions_async( transport: str = "grpc_asyncio", request_type=iam_policy_pb2.TestIamPermissionsRequest, ): client = DataCatalogAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( iam_policy_pb2.TestIamPermissionsResponse( permissions=["permissions_value"], ) ) response = await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() # Establish that the response is the type that we expect. assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) assert response.permissions == ["permissions_value"] @pytest.mark.asyncio async def test_test_iam_permissions_async_from_dict(): await test_test_iam_permissions_async(request_type=dict) def test_test_iam_permissions_field_headers(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = iam_policy_pb2.TestIamPermissionsRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = iam_policy_pb2.TestIamPermissionsResponse() client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): client = DataCatalogAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = iam_policy_pb2.TestIamPermissionsRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( iam_policy_pb2.TestIamPermissionsResponse() ) await client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] def test_test_iam_permissions_from_dict_foreign(): client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = iam_policy_pb2.TestIamPermissionsResponse() response = client.test_iam_permissions( request={ "resource": "resource_value", "permissions": ["permissions_value"], } ) call.assert_called() def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.DataCatalogGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.DataCatalogGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DataCatalogClient( client_options={"credentials_file": "credentials.json"}, transport=transport, ) # It is an error to provide scopes and a transport instance. transport = transports.DataCatalogGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DataCatalogClient( client_options={"scopes": ["1", "2"]}, transport=transport, ) def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.DataCatalogGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) client = DataCatalogClient(transport=transport) assert client.transport is transport def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.DataCatalogGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.DataCatalogGrpcAsyncIOTransport( credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @pytest.mark.parametrize( "transport_class", [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport,], ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = DataCatalogClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.DataCatalogGrpcTransport,) def test_data_catalog_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.DataCatalogTransport( credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) def test_data_catalog_base_transport(): # Instantiate the base transport. with mock.patch( "google.cloud.datacatalog_v1beta1.services.data_catalog.transports.DataCatalogTransport.__init__" ) as Transport: Transport.return_value = None transport = transports.DataCatalogTransport( credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly # raise NotImplementedError. methods = ( "search_catalog", "create_entry_group", "update_entry_group", "get_entry_group", "delete_entry_group", "list_entry_groups", "create_entry", "update_entry", "delete_entry", "get_entry", "lookup_entry", "list_entries", "create_tag_template", "get_tag_template", "update_tag_template", "delete_tag_template", "create_tag_template_field", "update_tag_template_field", "rename_tag_template_field", "delete_tag_template_field", "create_tag", "update_tag", "delete_tag", "list_tags", "set_iam_policy", "get_iam_policy", "test_iam_permissions", ) for method in methods: with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) @requires_google_auth_gte_1_25_0 def test_data_catalog_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.datacatalog_v1beta1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DataCatalogTransport( credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", scopes=None, default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) @requires_google_auth_lt_1_25_0 def test_data_catalog_base_transport_with_credentials_file_old_google_auth(): # Instantiate the base transport with a credentials file with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.datacatalog_v1beta1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DataCatalogTransport( credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) def test_data_catalog_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.datacatalog_v1beta1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DataCatalogTransport() adc.assert_called_once() @requires_google_auth_gte_1_25_0 def test_data_catalog_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) DataCatalogClient() adc.assert_called_once_with( scopes=None, default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id=None, ) @requires_google_auth_lt_1_25_0 def test_data_catalog_auth_adc_old_google_auth(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) DataCatalogClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id=None, ) @pytest.mark.parametrize( "transport_class", [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport,], ) @requires_google_auth_gte_1_25_0 def test_data_catalog_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) @pytest.mark.parametrize( "transport_class", [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport,], ) @requires_google_auth_lt_1_25_0 def test_data_catalog_transport_auth_adc_old_google_auth(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) @pytest.mark.parametrize( "transport_class,grpc_helpers", [ (transports.DataCatalogGrpcTransport, grpc_helpers), (transports.DataCatalogGrpcAsyncIOTransport, grpc_helpers_async), ], ) def test_data_catalog_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object( google.auth, "default", autospec=True ) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) create_channel.assert_called_with( "datacatalog.googleapis.com:443", credentials=creds, credentials_file=None, quota_project_id="octopus", default_scopes=("https://www.googleapis.com/auth/cloud-platform",), scopes=["1", "2"], default_host="datacatalog.googleapis.com", ssl_credentials=None, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) @pytest.mark.parametrize( "transport_class", [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport], ) def test_data_catalog_grpc_transport_client_cert_source_for_mtls(transport_class): cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: mock_ssl_channel_creds = mock.Mock() transport_class( host="squid.clam.whelk", credentials=cred, ssl_channel_credentials=mock_ssl_channel_creds, ) mock_create_channel.assert_called_once_with( "squid.clam.whelk:443", credentials=cred, credentials_file=None, scopes=None, ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls # is used. with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: transport_class( credentials=cred, client_cert_source_for_mtls=client_cert_source_callback, ) expected_cert, expected_key = client_cert_source_callback() mock_ssl_cred.assert_called_once_with( certificate_chain=expected_cert, private_key=expected_key ) def test_data_catalog_host_no_port(): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datacatalog.googleapis.com" ), ) assert client.transport._host == "datacatalog.googleapis.com:443" def test_data_catalog_host_with_port(): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datacatalog.googleapis.com:8000" ), ) assert client.transport._host == "datacatalog.googleapis.com:8000" def test_data_catalog_grpc_transport_channel(): channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.DataCatalogGrpcTransport( host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" assert transport._ssl_channel_credentials == None def test_data_catalog_grpc_asyncio_transport_channel(): channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.DataCatalogGrpcAsyncIOTransport( host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" assert transport._ssl_channel_credentials == None # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport], ) def test_data_catalog_transport_channel_mtls_with_client_cert_source(transport_class): with mock.patch( "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", api_mtls_endpoint="mtls.squid.clam.whelk", client_cert_source=client_cert_source_callback, ) adc.assert_called_once() grpc_ssl_channel_cred.assert_called_once_with( certificate_chain=b"cert bytes", private_key=b"key bytes" ) grpc_create_channel.assert_called_once_with( "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport], ) def test_data_catalog_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() with pytest.warns(DeprecationWarning): transport = transport_class( host="squid.clam.whelk", credentials=mock_cred, api_mtls_endpoint="mtls.squid.clam.whelk", client_cert_source=None, ) grpc_create_channel.assert_called_once_with( "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) assert transport.grpc_channel == mock_grpc_channel def test_entry_path(): project = "squid" location = "clam" entry_group = "whelk" entry = "octopus" expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format( project=project, location=location, entry_group=entry_group, entry=entry, ) actual = DataCatalogClient.entry_path(project, location, entry_group, entry) assert expected == actual def test_parse_entry_path(): expected = { "project": "oyster", "location": "nudibranch", "entry_group": "cuttlefish", "entry": "mussel", } path = DataCatalogClient.entry_path(**expected) # Check that the path construction is reversible. actual = DataCatalogClient.parse_entry_path(path) assert expected == actual def test_entry_group_path(): project = "winkle" location = "nautilus" entry_group = "scallop" expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format( project=project, location=location, entry_group=entry_group, ) actual = DataCatalogClient.entry_group_path(project, location, entry_group) assert expected == actual def test_parse_entry_group_path(): expected = { "project": "abalone", "location": "squid", "entry_group": "clam", } path = DataCatalogClient.entry_group_path(**expected) # Check that the path construction is reversible. actual = DataCatalogClient.parse_entry_group_path(path) assert expected == actual def test_tag_path(): project = "whelk" location = "octopus" entry_group = "oyster" entry = "nudibranch" tag = "cuttlefish" expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format( project=project, location=location, entry_group=entry_group, entry=entry, tag=tag, ) actual = DataCatalogClient.tag_path(project, location, entry_group, entry, tag) assert expected == actual def test_parse_tag_path(): expected = { "project": "mussel", "location": "winkle", "entry_group": "nautilus", "entry": "scallop", "tag": "abalone", } path = DataCatalogClient.tag_path(**expected) # Check that the path construction is reversible. actual = DataCatalogClient.parse_tag_path(path) assert expected == actual def test_tag_template_path(): project = "squid" location = "clam" tag_template = "whelk" expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format( project=project, location=location, tag_template=tag_template, ) actual = DataCatalogClient.tag_template_path(project, location, tag_template) assert expected == actual def test_parse_tag_template_path(): expected = { "project": "octopus", "location": "oyster", "tag_template": "nudibranch", } path = DataCatalogClient.tag_template_path(**expected) # Check that the path construction is reversible. actual = DataCatalogClient.parse_tag_template_path(path) assert expected == actual def test_tag_template_field_path(): project = "cuttlefish" location = "mussel" tag_template = "winkle" field = "nautilus" expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}".format( project=project, location=location, tag_template=tag_template, field=field, ) actual = DataCatalogClient.tag_template_field_path( project, location, tag_template, field ) assert expected == actual def test_parse_tag_template_field_path(): expected = { "project": "scallop", "location": "abalone", "tag_template": "squid", "field": "clam", } path = DataCatalogClient.tag_template_field_path(**expected) # Check that the path construction is reversible. actual = DataCatalogClient.parse_tag_template_field_path(path) assert expected == actual def test_common_billing_account_path(): billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) actual = DataCatalogClient.common_billing_account_path(billing_account) assert expected == actual def test_parse_common_billing_account_path(): expected = { "billing_account": "octopus", } path = DataCatalogClient.common_billing_account_path(**expected) # Check that the path construction is reversible. actual = DataCatalogClient.parse_common_billing_account_path(path) assert expected == actual def test_common_folder_path(): folder = "oyster" expected = "folders/{folder}".format(folder=folder,) actual = DataCatalogClient.common_folder_path(folder) assert expected == actual def test_parse_common_folder_path(): expected = { "folder": "nudibranch", } path = DataCatalogClient.common_folder_path(**expected) # Check that the path construction is reversible. actual = DataCatalogClient.parse_common_folder_path(path) assert expected == actual def test_common_organization_path(): organization = "cuttlefish" expected = "organizations/{organization}".format(organization=organization,) actual = DataCatalogClient.common_organization_path(organization) assert expected == actual def test_parse_common_organization_path(): expected = { "organization": "mussel", } path = DataCatalogClient.common_organization_path(**expected) # Check that the path construction is reversible. actual = DataCatalogClient.parse_common_organization_path(path) assert expected == actual def test_common_project_path(): project = "winkle" expected = "projects/{project}".format(project=project,) actual = DataCatalogClient.common_project_path(project) assert expected == actual def test_parse_common_project_path(): expected = { "project": "nautilus", } path = DataCatalogClient.common_project_path(**expected) # Check that the path construction is reversible. actual = DataCatalogClient.parse_common_project_path(path) assert expected == actual def test_common_location_path(): project = "scallop" location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) actual = DataCatalogClient.common_location_path(project, location) assert expected == actual def test_parse_common_location_path(): expected = { "project": "squid", "location": "clam", } path = DataCatalogClient.common_location_path(**expected) # Check that the path construction is reversible. actual = DataCatalogClient.parse_common_location_path(path) assert expected == actual def test_client_withDEFAULT_CLIENT_INFO(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( transports.DataCatalogTransport, "_prep_wrapped_messages" ) as prep: client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) with mock.patch.object( transports.DataCatalogTransport, "_prep_wrapped_messages" ) as prep: transport_class = DataCatalogClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info)
38.857551
119
0.692187
34,254
285,875
5.551527
0.014626
0.016197
0.02632
0.061316
0.959482
0.940898
0.929224
0.911602
0.892776
0.877084
0
0.004578
0.222891
285,875
7,356
120
38.862833
0.851406
0.228373
0
0.707767
0
0
0.078799
0.02628
0
0
0
0.000136
0.156834
1
0.047114
false
0
0.00764
0.000424
0.055178
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
53d18789a7cde6335ecf938aaae61ecf37a82c34
308
py
Python
temboo/core/Library/Dwolla/Contacts/__init__.py
jordanemedlock/psychtruths
52e09033ade9608bd5143129f8a1bfac22d634dd
[ "Apache-2.0" ]
7
2016-03-07T02:07:21.000Z
2022-01-21T02:22:41.000Z
temboo/core/Library/Dwolla/Contacts/__init__.py
jordanemedlock/psychtruths
52e09033ade9608bd5143129f8a1bfac22d634dd
[ "Apache-2.0" ]
null
null
null
temboo/core/Library/Dwolla/Contacts/__init__.py
jordanemedlock/psychtruths
52e09033ade9608bd5143129f8a1bfac22d634dd
[ "Apache-2.0" ]
8
2016-06-14T06:01:11.000Z
2020-04-22T09:21:44.000Z
from temboo.Library.Dwolla.Contacts.NearbyContacts import NearbyContacts, NearbyContactsInputSet, NearbyContactsResultSet, NearbyContactsChoreographyExecution from temboo.Library.Dwolla.Contacts.UserContacts import UserContacts, UserContactsInputSet, UserContactsResultSet, UserContactsChoreographyExecution
102.666667
158
0.909091
22
308
12.727273
0.636364
0.071429
0.121429
0.164286
0.221429
0
0
0
0
0
0
0
0.045455
308
2
159
154
0.952381
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
1
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
9900a325407f52e9b24fa30a9005f4a3d92b56b9
40,447
py
Python
dashboard/dashboard/pinpoint/models/job_test.py
BearerPipelineTest/catapult
3800a67cd916200046a50748893bbd0dcf3d7f4a
[ "BSD-3-Clause" ]
1,894
2015-04-17T18:29:53.000Z
2022-03-28T22:41:06.000Z
dashboard/dashboard/pinpoint/models/job_test.py
BearerPipelineTest/catapult
3800a67cd916200046a50748893bbd0dcf3d7f4a
[ "BSD-3-Clause" ]
4,640
2015-07-08T16:19:08.000Z
2019-12-02T15:01:27.000Z
dashboard/dashboard/pinpoint/models/job_test.py
atuchin-m/catapult
108ea3e2ec108e68216b1250a3d79cc642600294
[ "BSD-3-Clause" ]
698
2015-06-02T19:18:35.000Z
2022-03-29T16:57:15.000Z
# Copyright 2017 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from __future__ import print_function from __future__ import division from __future__ import absolute_import import datetime import mock import sys from tracing.value.diagnostics import generic_set from tracing.value.diagnostics import reserved_infos from dashboard.common import layered_cache from dashboard.common import utils from dashboard.models import histogram from dashboard.pinpoint.models import change from dashboard.pinpoint.models import errors from dashboard.pinpoint.models import job from dashboard.pinpoint.models import scheduler from dashboard.pinpoint import test # This is a very long file. # pylint: disable=too-many-lines _CHROMIUM_URL = 'https://chromium.googlesource.com/chromium/src' _COMMENT_STARTED = (u"""\U0001f4cd Pinpoint job started. https://testbed.example.com/job/1""") _COMMENT_COMPLETED_NO_COMPARISON = ( u"""<b>\U0001f4cd Job complete. See results below.</b> https://testbed.example.com/job/1""") _COMMENT_COMPLETED_NO_DIFFERENCES = ( u"""<b>\U0001f4cd Couldn't reproduce a difference.</b> https://testbed.example.com/job/1""") _COMMENT_COMPLETED_NO_DIFFERENCES_DUE_TO_FAILURE = ( u"""<b>\U0001f63f Job finished with errors.</b> https://testbed.example.com/job/1 One or both of the initial changes failed to produce any results. Perhaps the job is misconfigured or the tests are broken? See the job page for details.""") _COMMENT_FAILED = (u"""\U0001f63f Pinpoint job stopped with an error. https://testbed.example.com/job/1 Error string""") _COMMENT_CODE_REVIEW = (u"""\U0001f4cd Job complete. See results at: https://testbed.example.com/job/1""") def FakeCommitAsDict(commit_self): """Fake for Commit.AsDict. Returns a canned commit dict based on the Commit's git_hash, which must start with the prefix "git_hash_". Use like:: @mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict', autospec=True) def testFoo(self, commit_as_dict): commit_as_dict.side_effect = FakeCommitAsDict ... """ git_hash = commit_self.git_hash n = git_hash[len('git_hash_'):] return { 'repository': 'chromium', 'git_hash': git_hash, 'url': 'https://example.com/repository/+/' + git_hash, 'author': 'author%s@chromium.org' % (n,), 'subject': 'Subject.', 'message': 'Subject.\n\nCommit message.', } @mock.patch.object(job.results2, 'GetCachedResults2', mock.MagicMock(return_value='http://foo')) class JobTest(test.TestCase): @mock.patch.object( job.timing_record, 'GetSimilarHistoricalTimings', mock.MagicMock( return_value=job.timing_record.EstimateResult( job.timing_record.Timings( datetime.timedelta(seconds=10), datetime.timedelta( seconds=5), datetime.timedelta( seconds=100)), ['try', 'linux']))) @mock.patch.object(job.scheduler, 'QueueStats', mock.MagicMock(return_value=[])) def testAsDictOptions_Estimate(self): j = job.Job.New((), (), bug_id=123456) d = j.AsDict([job.OPTION_ESTIMATE]) self.assertTrue('estimate' in d) self.assertEqual(d['estimate']['timings'][0], 10) self.assertEqual(d['estimate']['timings'][1], 5) self.assertEqual(d['estimate']['timings'][2], 100) self.assertEqual(d['estimate']['tags'], ['try', 'linux']) def testAsDictOptions_Inputs(self): j = job.Job.New((), (), bug_id=123456) d = j.AsDict([job.OPTION_INPUTS]) self.assertEqual(d['state'], []) @mock.patch.object(job.timing_record, 'GetSimilarHistoricalTimings', mock.MagicMock(return_value=None)) @mock.patch.object(job.scheduler, 'QueueStats', mock.MagicMock(return_value=[])) def testAsDictOptions_EstimateFails(self): j = job.Job.New((), (), bug_id=123456) d = j.AsDict([job.OPTION_ESTIMATE]) self.assertFalse('estimate' in d) class RetryTest(test.TestCase): def setUp(self): super(RetryTest, self).setUp() def testStarted_RecoverableError_BacksOff(self): j = job.Job.New((), (), comparison_mode='performance') j.Start() scheduler.Schedule(j) j.state.Explore = mock.MagicMock(side_effect=errors.RecoverableError(None)) j._Schedule = mock.MagicMock() j.put = mock.MagicMock() j.Fail = mock.MagicMock() j.Run() j.Run() j.Run() self.assertEqual(j._Schedule.call_args_list[0], mock.call(countdown=job._TASK_INTERVAL * 2)) self.assertEqual(j._Schedule.call_args_list[1], mock.call(countdown=job._TASK_INTERVAL * 4)) self.assertEqual(j._Schedule.call_args_list[2], mock.call(countdown=job._TASK_INTERVAL * 8)) self.assertFalse(j.Fail.called) j.Run() self.assertTrue(j.Fail.called) def testStarted_RecoverableError_Resets(self): j = job.Job.New((), (), comparison_mode='performance') j.Start() scheduler.Schedule(j) j.state.Explore = mock.MagicMock(side_effect=errors.RecoverableError(None)) j._Schedule = mock.MagicMock() j.put = mock.MagicMock() j.Fail = mock.MagicMock() j.Run() j.Run() j.Run() self.assertEqual(j._Schedule.call_args_list[0], mock.call(countdown=job._TASK_INTERVAL * 2)) self.assertEqual(j._Schedule.call_args_list[1], mock.call(countdown=job._TASK_INTERVAL * 4)) self.assertEqual(j._Schedule.call_args_list[2], mock.call(countdown=job._TASK_INTERVAL * 8)) self.assertFalse(j.Fail.called) j.state.Explore = mock.MagicMock() j.Run() self.assertEqual(0, j.retry_count) @mock.patch('dashboard.pinpoint.models.job_state.JobState.ChangesExamined', lambda _: 10) @mock.patch('dashboard.common.utils.ServiceAccountHttp', mock.MagicMock()) class BugCommentTest(test.TestCase): def setUp(self): super(BugCommentTest, self).setUp() self.add_bug_comment = mock.MagicMock() self.get_issue = mock.MagicMock() patcher = mock.patch('dashboard.services.issue_tracker_service.' 'IssueTrackerService') issue_tracker_service = patcher.start() issue_tracker_service.return_value = mock.MagicMock( AddBugComment=self.add_bug_comment, GetIssue=self.get_issue) self.addCleanup(patcher.stop) def testNoBug(self): j = job.Job.New((), ()) j.Start() scheduler.Schedule(j) j.Run() self.ExecuteDeferredTasks('default') self.assertFalse(self.add_bug_comment.called) def testStarted(self): j = job.Job.New((), (), bug_id=123456) j.Start() self.ExecuteDeferredTasks('default') self.assertFalse(j.failed) self.add_bug_comment.assert_called_once_with( 123456, _COMMENT_STARTED, labels=mock.ANY, send_email=True, project='chromium') labels = self.add_bug_comment.call_args[1]['labels'] self.assertIn('Pinpoint-Job-Started', labels) self.assertNotIn('-Pinpoint-Job-Started', labels) def testCompletedNoComparison(self): j = job.Job.New((), (), bug_id=123456) scheduler.Schedule(j) j.Run() self.ExecuteDeferredTasks('default') self.assertFalse(j.failed) self.add_bug_comment.assert_called_once_with( 123456, _COMMENT_COMPLETED_NO_COMPARISON, labels=['Pinpoint-Tryjob-Completed'], project='chromium', ) def testCompletedNoDifference(self): j = job.Job.New((), (), bug_id=123456, comparison_mode='performance') scheduler.Schedule(j) j.Run() self.ExecuteDeferredTasks('default') self.assertFalse(j.failed) self.add_bug_comment.assert_called_once_with( 123456, _COMMENT_COMPLETED_NO_DIFFERENCES, labels=mock.ANY, status='WontFix', project='chromium', ) labels = self.add_bug_comment.call_args[1]['labels'] self.assertIn('Pinpoint-Job-Completed', labels) self.assertNotIn('-Pinpoint-Job-Completed', labels) self.assertIn('Pinpoint-No-Repro', labels) self.assertNotIn('-Pinpoint-No-Repro', labels) @mock.patch.object(job.job_state.JobState, 'FirstOrLastChangeFailed') @mock.patch.object(job.job_state.JobState, 'Differences') def testCompletedNoDifferenceDueToFailureAtOneChange( self, differences, first_or_last_change_failed): differences.return_value = [] first_or_last_change_failed.return_value = True j = job.Job.New((), (), bug_id=123456, comparison_mode='performance') scheduler.Schedule(j) j.Run() self.ExecuteDeferredTasks('default') self.assertFalse(j.failed) self.add_bug_comment.assert_called_once_with( 123456, _COMMENT_COMPLETED_NO_DIFFERENCES_DUE_TO_FAILURE, labels=mock.ANY, project='chromium', ) labels = self.add_bug_comment.call_args[1]['labels'] self.assertIn('Pinpoint-Job-Failed', labels) self.assertNotIn('-Pinpoint-Job-Failed', labels) @mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict') @mock.patch.object(job.job_state.JobState, 'ResultValues') @mock.patch.object(job.job_state.JobState, 'Differences') def testCompletedWithCommit(self, differences, result_values, commit_as_dict): c = change.Change((change.Commit('chromium', 'git_hash'),)) differences.return_value = [(None, c)] result_values.side_effect = [0], [1.23456] commit_as_dict.return_value = { 'repository': 'chromium', 'git_hash': 'git_hash', 'url': 'https://example.com/repository/+/git_hash', 'author': 'author@chromium.org', 'subject': 'Subject.', 'message': 'Subject.\n\nCommit message.', } self.get_issue.return_value = {'status': 'Untriaged'} j = job.Job.New((), (), bug_id=123456, comparison_mode='performance') scheduler.Schedule(j) j.Run() self.ExecuteDeferredTasks('default') self.assertFalse(j.failed) self.add_bug_comment.assert_called_once_with( 123456, mock.ANY, status='Assigned', owner='author@chromium.org', labels=mock.ANY, cc_list=['author@chromium.org'], merge_issue=None, project='chromium') message = self.add_bug_comment.call_args[0][1] self.assertIn('Found a significant difference at 1 commit.', message) self.assertIn('<b>Subject.</b>', message) self.assertIn('https://example.com/repository/+/git_hash', message) labels = self.add_bug_comment.call_args[1]['labels'] self.assertIn('Pinpoint-Culprit-Found', labels) self.assertNotIn('-Pinpoint-Culprit-Found', labels) self.assertIn('Pinpoint-Job-Completed', labels) self.assertNotIn('-Pinpoint-Job-Completed', labels) @mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict') @mock.patch.object(job.job_state.JobState, 'ResultValues') @mock.patch.object(job.job_state.JobState, 'Differences') def testCompletedMergeIntoExisting(self, differences, result_values, commit_as_dict): c = change.Change((change.Commit('chromium', 'git_hash'),)) differences.return_value = [(None, c)] result_values.side_effect = [0], [1.23456] commit_as_dict.return_value = { 'repository': 'chromium', 'git_hash': 'git_hash', 'author': 'author@chromium.org', 'subject': 'Subject.', 'url': 'https://example.com/repository/+/git_hash', 'message': 'Subject.\n\nCommit message.', } self.get_issue.return_value = { 'status': 'Untriaged', 'id': '111222', 'projectId': 'chromium' } layered_cache.SetExternal('commit_hash_git_hash', 'chromium:111222') j = job.Job.New((), (), bug_id=123456, comparison_mode='performance') scheduler.Schedule(j) j.Run() self.ExecuteDeferredTasks('default') self.assertFalse(j.failed) self.add_bug_comment.assert_called_once_with( 123456, mock.ANY, status='Assigned', owner='author@chromium.org', cc_list=[], labels=mock.ANY, merge_issue='111222', project='chromium') message = self.add_bug_comment.call_args[0][1] self.assertIn('Found a significant difference at 1 commit.', message) self.assertIn('https://example.com/repository/+/git_hash', message) labels = self.add_bug_comment.call_args[1]['labels'] self.assertIn('Pinpoint-Job-Completed', labels) self.assertNotIn('-Pinpoint-Job-Completed', labels) self.assertIn('Pinpoint-Culprit-Found', labels) self.assertNotIn('-Pinpoint-Culprit-Found', labels) @mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict') @mock.patch.object(job.job_state.JobState, 'ResultValues') @mock.patch.object(job.job_state.JobState, 'Differences') def testCompletedSkipsMergeWhenDuplicate(self, differences, result_values, commit_as_dict): c = change.Change((change.Commit('chromium', 'git_hash'),)) differences.return_value = [(None, c)] result_values.side_effect = [0], [1.23456] commit_as_dict.return_value = { 'repository': 'chromium', 'git_hash': 'git_hash', 'author': 'author@chromium.org', 'subject': 'Subject.', 'url': 'https://example.com/repository/+/git_hash', 'message': 'Subject.\n\nCommit message.', } def _GetIssue(bug_id, project='chromium'): if bug_id == '111222': return {'status': 'Duplicate', 'projectId': project, 'id': '111222'} else: return {'status': 'Untriaged', 'projectId': project, 'id': str(bug_id)} self.get_issue.side_effect = _GetIssue layered_cache.SetExternal('commit_hash_git_hash', 'chromium:111222') j = job.Job.New((), (), bug_id=123456, comparison_mode='performance', project='chromium') scheduler.Schedule(j) j.Run() self.ExecuteDeferredTasks('default') self.assertFalse(j.failed) self.add_bug_comment.assert_called_once_with( 123456, mock.ANY, status='Assigned', owner='author@chromium.org', labels=mock.ANY, cc_list=['author@chromium.org'], merge_issue=None, project='chromium') message = self.add_bug_comment.call_args[0][1] self.assertIn('Found a significant difference at 1 commit.', message) self.assertIn('https://example.com/repository/+/git_hash', message) labels = self.add_bug_comment.call_args[1]['labels'] self.assertIn('Pinpoint-Job-Completed', labels) self.assertNotIn('-Pinpoint-Job-Completed', labels) self.assertIn('Pinpoint-Culprit-Found', labels) self.assertNotIn('-Pinpoint-Culprit-Found', labels) @mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict') @mock.patch.object(job.job_state.JobState, 'ResultValues') @mock.patch.object(job.job_state.JobState, 'Differences') def testCompletedWithInvalidIssue(self, differences, result_values, commit_as_dict): c = change.Change((change.Commit('chromium', 'git_hash'),)) differences.return_value = [(None, c)] result_values.side_effect = [0], [1.23456] commit_as_dict.return_value = { 'repository': 'chromium', 'git_hash': 'git_hash', 'url': 'https://example.com/repository/+/git_hash', 'author': 'author@chromium.org', 'subject': 'Subject.', 'message': 'Subject.\n\nCommit message.', } self.get_issue.return_value = None j = job.Job.New((), (), bug_id=123456, comparison_mode='performance') scheduler.Schedule(j) j.Run() self.ExecuteDeferredTasks('default') self.assertFalse(j.failed) self.assertFalse(self.add_bug_comment.called) @mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict') @mock.patch.object(job.job_state.JobState, 'ResultValues') @mock.patch.object(job.job_state.JobState, 'Differences') def testCompletedWithCommitAndDocs(self, differences, result_values, commit_as_dict): c = change.Change((change.Commit('chromium', 'git_hash'),)) differences.return_value = [(None, c)] result_values.side_effect = [1.23456], [0] commit_as_dict.return_value = { 'repository': 'chromium', 'git_hash': 'git_hash', 'url': 'https://example.com/repository/+/git_hash', 'author': 'author@chromium.org', 'subject': 'Subject.', 'message': 'Subject.\n\nCommit message.', } self.get_issue.return_value = {'status': 'Untriaged'} j = job.Job.New((), (), bug_id=123456, comparison_mode='performance', tags={'test_path': 'master/bot/benchmark'}) diag_dict = generic_set.GenericSet([[u'Benchmark doc link', u'http://docs']]) diag = histogram.SparseDiagnostic( data=diag_dict.AsDict(), start_revision=1, end_revision=sys.maxsize, name=reserved_infos.DOCUMENTATION_URLS.name, test=utils.TestKey('master/bot/benchmark')) diag.put() scheduler.Schedule(j) j.Run() self.ExecuteDeferredTasks('default') self.assertFalse(j.failed) self.add_bug_comment.assert_called_once_with( 123456, mock.ANY, status='Assigned', owner='author@chromium.org', labels=mock.ANY, cc_list=['author@chromium.org'], merge_issue=None, project='chromium') message = self.add_bug_comment.call_args[0][1] self.assertIn('Found a significant difference at 1 commit.', message) self.assertIn('http://docs', message) self.assertIn('Benchmark doc link', message) labels = self.add_bug_comment.call_args[1]['labels'] self.assertIn('Pinpoint-Job-Completed', labels) self.assertNotIn('-Pinpoint-Job-Completed', labels) self.assertIn('Pinpoint-Culprit-Found', labels) self.assertNotIn('-Pinpoint-Culprit-Found', labels) @mock.patch('dashboard.pinpoint.models.change.patch.GerritPatch.AsDict') @mock.patch.object(job.job_state.JobState, 'ResultValues') @mock.patch.object(job.job_state.JobState, 'Differences') def testCompletedWithPatch(self, differences, result_values, patch_as_dict): commits = (change.Commit('chromium', 'git_hash'),) patch = change.GerritPatch('https://codereview.com', 672011, '2f0d5c7') c = change.Change(commits, patch) differences.return_value = [(None, c)] result_values.side_effect = [40], [20] patch_as_dict.return_value = { 'url': 'https://codereview.com/c/672011/2f0d5c7', 'author': 'author@chromium.org', 'subject': 'Subject.', 'message': 'Subject.\n\nCommit message.', } self.get_issue.return_value = {'status': 'Untriaged'} j = job.Job.New((), (), bug_id=123456, comparison_mode='performance') scheduler.Schedule(j) j.Run() self.ExecuteDeferredTasks('default') self.assertFalse(j.failed) self.add_bug_comment.assert_called_once_with( 123456, mock.ANY, status='Assigned', owner='author@chromium.org', labels=mock.ANY, cc_list=['author@chromium.org'], merge_issue=None, project='chromium') message = self.add_bug_comment.call_args[0][1] self.assertIn('Found a significant difference at 1 commit.', message) self.assertIn('https://codereview.com/c/672011/2f0d5c7', message) labels = self.add_bug_comment.call_args[1]['labels'] self.assertIn('Pinpoint-Culprit-Found', labels) self.assertNotIn('-Pinpoint-Culprit-Found', labels) @mock.patch('dashboard.pinpoint.models.change.patch.GerritPatch.AsDict') @mock.patch.object(job.job_state.JobState, 'ResultValues') @mock.patch.object(job.job_state.JobState, 'Differences') def testCompletedDoesReassign(self, differences, result_values, patch_as_dict): commits = (change.Commit('chromium', 'git_hash'),) patch = change.GerritPatch('https://codereview.com', 672011, '2f0d5c7') c = change.Change(commits, patch) c = change.Change(commits, patch) differences.return_value = [(None, c)] result_values.side_effect = [40], [20] patch_as_dict.return_value = { 'url': 'https://codereview.com/c/672011/2f0d5c7', 'author': 'author@chromium.org', 'subject': 'Subject.', 'message': 'Subject.\n\nCommit message.', } self.get_issue.return_value = { 'status': 'Assigned', 'owner': { 'email': 'some-author@somewhere.org' } } j = job.Job.New((), (), bug_id=123456, comparison_mode='performance') scheduler.Schedule(j) j.Run() self.ExecuteDeferredTasks('default') self.assertFalse(j.failed) self.add_bug_comment.assert_called_once_with( 123456, mock.ANY, owner='author@chromium.org', status=None, cc_list=['author@chromium.org', 'some-author@somewhere.org'], labels=mock.ANY, merge_issue=None, project='chromium') message = self.add_bug_comment.call_args[0][1] self.assertIn('Found a significant difference at 1 commit.', message) self.assertIn('https://codereview.com/c/672011/2f0d5c7', message) labels = self.add_bug_comment.call_args[1]['labels'] self.assertIn('Pinpoint-Job-Completed', labels) self.assertNotIn('-Pinpoint-Job-Completed', labels) self.assertIn('Pinpoint-Culprit-Found', labels) self.assertNotIn('-Pinpoint-Culprit-Found', labels) @mock.patch('dashboard.pinpoint.models.change.patch.GerritPatch.AsDict') @mock.patch.object(job.job_state.JobState, 'ResultValues') @mock.patch.object(job.job_state.JobState, 'Differences') def testCompletedDoesNotReopen(self, differences, result_values, patch_as_dict): commits = (change.Commit('chromium', 'git_hash'),) patch = change.GerritPatch('https://codereview.com', 672011, '2f0d5c7') c = change.Change(commits, patch) differences.return_value = [(None, c)] result_values.side_effect = [40], [20] patch_as_dict.return_value = { 'url': 'https://codereview.com/c/672011/2f0d5c7', 'author': 'author@chromium.org', 'subject': 'Subject.', 'message': 'Subject.\n\nCommit message.', } self.get_issue.return_value = {'status': 'Fixed'} j = job.Job.New((), (), bug_id=123456, comparison_mode='performance') scheduler.Schedule(j) j.Run() self.ExecuteDeferredTasks('default') self.assertFalse(j.failed) self.add_bug_comment.assert_called_once_with( 123456, mock.ANY, owner=None, status=None, cc_list=['author@chromium.org'], labels=mock.ANY, merge_issue=None, project='chromium') message = self.add_bug_comment.call_args[0][1] self.assertIn('10 revisions compared', message) labels = self.add_bug_comment.call_args[1]['labels'] self.assertIn('Pinpoint-Job-Completed', labels) self.assertNotIn('-Pinpoint-Job-Completed', labels) self.assertIn('Pinpoint-Culprit-Found', labels) self.assertNotIn('-Pinpoint-Culprit-Found', labels) @mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict') @mock.patch.object(job.job_state.JobState, 'ResultValues') @mock.patch.object(job.job_state.JobState, 'Differences') def testCompletedMultipleDifferences(self, differences, result_values, commit_as_dict): c0 = change.Change((change.Commit('chromium', 'git_hash_0'),)) c1 = change.Change((change.Commit('chromium', 'git_hash_1'),)) c2 = change.Change((change.Commit('chromium', 'git_hash_2'),)) c2_5 = change.Change((change.Commit('chromium', 'git_hash_2_5'))) c3 = change.Change((change.Commit('chromium', 'git_hash_3'),)) change_map = {c0: [50], c1: [0], c2: [40], c2_5: [0], c3: []} differences.return_value = [(c0, c1), (c1, c2), (c2_5, c3)] result_values.side_effect = lambda c: change_map.get(c, []) commit_as_dict.side_effect = ( { 'repository': 'chromium', 'git_hash': 'git_hash_1', 'url': 'https://example.com/repository/+/git_hash_1', 'author': 'author1@chromium.org', 'subject': 'Subject.', 'message': 'Subject.\n\nCommit message.', }, { 'repository': 'chromium', 'git_hash': 'git_hash_2', 'url': 'https://example.com/repository/+/git_hash_2', 'author': 'author2@chromium.org', 'subject': 'Subject.', 'message': 'Subject.\n\nCommit message.', }, { 'repository': 'chromium', 'git_hash': 'git_hash_3', 'url': 'https://example.com/repository/+/git_hash_3', 'author': 'author3@chromium.org', 'subject': 'Subject.', 'message': 'Subject.\n\nCommit message.', }, ) self.get_issue.return_value = {'status': 'Untriaged'} j = job.Job.New((), (), bug_id=123456, comparison_mode='performance') scheduler.Schedule(j) j.Run() self.ExecuteDeferredTasks('default') self.assertFalse(j.failed) # We now only CC folks from the top commit. self.add_bug_comment.assert_called_once_with( 123456, mock.ANY, status='Assigned', owner='author1@chromium.org', cc_list=['author1@chromium.org'], labels=mock.ANY, merge_issue=None, project='chromium') message = self.add_bug_comment.call_args[0][1] self.assertIn('Found significant differences at 2 commits.', message) self.assertIn('1. Subject.', message) self.assertIn('transitions from "no values" to "some values"', message) labels = self.add_bug_comment.call_args[1]['labels'] self.assertIn('Pinpoint-Job-Completed', labels) self.assertNotIn('-Pinpoint-Job-Completed', labels) self.assertIn('Pinpoint-Multiple-Culprits', labels) self.assertNotIn('-Pinpoint-Multiple-Culprits', labels) self.assertIn('Pinpoint-Multiple-MissingValues', labels) self.assertNotIn('-Pinpoint-Multiple-MissingValues', labels) @mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict', autospec=True) @mock.patch.object(job.job_state.JobState, 'ResultValues') @mock.patch.object(job.job_state.JobState, 'Differences') def testCompletedMultipleDifferences_BlameAbsoluteLargest( self, differences, result_values, commit_as_dict): c1 = change.Change((change.Commit('chromium', 'git_hash_1'),)) c2 = change.Change((change.Commit('chromium', 'git_hash_2'),)) c3 = change.Change((change.Commit('chromium', 'git_hash_3'),)) change_map = {c1: [10], c2: [0], c3: [-100]} differences.return_value = [(None, c1), (c1, c2), (c2, c3)] result_values.side_effect = lambda c: change_map.get(c, []) commit_as_dict.side_effect = FakeCommitAsDict self.get_issue.return_value = {'status': 'Untriaged'} j = job.Job.New((), (), bug_id=123456, comparison_mode='performance') scheduler.Schedule(j) j.Run() self.ExecuteDeferredTasks('default') self.assertFalse(j.failed) # We now only CC folks from the top commit. self.add_bug_comment.assert_called_once_with( 123456, mock.ANY, status='Assigned', owner='author3@chromium.org', cc_list=['author3@chromium.org'], labels=mock.ANY, merge_issue=None, project='chromium') message = self.add_bug_comment.call_args[0][1] self.assertIn('Found significant differences at 2 commits.', message) self.assertIn('https://example.com/repository/+/git_hash_3', message) self.assertIn('https://example.com/repository/+/git_hash_2', message) self.assertIn('https://example.com/repository/+/git_hash_1', message) labels = self.add_bug_comment.call_args[1]['labels'] self.assertIn('Pinpoint-Job-Completed', labels) self.assertNotIn('-Pinpoint-Job-Completed', labels) self.assertIn('Pinpoint-Multiple-Culprits', labels) self.assertNotIn('-Pinpoint-Multiple-Culprits', labels) self.assertIn('Pinpoint-Multiple-MissingValues', labels) self.assertNotIn('-Pinpoint-Multiple-MissingValues', labels) @mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict', autospec=True) @mock.patch.object(job.job_state.JobState, 'ResultValues') @mock.patch.object(job.job_state.JobState, 'Differences') def testCompletedMultipleDifferences_TenCulpritsCcTopTwo( self, differences, result_values, commit_as_dict): self.Parameterized_TestCompletedMultipleDifferences(10, 2, differences, result_values, commit_as_dict) @mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict', autospec=True) @mock.patch.object(job.job_state.JobState, 'ResultValues') @mock.patch.object(job.job_state.JobState, 'Differences') def testCompletedMultipleDifferences_HundredCulpritsCcTopThree( self, differences, result_values, commit_as_dict): self.Parameterized_TestCompletedMultipleDifferences(100, 3, differences, result_values, commit_as_dict) def Parameterized_TestCompletedMultipleDifferences(self, number_culprits, expected_num_ccs, differences, result_values, commit_as_dict): changes = [ change.Change((change.Commit('chromium', 'git_hash_%d' % (i,)),)) for i in range(1, number_culprits + 1) ] # Return [(None,c1), (c1,c2), (c2,c3), ...] differences.return_value = zip([None] + changes, changes) # Ensure culprits are ordered by deriving change results values from commit # names. E.g.: # Change(git_hash_1) -> result_value=[1], # Change(git_hash_2) -> result_value=[4], # etc. def ResultValuesFromFakeGitHash(change_obj): if change_obj is None: return [0] v = int(change_obj.commits[0].git_hash[len('git_hash_'):]) return [v * v] # Square the value to ensure increasing deltas. result_values.side_effect = ResultValuesFromFakeGitHash commit_as_dict.side_effect = FakeCommitAsDict self.get_issue.return_value = {'status': 'Untriaged'} j = job.Job.New((), (), bug_id=123456, comparison_mode='performance') scheduler.Schedule(j) j.Run() self.ExecuteDeferredTasks('default') self.assertFalse(j.failed) expected_ccs = [ 'author%d@chromium.org' % (i,) for i in range(number_culprits, number_culprits - expected_num_ccs, -1) ] # We only CC folks from the top commits. self.add_bug_comment.assert_called_once_with( 123456, mock.ANY, status='Assigned', owner=expected_ccs[0], cc_list=sorted(expected_ccs), labels=mock.ANY, merge_issue=None, project='chromium') labels = self.add_bug_comment.call_args[1]['labels'] self.assertIn('Pinpoint-Job-Completed', labels) self.assertNotIn('-Pinpoint-Job-Completed', labels) self.assertIn('Pinpoint-Multiple-Culprits', labels) self.assertNotIn('-Pinpoint-Multiple-Culprits', labels) @mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict') @mock.patch.object(job.job_state.JobState, 'ResultValues') @mock.patch.object(job.job_state.JobState, 'Differences') def testCompletedMultipleDifferences_NoDeltas(self, differences, result_values, commit_as_dict): """Regression test for http://crbug.com/1078680. Picks people to notify even when none of the differences have deltas (they are all transitions to/from "No values"). """ # Two differences, neither has deltas (50 -> No Values, No Values -> 50). c0 = change.Change((change.Commit('chromium', 'git_hash_0'),)) c1 = change.Change((change.Commit('chromium', 'git_hash_1'),)) c2 = change.Change((change.Commit('chromium', 'git_hash_2'),)) change_map = {c0: [50], c1: [], c2: [50]} differences.return_value = [(c0, c1), (c1, c2)] result_values.side_effect = lambda c: change_map.get(c, []) commit_as_dict.side_effect = ( { 'repository': 'chromium', 'git_hash': 'git_hash_1', 'url': 'https://example.com/repository/+/git_hash_1', 'author': 'author1@chromium.org', 'subject': 'Subject.', 'message': 'Subject.\n\nCommit message.', }, { 'repository': 'chromium', 'git_hash': 'git_hash_2', 'url': 'https://example.com/repository/+/git_hash_2', 'author': 'author2@chromium.org', 'subject': 'Subject.', 'message': 'Subject.\n\nCommit message.', }, ) self.get_issue.return_value = {'status': 'Untriaged'} j = job.Job.New((), (), bug_id=123456, comparison_mode='performance') scheduler.Schedule(j) j.Run() self.ExecuteDeferredTasks('default') self.assertFalse(j.failed) # Notifies the owner of the first change in the list of differences, seeing # as they are all equally small. self.add_bug_comment.assert_called_once_with( 123456, mock.ANY, status='Assigned', owner='author1@chromium.org', cc_list=['author1@chromium.org'], labels=mock.ANY, merge_issue=None, project='chromium') message = self.add_bug_comment.call_args[0][1] self.assertIn('Missing Values', message) self.assertIn('author1@chromium.org', message) self.assertIn('author2@chromium.org', message) labels = self.add_bug_comment.call_args[1]['labels'] self.assertIn('Pinpoint-Job-Completed', labels) self.assertNotIn('-Pinpoint-Job-Completed', labels) self.assertIn('Pinpoint-Multiple-MissingValues', labels) self.assertNotIn('-Pinpoint-Multiple-MissingValues', labels) self.assertNotIn('Pinpoint-Multiple-Culprits', labels) @mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict') @mock.patch.object(job.job_state.JobState, 'ResultValues') @mock.patch.object(job.job_state.JobState, 'Differences') def testCompletedWithAutoroll(self, differences, result_values, commit_as_dict): c = change.Change((change.Commit('chromium', 'git_hash'),)) differences.return_value = [(None, c)] result_values.side_effect = [20], [30] commit_as_dict.return_value = { 'repository': 'chromium', 'git_hash': 'git_hash', 'url': 'https://example.com/repository/+/git_hash', 'author': 'chromium-autoroll@skia-public.iam.gserviceaccount.com', 'subject': 'Subject.', 'message': 'Subject.\n\nCommit message.\n\nTBR=sheriff@bar.com', } self.get_issue.return_value = {'status': 'Untriaged'} j = job.Job.New((), (), bug_id=123456, comparison_mode='performance') j.put() scheduler.Schedule(j) j.Run() self.ExecuteDeferredTasks('default') self.assertFalse(j.failed) self.add_bug_comment.assert_called_once_with( 123456, mock.ANY, status='Assigned', owner='sheriff@bar.com', cc_list=['chromium-autoroll@skia-public.iam.gserviceaccount.com'], labels=mock.ANY, merge_issue=None, project='chromium') message = self.add_bug_comment.call_args[0][1] self.assertIn('Found a significant difference at 1 commit.', message) self.assertIn('chromium-autoroll@skia-public.iam.gserviceaccount.com', message) labels = self.add_bug_comment.call_args[1]['labels'] self.assertIn('Pinpoint-Job-Completed', labels) self.assertNotIn('-Pinpoint-Job-Completed', labels) self.assertIn('Pinpoint-Culprit-Found', labels) self.assertNotIn('-Pinpoint-Culprit-Found', labels) @mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict') @mock.patch.object(job.job_state.JobState, 'ResultValues') @mock.patch.object(job.job_state.JobState, 'Differences') def testCompletedWithAutorollCulpritButNotMostRecent(self, differences, result_values, commit_as_dict): """Regression test for http://crbug.com/1076756. When an autoroll has the biggest delta, assigns to its sheriff even when it is not the latest change. """ c0 = change.Change((change.Commit('chromium', 'git_hash_0'),)) c1 = change.Change((change.Commit('chromium', 'git_hash_1'),)) c2 = change.Change((change.Commit('chromium', 'git_hash_2'),)) change_map = {c0: [0], c1: [10], c2: [10]} differences.return_value = [(c0, c1), (c1, c2)] result_values.side_effect = lambda c: change_map.get(c, []) commit_as_dict.side_effect = ( { 'repository': 'chromium', 'git_hash': 'git_hash_1', 'url': 'https://example.com/repository/+/git_hash_1', 'author': 'chromium-autoroll@skia-public.iam.gserviceaccount.com', 'subject': 'Subject.', 'message': 'Subject.\n\nCommit message.\n\nTBR=sheriff@bar.com', }, { 'repository': 'chromium', 'git_hash': 'git_hash_2', 'url': 'https://example.com/repository/+/git_hash_2', 'author': 'author2@chromium.org', 'subject': 'Subject.', 'message': 'Subject.\n\nCommit message.', }, ) self.get_issue.return_value = {'status': 'Untriaged'} j = job.Job.New((), (), bug_id=123456, comparison_mode='performance') j.put() scheduler.Schedule(j) j.Run() self.ExecuteDeferredTasks('default') self.assertFalse(j.failed) self.add_bug_comment.assert_called_once_with( mock.ANY, mock.ANY, status='Assigned', owner='sheriff@bar.com', cc_list=['chromium-autoroll@skia-public.iam.gserviceaccount.com'], labels=mock.ANY, merge_issue=None, project='chromium') @mock.patch.object(job.job_state.JobState, 'ScheduleWork', mock.MagicMock(side_effect=AssertionError('Error string'))) def testFailed(self): j = job.Job.New((), (), bug_id=123456) scheduler.Schedule(j) with self.assertRaises(AssertionError): j.Run() self.ExecuteDeferredTasks('default') self.assertTrue(j.failed) self.add_bug_comment.assert_called_once_with( 123456, _COMMENT_FAILED, send_email=True, labels=mock.ANY, project='chromium') labels = self.add_bug_comment.call_args[1]['labels'] self.assertIn('Pinpoint-Job-Failed', labels) @mock.patch.object(job.job_state.JobState, 'ScheduleWork', mock.MagicMock(side_effect=AssertionError('Error string'))) def testFailed_ExceptionDetailsFieldAdded(self): j = job.Job.New((), (), bug_id=123456) scheduler.Schedule(j) with self.assertRaises(AssertionError): j.Run() j.exception = j.exception_details['traceback'] exception_details = job.Job.exception_details delattr(job.Job, 'exception_details') j.put() self.assertTrue(j.failed) self.assertFalse(hasattr(j, 'exception_details')) job.Job.exception_details = exception_details j = j.key.get(use_cache=False) self.assertTrue(j.failed) self.assertTrue(hasattr(j, 'exception_details')) self.assertEqual(j.exception, j.exception_details['traceback']) self.assertTrue( j.exception_details['message'] in j.exception.splitlines()[-1]) @mock.patch('dashboard.services.gerrit_service.PostChangeComment') def testCompletedUpdatesGerrit(self, post_change_comment): j = job.Job.New((), (), gerrit_server='https://review.com', gerrit_change_id='123456') scheduler.Schedule(j) j.Run() self.ExecuteDeferredTasks('default') post_change_comment.assert_called_once_with('https://review.com', '123456', _COMMENT_CODE_REVIEW)
40.650251
80
0.654288
4,713
40,447
5.446849
0.087418
0.022905
0.019088
0.032449
0.806396
0.778544
0.757041
0.73994
0.734642
0.725838
0
0.02175
0.202017
40,447
994
81
40.691147
0.773609
0.03538
0
0.720137
0
0
0.239746
0.073105
0
0
0
0
0.175199
1
0.03868
false
0
0.018203
0
0.065984
0.001138
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
070c463e415a60eb8c9789475679afb0da7d7a46
126
py
Python
testfixtures/tests/mock.py
foobacca/testfixtures
c4f893ec0ac40ffa998d97152639536f54c8436d
[ "MIT" ]
null
null
null
testfixtures/tests/mock.py
foobacca/testfixtures
c4f893ec0ac40ffa998d97152639536f54c8436d
[ "MIT" ]
null
null
null
testfixtures/tests/mock.py
foobacca/testfixtures
c4f893ec0ac40ffa998d97152639536f54c8436d
[ "MIT" ]
null
null
null
from __future__ import absolute_import try: from unittest.mock import Mock, call except: from mock import Mock, call
18
40
0.761905
18
126
5.055556
0.5
0.21978
0.307692
0.395604
0
0
0
0
0
0
0
0
0.198413
126
6
41
21
0.90099
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.6
0
0.6
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
0721b6dbafd1d53e43b607c01ad3e42757f983a0
2,603
py
Python
python/taichi/lang/simt/warp.py
TiGeekMan/taichi
8fdd3b8e5aabf6b2c8f33ff06994e24d50942c3a
[ "MIT" ]
null
null
null
python/taichi/lang/simt/warp.py
TiGeekMan/taichi
8fdd3b8e5aabf6b2c8f33ff06994e24d50942c3a
[ "MIT" ]
null
null
null
python/taichi/lang/simt/warp.py
TiGeekMan/taichi
8fdd3b8e5aabf6b2c8f33ff06994e24d50942c3a
[ "MIT" ]
null
null
null
from taichi._lib import core as _ti_core from taichi.lang import expr def all_nonzero(mask, predicate): return expr.Expr( _ti_core.insert_internal_func_call( "cuda_all_sync_i32", expr.make_expr_group(mask, predicate), False)) def any_nonzero(mask, predicate): return expr.Expr( _ti_core.insert_internal_func_call( "cuda_any_sync_i32", expr.make_expr_group(mask, predicate), False)) def unique(): # TODO pass def ballot(predicate): return expr.Expr( _ti_core.insert_internal_func_call("cuda_ballot_i32", expr.make_expr_group(predicate), False)) def shfl_sync_i32(mask, val, offset): return expr.Expr( _ti_core.insert_internal_func_call( # lane offset is 31 for warp size 32 "cuda_shfl_sync_i32", expr.make_expr_group(mask, val, offset, 31), False)) def shfl_sync_f32(mask, val, offset): return expr.Expr( _ti_core.insert_internal_func_call( # lane offset is 31 for warp size 32 "cuda_shfl_sync_f32", expr.make_expr_group(mask, val, offset, 31), False)) def shfl_down_i32(mask, val, offset): return expr.Expr( _ti_core.insert_internal_func_call( "cuda_shfl_down_sync_i32", # lane offset is 31 for warp size 32 expr.make_expr_group(mask, val, offset, 31), False)) def shfl_up_i32(mask, val, offset): return expr.Expr( _ti_core.insert_internal_func_call( "cuda_shfl_up_sync_i32", # lane offset is 0 for warp size 32 expr.make_expr_group(mask, val, offset, 0), False)) def shfl_up_f32(mask, val, offset): return expr.Expr( _ti_core.insert_internal_func_call( "cuda_shfl_up_sync_f32", # lane offset is 0 for warp size 32 expr.make_expr_group(mask, val, offset, 0), False)) def shfl_xor_i32(mask, val, offset): return expr.Expr( _ti_core.insert_internal_func_call( "cuda_shfl_xor_sync_i32", expr.make_expr_group(mask, val, offset, 31), False)) def match_any(): # TODO pass def match_all(): # TODO pass def active_mask(): # TODO pass def sync(): # TODO pass __all__ = [ 'all_nonzero', 'any_nonzero', 'unique', 'ballot', 'shfl_i32', 'shfl_up_i32', 'shfl_down_i32', 'match_any', 'match_all', 'active_mask', 'sync', ]
22.833333
79
0.601614
347
2,603
4.146974
0.135447
0.058374
0.108409
0.100069
0.781098
0.76025
0.76025
0.76025
0.750521
0.750521
0
0.035418
0.305801
2,603
113
80
23.035398
0.76093
0.075682
0
0.452055
0
0
0.113152
0.036326
0
0
0
0.00885
0
1
0.191781
false
0.068493
0.027397
0.123288
0.342466
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
1
0
0
0
7
07299889be07d6f0bad9846a46ebe318c7678da6
18,409
py
Python
FirstPersonDriver/__init__.py
juso40/bl2sdk_Mods
6c4a358b6cd2cb73e4feadcfc197c98eb6632396
[ "MIT" ]
16
2019-12-10T23:11:32.000Z
2022-03-05T17:45:09.000Z
FirstPersonDriver/__init__.py
juso40/bl2sdk_Mods
6c4a358b6cd2cb73e4feadcfc197c98eb6632396
[ "MIT" ]
9
2021-01-07T11:17:58.000Z
2022-03-31T11:18:13.000Z
FirstPersonDriver/__init__.py
juso40/bl2sdk_Mods
6c4a358b6cd2cb73e4feadcfc197c98eb6632396
[ "MIT" ]
35
2019-09-13T23:46:45.000Z
2022-03-05T17:45:11.000Z
import unrealsdk from unrealsdk import * from . import bl2tools from ..ModMenu import EnabledSaveType, Hook, KeybindManager, ModTypes, SDKMod class FPDriver(SDKMod): Name = "First Person Driver" Version = "2.0" Description = f"Experience all vehicles in first person. Toggle between 3rd and 1st person using by default '5'." Author = "Juso" Types = ModTypes.Gameplay SaveEnabledState = EnabledSaveType.LoadOnMainMenu def __init__(self): self.is_first_person = True self.Keybinds = [KeybindManager.Keybind("Driver Cam", "Five")] self.settings = { "mercenary": {"GD_Runner_Streaming.CameraDefs.Camera_DriverSeat": ["CameraOffset 0", "CameraPitchDownOffset -20", "CameraPitchUpOffset 50", "BaseCameraPosition (Z=65)", "bScaleDistanceWithSpeed False", ], "GD_BanditTechnical.CameraDefs.Camera_DriverSeat": ["CameraPitchDownOffset -10", "BaseCameraPosition (X=-1,Y=0,Z=70)", "CameraPitchUpOffset 50", "CameraOffset 0", "bScaleDistanceWithSpeed False", ], "GD_Sage_FanBoat.CameraDefs.Camera_DriverSeat": ["CameraOffset 0", "CameraPitchDownOffset -50", "CameraPitchUpOffset 45", "BaseCameraPosition (X=15,Y=-3,Z=60)", "bScaleDistanceWithSpeed False", ], "GD_Orchid_Hovercraft.CameraDefs.Camera_DriverSeat": ["CameraOffset 0", "CameraPitchDownOffset 0", "CameraPitchUpOffset 45", "BaseCameraPosition (X=10,Y=0,Z=75)", "bScaleDistanceWithSpeed False", ]}, "assassin": {"GD_Runner_Streaming.CameraDefs.Camera_DriverSeat": ["CameraOffset 0", "CameraPitchDownOffset -20", "CameraPitchUpOffset 50", "BaseCameraPosition (Z=65)", "bScaleDistanceWithSpeed False", ], "GD_BanditTechnical.CameraDefs.Camera_DriverSeat": ["CameraPitchDownOffset -10", "BaseCameraPosition (X=-1,Y=0,Z=70)", "CameraPitchUpOffset 50", "CameraOffset 0", "bScaleDistanceWithSpeed False", ], "GD_Sage_FanBoat.CameraDefs.Camera_DriverSeat": ["CameraOffset 0", "CameraPitchDownOffset -50", "CameraPitchUpOffset 45", "BaseCameraPosition (X=15,Y=-3,Z=60)", "bScaleDistanceWithSpeed False", ], "GD_Orchid_Hovercraft.CameraDefs.Camera_DriverSeat": ["CameraOffset 0", "CameraPitchDownOffset 0", "CameraPitchUpOffset 45", "BaseCameraPosition (X=10,Y=0,Z=75)", "bScaleDistanceWithSpeed False", ]}, "lilac": {"GD_Runner_Streaming.CameraDefs.Camera_DriverSeat": ["CameraOffset 0", "CameraPitchDownOffset -20", "CameraPitchUpOffset 50", "BaseCameraPosition (Z=65)", "bScaleDistanceWithSpeed False", ], "GD_BanditTechnical.CameraDefs.Camera_DriverSeat": ["CameraPitchDownOffset -10", "BaseCameraPosition (X=-1,Y=0,Z=70)", "CameraPitchUpOffset 50", "CameraOffset 0", "bScaleDistanceWithSpeed False", ], "GD_Sage_FanBoat.CameraDefs.Camera_DriverSeat": ["CameraOffset 0", "CameraPitchDownOffset -50", "CameraPitchUpOffset 45", "BaseCameraPosition (X=15,Y=-3,Z=60)", "bScaleDistanceWithSpeed False", ], "GD_Orchid_Hovercraft.CameraDefs.Camera_DriverSeat": ["CameraOffset 0", "CameraPitchDownOffset 0", "CameraPitchUpOffset 45", "BaseCameraPosition (X=10,Y=0,Z=75)", "bScaleDistanceWithSpeed False", ]}, "siren": {"GD_Runner_Streaming.CameraDefs.Camera_DriverSeat": ["CameraOffset 0", "CameraPitchDownOffset -20", "CameraPitchUpOffset 50", "BaseCameraPosition (Z=65)", "bScaleDistanceWithSpeed False", ], "GD_BanditTechnical.CameraDefs.Camera_DriverSeat": ["CameraPitchDownOffset -10", "BaseCameraPosition (X=-1,Y=0,Z=70)", "CameraPitchUpOffset 50", "CameraOffset 0", "bScaleDistanceWithSpeed False", ], "GD_Sage_FanBoat.CameraDefs.Camera_DriverSeat": ["CameraOffset 0", "CameraPitchDownOffset -50", "CameraPitchUpOffset 45", "BaseCameraPosition (X=15,Y=-3,Z=60)", "bScaleDistanceWithSpeed False", ], "GD_Orchid_Hovercraft.CameraDefs.Camera_DriverSeat": ["CameraOffset 0", "CameraPitchDownOffset 0", "CameraPitchUpOffset 45", "BaseCameraPosition (X=10,Y=0,Z=80)", "bScaleDistanceWithSpeed False", ]}, "soldier": {"GD_Runner_Streaming.CameraDefs.Camera_DriverSeat": ["CameraOffset 0", "CameraPitchDownOffset -20", "CameraPitchUpOffset 50", "BaseCameraPosition (Z=65)", "bScaleDistanceWithSpeed False", ], "GD_BanditTechnical.CameraDefs.Camera_DriverSeat": ["CameraPitchDownOffset -10", "BaseCameraPosition (X=-1,Y=0,Z=70)", "CameraPitchUpOffset 50", "CameraOffset 0", "bScaleDistanceWithSpeed False", ], "GD_Sage_FanBoat.CameraDefs.Camera_DriverSeat": ["CameraOffset 0", "CameraPitchDownOffset -50", "CameraPitchUpOffset 45", "BaseCameraPosition (X=15,Y=-3,Z=60)", "bScaleDistanceWithSpeed False", ], "GD_Orchid_Hovercraft.CameraDefs.Camera_DriverSeat": ["CameraOffset 0", "CameraPitchDownOffset 0", "CameraPitchUpOffset 45", "BaseCameraPosition (X=10,Y=0,Z=75)", "bScaleDistanceWithSpeed False", ]}, "tulip": {"GD_Runner_Streaming.CameraDefs.Camera_DriverSeat": ["CameraOffset 0", "CameraPitchDownOffset -20", "CameraPitchUpOffset 50", "BaseCameraPosition (Z=65)", "bScaleDistanceWithSpeed False", ], "GD_BanditTechnical.CameraDefs.Camera_DriverSeat": ["CameraPitchDownOffset -10", "BaseCameraPosition (X=-1,Y=0,Z=70)", "CameraPitchUpOffset 50", "CameraOffset 0", "bScaleDistanceWithSpeed False", ], "GD_Sage_FanBoat.CameraDefs.Camera_DriverSeat": ["CameraOffset 0", "CameraPitchDownOffset -50", "CameraPitchUpOffset 45", "BaseCameraPosition (X=15,Y=-3,Z=60)", "bScaleDistanceWithSpeed False", ], "GD_Orchid_Hovercraft.CameraDefs.Camera_DriverSeat": ["CameraOffset 0", "CameraPitchDownOffset 0", "CameraPitchUpOffset 45", "BaseCameraPosition (X=10,Y=0,Z=75)", "bScaleDistanceWithSpeed False", ]}, "default": {"GD_Runner_Streaming.CameraDefs.Camera_DriverSeat": ["CameraOffset -800", "CameraPitchDownOffset 0", "CameraPitchUpOffset 0", "BaseCameraPosition (Z=200)", "bScaleDistanceWithSpeed True", ], "GD_BanditTechnical.CameraDefs.Camera_DriverSeat": ["CameraPitchDownOffset 0", "BaseCameraPosition (X=40,Y=0,Z=300)", "CameraPitchUpOffset 100", "CameraOffset -1300", "bScaleDistanceWithSpeed True", ], "GD_Sage_FanBoat.CameraDefs.Camera_DriverSeat": ["CameraOffset -1100", "CameraPitchDownOffset 0", "CameraPitchUpOffset -250", "BaseCameraPosition (X=0,Y=0,Z=300)", "bScaleDistanceWithSpeed True", ], "GD_Orchid_Hovercraft.CameraDefs.Camera_DriverSeat": ["CameraOffset -1100", "CameraPitchDownOffset 0", "CameraPitchUpOffset -250", "BaseCameraPosition (X=0,Y=0,Z=300)", "bScaleDistanceWithSpeed True", ]} } @Hook("WillowGame.VehicleSpawnStationTerminal.UnlockForOtherUsers") def EndLoad(self, caller: unrealsdk.UObject, function: unrealsdk.UFunction, params: unrealsdk.FStruct) -> bool: self.calc_driver_cam() return True def GameInputPressed(self, bind: KeybindManager.Keybind): if bind.Name == "Driver Cam": self.is_first_person = not self.is_first_person self.calc_driver_cam() def calc_driver_cam(self): pc = bl2tools.get_player_controller() if pc and pc.Pawn: vh = bl2tools.get_obj_path_name(pc.CharacterClass).lower() if not self.is_first_person: vh = "default" for char, data in self.settings.items(): if char not in vh: continue for cam, attrs in data.items(): if not unrealsdk.FindObject("PassengerCameraDefinition", cam): continue for att in attrs: bl2tools.console_command(f"set {cam} {att}") unrealsdk.RegisterMod(FPDriver())
83.298643
117
0.296485
748
18,409
7.161765
0.181818
0.083629
0.135897
0.148964
0.7605
0.753033
0.741086
0.725033
0.725033
0.725033
0
0.041134
0.647401
18,409
220
118
83.677273
0.784163
0
0
0.742718
0
0
0.272964
0.142919
0
0
0
0
0
1
0.019417
false
0.004854
0.019417
0
0.07767
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
1
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
075e89a339167cf61754d0c2c9247a3e72efb366
11,965
py
Python
ctpbee/data_handle/generator.py
tbmilk/ctpbee
03f32493962578612e1116b84443a0e351cbc8d8
[ "MIT" ]
null
null
null
ctpbee/data_handle/generator.py
tbmilk/ctpbee
03f32493962578612e1116b84443a0e351cbc8d8
[ "MIT" ]
null
null
null
ctpbee/data_handle/generator.py
tbmilk/ctpbee
03f32493962578612e1116b84443a0e351cbc8d8
[ "MIT" ]
null
null
null
# encoding: UTF-8 from copy import deepcopy from typing import Iterable, Callable from pandas._typing import FuncType from ctpbee.signals import common_signals from ctpbee.constant import BarData, TickData, EVENT_BAR from ctpbee.constant import Event class DataGenerator: def __init__(self, app): self.app = app self.XMIN = app.config.get("XMIN") self.last_entity: {int: None} = {x: None for x in self.XMIN} self.last_datetime = {x: None for x in self.XMIN} def update_tick(self, tick: TickData): bar = self.resample(tick) for x in bar: event = Event(type=EVENT_BAR, data=x) common_signals.bar_signal.send(event) def resample(self, tick_data: TickData) -> BarData or None: data = [] for frq, last in self.last_entity.items(): if last is None: self.last_entity[frq] = BarData( datetime=tick_data.datetime, high_price=tick_data.last_price, low_price=tick_data.last_price, close_price=tick_data.last_price, open_price=tick_data.last_price, interval=frq, volume=0, first_volume=tick_data.volume, local_symbol=tick_data.local_symbol, ) self.last_datetime[frq] = tick_data.datetime else: if frq != 1 and tick_data.datetime.minute % frq == 0 and abs( (tick_data.datetime - self.last_datetime[frq]).seconds) >= 60: temp = deepcopy(last) if self.check_tick(tick_data): temp.high_price = max(temp.high_price, tick_data.last_price) temp.low_price = min(temp.high_price, tick_data.last_price) temp.close_price = tick_data.last_price temp.volume += max(tick_data.volume - temp.first_volume, 0) self.last_entity[frq] = None else: self.last_entity[frq] = BarData( datetime=tick_data.datetime, high_price=tick_data.last_price, low_price=tick_data.last_price, close_price=tick_data.last_price, open_price=tick_data.last_price, interval=frq, volume=0, first_volume=tick_data.volume, local_symbol=tick_data.local_symbol ) self.last_datetime[frq] = tick_data.datetime data.append(temp) elif frq != 1: self.last_entity[frq].high_price = max(self.last_entity[frq].high_price, tick_data.last_price) self.last_entity[frq].low_price = min(self.last_entity[frq].low_price, tick_data.last_price) self.last_entity[frq].close_price = tick_data.last_price self.last_entity[frq].volume += max(tick_data.volume - self.last_entity[frq].first_volume, 0) self.last_entity[frq].first_volume = tick_data.volume # 处理一分钟的k线 if frq == 1 and tick_data.datetime.second == 0 and \ abs((tick_data.datetime - self.last_datetime[frq]).seconds) > 10: temp = deepcopy(last) if self.check_tick(tick_data): temp.high_price = max(temp.high_price, tick_data.last_price) temp.low_price = min(temp.high_price, tick_data.last_price) temp.close_price = tick_data.last_price temp.volume += max(tick_data.volume - temp.first_volume, 0) self.last_entity[frq] = None else: self.last_entity[frq] = BarData( datetime=tick_data.datetime, high_price=tick_data.last_price, low_price=tick_data.last_price, close_price=tick_data.last_price, open_price=tick_data.last_price, interval=frq, volume=0, first_volume=tick_data.volume, local_symbol=tick_data.local_symbol ) self.last_datetime[frq] = tick_data.datetime data.append(temp) elif frq == 1: self.last_entity[frq].high_price = max(self.last_entity[frq].high_price, tick_data.last_price) self.last_entity[frq].low_price = min(self.last_entity[frq].low_price, tick_data.last_price) self.last_entity[frq].close_price = tick_data.last_price self.last_entity[frq].volume += max(tick_data.volume - self.last_entity[frq].first_volume, 0) self.last_entity[frq].first_volume = tick_data.volume return data @staticmethod def check_tick(T: TickData): if (T.datetime.hour == 10 and T.datetime.minute == 15) or \ (T.datetime.hour == 11 and T.datetime.minute == 30) or \ (T.datetime.hour == 15 and T.datetime.minute == 0) or \ (T.datetime.hour == 23 and T.datetime.minute == 0): return True return False def __del__(self): for x in self.last_entity.values(): event = Event(type=EVENT_BAR, data=x) common_signals.bar_signal.send(event) self.last_entity.clear() class HighKlineSupporter: def __init__(self, code: str, callback: Callable, interval: Iterable, data: dict): assert code in data.keys() assert data[code].get("time") is not None if not isinstance(callback, Callable): raise TypeError("callable should be a function") self.callback = callback self.code = code self.last_entity: {int: None} = {x: None for x in interval} self.last_datetime = {x: None for x in interval} self._data = data def update_tick(self, tick: TickData): bar = self.resample(tick) for x in bar: self.callback(x) def resample(self, tick_data: TickData) -> BarData or None: data = [] for frq, last in self.last_entity.items(): if last is None: self.last_entity[frq] = BarData( datetime=tick_data.datetime, high_price=tick_data.last_price, low_price=tick_data.last_price, close_price=tick_data.last_price, open_price=tick_data.last_price, interval=frq, volume=0, first_volume=tick_data.volume, local_symbol=tick_data.local_symbol, ) self.last_datetime[frq] = tick_data.datetime else: if frq != 1 and tick_data.datetime.minute % frq == 0 and abs( (tick_data.datetime - self.last_datetime[frq]).seconds) >= 60: temp = deepcopy(last) if self.check_tick(tick_data): temp.high_price = max(temp.high_price, tick_data.last_price) temp.low_price = min(temp.high_price, tick_data.last_price) temp.close_price = tick_data.last_price temp.volume += max(tick_data.volume - temp.first_volume, 0) self.last_entity[frq] = None else: self.last_entity[frq] = BarData( datetime=tick_data.datetime, high_price=tick_data.last_price, low_price=tick_data.last_price, close_price=tick_data.last_price, open_price=tick_data.last_price, interval=frq, volume=0, first_volume=tick_data.volume, local_symbol=tick_data.local_symbol ) self.last_datetime[frq] = tick_data.datetime data.append(temp) elif frq != 1: self.last_entity[frq].high_price = max(self.last_entity[frq].high_price, tick_data.last_price) self.last_entity[frq].low_price = min(self.last_entity[frq].low_price, tick_data.last_price) self.last_entity[frq].close_price = tick_data.last_price self.last_entity[frq].volume += max(tick_data.volume - self.last_entity[frq].first_volume, 0) self.last_entity[frq].first_volume = tick_data.volume # 处理一分钟的k线 if frq == 1 and tick_data.datetime.second == 0 and \ abs((tick_data.datetime - self.last_datetime[frq]).seconds) > 10: temp = deepcopy(last) if self.check_tick(tick_data): temp.high_price = max(temp.high_price, tick_data.last_price) temp.low_price = min(temp.high_price, tick_data.last_price) temp.close_price = tick_data.last_price temp.volume += max(tick_data.volume - temp.first_volume, 0) self.last_entity[frq] = None else: self.last_entity[frq] = BarData( datetime=tick_data.datetime, high_price=tick_data.last_price, low_price=tick_data.last_price, close_price=tick_data.last_price, open_price=tick_data.last_price, interval=frq, volume=0, first_volume=tick_data.volume, local_symbol=tick_data.local_symbol ) self.last_datetime[frq] = tick_data.datetime data.append(temp) elif frq == 1: self.last_entity[frq].high_price = max(self.last_entity[frq].high_price, tick_data.last_price) self.last_entity[frq].low_price = min(self.last_entity[frq].low_price, tick_data.last_price) self.last_entity[frq].close_price = tick_data.last_price self.last_entity[frq].volume += max(tick_data.volume - self.last_entity[frq].first_volume, 0) self.last_entity[frq].first_volume = tick_data.volume return data def __del__(self): self.last_entity.clear() def check_tick(self, T: TickData): h = self._data[self.code]["time"].get("night") if h is not None: """ 处理夜盘 """ hour, minute, second = [int(x) for x in h[0][-1].split(":")] if hour >= 24: hour = hour - 24 if (T.datetime.hour == 10 and T.datetime.minute == 15) or \ (T.datetime.hour == 11 and T.datetime.minute == 30) or \ (T.datetime.hour == 15 and T.datetime.minute == 0) or \ (T.datetime.hour == hour and T.datetime.minute == minute): # make night kline true return True else: """ 处理白天 """ if (T.datetime.hour == 10 and T.datetime.minute == 15) or \ (T.datetime.hour == 11 and T.datetime.minute == 30) or \ (T.datetime.hour == 15 and T.datetime.minute == 0): return True return False
50.273109
114
0.525366
1,368
11,965
4.349415
0.082602
0.131765
0.115294
0.137143
0.86
0.86
0.86
0.85395
0.844874
0.834622
0
0.010493
0.386711
11,965
237
115
50.485232
0.800354
0.004597
0
0.805556
0
0
0.003956
0
0
0
0
0
0.009259
1
0.046296
false
0
0.027778
0
0.115741
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
ab375f6c1611ce3c3660591e4ec881a2bc3e601d
9,765
py
Python
tests/test_required_types.py
hunter-packages/fruit
71d9ada48f7bf1749ce2889250955404582a7c6b
[ "Apache-2.0" ]
1
2018-08-29T11:10:35.000Z
2018-08-29T11:10:35.000Z
tests/test_required_types.py
hunter-packages/fruit
71d9ada48f7bf1749ce2889250955404582a7c6b
[ "Apache-2.0" ]
1
2018-08-29T11:29:53.000Z
2018-08-29T11:29:53.000Z
tests/test_required_types.py
hunter-packages/fruit
71d9ada48f7bf1749ce2889250955404582a7c6b
[ "Apache-2.0" ]
2
2020-10-01T04:19:30.000Z
2021-07-01T07:50:22.000Z
#!/usr/bin/env python3 # Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from fruit_test_common import * COMMON_DEFINITIONS = ''' #include "test_common.h" struct X; struct Annotation1 {}; using XAnnot1 = fruit::Annotated<Annotation1, X>; ''' def test_required_success(): source = ''' struct X { virtual void foo() = 0; virtual ~X() = default; }; using XFactory = std::function<std::unique_ptr<X>()>; struct Y { XFactory xFactory; INJECT(Y(XFactory xFactory)) : xFactory(xFactory) { } void doStuff() { xFactory()->foo(); } }; fruit::Component<fruit::Required<XFactory>, Y> getYComponent() { return fruit::createComponent(); } struct XImpl : public X { INJECT(XImpl()) = default; void foo() override {} }; fruit::Component<XFactory> getXFactoryComponent() { return fruit::createComponent() .bind<X, XImpl>(); } fruit::Component<Y> getComponent() { return fruit::createComponent() .install(getYComponent) .install(getXFactoryComponent); } int main() { fruit::Injector<Y> injector(getComponent); Y* y(injector); y->doStuff(); } ''' expect_success(COMMON_DEFINITIONS, source) def test_required_annotated_success(): source = ''' struct X { virtual void foo() = 0; virtual ~X() = default; }; using XFactory = std::function<std::unique_ptr<X>()>; using XFactoryAnnot = fruit::Annotated<Annotation1, XFactory>; struct Y { XFactory xFactory; INJECT(Y(ANNOTATED(Annotation1, XFactory) xFactory)) : xFactory(xFactory) { } void doStuff() { xFactory()->foo(); } }; fruit::Component<fruit::Required<XFactoryAnnot>, Y> getYComponent() { return fruit::createComponent(); } struct XImpl : public X { INJECT(XImpl()) = default; void foo() override {} }; fruit::Component<XFactoryAnnot> getXFactoryComponent() { return fruit::createComponent() .bind<fruit::Annotated<Annotation1, X>, fruit::Annotated<Annotation1, XImpl>>(); } fruit::Component<Y> getComponent() { return fruit::createComponent() .install(getYComponent) .install(getXFactoryComponent); } int main() { fruit::Injector<Y> injector(getComponent); Y* y(injector); y->doStuff(); } ''' expect_success(COMMON_DEFINITIONS, source) def test_required_forward_declared_success(): source = ''' struct X; using XFactory = std::function<std::unique_ptr<X>()>; struct Y { XFactory xFactory; INJECT(Y(XFactory xFactory)) : xFactory(xFactory) { } void doStuff(); }; fruit::Component<fruit::Required<XFactory>, Y> getYComponent() { return fruit::createComponent(); } fruit::Component<XFactory> getXFactoryComponent(); fruit::Component<Y> getComponent() { return fruit::createComponent() .install(getYComponent) .install(getXFactoryComponent); } int main() { fruit::Injector<Y> injector(getComponent); Y* y(injector); y->doStuff(); } // We define X as late as possible, to make sure that all the above compiles even if X is only forward-declared. struct X { virtual void foo() = 0; virtual ~X() = default; }; void Y::doStuff() { xFactory()->foo(); } struct XImpl : public X { INJECT(XImpl()) = default; void foo() override {} }; fruit::Component<XFactory> getXFactoryComponent() { return fruit::createComponent() .bind<X, XImpl>(); } ''' expect_success(COMMON_DEFINITIONS, source) def test_required_annotated_forward_declared_success(): source = ''' struct X; using XFactory = std::function<std::unique_ptr<X>()>; using XFactoryAnnot = fruit::Annotated<Annotation1, XFactory>; struct Y { XFactory xFactory; INJECT(Y(ANNOTATED(Annotation1, XFactory) xFactory)) : xFactory(xFactory) { } void doStuff(); }; fruit::Component<fruit::Required<XFactoryAnnot>, Y> getYComponent() { return fruit::createComponent(); } fruit::Component<XFactoryAnnot> getXFactoryComponent(); fruit::Component<Y> getComponent() { return fruit::createComponent() .install(getYComponent) .install(getXFactoryComponent); } int main() { fruit::Injector<Y> injector(getComponent); Y* y(injector); y->doStuff(); } // We define X as late as possible, to make sure that all the above compiles even if X is only forward-declared. struct X { virtual void foo() = 0; virtual ~X() = default; }; void Y::doStuff() { xFactory()->foo(); } struct XImpl : public X { INJECT(XImpl()) = default; void foo() override {} }; fruit::Component<XFactoryAnnot> getXFactoryComponent() { return fruit::createComponent() .bind<fruit::Annotated<Annotation1, X>, fruit::Annotated<Annotation1, XImpl>>(); } ''' expect_success(COMMON_DEFINITIONS, source) def test_required_const_forward_declared_success(): source = ''' struct X; using XFactory = std::function<std::unique_ptr<X>()>; struct Y { XFactory xFactory; INJECT(Y(XFactory xFactory)) : xFactory(xFactory) { } void doStuff(); }; fruit::Component<fruit::Required<const XFactory>, Y> getYComponent() { return fruit::createComponent(); } fruit::Component<const XFactory> getXFactoryComponent(); fruit::Component<Y> getComponent() { return fruit::createComponent() .install(getYComponent) .install(getXFactoryComponent); } int main() { fruit::Injector<Y> injector(getComponent); Y* y(injector); y->doStuff(); } // We define X as late as possible, to make sure that all the above compiles even if X is only forward-declared. struct X { virtual void foo() = 0; virtual ~X() = default; }; void Y::doStuff() { xFactory()->foo(); } struct XImpl : public X { INJECT(XImpl()) = default; void foo() override {} }; fruit::Component<const XFactory> getXFactoryComponent() { return fruit::createComponent() .bind<X, XImpl>(); } ''' expect_success(COMMON_DEFINITIONS, source) def test_required_const_annotated_forward_declared_success(): source = ''' struct X; using XFactory = std::function<std::unique_ptr<X>()>; using ConstXFactoryAnnot = fruit::Annotated<Annotation1, const XFactory>; struct Y { XFactory xFactory; INJECT(Y(ANNOTATED(Annotation1, XFactory) xFactory)) : xFactory(xFactory) { } void doStuff(); }; fruit::Component<fruit::Required<ConstXFactoryAnnot>, Y> getYComponent() { return fruit::createComponent(); } fruit::Component<ConstXFactoryAnnot> getXFactoryComponent(); fruit::Component<Y> getComponent() { return fruit::createComponent() .install(getYComponent) .install(getXFactoryComponent); } int main() { fruit::Injector<Y> injector(getComponent); Y* y(injector); y->doStuff(); } // We define X as late as possible, to make sure that all the above compiles even if X is only forward-declared. struct X { virtual void foo() = 0; virtual ~X() = default; }; void Y::doStuff() { xFactory()->foo(); } struct XImpl : public X { INJECT(XImpl()) = default; void foo() override {} }; fruit::Component<ConstXFactoryAnnot> getXFactoryComponent() { return fruit::createComponent() .bind<fruit::Annotated<Annotation1, X>, fruit::Annotated<Annotation1, XImpl>>(); } ''' expect_success(COMMON_DEFINITIONS, source) if __name__== '__main__': main(__file__)
32.121711
120
0.543574
870
9,765
6.033333
0.151724
0.073157
0.08916
0.022862
0.857878
0.855973
0.855973
0.843018
0.828539
0.826824
0
0.004672
0.342448
9,765
303
121
32.227723
0.812802
0.060727
0
0.778626
0
0.015267
0.91199
0.251256
0
0
0
0
0
1
0.022901
false
0
0.003817
0
0.09542
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
ab4efb56d781e1820a417347df39c95f5a12674f
9,976
py
Python
python/reduction/definitions.py
apaszke/iree-llvm-sandbox
cf04a17076c0d842f47adb655998929ad7ccbd43
[ "Apache-2.0" ]
null
null
null
python/reduction/definitions.py
apaszke/iree-llvm-sandbox
cf04a17076c0d842f47adb655998929ad7ccbd43
[ "Apache-2.0" ]
null
null
null
python/reduction/definitions.py
apaszke/iree-llvm-sandbox
cf04a17076c0d842f47adb655998929ad7ccbd43
[ "Apache-2.0" ]
null
null
null
import sys, time from typing import Any, List, Mapping, Optional, Sequence import numpy as np from mlir.ir import * from mlir.dialects import arith, builtin, linalg, scf, std from mlir.dialects.linalg.opdsl.lang import * from ..core.compilation import attach_inplaceable_attributes, attach_passthrough from ..core.problem_definition import * from ..core.utils import * # TODO: Orthogonal configuration object. avx512 = True ################################################################################ ### Row Reduction 2D ################################################################################ # Op def: ( m, k ) # Iters: ({Par(), Red()}) # I O # Layout: {{m, k}, {m}} # TODO: fold OpDSL definition and inferences into ProblemDefinition. @linalg_structured_op def row_reduction_2d( A=TensorDef(T, S.M, S.N), B=TensorDef(T, S.M, output=True)): domain(D.m, D.n) B[D.m] += A[D.m, D.n] class RowReduction2DProblem(ProblemDefinition): """ Problem definition for a single fill + row_reduction_2d problem.""" def shapes_builder(self, sizes: Mapping[str, Any]) -> List[List[int]]: """Shape builder function. Given a mapping between dimension names / op attributes and their numeric values, return the list of lists of shapes of the FuncOp operands. The FuncOp is responsible for distinguishing between input operands and results. """ M, K = sizes["M"], sizes["K"] return [[M, K], [M]] def gflop_count_builder(self, sizes: Mapping[str, Any]) -> float: """GFlop builder function. Given a mapping between dimension names / op attributes and their numeric values, return the number of GFlops computed. """ M, K = sizes["M"], sizes["K"] return float(M * K) / float(1e9) def gbyte_count_builder(self, sizes: Mapping[str, Any], types: Sequence[np.dtype]) -> float: """GByte builder function. Given a mapping between dimension names / op attributes and their numeric values, and a list of data types, return the number of GBytes read or written. """ M, K = sizes["M"], sizes["K"] inp_np_type, out_np_type = types return float(M * K * np.dtype(inp_np_type).itemsize + M * np.dtype(out_np_type).itemsize) / float(1e9) def tensors_np_builder(self, sizes: Mapping[str, Any], types: Sequence[np.dtype]) -> List[np.dtype]: """NumPy tensors building function. Given a mapping between dimension names / op attributes and their numeric values, and a list of NumPy elemental types, return constructed NP values of shapes given by `shape_builder` and specified elemental types. """ shapes = self.shapes_builder(sizes) tensors = [ realign(np.random.rand(*s).astype(t), byte_alignment=64) for s, t in zip(shapes, types) ] # Uncomment to simplify debugging. # tensors = [ # realign(np.arange(1, np.prod(s) + 1).reshape(s).astype(t), \ # byte_alignment=64) \ # for s, t in zip(shapes, types) # ] tensors[len(tensors) - 1].fill(0.) return tensors def check_np(self, A: np.dtype, B: np.dtype) -> None: """NumPy checking function. Given a list of NumPy values, check the precomputed results matches those of the expected reference implementation. """ if not np.allclose(B, np.sum(A, axis=1)): delta = B - np.sum(A, axis=1) max_abs_delta = max(delta.max(), delta.min(), key=abs) raise Exception(f"max_abs_delta: {max_abs_delta} -> FAILURE ") def types_mlir_builder(self, sizes: Mapping[str, Any], types: Sequence[Type]) -> List[Type]: """ MLIR types builder. Given a mapping between dimension names / op attributes and their numeric values, and a list of elemental MLIR types, return MLIR tensor types of the shape expected by the function. """ shapes = self.shapes_builder(sizes) return [RankedTensorType.get(s, t) for s, t in zip(shapes, types)] def build_problem_under_context_manager( self, name: str, types: Sequence[Type]) -> builtin.FuncOp: """MLIR problem builder. Given a list of MLIR shaped types, build and return the MLIR FuncOp that implements the desired computation on those types. """ global avx512 # Actual benchmarked function called under entry_point_name. func = builtin.FuncOp(name, (types, types[-1:])) # TODO: need something much more flexible to add func argument attributes. attach_inplaceable_attributes(func, inplaceable=[False, True]) attach_passthrough(func, [StringAttr.get('noinline')], avx512=avx512) output_elem_type = types[-1].element_type with InsertionPoint(func.add_entry_block()): zero = arith.ConstantOp(output_elem_type, 0.0) tensor_zero = linalg.FillOp(output=func.arguments[1], value=zero) result = row_reduction_2d(func.arguments[0], outs=[tensor_zero]) # linalg.matmul returns a Value instead of OpView, so we have to manually # wrap it in a list here. std.ReturnOp([result]) return func ################################################################################ ### Column Reduction 2D ################################################################################ # Op def: ( m, k ) # Iters: ({Red(), Par()}) # I O # Layout: {{m, k}, {k}} # TODO: fold OpDSL definition and inferences into ProblemDefinition. @linalg_structured_op def column_reduction_2d( A=TensorDef(T, S.M, S.N), B=TensorDef(T, S.N, output=True)): domain(D.m, D.n) B[D.n] += A[D.m, D.n] class ColumnReduction2DProblem(ProblemDefinition): """ Problem definition for a single fill + row_reduction_2d problem.""" def shapes_builder(self, sizes: Mapping[str, Any]) -> List[List[int]]: """Shape builder function. Given a mapping between dimension names / op attributes and their numeric values, return the list of lists of shapes of the FuncOp operands. The FuncOp is responsible for distinguishing between input operands and results. """ M, K = sizes["M"], sizes["K"] return [[M, K], [K]] def gflop_count_builder(self, sizes: Mapping[str, Any]) -> float: """GFlop builder function. Given a mapping between dimension names / op attributes and their numeric values, return the number of GFlops computed. """ M, K = sizes["M"], sizes["K"] return float(M * K) / float(1e9) def gbyte_count_builder(self, sizes: Mapping[str, Any], types: Sequence[np.dtype]) -> float: """GByte builder function. Given a mapping between dimension names / op attributes and their numeric values, and a list of data types, return the number of GBytes read or written. """ M, K = sizes["M"], sizes["K"] inp_np_type, out_np_type = types return float(M * K * np.dtype(inp_np_type).itemsize + K * np.dtype(out_np_type).itemsize) / float(1e9) def tensors_np_builder(self, sizes: Mapping[str, Any], types: Sequence[np.dtype]) -> List[np.dtype]: """NumPy tensors building function. Given a mapping between dimension names / op attributes and their numeric values, and a list of NumPy elemental types, return constructed NP values of shapes given by `shape_builder` and specified elemental types. """ shapes = self.shapes_builder(sizes) tensors = [ realign(np.random.rand(*s).astype(t), byte_alignment=64) for s, t in zip(shapes, types) ] # Uncomment to simplify debugging. # tensors = [ # realign(np.arange(1, np.prod(s) + 1).reshape(s).astype(t), \ # byte_alignment=64) \ # for s, t in zip(shapes, types) # ] tensors[len(tensors) - 1].fill(0.) return tensors def check_np(self, A: np.dtype, B: np.dtype) -> None: """NumPy checking function. Given a list of NumPy values, check the precomputed results matches those of the expected reference implementation. """ if not np.allclose(B, np.sum(A, axis=0)): delta = B - np.sum(A, axis=0) max_abs_delta = max(delta.max(), delta.min(), key=abs) raise Exception(f"max_abs_delta: {max_abs_delta} -> FAILURE ") def types_mlir_builder(self, sizes: Mapping[str, Any], types: Sequence[Type]) -> List[Type]: """ MLIR types builder. Given a mapping between dimension names / op attributes and their numeric values, and a list of elemental MLIR types, return MLIR tensor types of the shape expected by the function. """ shapes = self.shapes_builder(sizes) return [RankedTensorType.get(s, t) for s, t in zip(shapes, types)] def build_problem_under_context_manager( self, name: str, types: Sequence[Type]) -> builtin.FuncOp: """MLIR problem builder. Given a list of MLIR shaped types, build and return the MLIR FuncOp that implements the desired computation on those types. """ global avx512 # Actual benchmarked function called under entry_point_name. func = builtin.FuncOp(name, (types, types[-1:])) # TODO: need something much more flexible to add func argument attributes. attach_inplaceable_attributes(func, inplaceable=[False, True]) attach_passthrough(func, [StringAttr.get('noinline')], avx512=avx512) output_elem_type = types[-1].element_type with InsertionPoint(func.add_entry_block()): zero = arith.ConstantOp(output_elem_type, 0.0) tensor_zero = linalg.FillOp(output=func.arguments[1], value=zero) result = column_reduction_2d(func.arguments[0], outs=[tensor_zero]) # linalg.matmul returns a Value instead of OpView, so we have to manually # wrap it in a list here. std.ReturnOp([result]) return func
37.787879
80
0.633821
1,331
9,976
4.664914
0.166041
0.005154
0.025769
0.037043
0.930907
0.927686
0.918666
0.911258
0.911258
0.904171
0
0.009214
0.227546
9,976
263
81
37.931559
0.796522
0.398557
0
0.722222
0
0
0.021468
0
0
0
0
0.011407
0
1
0.148148
false
0.027778
0.083333
0
0.361111
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
8
db45ddfe3300f3b28b6bf8e3b8df9da72516da3d
62,380
py
Python
mms/threshold.py
BridgelessAlexQiu/Mask-Disease-Multilayer
0ef0137e9d607f6f30c7c4e0b19c4cf833bff35e
[ "MIT" ]
null
null
null
mms/threshold.py
BridgelessAlexQiu/Mask-Disease-Multilayer
0ef0137e9d607f6f30c7c4e0b19c4cf833bff35e
[ "MIT" ]
null
null
null
mms/threshold.py
BridgelessAlexQiu/Mask-Disease-Multilayer
0ef0137e9d607f6f30c7c4e0b19c4cf833bff35e
[ "MIT" ]
null
null
null
""" Author: Zirou Qiu Last modfied: 10/31/2020 Description: This module consists of simulations of the spread of multiple contigions on a single network under the threshold model. """ #--------------------------- Imports ------------------------------# import numpy as np import mms.utility as mu from scipy import sparse #----------------------- Funciton Defintions ----------------------# def isolate_threshold_count(A, B, T, k, r = 0): """ Description ----------- This function simulate the spread of multiple contigions on a single network where each contagion has 2 states (0 or 1). Contagions are not interrelated. Parameters ---------- A: scipy array, int {0, 1} The adjacency matrix of G. A is sparse B: scipy array, int {0, 1} The initial configuration matrix where $B_{vj}$ is the state value of vertex v for contagion j. B is sparse T: numpy array, int The threshold matrix where $T_{vj}$ is the threshold of vertex v for contagion j. k: int The number of system iterations r: float, optional The recovery probability. In each iteration, each vertex has a probability r changing the state to 0 for each contigion. Returns ------- B: numpy array The final configuration """ # Make all 1s along the diagonal of A (since we are considering the closed neighborhood) #np.fill_diagonal(A, 1) # The recovery probability recovery = False if r != 0: recovery = True # The main loop for i in range(k): # matrix operation B_last = B B = A @ B - T #B = np.matmul(A, B_last) - T # update states B[B >= 0] = 1 B[B < 0] = 0 # If a recovery probability is set if recovery: B[np.random.rand(*B.shape) < r] = 0 # if fixed point if np.array_equal(B, B_last): print("A fixed point is reached at iteration {}".format(i)) return B print("Max number of iteratios reached") return B ########################################################################################### def correlate_threshold_weight(A, B, T, W, k, r = 0): """ Description ----------- This function simulate the spread of multiple contigions on a single network where each contagion has 2 states (0 or 1). Contagions are interrelated as described by the thrid model. Parameters ---------- A: numpy array, int {0, 1} The adjacency matrix of G. A is sparse B: numpy array, int {0, 1} The initial configuration matrix where $B_{vj}$ is the state value of vertex v for contagion j. B is sparse T: numpy array, int The threshold matrix where $T_{vj}$ is the threshold of vertex v for contagion j. W: numpy array, float [0, 1] The weight matrix where $W_{ij}$ is the weight of contagion j w.r.t contagion i k: int The number of system iterations r: float, optional The recovery probability. In each iteration, each vertex has a probability r changing the state to 0 for each contigion. Returns ------- B: numpy array The final configuration """ # Make all 1s along the diagonal of A (since we are considering the closed neighborhood) #A.setdiag(1) # The recovery probability recovery = False if r != 0: recovery = True # Take the transpose of the weight matrix W = np.transpose(W) # The main loop for i in range(k): # matrix operation B_last = B #B = np.linalg.multi_dot([A, B_last, W]) - T B = A @ B_last @ W - T # update states B[B >= 0] = 1 B[B < 0] = 0 # If a recovery probability is set if recovery: B[np.random.rand(*B.shape) < r] = 0 # if fixed point if np.array_equal(B, B_last): print("A fixed point is reached at iteration {}".format(i)) return B #h = hpy() #print(h.heap()) print("Max number of iteratios reached") return B def correlate_threshold_density(A, B, T, d, k): """ Description ----------- This function simulate the spread of multiple contigions on a single network where each contagion has 2 states (0 or 1). Contagions interrelated as described by the second model. Parameters ---------- A: numpy array, int {0, 1} The adjacency matrix of G. A is sparse B: numpy array, int {0, 1} The initial configuration matrix where $B_{vj}$ is the state value of vertex v for contagion j. B is sparse T: numpy array, int The threshold matrix where $T_{vj}$ is the threshold of vertex v for contagion j. d: numpy array, int The density vector k: int The number of system iterations Returns ------- B: numpy array The final configuration """ # Compute the reciprocal d_bar = np.transpose( np.reciprocal(d.astype(float)) ) # Make sure that d is a column vector # The number of contagions c = np.shape(T)[1] # k * 1 ones one = np.ones((c, 1), dtype = 'float') # The main loop for i in range(k): B_last = B # Compute M M = B @ one @ d_bar #M = np.linalg.multi_dot([B, one, d_bar]) M[M >= 1.0] = 1.0 M[M < 1.0] = 0.0 #B = np.matmul(A, M) - T B = A @ M - T # update states B[B >= 0.0] = 1.0 B[B < 0.0] = 0.0 # if fixed point if np.array_equal(B, B_last): print("A fixed point is reached at iteration {}".format(i)) return B print("Max number of iteratios reached") return B def covid_mask(A_1, A_2, D_inverse, a_1, t_2, t_3, b_1, b_2, p, alpha, beta, r, k): """ Description ----------- This funciton simulate the spread of two contagions on two different networks, where contagions are correlated as described in the project report. Parameters ---------- A_1: n x n scipy sparse matrix, int {0, 1} The adjacency matrix of the social layer A_2: n x n scipy sparse matrix, int {0, 1} The adjacency matrix of the disease layer D_inverse: n x n scipy sparse matrix, float [0, 1] The inversed diagonal matrix of the social layer. a_1: n x 1 scipy sparse matrix, int {0, 1} (a_1)_i = 1 if the person i is prosocial, and (a_1)_i = 0 otherwise. t_2: n x 1 numpy array, float [0, 1] (t_2)_i is threshold percentage of neighbors who wear masks for person i to wear a mask in the next iteration. t_3: n x 1 numpy array, float [0, 1] (t_3)_i is the threshold percentage of the overall infection of the population for person i to wear a mask in the next iteration. b_1: n x 1 scipy sparse matrix, int {0, 1} (b_1)_i = 1 if the person i wears a mask at the current iteration. b_2: n x 1 scipy sparse matrix, int {0, 1} (b_2)_1 = 1 if the person i is infected by the disease at the current iteration p: float [0, 1] Transimission probability of the disease alpha: The damping factor on p when the person himself wears a mask. beta: The damping factor on p when a neighbor of a person wears a mask. r: Recovery probability. k: The maximum number of time-steps. """ # Keep track of the dynamic: {time: [# of masks, # of infections]} # dynamic = {} # Compute the degree fraction matrix F = D_inverse @ A_1 F = sparse.csr_matrix(F) # The number of vertices n = np.shape(A_1)[0] # The one and zero vectors one = np.ones((n, 1), dtype = 'float') zero = np.zeros((n, 1), dtype = 'float') # The recovery vector: b_3 b_3 = np.zeros((n, 1), dtype = 'float') # Initially, no one has recovered. # The susceptible vector: b_4 b_4 = -b_2 - b_3 b_4[b_4 == 0.0] = 1.0 b_4[b_4 < 0.0] = 0.0 # The largest fraction of infection reached throughout the time max_frac = 0.0 # The number of days the infection lasts days = 0 # total number of mask wearings (sum over all days) total_mask = 0 # mask vector thoughout the time mask_vector = [] #new # infection vector infection_vector = [] #new # The main loop for i in range(k): days += 1 mask_vector.append(float(np.count_nonzero(b_1) / n)) #new infection_vector.append(float(np.count_nonzero(b_2) / n)) #new # dynamic[i] = [np.count_nonzero(b_1), np.count_nonzero(b_2), np.count_nonzero(b_3), np.count_nonzero(b_4)] # need b_1_last to update the state of the second contagion b_1_last = b_1 b_4_last = b_4 b_2_last = b_2 # The fraction of total number of infections a_3 = np.count_nonzero(b_2) / float(n) # Update the max_frac if a_3 > max_frac: max_frac = a_3 # determine if the overall faction of infection exceed the threshold l_3 = -(t_3 - a_3) # Note that I cannot do a_3 - t_3 since a_3 is not a vector l_3[l_3 >= 0.0] = 1.0 l_3[l_3 < 0.0] = 0.0 # l3 = t_3 <= a_3 # Determine if the fraction of neighbors with wear face masks exceeds a threshold l_2 = F @ b_1_last - t_2 # sparse? l_2[l_2 >= 0.0] = 1.0 l_2[l_2 < 0.0] = 0.0 # l_2 = (F @ b_1_last) >= t_2 WORTH TRYING! # Update the mask state b_1 b_1 = a_1 + l_2 + l_3 # logical operation? b_1[b_1 >= 1.0] = 1.0 # sparse? #b_1 = np.logical_or(np.logical_or(a_1, l_2), l_3) WORTH TRYING total_mask += np.count_nonzero(b_1) # The # of infected neighbors of each v d = A_2 @ b_2_last # The # of infected neighbors with mask d_2 = A_2 @ np.multiply(b_1_last, b_2_last) # Very important to pass b_1_last here # The # of infected neighbors without mask d_1 = d - d_2 # Only susceptibles (b_4) can be infected #--------------------------------------------------# # h1 : the probability of not getting infected from neighbors who do not wear masks (1 - p or 1 - alpha p) temp = one - (b_1 * (1.0 - alpha)) # IMPORTANT: b_1_last vs b_1 (syn vs asyn) h_1 = one - (temp * p) # h2: contains the probability of not getting infected from neighbors who wear masks (1 - beta p or 1 - alpha beta p) h_2 = one - (temp * beta * p) temp = np.multiply(np.power(h_1, d_1), np.power(h_2, d_2)) q = np.multiply(b_4, one - temp) #--------------------------------------------------# # Has to flatten q to pass it to the binomial funciton q_f = q.flatten() # Compute newly infected nodes newly_infected = np.reshape(np.random.binomial(1, q_f), (-1 ,1)) # Computer R0 (do this before recovery) # R_0 = np.count_nonzero(newly_infected) / np.count_nonzero(b_2) # Recovery rr = np.random.choice([0, 1], size = (n, 1), p=[1.0 - r, r]) b_3 = np.logical_and(b_2, rr) + b_3 # update b_3 b_2 = b_2 - rr b_2[b_2 == -1] = 0.0 # Update b_2 b_2 = newly_infected + b_2 # Update the susceptible vector b_4 = -b_2 - b_3 b_4[b_4 == 0.0] = 1.0 b_4[b_4 < 0.0] = 0.0 # A fixed point is reached under zero infection if np.array_equal(b_2, zero): # print("A fixed point is reached at iteration {}".format(i)) average_mask = float(total_mask / days) return round(float(np.count_nonzero(b_3) / n), 4), round(max_frac, 4), days, round(float(average_mask / n), 4) #return infection_vector, mask_vector average_mask = float(total_mask / days) return round(float(np.count_nonzero(b_3) / n), 4), round(max_frac, 4), days, round(float(average_mask / n), 4) #return infection_vector, mask_vector def covid_mask_sym_fear(A_1, A_2, D_inverse, a_1, t_2, t_3, b_1, b_2, p, alpha, beta, r, k, sym_ratio): """ Description ----------- This funciton simulate the spread of two contagions on two different networks, where contagions are correlated as described in the project report. Parameters ---------- A_1: n x n scipy sparse matrix, int {0, 1} The adjacency matrix of the social layer A_2: n x n scipy sparse matrix, int {0, 1} The adjacency matrix of the disease layer D_inverse: n x n scipy sparse matrix, float [0, 1] The inversed diagonal matrix of the social layer. a_1: n x 1 scipy sparse matrix, int {0, 1} (a_1)_i = 1 if the person i is prosocial, and (a_1)_i = 0 otherwise. t_2: n x 1 numpy array, float [0, 1] (t_2)_i is threshold percentage of neighbors who wear masks for person i to wear a mask in the next iteration. t_3: n x 1 numpy array, float [0, 1] (t_3)_i is the threshold percentage of the overall infection of the population for person i to wear a mask in the next iteration. b_1: n x 1 scipy sparse matrix, int {0, 1} (b_1)_i = 1 if the person i wears a mask at the current iteration. b_2: n x 1 scipy sparse matrix, int {0, 1} (b_2)_1 = 1 if the person i is infected by the disease at the current iteration p: float [0, 1] Transimission probability of the disease alpha: The damping factor on p when the person himself wears a mask. beta: The damping factor on p when a neighbor of a person wears a mask. r: Recovery probability. k: The maximum number of time-steps. """ # Keep track of the dynamic: {time: [# of masks, # of infections]} # dynamic = {} # Compute the degree fraction matrix F = D_inverse @ A_1 F = sparse.csr_matrix(F) # The number of vertices n = np.shape(A_1)[0] # The one and zero vectors one = np.ones((n, 1), dtype = 'float') zero = np.zeros((n, 1), dtype = 'float') # The recovery vector: b_3 b_3 = np.zeros((n, 1), dtype = 'float') # Initially, no one has recovered. # The susceptible vector: b_4 b_4 = -b_2 - b_3 b_4[b_4 == 0.0] = 1.0 b_4[b_4 < 0.0] = 0.0 # The largest fraction of infection reached throughout the time max_frac = 0.0 # The number of days the infection lasts days = 0 # total number of mask wearings (sum over all days) total_mask = 0 # mask vector thoughout the time mask_vector = [] #new # infection vector infection_vector = [] #new # The main loop for i in range(k): days += 1 mask_vector.append(float(np.count_nonzero(b_1) / n)) #new infection_vector.append(float(np.count_nonzero(b_2) / n)) #new # dynamic[i] = [np.count_nonzero(b_1), np.count_nonzero(b_2), np.count_nonzero(b_3), np.count_nonzero(b_4)] # need b_1_last to update the state of the second contagion b_1_last = b_1 b_4_last = b_4 b_2_last = b_2 # The fraction of total number of infections a_3 = np.count_nonzero(b_2) / float(n) # Update the max_frac if a_3 > max_frac: max_frac = a_3 # determine if the overall faction of infection exceed the threshold l_3 = -(t_3 - sym_ratio * a_3) # Note that I cannot do a_3 - t_3 since a_3 is not a vector l_3[l_3 >= 0.0] = 1.0 l_3[l_3 < 0.0] = 0.0 # l3 = t_3 <= a_3 # Determine if the fraction of neighbors with wear face masks exceeds a threshold l_2 = F @ b_1_last - t_2 # sparse? l_2[l_2 >= 0.0] = 1.0 l_2[l_2 < 0.0] = 0.0 # l_2 = (F @ b_1_last) >= t_2 WORTH TRYING! # Update the mask state b_1 b_1 = a_1 + l_2 + l_3 # logical operation? b_1[b_1 >= 1.0] = 1.0 # sparse? #b_1 = np.logical_or(np.logical_or(a_1, l_2), l_3) WORTH TRYING total_mask += np.count_nonzero(b_1) # The # of infected neighbors of each v d = A_2 @ b_2_last # The # of infected neighbors with mask d_2 = A_2 @ np.multiply(b_1_last, b_2_last) # Very important to pass b_1_last here # The # of infected neighbors without mask d_1 = d - d_2 # Only susceptibles (b_4) can be infected #--------------------------------------------------# # h1 : the probability of not getting infected from neighbors who do not wear masks (1 - p or 1 - alpha p) temp = one - (b_1 * (1.0 - alpha)) # IMPORTANT: b_1_last vs b_1 (syn vs asyn) h_1 = one - (temp * p) # h2: contains the probability of not getting infected from neighbors who wear masks (1 - beta p or 1 - alpha beta p) h_2 = one - (temp * beta * p) temp = np.multiply(np.power(h_1, d_1), np.power(h_2, d_2)) q = np.multiply(b_4, one - temp) #--------------------------------------------------# # Has to flatten q to pass it to the binomial funciton q_f = q.flatten() # Compute newly infected nodes newly_infected = np.reshape(np.random.binomial(1, q_f), (-1 ,1)) # Computer R0 (do this before recovery) # R_0 = np.count_nonzero(newly_infected) / np.count_nonzero(b_2) # Recovery rr = np.random.choice([0, 1], size = (n, 1), p=[1.0 - r, r]) b_3 = np.logical_and(b_2, rr) + b_3 # update b_3 b_2 = b_2 - rr b_2[b_2 == -1] = 0.0 # Update b_2 b_2 = newly_infected + b_2 # Update the susceptible vector b_4 = -b_2 - b_3 b_4[b_4 == 0.0] = 1.0 b_4[b_4 < 0.0] = 0.0 # A fixed point is reached under zero infection if np.array_equal(b_2, zero): # print("A fixed point is reached at iteration {}".format(i)) average_mask = float(total_mask / days) return round(float(np.count_nonzero(b_3) / n), 4), round(max_frac, 4), days, round(float(average_mask / n), 4) #return infection_vector, mask_vector average_mask = float(total_mask / days) return round(float(np.count_nonzero(b_3) / n), 4), round(max_frac, 4), days, round(float(average_mask / n), 4) #return infection_vector, mask_vector def covid_mask_peak_diff(A_1, A_2, D_inverse, a_1, t_2, t_3, b_1, b_2, p, alpha, beta, r, k): """ Description ----------- This funciton simulate the spread of two contagions on two different networks, where contagions are correlated as described in the project report. Parameters ---------- A_1: n x n scipy sparse matrix, int {0, 1} The adjacency matrix of the social layer A_2: n x n scipy sparse matrix, int {0, 1} The adjacency matrix of the disease layer D_inverse: n x n scipy sparse matrix, float [0, 1] The inversed diagonal matrix of the social layer. a_1: n x 1 scipy sparse matrix, int {0, 1} (a_1)_i = 1 if the person i is prosocial, and (a_1)_i = 0 otherwise. t_2: n x 1 numpy array, float [0, 1] (t_2)_i is threshold percentage of neighbors who wear masks for person i to wear a mask in the next iteration. t_3: n x 1 numpy array, float [0, 1] (t_3)_i is the threshold percentage of the overall infection of the population for person i to wear a mask in the next iteration. b_1: n x 1 scipy sparse matrix, int {0, 1} (b_1)_i = 1 if the person i wears a mask at the current iteration. b_2: n x 1 scipy sparse matrix, int {0, 1} (b_2)_1 = 1 if the person i is infected by the disease at the current iteration p: float [0, 1] Transimission probability of the disease alpha: The damping factor on p when the person himself wears a mask. beta: The damping factor on p when a neighbor of a person wears a mask. r: Recovery probability. k: The maximum number of time-steps. """ # Keep track of the dynamic: {time: [# of masks, # of infections]} # dynamic = {} # Compute the degree fraction matrix F = D_inverse @ A_1 F = sparse.csr_matrix(F) # The number of vertices n = np.shape(A_1)[0] # The one and zero vectors one = np.ones((n, 1), dtype = 'float') zero = np.zeros((n, 1), dtype = 'float') # The recovery vector: b_3 b_3 = np.zeros((n, 1), dtype = 'float') # Initially, no one has recovered. # The susceptible vector: b_4 b_4 = -b_2 - b_3 b_4[b_4 == 0.0] = 1.0 b_4[b_4 < 0.0] = 0.0 # The largest fraction of infection reached throughout the time max_frac_1 = 0.0 # The second largest fraction of infection reached throughout the time max_frac_2 = 0.0 # The time where the largest infection occurs peak_time_1 = 0 # The time where the second largest infection occurs peak_time_2 = 0 # The number of days the infection lasts days = 0 # total number of mask wearings (sum over all days) total_mask = 0 # mask vector thoughout the time mask_vector = [] #new # infection vector infection_vector = [] #new # The main loop for i in range(k): days += 1 mask_vector.append(float(np.count_nonzero(b_1) / n)) #new infection_vector.append(float(np.count_nonzero(b_2) / n)) #new # dynamic[i] = [np.count_nonzero(b_1), np.count_nonzero(b_2), np.count_nonzero(b_3), np.count_nonzero(b_4)] # need b_1_last to update the state of the second contagion b_1_last = b_1 b_4_last = b_4 b_2_last = b_2 # The fraction of total number of infections a_3 = np.count_nonzero(b_2) / float(n) # Update the max_frac if a_3 > max_frac_1: max_frac_2 = max_frac_1 peak_time_2 = peak_time_1 max_frac_1 = a_3 peak_time_1 = i # determine if the overall faction of infection exceed the threshold l_3 = -(t_3 - a_3) # Note that I cannot do a_3 - t_3 since a_3 is not a vector l_3[l_3 >= 0.0] = 1.0 l_3[l_3 < 0.0] = 0.0 # l3 = t_3 <= a_3 # Determine if the fraction of neighbors with wear face masks exceeds a threshold l_2 = F @ b_1_last - t_2 # sparse? l_2[l_2 >= 0.0] = 1.0 l_2[l_2 < 0.0] = 0.0 # l_2 = (F @ b_1_last) >= t_2 WORTH TRYING! # Update the mask state b_1 b_1 = a_1 + l_2 + l_3 # logical operation? b_1[b_1 >= 1.0] = 1.0 # sparse? #b_1 = np.logical_or(np.logical_or(a_1, l_2), l_3) WORTH TRYING total_mask += np.count_nonzero(b_1) # The # of infected neighbors of each v d = A_2 @ b_2_last # The # of infected neighbors with mask d_2 = A_2 @ np.multiply(b_1_last, b_2_last) # Very important to pass b_1_last here # The # of infected neighbors without mask d_1 = d - d_2 # Only susceptibles (b_4) can be infected #--------------------------------------------------# # h1 : the probability of not getting infected from neighbors who do not wear masks (1 - p or 1 - alpha p) temp = one - (b_1 * (1.0 - alpha)) # IMPORTANT: b_1_last vs b_1 (syn vs asyn) h_1 = one - (temp * p) # h2: contains the probability of not getting infected from neighbors who wear masks (1 - beta p or 1 - alpha beta p) h_2 = one - (temp * beta * p) temp = np.multiply(np.power(h_1, d_1), np.power(h_2, d_2)) q = np.multiply(b_4, one - temp) #--------------------------------------------------# # Has to flatten q to pass it to the binomial funciton q_f = q.flatten() # Compute newly infected nodes newly_infected = np.reshape(np.random.binomial(1, q_f), (-1 ,1)) # Computer R0 (do this before recovery) # R_0 = np.count_nonzero(newly_infected) / np.count_nonzero(b_2) # Recovery rr = np.random.choice([0, 1], size = (n, 1), p=[1.0 - r, r]) b_3 = np.logical_and(b_2, rr) + b_3 # update b_3 b_2 = b_2 - rr b_2[b_2 == -1] = 0.0 # Update b_2 b_2 = newly_infected + b_2 # Update the susceptible vector b_4 = -b_2 - b_3 b_4[b_4 == 0.0] = 1.0 b_4[b_4 < 0.0] = 0.0 # A fixed point is reached under zero infection if np.array_equal(b_2, zero): return peak_time return peak_time def covid_mask_control(A_1, A_2, D_inverse, a_1, t_2, t_3, b_1, b_2, p, alpha, beta, r, k): """ Description ----------- This funciton simulate the spread of two contagions on two different networks, where contagions are correlated as described in the project report. Parameters ---------- A_1: n x n scipy sparse matrix, int {0, 1} The adjacency matrix of the social layer A_2: n x n scipy sparse matrix, int {0, 1} The adjacency matrix of the disease layer D_inverse: n x n scipy sparse matrix, float [0, 1] The inversed diagonal matrix of the social layer. a_1: n x 1 scipy sparse matrix, int {0, 1} (a_1)_i = 1 if the person i is prosocial, and (a_1)_i = 0 otherwise. t_2: n x 1 numpy array, float [0, 1] (t_2)_i is threshold percentage of neighbors who wear masks for person i to wear a mask in the next iteration. t_3: n x 1 numpy array, float [0, 1] (t_3)_i is the threshold percentage of the overall infection of the population for person i to wear a mask in the next iteration. b_1: n x 1 scipy sparse matrix, int {0, 1} (b_1)_i = 1 if the person i wears a mask at the current iteration. b_2: n x 1 scipy sparse matrix, int {0, 1} (b_2)_1 = 1 if the person i is infected by the disease at the current iteration p: float [0, 1] Transimission probability of the disease alpha: The damping factor on p when the person himself wears a mask. beta: The damping factor on p when a neighbor of a person wears a mask. r: Recovery probability. k: The maximum number of time-steps. """ # Keep track of the dynamic: {time: [# of masks, # of infections]} # dynamic = {} # Compute the degree fraction matrix F = D_inverse @ A_1 F = sparse.csr_matrix(F) # The number of vertices n = np.shape(A_1)[0] # The one and zero vectors one = np.ones((n, 1), dtype = 'float') zero = np.zeros((n, 1), dtype = 'float') # The recovery vector: b_3 b_3 = np.zeros((n, 1), dtype = 'float') # Initially, no one has recovered. # The susceptible vector: b_4 b_4 = -b_2 - b_3 b_4[b_4 == 0.0] = 1.0 b_4[b_4 < 0.0] = 0.0 # The largest fraction of infection reached throughout the time max_frac = 0.0 # The number of days the infection lasts days = 0 # total number of mask wearings (sum over all days) total_mask = 0 # mask vector thoughout the time mask_vector = [] #new # infection vector infection_vector = [] #new # The main loop for i in range(k): days += 1 mask_vector.append(float(np.count_nonzero(b_1) / n)) #new infection_vector.append(float(np.count_nonzero(b_2) / n)) #new # dynamic[i] = [np.count_nonzero(b_1), np.count_nonzero(b_2), np.count_nonzero(b_3), np.count_nonzero(b_4)] # need b_1_last to update the state of the second contagion b_1_last = b_1 b_4_last = b_4 b_2_last = b_2 # The fraction of total number of infections a_3 = np.count_nonzero(b_2) / float(n) # Update the max_frac if a_3 > max_frac: max_frac = a_3 # determine if the overall faction of infection exceed the threshold l_3 = -(t_3 - a_3) # Note that I cannot do a_3 - t_3 since a_3 is not a vector l_3[l_3 >= 0.0] = 1.0 l_3[l_3 < 0.0] = 0.0 # l3 = t_3 <= a_3 # Determine if the fraction of neighbors with wear face masks exceeds a threshold l_2 = F @ b_1_last - t_2 # sparse? l_2[l_2 >= 0.0] = 1.0 l_2[l_2 < 0.0] = 0.0 # l_2 = (F @ b_1_last) >= t_2 WORTH TRYING! # Update the mask state b_1 b_1 = a_1 + l_2 + l_3 # logical operation? b_1[b_1 >= 1.0] = 1.0 # sparse? #b_1 = np.logical_or(np.logical_or(a_1, l_2), l_3) WORTH TRYING total_mask += np.count_nonzero(b_1) # The # of infected neighbors of each v d = A_2 @ b_2_last # The # of infected neighbors with mask d_2 = A_2 @ np.multiply(b_1_last, b_2_last) # Very important to pass b_1_last here # The # of infected neighbors without mask d_1 = d - d_2 # Only susceptibles (b_4) can be infected #--------------------------------------------------# # h1 : the probability of not getting infected from neighbors who do not wear masks (1 - p or 1 - alpha p) temp = one - (b_1 * (1.0 - alpha)) # IMPORTANT: b_1_last vs b_1 (syn vs asyn) h_1 = one - (temp * p) # h2: contains the probability of not getting infected from neighbors who wear masks (1 - beta p or 1 - alpha beta p) h_2 = one - (temp * beta * p) temp = np.multiply(np.power(h_1, d_1), np.power(h_2, d_2)) q = np.multiply(b_4, one - temp) #--------------------------------------------------# # Has to flatten q to pass it to the binomial funciton q_f = q.flatten() # Compute newly infected nodes newly_infected = np.reshape(np.random.binomial(1, q_f), (-1 ,1)) # Computer R0 (do this before recovery) # R_0 = np.count_nonzero(newly_infected) / np.count_nonzero(b_2) # Recovery rr = np.random.choice([0, 1], size = (n, 1), p=[1.0 - r, r]) b_3 = np.logical_and(b_2, rr) + b_3 # update b_3 b_2 = b_2 - rr b_2[b_2 == -1] = 0.0 # Update b_2 b_2 = newly_infected + b_2 # Update the susceptible vector b_4 = -b_2 - b_3 b_4[b_4 == 0.0] = 1.0 b_4[b_4 < 0.0] = 0.0 # The control happens here if float(np.count_nonzero(b_2) / n) < float(np.count_nonzero(b_2_last) / n): a_1 = b_1 # Mask wearing people continue to wear masks # A fixed point is reached under zero infection if np.array_equal(b_2, zero): # print("A fixed point is reached at iteration {}".format(i)) average_mask = float(total_mask / days) # return round(float(np.count_nonzero(b_3) / n), 4), round(max_frac, 4), days, round(float(average_mask / n), 4) return infection_vector, mask_vector average_mask = float(total_mask / days) #return round(float(np.count_nonzero(b_3) / n), 4), round(max_frac, 4), days, round(float(average_mask / n), 4) return infection_vector, mask_vector def covid_mask_prob_social(A_1, A_2, D_inverse, a_1, t_2, t_3, b_1, b_2, p, alpha, beta, r, k, g_peer, g_fear): """ Description ----------- This funciton simulate the spread of two contagions on two different networks, where contagions are correlated as described in the project report. Parameters ---------- A_1: n x n scipy sparse matrix, int {0, 1} The adjacency matrix of the social layer A_2: n x n scipy sparse matrix, int {0, 1} The adjacency matrix of the disease layer D_inverse: n x n scipy sparse matrix, float [0, 1] The inversed diagonal matrix of the social layer. a_1: n x 1 scipy sparse matrix, int {0, 1} (a_1)_i = 1 if the person i is prosocial, and (a_1)_i = 0 otherwise. t_2: n x 1 numpy array, float [0, 1] (t_2)_i is threshold percentage of neighbors who wear masks for person i to wear a mask in the next iteration. t_3: n x 1 numpy array, float [0, 1] (t_3)_i is the threshold percentage of the overall infection of the population for person i to wear a mask in the next iteration. b_1: n x 1 scipy sparse matrix, int {0, 1} (b_1)_i = 1 if the person i wears a mask at the current iteration. b_2: n x 1 scipy sparse matrix, int {0, 1} (b_2)_1 = 1 if the person i is infected by the disease at the current iteration p: float [0, 1] Transimission probability of the disease alpha: The damping factor on p when the person himself wears a mask. beta: The damping factor on p when a neighbor of a person wears a mask. r: Recovery probability. k: The maximum number of time-steps. """ # Keep track of the dynamic: {time: [# of masks, # of infections]} # dynamic = {} # Growth rate of the logistic function g_2 = g_peer g_3 = g_fear # Compute the degree fraction matrix F = D_inverse @ A_1 F = sparse.csr_matrix(F) # The number of vertices n = np.shape(A_1)[0] # The one and zero vectors one = np.ones((n, 1), dtype = 'float') zero = np.zeros((n, 1), dtype = 'float') # The recovery vector: b_3 b_3 = np.zeros((n, 1), dtype = 'float') # Initially, no one has recovered. # The susceptible vector: b_4 b_4 = -b_2 - b_3 b_4[b_4 == 0.0] = 1.0 b_4[b_4 < 0.0] = 0.0 # The largest fraction of infection reached throughout the time max_frac = 0.0 # The number of days the infection lasts days = 0 # total number of mask wearings (sum over all days) total_mask = 0 # mask vector thoughout the time mask_vector = [] #new # infection vector infection_vector = [] #new # The main loop for i in range(k): days += 1 mask_vector.append(float(np.count_nonzero(b_1) / n)) #new infection_vector.append(float(np.count_nonzero(b_2) / n)) #new # dynamic[i] = [np.count_nonzero(b_1), np.count_nonzero(b_2), np.count_nonzero(b_3), np.count_nonzero(b_4)] # need b_1_last to update the state of the second contagion b_1_last = b_1 b_4_last = b_4 b_2_last = b_2 # The fraction of total number of infections a_3 = np.count_nonzero(b_2) / float(n) # Update the max_frac if a_3 > max_frac: max_frac = a_3 # Fear # 1 / (1 + e^{-g (a_3 - t_3)}) p_3 = -(t_3 - a_3) # Note that I cannot do a_3 - t_3 p_3 = -g_3 * p_3 p_3 = one + np.exp(p_3) p_3 = np.reciprocal(p_3) p_3 = p_3.flatten() l_3 = np.reshape(np.random.binomial(1, p_3), (-1 ,1)) # Peer pressure # 1 / (1 + e^{-g (c - t)}) p_2 = F @ b_1_last - t_2 p_2 = -g_2 * p_2 p_2 = one + np.exp(p_2) p_2 = np.reciprocal(p_2) p_2 = p_2.flatten() l_2 = np.reshape(np.random.binomial(1, p_2), (-1 ,1)) # Update the mask state b_1 b_1 = a_1 + l_2 + l_3 # logical operation? b_1[b_1 >= 1.0] = 1.0 # sparse? total_mask += np.count_nonzero(b_1) # The # of infected neighbors of each v d = A_2 @ b_2_last # The # of infected neighbors with mask d_2 = A_2 @ np.multiply(b_1_last, b_2_last) # Very important to pass b_1_last here # The # of infected neighbors without mask d_1 = d - d_2 # Only susceptibles (b_4) can be infected #--------------------------------------------------# # h1 : the probability of not getting infected from neighbors who do not wear masks (1 - p or 1 - alpha p) temp = one - (b_1 * (1.0 - alpha)) # IMPORTANT: b_1_last vs b_1 (syn vs asyn) h_1 = one - (temp * p) # h2: contains the probability of not getting infected from neighbors who wear masks (1 - beta p or 1 - alpha beta p) h_2 = one - (temp * beta * p) temp = np.multiply(np.power(h_1, d_1), np.power(h_2, d_2)) q = np.multiply(b_4, one - temp) #--------------------------------------------------# # Has to flatten q to pass it to the binomial funciton q_f = q.flatten() # Compute newly infected nodes newly_infected = np.reshape(np.random.binomial(1, q_f), (-1 ,1)) # Computer R0 (do this before recovery) # R_0 = np.count_nonzero(newly_infected) / np.count_nonzero(b_2) # Recovery rr = np.random.choice([0, 1], size = (n, 1), p=[1.0 - r, r]) b_3 = np.logical_and(b_2, rr) + b_3 # update b_3 b_2 = b_2 - rr b_2[b_2 == -1] = 0.0 # Update b_2 b_2 = newly_infected + b_2 # Update the susceptible vector b_4 = -b_2 - b_3 b_4[b_4 == 0.0] = 1.0 b_4[b_4 < 0.0] = 0.0 # A fixed point is reached under zero infection if np.array_equal(b_2, zero): # print("A fixed point is reached at iteration {}".format(i)) average_mask = float(total_mask / days) return round(float(np.count_nonzero(b_3) / n), 4), round(max_frac, 4), days, round(float(average_mask / n), 4) #return infection_vector, mask_vector return round(float(np.count_nonzero(b_3) / n), 4), round(max_frac, 4), days, round(float(average_mask / n), 4) #return infection_vector, mask_vector def covid_mask_habit(A_1, A_2, D_inverse, a_1, t_2, t_3, b_1, b_2, p, alpha, beta, r, k, habit_p): """ Description ----------- This funciton simulate the spread of two contagions on two different networks, where contagions are correlated as described in the project report. Parameters ---------- A_1: n x n scipy sparse matrix, int {0, 1} The adjacency matrix of the social layer A_2: n x n scipy sparse matrix, int {0, 1} The adjacency matrix of the disease layer D_inverse: n x n scipy sparse matrix, float [0, 1] The inversed diagonal matrix of the social layer. a_1: n x 1 scipy sparse matrix, int {0, 1} (a_1)_i = 1 if the person i is prosocial, and (a_1)_i = 0 otherwise. t_2: n x 1 numpy array, float [0, 1] (t_2)_i is threshold percentage of neighbors who wear masks for person i to wear a mask in the next iteration. t_3: n x 1 numpy array, float [0, 1] (t_3)_i is the threshold percentage of the overall infection of the population for person i to wear a mask in the next iteration. b_1: n x 1 scipy sparse matrix, int {0, 1} (b_1)_i = 1 if the person i wears a mask at the current iteration. b_2: n x 1 scipy sparse matrix, int {0, 1} (b_2)_1 = 1 if the person i is infected by the disease at the current iteration p: float [0, 1] Transimission probability of the disease alpha: The damping factor on p when the person himself wears a mask. beta: The damping factor on p when a neighbor of a person wears a mask. r: Recovery probability. k: The maximum number of time-steps. """ # Keep track of the dynamic: {time: [# of masks, # of infections]} # dynamic = {} # Compute the degree fraction matrix F = D_inverse @ A_1 F = sparse.csr_matrix(F) # The number of vertices n = np.shape(A_1)[0] # The one and zero vectors one = np.ones((n, 1), dtype = 'float') zero = np.zeros((n, 1), dtype = 'float') # The recovery vector: b_3 b_3 = np.zeros((n, 1), dtype = 'float') # Initially, no one has recovered. # The susceptible vector: b_4 b_4 = -b_2 - b_3 b_4[b_4 == 0.0] = 1.0 b_4[b_4 < 0.0] = 0.0 # The largest fraction of infection reached throughout the time max_frac = 0.0 # The number of days the infection lasts days = 0 # total number of mask wearings (sum over all days) total_mask = 0 # mask vector thoughout the time mask_vector = [] #new # infection vector infection_vector = [] #new # The main loop for i in range(k): days += 1 mask_vector.append(float(np.count_nonzero(b_1) / n)) #new infection_vector.append(float(np.count_nonzero(b_2) / n)) #new # dynamic[i] = [np.count_nonzero(b_1), np.count_nonzero(b_2), np.count_nonzero(b_3), np.count_nonzero(b_4)] # need b_1_last to update the state of the second contagion b_1_last = b_1 b_4_last = b_4 b_2_last = b_2 # Habit formation step l_habit = np.random.choice([0, 1], size = (n, 1), p=[1.0 - habit_p, habit_p]) l_habit = np.multiply(l_habit, b_1_last) # The fraction of total number of infections a_3 = np.count_nonzero(b_2) / float(n) # Update the max_frac if a_3 > max_frac: max_frac = a_3 # determine if the overall faction of infection exceed the threshold l_3 = -(t_3 - a_3) # Note that I cannot do a_3 - t_3 since a_3 is not a vector l_3[l_3 >= 0.0] = 1.0 l_3[l_3 < 0.0] = 0.0 # l3 = t_3 <= a_3 # Determine if the fraction of neighbors with wear face masks exceeds a threshold l_2 = F @ b_1_last - t_2 # sparse? l_2[l_2 >= 0.0] = 1.0 l_2[l_2 < 0.0] = 0.0 # Update the mask state b_1 b_1 = a_1 + l_2 + l_3 + l_habit # logical operation? b_1[b_1 >= 1.0] = 1.0 # sparse? total_mask += np.count_nonzero(b_1) # The # of infected neighbors of each v d = A_2 @ b_2_last # The # of infected neighbors with mask d_2 = A_2 @ np.multiply(b_1_last, b_2_last) # Very important to pass b_1_last here # The # of infected neighbors without mask d_1 = d - d_2 # Only susceptibles (b_4) can be infected #--------------------------------------------------# # h1 : the probability of not getting infected from neighbors who do not wear masks (1 - p or 1 - alpha p) temp = one - (b_1 * (1.0 - alpha)) # IMPORTANT: b_1_last vs b_1 (syn vs asyn) h_1 = one - (temp * p) # h2: contains the probability of not getting infected from neighbors who wear masks (1 - beta p or 1 - alpha beta p) h_2 = one - (temp * beta * p) temp = np.multiply(np.power(h_1, d_1), np.power(h_2, d_2)) q = np.multiply(b_4, one - temp) #--------------------------------------------------# # Has to flatten q to pass it to the binomial funciton q_f = q.flatten() # Compute newly infected nodes newly_infected = np.reshape(np.random.binomial(1, q_f), (-1 ,1)) # Computer R0 (do this before recovery) # R_0 = np.count_nonzero(newly_infected) / np.count_nonzero(b_2) # Recovery rr = np.random.choice([0, 1], size = (n, 1), p=[1.0 - r, r]) b_3 = np.logical_and(b_2, rr) + b_3 # update b_3 b_2 = b_2 - rr b_2[b_2 == -1] = 0.0 # Update b_2 b_2 = newly_infected + b_2 # Update the susceptible vector b_4 = -b_2 - b_3 b_4[b_4 == 0.0] = 1.0 b_4[b_4 < 0.0] = 0.0 # A fixed point is reached under zero infection if np.array_equal(b_2, zero): # print("A fixed point is reached at iteration {}".format(i)) average_mask = float(total_mask / days) return round(float(np.count_nonzero(b_3) / n), 4), round(max_frac, 4), days, round(float(average_mask / n), 4) #return infection_vector, mask_vector return round(float(np.count_nonzero(b_3) / n), 4), round(max_frac, 4), days, round(float(average_mask / n), 4) #return infection_vector, mask_vector def covid_mask_strategy_game(A_1, A_2, D_inverse, a_1, t_2_1, t_2_2, t_3, b_1, b_2, p, alpha, beta, r, k): """ Description ----------- This funciton simulate the spread of two contagions on two different networks, where contagions are correlated as described in the project report. Parameters ---------- A_1: n x n scipy sparse matrix, int {0, 1} The adjacency matrix of the social layer A_2: n x n scipy sparse matrix, int {0, 1} The adjacency matrix of the disease layer D_inverse: n x n scipy sparse matrix, float [0, 1] The inversed diagonal matrix of the social layer. a_1: n x 1 scipy sparse matrix, int {0, 1} (a_1)_i = 1 if the person i is prosocial, and (a_1)_i = 0 otherwise. t_2: n x 1 numpy array, float [0, 1] (t_2)_i is threshold percentage of neighbors who wear masks for person i to wear a mask in the next iteration. t_3: n x 1 numpy array, float [0, 1] (t_3)_i is the threshold percentage of the overall infection of the population for person i to wear a mask in the next iteration. b_1: n x 1 scipy sparse matrix, int {0, 1} (b_1)_i = 1 if the person i wears a mask at the current iteration. b_2: n x 1 scipy sparse matrix, int {0, 1} (b_2)_1 = 1 if the person i is infected by the disease at the current iteration p: float [0, 1] Transimission probability of the disease alpha: The damping factor on p when the person himself wears a mask. beta: The damping factor on p when a neighbor of a person wears a mask. r: Recovery probability. k: The maximum number of time-steps. """ # Keep track of the dynamic: {time: [# of masks, # of infections]} # dynamic = {} # Compute the degree fraction matrix F = D_inverse @ A_1 F = sparse.csr_matrix(F) # The number of vertices n = np.shape(A_1)[0] # The one and zero vectors one = np.ones((n, 1), dtype = 'float') zero = np.zeros((n, 1), dtype = 'float') # The recovery vector: b_3 b_3 = np.zeros((n, 1), dtype = 'float') # Initially, no one has recovered. # The susceptible vector: b_4 b_4 = -b_2 - b_3 b_4[b_4 == 0.0] = 1.0 b_4[b_4 < 0.0] = 0.0 # The largest fraction of infection reached throughout the time max_frac = 0.0 # The number of days the infection lasts days = 0 # total number of mask wearings (sum over all days) total_mask = 0 # mask vector thoughout the time mask_vector = [] #new # infection vector infection_vector = [] #new # The main loop for i in range(k): days += 1 mask_vector.append(float(np.count_nonzero(b_1) / n)) #new infection_vector.append(float(np.count_nonzero(b_2) / n)) #new # dynamic[i] = [np.count_nonzero(b_1), np.count_nonzero(b_2), np.count_nonzero(b_3), np.count_nonzero(b_4)] # need b_1_last to update the state of the second contagion b_1_last = b_1 b_4_last = b_4 b_2_last = b_2 # The fraction of total number of infections a_3 = np.count_nonzero(b_2) / float(n) # Update the max_frac if a_3 > max_frac: max_frac = a_3 # determine if the overall faction of infection exceed the threshold l_3 = -(t_3 - a_3) # Note that I cannot do a_3 - t_3 since a_3 is not a vector l_3[l_3 >= 0.0] = 1.0 l_3[l_3 < 0.0] = 0.0 # l3 = t_3 <= a_3 # Determine if the fraction of neighbors with wear face masks exceeds a threshold T = F @ b_1_last l_2_1 = T - t_2_1 # sparse? l_2_1[l_2_1 >= 0.0] = 1.0 l_2_1[l_2_1 < 0.0] = 0.0 l_2_2 = t_2_2 - T l_2_2[l_2_2 > 0.0] = 1.0 l_2_2[l_2_2 <= 0.0] = 0.0 l_2 = np.multiply(l_2_1, l_2_2) # Update the mask state b_1 b_1 = a_1 + l_2 + l_3 # logical operation? b_1[b_1 >= 1.0] = 1.0 # sparse? #b_1 = np.logical_or(np.logical_or(a_1, l_2), l_3) WORTH TRYING total_mask += np.count_nonzero(b_1) # The # of infected neighbors of each v d = A_2 @ b_2_last # The # of infected neighbors with mask d_2 = A_2 @ np.multiply(b_1_last, b_2_last) # Very important to pass b_1_last here # The # of infected neighbors without mask d_1 = d - d_2 # Only susceptibles (b_4) can be infected #--------------------------------------------------# # h1 : the probability of not getting infected from neighbors who do not wear masks (1 - p or 1 - alpha p) temp = one - (b_1 * (1.0 - alpha)) # IMPORTANT: b_1_last vs b_1 (syn vs asyn) h_1 = one - (temp * p) # h2: contains the probability of not getting infected from neighbors who wear masks (1 - beta p or 1 - alpha beta p) h_2 = one - (temp * beta * p) temp = np.multiply(np.power(h_1, d_1), np.power(h_2, d_2)) q = np.multiply(b_4, one - temp) #--------------------------------------------------# # Has to flatten q to pass it to the binomial funciton q_f = q.flatten() # Compute newly infected nodes newly_infected = np.reshape(np.random.binomial(1, q_f), (-1 ,1)) # Computer R0 (do this before recovery) # R_0 = np.count_nonzero(newly_infected) / np.count_nonzero(b_2) # Recovery rr = np.random.choice([0, 1], size = (n, 1), p=[1.0 - r, r]) b_3 = np.logical_and(b_2, rr) + b_3 # update b_3 b_2 = b_2 - rr b_2[b_2 == -1] = 0.0 # Update b_2 b_2 = newly_infected + b_2 # Update the susceptible vector b_4 = -b_2 - b_3 b_4[b_4 == 0.0] = 1.0 b_4[b_4 < 0.0] = 0.0 # A fixed point is reached under zero infection if np.array_equal(b_2, zero): # print("A fixed point is reached at iteration {}".format(i)) average_mask = float(total_mask / days) # return round(float(np.count_nonzero(b_3) / n), 4), round(max_frac, 4), days, round(float(average_mask / n), 4) return infection_vector, mask_vector # return round(float(np.count_nonzero(b_3) / n), 4), round(max_frac, 4), days, round(float(average_mask / n), 4) return infection_vector, mask_vector def covid_mask_asymptomatic(A_1, A_2, D_inverse, a_1, t_2, t_3, b_1, b_2, p, alpha, beta, r, k): """ Description ----------- This funciton simulate the spread of two contagions on two different networks, where contagions are correlated as described in the project report. Parameters ---------- A_1: n x n scipy sparse matrix, int {0, 1} The adjacency matrix of the social layer A_2: n x n scipy sparse matrix, int {0, 1} The adjacency matrix of the disease layer D_inverse: n x n scipy sparse matrix, float [0, 1] The inversed diagonal matrix of the social layer. a_1: n x 1 scipy sparse matrix, int {0, 1} (a_1)_i = 1 if the person i is prosocial, and (a_1)_i = 0 otherwise. t_2: n x 1 numpy array, float [0, 1] (t_2)_i is threshold percentage of neighbors who wear masks for person i to wear a mask in the next iteration. t_3: n x 1 numpy array, float [0, 1] (t_3)_i is the threshold percentage of the overall infection of the population for person i to wear a mask in the next iteration. b_1: n x 1 scipy sparse matrix, int {0, 1} (b_1)_i = 1 if the person i wears a mask at the current iteration. b_2: n x 1 scipy sparse matrix, int {0, 1} (b_2)_1 = 1 if the person i is infected by the disease at the current iteration p: float [0, 1] Transimission probability of the disease alpha: The damping factor on p when the person himself wears a mask. beta: The damping factor on p when a neighbor of a person wears a mask. r: Recovery probability. k: The maximum number of time-steps. """ # Keep track of the dynamic: {time: [# of masks, # of infections]} # dynamic = {} # Compute the degree fraction matrix F = D_inverse @ A_1 F = sparse.csr_matrix(F) # The number of vertices n = np.shape(A_1)[0] # The one and zero vectors one = np.ones((n, 1), dtype = 'float') zero = np.zeros((n, 1), dtype = 'float') # The recovery vector: b_3 b_3 = np.zeros((n, 1), dtype = 'float') # Initially, no one has recovered. # The susceptible vector: b_4 b_4 = -b_2 - b_3 b_4[b_4 == 0.0] = 1.0 b_4[b_4 < 0.0] = 0.0 # The largest fraction of infection reached throughout the time max_frac = 0.0 # The number of days the infection lasts days = 0 # total number of mask wearings (sum over all days) total_mask = 0 # mask vector thoughout the time mask_vector = [] #new # infection vector infection_vector = [] #new # The main loop for i in range(k): days += 1 mask_vector.append(float(np.count_nonzero(b_1) / n)) #new infection_vector.append(float(np.count_nonzero(b_2) / n)) #new # dynamic[i] = [np.count_nonzero(b_1), np.count_nonzero(b_2), np.count_nonzero(b_3), np.count_nonzero(b_4)] # need b_1_last to update the state of the second contagion b_1_last = b_1 b_4_last = b_4 b_2_last = b_2 # The fraction of total number of infections a_3 = np.count_nonzero(b_2) / float(n) # Update the max_frac if a_3 > max_frac: max_frac = a_3 # determine if the overall faction of infection exceed the threshold l_3 = -(t_3 - a_3) # Note that I cannot do a_3 - t_3 since a_3 is not a vector l_3[l_3 >= 0.0] = 1.0 l_3[l_3 < 0.0] = 0.0 # l3 = t_3 <= a_3 # Determine if the fraction of neighbors with wear face masks exceeds a threshold l_2 = F @ b_1_last - t_2 # sparse? l_2[l_2 >= 0.0] = 1.0 l_2[l_2 < 0.0] = 0.0 # l_2 = (F @ b_1_last) >= t_2 WORTH TRYING! # Update the mask state b_1 b_1 = a_1 + l_2 + l_3 # logical operation? b_1[b_1 >= 1.0] = 1.0 # sparse? #b_1 = np.logical_or(np.logical_or(a_1, l_2), l_3) WORTH TRYING total_mask += np.count_nonzero(b_1) # The # of infected neighbors of each v d = A_2 @ b_2_last # The # of infected neighbors with mask d_2 = A_2 @ np.multiply(b_1_last, b_2_last) # Very important to pass b_1_last here # The # of infected neighbors without mask d_1 = d - d_2 # Only susceptibles (b_4) can be infected #--------------------------------------------------# # h1 : the probability of not getting infected from neighbors who do not wear masks (1 - p or 1 - alpha p) temp = one - (b_1 * (1.0 - alpha)) # IMPORTANT: b_1_last vs b_1 (syn vs asyn) h_1 = one - (temp * p) # h2: contains the probability of not getting infected from neighbors who wear masks (1 - beta p or 1 - alpha beta p) h_2 = one - (temp * beta * p) temp = np.multiply(np.power(h_1, d_1), np.power(h_2, d_2)) q = np.multiply(b_4, one - temp) #--------------------------------------------------# # Has to flatten q to pass it to the binomial funciton q_f = q.flatten() # Compute newly infected nodes newly_infected = np.reshape(np.random.binomial(1, q_f), (-1 ,1)) # probability of asymptomatic: 0.35. probability of self-isolation: 0.9 # probability of asymptomatic: 1 - (0.65 * 0.9) = 0.415 aym = np.random.choice([0, 1], size = (n, 1), p=[1.0 - 0.415, 0.415]) aym = np.multiply(newly_infected, aym) sym = newly_infected - aym # Recovery rr = np.random.choice([0, 1], size = (n, 1), p=[1.0 - r, r]) b_3 = np.logical_and(b_2, rr) + b_3 + sym # update b_3 b_2 = b_2 - rr b_2[b_2 == -1] = 0.0 # Update b_2 b_2 = aym + b_2 # Update the susceptible vector b_4 = -b_2 - b_3 b_4[b_4 == 0.0] = 1.0 b_4[b_4 < 0.0] = 0.0 # A fixed point is reached under zero infection if np.array_equal(b_2, zero): # print("A fixed point is reached at iteration {}".format(i)) average_mask = float(total_mask / days) return round(float(np.count_nonzero(b_3) / n), 4), round(max_frac, 4), days, round(float(average_mask / n), 4) #return infection_vector, mask_vector return round(float(np.count_nonzero(b_3) / n), 4), round(max_frac, 4), days, round(float(average_mask / n), 4) #return infection_vector, mask_vector # The SIS model, and each node recoverys in exact one day (r = 1) def two_layer_SIS(A, D_inverse, a_1, t_2, t_3, b_1, b_2, p, alpha, beta, r, k): # Compute the degree fraction matrix F = D_inverse @ A F = sparse.csr_matrix(F) # The number of vertices n = np.shape(A)[0] # The one vector one = np.ones((n, 1), dtype = 'float') zero = np.zeros((n, 1), dtype = 'float') # The initial recovery vector (no one has recovered) b_3 = np.zeros((n, 1), dtype = 'float') b_3 = sparse.csr_matrix(b_3) # The inital susceptible vector b_4 = -b_2 - b_3 b_4[b_4 == 0.0] = 1.0 b_4[b_4 < 0.0] = 0.0 # The main loop for i in range(k): # need b_1__last to Update the state of the second contagion b_1_last = b_1 # Deteremine if a fixed point is reached, that is everyone has recovered (if r != 0) b_4_last = b_4 # This can also be used to determine if a fixed point is reached b_2_last = b_2 # The fraction of total number of infections a_3 = np.count_nonzero(b_2) / float(n) # determine if the overall faction of infection exceed the threshold l_3 = -(t_3 - a_3) # Note that I cannot do a_3 - t_3 since a_3 is not a vector l_3[l_3 >= 0.0] = 1.0 l_3[l_3 < 0.0] = 0.0 #l3 = t_3 <= a_3 # Determine if the fraction of neighbors with wear face masks exceeds a threshold l_2 = F @ b_1_last - t_2 # sparse? l_2[l_2 >= 0.0] = 1.0 l_2[l_2 < 0.0] = 0.0 #l_2 = (F @ b_1_last) >= t_2 WORTH TRYING! # Update the mask state b_1 b_1 = a_1 + l_2 + l_3 # logical operation? b_1[b_1 >= 1.0] = 1.0 # sparse? #b_1 = np.logical_or(np.logical_or(a_1, l_2), l_3) WORTH TRYING # The # of infected neighbors of each v d = A @ b_2_last # The # of infected neighbors with mask d_2 = A @ np.multiply(b_1_last, b_2_last) # Very important to pass b_1_last here # The # of infected neighbors without mask d_1 = d - d_2 #Only susceptibles (b_4) can be infected #--------------------------------------------------# # h1 : the probability of not getting infected from neighbors who do not wear masks (1 - p or 1 - alpha p) temp = one - (b_1 * (1.0 - alpha)) # IMPORTANT!!!!! b_1_last? (syn vs asyn) h_1 = one - (temp * p) # h2: contains the probability of not getting infected from neighbors who wear masks (1 - beta p or 1 - alpha beta p) h_2 = one - (temp * beta * p) temp = np.multiply(np.power(h_1, d_1), np.power(h_2, d_2)) q = np.multiply(b_4, one - temp) #--------------------------------------------------# # Has to flatten q to pass it to the binomial funciton q_f = q.flatten() # Compute newly infected nodes newly_infected = np.reshape(np.random.binomial(1, q_f), (-1 ,1)) # Recovery rr = np.random.choice([0, 1], size = (n, 1), p=[1.0 - r, r]) b_2 = b_2 - rr b_2[b_2 == -1] = 0.0 # Update b_2 b_2 = newly_infected + b_2 # Update the susceptible vector b_4 = -b_2 - b_3 b_4[b_4 == 0.0] = 1.0 b_4[b_4 < 0.0] = 0.0 # Determine if a fixed point it reached # if np.array_equal(b_2, b_2_last):
33.957539
125
0.559907
10,046
62,380
3.305594
0.032053
0.011684
0.040051
0.039749
0.952
0.943989
0.940376
0.935497
0.932607
0.929113
0
0.050182
0.330106
62,380
1,836
126
33.976035
0.744496
0.534017
0
0.870175
0
0
0.013914
0
0
0
0
0
0
1
0.021053
false
0
0.005263
0
0.064912
0.010526
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
db489edbf6ffe9cb65bea0ea1a10002559149b6a
26,669
py
Python
chembl_webservices/tests.py
chembl/chembl_new_webservices
59fb52665cc1fc09a495d9a8c118687ddd0ad781
[ "Apache-2.0" ]
16
2015-02-20T15:54:56.000Z
2022-03-04T15:33:11.000Z
chembl_webservices/tests.py
chembl/chembl_webservices_2
59fb52665cc1fc09a495d9a8c118687ddd0ad781
[ "Apache-2.0" ]
144
2015-02-18T22:14:18.000Z
2022-03-07T13:01:20.000Z
chembl_webservices/tests.py
chembl/chembl_new_webservices
59fb52665cc1fc09a495d9a8c118687ddd0ad781
[ "Apache-2.0" ]
5
2015-03-03T12:58:29.000Z
2020-11-03T21:16:20.000Z
from tastypie.test import ResourceTestCase from chembl_webservices import api_name from chembl_webservices.resources import * URL_PREFIX = '/chembl_webservices/' BASE_URL = URL_PREFIX + api_name + '/' #----------------------------------------------------------------------------------------------------------------------- class ActivityResourceTest(ResourceTestCase): def setUp(self): super(ActivityResourceTest, self).setUp() self.resource_class = ActivityResource first_two = self.resource_class._meta.queryset._clone()[0:2] first = first_two[0] second = first_two[1] self.resource_url = BASE_URL + self.resource_class._meta.resource_name self.detail_url = self.resource_url + '/%s' % first.pk self.multiple_url = self.resource_url + '/set/%s;%s' % (first.pk, second.pk) def test_get_list_json(self): resp = self.api_client.get(self.resource_url, format='json') self.assertValidJSONResponse(resp) def test_get_list_xml(self): resp = self.api_client.get(self.resource_url, format='xml') self.assertValidXMLResponse(resp) def test_get_detail_json(self): resp = self.api_client.get(self.detail_url, format='json') self.assertValidJSONResponse(resp) def test_get_detail_xml(self): resp = self.api_client.get(self.detail_url, format='xml') self.assertValidXMLResponse(resp) def test_get_multiple_json(self): resp = self.api_client.get(self.multiple_url, format='json') self.assertValidJSONResponse(resp) def test_get_multiple_xml(self): resp = self.api_client.get(self.multiple_url, format='xml') self.assertValidXMLResponse(resp) #----------------------------------------------------------------------------------------------------------------------- class AssayResourceTest(ResourceTestCase): def setUp(self): super(AssayResourceTest, self).setUp() self.resource_class = AssayResource first_two = self.resource_class._meta.queryset._clone()[0:2] first = first_two[0] second = first_two[1] self.resource_url = BASE_URL + self.resource_class._meta.resource_name self.detail_url = self.resource_url + '/%s' % first.chembl_id self.multiple_url = self.resource_url + '/set/%s;%s' % (first.chembl_id, second.chembl_id) def test_get_list_json(self): resp = self.api_client.get(self.resource_url, format='json') self.assertValidJSONResponse(resp) def test_get_list_xml(self): resp = self.api_client.get(self.resource_url, format='xml') self.assertValidXMLResponse(resp) def test_get_detail_json(self): resp = self.api_client.get(self.detail_url, format='json') self.assertValidJSONResponse(resp) def test_get_detail_xml(self): resp = self.api_client.get(self.detail_url, format='xml') self.assertValidXMLResponse(resp) def test_get_multiple_json(self): resp = self.api_client.get(self.multiple_url, format='json') self.assertValidJSONResponse(resp) def test_get_multiple_xml(self): resp = self.api_client.get(self.multiple_url, format='xml') self.assertValidXMLResponse(resp) #----------------------------------------------------------------------------------------------------------------------- class AtcResourceTest(ResourceTestCase): def setUp(self): super(AtcResourceTest, self).setUp() self.resource_class = AtcResource first_two = self.resource_class._meta.queryset._clone()[0:2] first = first_two[0] second = first_two[1] self.resource_url = BASE_URL + self.resource_class._meta.resource_name self.detail_url = self.resource_url + '/%s' % first.pk self.multiple_url = self.resource_url + '/set/%s;%s' % (first.pk, second.pk) def test_get_list_json(self): resp = self.api_client.get(self.resource_url, format='json') self.assertValidJSONResponse(resp) def test_get_list_xml(self): resp = self.api_client.get(self.resource_url, format='xml') self.assertValidXMLResponse(resp) def test_get_detail_json(self): resp = self.api_client.get(self.detail_url, format='json') self.assertValidJSONResponse(resp) def test_get_detail_xml(self): resp = self.api_client.get(self.detail_url, format='xml') self.assertValidXMLResponse(resp) def test_get_multiple_json(self): resp = self.api_client.get(self.multiple_url, format='json') self.assertValidJSONResponse(resp) def test_get_multiple_xml(self): resp = self.api_client.get(self.multiple_url, format='xml') self.assertValidXMLResponse(resp) #----------------------------------------------------------------------------------------------------------------------- class BindingSiteResourceTest(ResourceTestCase): def setUp(self): super(BindingSiteResourceTest, self).setUp() self.resource_class = BindingSiteResource first_two = self.resource_class._meta.queryset._clone()[0:2] first = first_two[0] second = first_two[1] self.resource_url = BASE_URL + self.resource_class._meta.resource_name self.detail_url = self.resource_url + '/%s' % first.pk self.multiple_url = self.resource_url + '/set/%s;%s' % (first.pk, second.pk) def test_get_list_json(self): resp = self.api_client.get(self.resource_url, format='json') self.assertValidJSONResponse(resp) def test_get_list_xml(self): resp = self.api_client.get(self.resource_url, format='xml') self.assertValidXMLResponse(resp) def test_get_detail_json(self): resp = self.api_client.get(self.detail_url, format='json') self.assertValidJSONResponse(resp) def test_get_detail_xml(self): resp = self.api_client.get(self.detail_url, format='xml') self.assertValidXMLResponse(resp) def test_get_multiple_json(self): resp = self.api_client.get(self.multiple_url, format='json') self.assertValidJSONResponse(resp) def test_get_multiple_xml(self): resp = self.api_client.get(self.multiple_url, format='xml') self.assertValidXMLResponse(resp) #----------------------------------------------------------------------------------------------------------------------- class BiotherapeuticComponentsResourceTest(ResourceTestCase): def setUp(self): super(BiotherapeuticComponentsResourceTest, self).setUp() self.resource_class = BiotherapeuticComponentsResource first_two = self.resource_class._meta.queryset._clone()[0:2] first = first_two[0] second = first_two[1] self.resource_url = BASE_URL + self.resource_class._meta.resource_name self.detail_url = self.resource_url + '/%s' % first.pk self.multiple_url = self.resource_url + '/set/%s;%s' % (first.pk, second.pk) def test_get_list_json(self): resp = self.api_client.get(self.resource_url, format='json') self.assertValidJSONResponse(resp) def test_get_list_xml(self): resp = self.api_client.get(self.resource_url, format='xml') self.assertValidXMLResponse(resp) def test_get_detail_json(self): resp = self.api_client.get(self.detail_url, format='json') self.assertValidJSONResponse(resp) def test_get_detail_xml(self): resp = self.api_client.get(self.detail_url, format='xml') self.assertValidXMLResponse(resp) def test_get_multiple_json(self): resp = self.api_client.get(self.multiple_url, format='json') self.assertValidJSONResponse(resp) def test_get_multiple_xml(self): resp = self.api_client.get(self.multiple_url, format='xml') self.assertValidXMLResponse(resp) #----------------------------------------------------------------------------------------------------------------------- class CellLineResourceTest(ResourceTestCase): def setUp(self): super(CellLineResourceTest, self).setUp() self.resource_class = CellLineResource first_two = self.resource_class._meta.queryset._clone()[0:2] first = first_two[0] second = first_two[1] self.resource_url = BASE_URL + self.resource_class._meta.resource_name self.detail_url = self.resource_url + '/%s' % first.pk self.multiple_url = self.resource_url + '/set/%s;%s' % (first.pk, second.pk) def test_get_list_json(self): resp = self.api_client.get(self.resource_url, format='json') self.assertValidJSONResponse(resp) def test_get_list_xml(self): resp = self.api_client.get(self.resource_url, format='xml') self.assertValidXMLResponse(resp) def test_get_detail_json(self): resp = self.api_client.get(self.detail_url, format='json') self.assertValidJSONResponse(resp) def test_get_detail_xml(self): resp = self.api_client.get(self.detail_url, format='xml') self.assertValidXMLResponse(resp) def test_get_multiple_json(self): resp = self.api_client.get(self.multiple_url, format='json') self.assertValidJSONResponse(resp) def test_get_multiple_xml(self): resp = self.api_client.get(self.multiple_url, format='xml') self.assertValidXMLResponse(resp) #----------------------------------------------------------------------------------------------------------------------- class DocsResourceTest(ResourceTestCase): def setUp(self): super(DocsResourceTest, self).setUp() self.resource_class = DocsResource first_two = self.resource_class._meta.queryset._clone()[0:2] first = first_two[0] second = first_two[1] self.resource_url = BASE_URL + self.resource_class._meta.resource_name self.detail_url = self.resource_url + '/%s' % first.chembl_id self.multiple_url = self.resource_url + '/set/%s;%s' % (first.chembl_id, second.chembl_id) def test_get_list_json(self): resp = self.api_client.get(self.resource_url, format='json') self.assertValidJSONResponse(resp) def test_get_list_xml(self): resp = self.api_client.get(self.resource_url, format='xml') self.assertValidXMLResponse(resp) def test_get_detail_json(self): resp = self.api_client.get(self.detail_url, format='json') self.assertValidJSONResponse(resp) def test_get_detail_xml(self): resp = self.api_client.get(self.detail_url, format='xml') self.assertValidXMLResponse(resp) def test_get_multiple_json(self): resp = self.api_client.get(self.multiple_url, format='json') self.assertValidJSONResponse(resp) def test_get_multiple_xml(self): resp = self.api_client.get(self.multiple_url, format='xml') self.assertValidXMLResponse(resp) #----------------------------------------------------------------------------------------------------------------------- class ImageResourceTest(ResourceTestCase): def setUp(self): super(ImageResourceTest, self).setUp() self.resource_class = ImageResource first_two = self.resource_class._meta.queryset._clone()[0:2] first = first_two[0] self.resource_url = BASE_URL + self.resource_class._meta.resource_name self.detail_url = self.resource_url + '/%s' % first.molecule.chembl_id def test_get_image_json(self): resp = self.api_client.get(self.detail_url + '.json', format='json') self.assertValidJSONResponse(resp) def test_get_image_svg(self): resp = self.api_client.get(self.detail_url + '.svg', format='svg') self.assertHttpOK(resp) self.assertTrue(resp['Content-Type'].startswith('image/svg+xml')) def test_get_detail_png(self): resp = self.api_client.get(self.detail_url + '.png', format='png') self.assertHttpOK(resp) #----------------------------------------------------------------------------------------------------------------------- class MechanismResourceTest(ResourceTestCase): def setUp(self): super(MechanismResourceTest, self).setUp() self.resource_class = MechanismResource first_two = self.resource_class._meta.queryset._clone()[0:2] first = first_two[0] second = first_two[1] self.resource_url = BASE_URL + self.resource_class._meta.resource_name self.detail_url = self.resource_url + '/%s' % first.pk self.multiple_url = self.resource_url + '/set/%s;%s' % (first.pk, second.pk) def test_get_list_json(self): resp = self.api_client.get(self.resource_url, format='json') self.assertValidJSONResponse(resp) def test_get_list_xml(self): resp = self.api_client.get(self.resource_url, format='xml') self.assertValidXMLResponse(resp) def test_get_detail_json(self): resp = self.api_client.get(self.detail_url, format='json') self.assertValidJSONResponse(resp) def test_get_detail_xml(self): resp = self.api_client.get(self.detail_url, format='xml') self.assertValidXMLResponse(resp) def test_get_multiple_json(self): resp = self.api_client.get(self.multiple_url, format='json') self.assertValidJSONResponse(resp) def test_get_multiple_xml(self): resp = self.api_client.get(self.multiple_url, format='xml') self.assertValidXMLResponse(resp) #----------------------------------------------------------------------------------------------------------------------- class MoleculeResourceTest(ResourceTestCase): def setUp(self): super(MoleculeResourceTest, self).setUp() self.resource_class = MoleculeResource first_two = self.resource_class._meta.queryset._clone()[0:2] first = first_two[0] second = first_two[1] self.resource_url = BASE_URL + self.resource_class._meta.resource_name self.detail_url = self.resource_url + '/%s' % first.chembl_id self.multiple_url = self.resource_url + '/set/%s;%s' % (first.chembl_id, second.chembl_id) def test_get_list_json(self): resp = self.api_client.get(self.resource_url, format='json') self.assertValidJSONResponse(resp) def test_get_list_xml(self): resp = self.api_client.get(self.resource_url, format='xml') self.assertValidXMLResponse(resp) def test_get_detail_json(self): resp = self.api_client.get(self.detail_url, format='json') self.assertValidJSONResponse(resp) def test_get_detail_xml(self): resp = self.api_client.get(self.detail_url, format='xml') self.assertValidXMLResponse(resp) def test_get_multiple_json(self): resp = self.api_client.get(self.multiple_url, format='json') self.assertValidJSONResponse(resp) def test_get_multiple_xml(self): resp = self.api_client.get(self.multiple_url, format='xml') self.assertValidXMLResponse(resp) #----------------------------------------------------------------------------------------------------------------------- class MoleculeFormsResourceTest(ResourceTestCase): def setUp(self): super(MoleculeFormsResourceTest, self).setUp() self.resource_class = MoleculeFormsResource first_two = self.resource_class._meta.queryset._clone()[0:2] first = first_two[0] second = first_two[1] self.resource_url = BASE_URL + self.resource_class._meta.resource_name self.detail_url = self.resource_url + '/%s' % first.molecule.chembl_id self.multiple_url = self.resource_url + '/set/%s;%s' % (first.molecule.chembl_id, second.molecule.chembl_id) def test_get_list_json(self): resp = self.api_client.get(self.resource_url, format='json') self.assertValidJSONResponse(resp) def test_get_list_xml(self): resp = self.api_client.get(self.resource_url, format='xml') self.assertValidXMLResponse(resp) def test_get_detail_json(self): resp = self.api_client.get(self.detail_url, format='json') self.assertValidJSONResponse(resp) def test_get_detail_xml(self): resp = self.api_client.get(self.detail_url, format='xml') self.assertValidXMLResponse(resp) def test_get_multiple_json(self): resp = self.api_client.get(self.multiple_url, format='json') self.assertValidJSONResponse(resp) def test_get_multiple_xml(self): resp = self.api_client.get(self.multiple_url, format='xml') self.assertValidXMLResponse(resp) #----------------------------------------------------------------------------------------------------------------------- class ProteinClassResourceTest(ResourceTestCase): def setUp(self): super(ProteinClassResourceTest, self).setUp() self.resource_class = ProteinClassResource first_two = self.resource_class._meta.queryset._clone()[0:2] first = first_two[0] second = first_two[1] self.resource_url = BASE_URL + self.resource_class._meta.resource_name self.detail_url = self.resource_url + '/%s' % first.pk self.multiple_url = self.resource_url + '/set/%s;%s' % (first.pk, second.pk) def test_get_list_json(self): resp = self.api_client.get(self.resource_url, format='json') self.assertValidJSONResponse(resp) def test_get_list_xml(self): resp = self.api_client.get(self.resource_url, format='xml') self.assertValidXMLResponse(resp) def test_get_detail_json(self): resp = self.api_client.get(self.detail_url, format='json') self.assertValidJSONResponse(resp) def test_get_detail_xml(self): resp = self.api_client.get(self.detail_url, format='xml') self.assertValidXMLResponse(resp) def test_get_multiple_json(self): resp = self.api_client.get(self.multiple_url, format='json') self.assertValidJSONResponse(resp) def test_get_multiple_xml(self): resp = self.api_client.get(self.multiple_url, format='xml') self.assertValidXMLResponse(resp) #----------------------------------------------------------------------------------------------------------------------- class SimilarityResourceTest(ResourceTestCase): def setUp(self): super(SimilarityResourceTest, self).setUp() self.resource_class = SimilarityResource self.resource_url = BASE_URL + self.resource_class._meta.resource_name self.detail_url = self.resource_url + '/%s/%s' % ('COc1ccc2[C@@H]3[C@H](COc2c1)C(C)(C)OC4=C3C(=O)C(=O)C5=C4OC(C)(C)[C@@H]6COc7cc(OC)ccc7[C@H]56', 70) def test_get_detail_json(self): resp = self.api_client.get(self.detail_url, format='json') self.assertValidJSONResponse(resp) def test_get_detail_xml(self): resp = self.api_client.get(self.detail_url, format='xml') self.assertValidXMLResponse(resp) #----------------------------------------------------------------------------------------------------------------------- class SourceResourceTest(ResourceTestCase): def setUp(self): super(SourceResourceTest, self).setUp() self.resource_class = SourceResource first_two = self.resource_class._meta.queryset._clone()[0:2] first = first_two[0] second = first_two[1] self.resource_url = BASE_URL + self.resource_class._meta.resource_name self.detail_url = self.resource_url + '/%s' % first.pk self.multiple_url = self.resource_url + '/set/%s;%s' % (first.pk, second.pk) def test_get_list_json(self): resp = self.api_client.get(self.resource_url, format='json') self.assertValidJSONResponse(resp) def test_get_list_xml(self): resp = self.api_client.get(self.resource_url, format='xml') self.assertValidXMLResponse(resp) def test_get_detail_json(self): resp = self.api_client.get(self.detail_url, format='json') self.assertValidJSONResponse(resp) def test_get_detail_xml(self): resp = self.api_client.get(self.detail_url, format='xml') self.assertValidXMLResponse(resp) def test_get_multiple_json(self): resp = self.api_client.get(self.multiple_url, format='json') self.assertValidJSONResponse(resp) def test_get_multiple_xml(self): resp = self.api_client.get(self.multiple_url, format='xml') self.assertValidXMLResponse(resp) #----------------------------------------------------------------------------------------------------------------------- class StatusResourceTest(ResourceTestCase): def setUp(self): super(StatusResourceTest, self).setUp() self.resource_class = StatusResource self.resource_url = BASE_URL + self.resource_class._meta.resource_name def test_get_list_json(self): resp = self.api_client.get(self.resource_url, format='json') self.assertValidJSONResponse(resp) def test_get_list_xml(self): resp = self.api_client.get(self.resource_url, format='xml') self.assertValidXMLResponse(resp) #----------------------------------------------------------------------------------------------------------------------- class SubstructureResourceTest(ResourceTestCase): def setUp(self): super(SubstructureResourceTest, self).setUp() self.resource_class = SubstructureResource self.resource_url = BASE_URL + self.resource_class._meta.resource_name self.detail_url = self.resource_url + '/%s' % 'CN(CCCN)c1cccc2ccccc12' def test_get_detail_json(self): resp = self.api_client.get(self.detail_url, format='json') self.assertValidJSONResponse(resp) def test_get_detail_xml(self): resp = self.api_client.get(self.detail_url, format='xml') self.assertValidXMLResponse(resp) #----------------------------------------------------------------------------------------------------------------------- class TargetResourceTest(ResourceTestCase): def setUp(self): super(TargetResourceTest, self).setUp() self.resource_class = TargetResource first_two = self.resource_class._meta.queryset._clone()[0:2] first = first_two[0] second = first_two[1] self.resource_url = BASE_URL + self.resource_class._meta.resource_name self.detail_url = self.resource_url + '/%s' % first.chembl_id self.multiple_url = self.resource_url + '/set/%s;%s' % (first.chembl_id, second.chembl_id) def test_get_list_json(self): resp = self.api_client.get(self.resource_url, format='json') self.assertValidJSONResponse(resp) def test_get_list_xml(self): resp = self.api_client.get(self.resource_url, format='xml') self.assertValidXMLResponse(resp) def test_get_detail_json(self): resp = self.api_client.get(self.detail_url, format='json') self.assertValidJSONResponse(resp) def test_get_detail_xml(self): resp = self.api_client.get(self.detail_url, format='xml') self.assertValidXMLResponse(resp) def test_get_multiple_json(self): resp = self.api_client.get(self.multiple_url, format='json') self.assertValidJSONResponse(resp) def test_get_multiple_xml(self): resp = self.api_client.get(self.multiple_url, format='xml') self.assertValidXMLResponse(resp) #----------------------------------------------------------------------------------------------------------------------- class TargetComponentsResourceTest(ResourceTestCase): def setUp(self): super(TargetComponentsResourceTest, self).setUp() self.resource_class = TargetComponentsResource first_two = self.resource_class._meta.queryset._clone()[0:2] first = first_two[0] second = first_two[1] self.resource_url = BASE_URL + self.resource_class._meta.resource_name self.detail_url = self.resource_url + '/%s' % first.pk self.multiple_url = self.resource_url + '/set/%s;%s' % (first.pk, second.pk) def test_get_list_json(self): resp = self.api_client.get(self.resource_url, format='json') self.assertValidJSONResponse(resp) def test_get_list_xml(self): resp = self.api_client.get(self.resource_url, format='xml') self.assertValidXMLResponse(resp) def test_get_detail_json(self): resp = self.api_client.get(self.detail_url, format='json') self.assertValidJSONResponse(resp) def test_get_detail_xml(self): resp = self.api_client.get(self.detail_url, format='xml') self.assertValidXMLResponse(resp) def test_get_multiple_json(self): resp = self.api_client.get(self.multiple_url, format='json') self.assertValidJSONResponse(resp) def test_get_multiple_xml(self): resp = self.api_client.get(self.multiple_url, format='xml') self.assertValidXMLResponse(resp) #----------------------------------------------------------------------------------------------------------------------- class ChemblIdLookupResourceTest(ResourceTestCase): def setUp(self): super(ChemblIdLookupResourceTest, self).setUp() self.resource_class = ChemblIdLookupResource first_two = self.resource_class._meta.queryset._clone()[0:2] first = first_two[0] second = first_two[1] self.resource_url = BASE_URL + self.resource_class._meta.resource_name self.detail_url = self.resource_url + '/%s' % first.pk self.multiple_url = self.resource_url + '/set/%s;%s' % (first.pk, second.pk) def test_get_list_json(self): resp = self.api_client.get(self.resource_url, format='json') self.assertValidJSONResponse(resp) def test_get_list_xml(self): resp = self.api_client.get(self.resource_url, format='xml') self.assertValidXMLResponse(resp) def test_get_detail_json(self): resp = self.api_client.get(self.detail_url, format='json') self.assertValidJSONResponse(resp) def test_get_detail_xml(self): resp = self.api_client.get(self.detail_url, format='xml') self.assertValidXMLResponse(resp) def test_get_multiple_json(self): resp = self.api_client.get(self.multiple_url, format='json') self.assertValidJSONResponse(resp) def test_get_multiple_xml(self): resp = self.api_client.get(self.multiple_url, format='xml') self.assertValidXMLResponse(resp) #-----------------------------------------------------------------------------------------------------------------------
40.653963
157
0.630545
3,083
26,669
5.196886
0.035031
0.103358
0.06179
0.092685
0.903133
0.832168
0.831045
0.830733
0.82755
0.822931
0
0.00378
0.176722
26,669
656
158
40.653963
0.725952
0.089242
0
0.849372
0
0.002092
0.029913
0.004697
0
0
0
0
0.209205
1
0.246862
false
0
0.006276
0
0.292887
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
8
db49ff5d92d4cd7b72ce3adaa008689354d23b63
212
py
Python
hodolbot/models/__init__.py
solar0037/hodolbot
f758375efce2dede58d920d41cab4a8ad38d1d58
[ "MIT" ]
null
null
null
hodolbot/models/__init__.py
solar0037/hodolbot
f758375efce2dede58d920d41cab4a8ad38d1d58
[ "MIT" ]
3
2021-08-02T01:59:04.000Z
2021-08-02T01:59:15.000Z
hodolbot/models/__init__.py
solar0037/hodolbot
f758375efce2dede58d920d41cab4a8ad38d1d58
[ "MIT" ]
null
null
null
from hodolbot.models.covid19 import Covid19Model from hodolbot.models.ranking import ProgrammingModel, AnimeModel from hodolbot.models.stock import StockModel from hodolbot.models.developer import DeveloperModel
42.4
64
0.877358
25
212
7.44
0.52
0.258065
0.387097
0
0
0
0
0
0
0
0
0.020513
0.080189
212
4
65
53
0.933333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
db58e1da519e33370e5c245f3fca29ec04d81657
108
py
Python
json_edit/komand_json_edit/actions/__init__.py
emartin-merrill-r7/insightconnect-plugins
a589745dbcc9f01d3e601431e77ab7221a84c117
[ "MIT" ]
1
2020-03-18T09:14:55.000Z
2020-03-18T09:14:55.000Z
json_edit/komand_json_edit/actions/__init__.py
OSSSP/insightconnect-plugins
846758dab745170cf1a8c146211a8bea9592e8ff
[ "MIT" ]
null
null
null
json_edit/komand_json_edit/actions/__init__.py
OSSSP/insightconnect-plugins
846758dab745170cf1a8c146211a8bea9592e8ff
[ "MIT" ]
null
null
null
# GENERATED BY KOMAND SDK - DO NOT EDIT from .delete.action import Delete from .update.action import Update
27
39
0.787037
17
108
5
0.705882
0.282353
0
0
0
0
0
0
0
0
0
0
0.157407
108
3
40
36
0.934066
0.342593
0
0
1
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
db677f5e02762b6dd7ce6864e5efbd7993e85651
230
py
Python
atcodertools/tools/templates/__init__.py
koba-e964/atcoder-tools
9a7221af631a141afccfcb6e26a017cd797ceb47
[ "MIT" ]
null
null
null
atcodertools/tools/templates/__init__.py
koba-e964/atcoder-tools
9a7221af631a141afccfcb6e26a017cd797ceb47
[ "MIT" ]
null
null
null
atcodertools/tools/templates/__init__.py
koba-e964/atcoder-tools
9a7221af631a141afccfcb6e26a017cd797ceb47
[ "MIT" ]
null
null
null
import os DEFAULT_TEMPLATE_DIR_PATH = os.path.dirname(os.path.abspath(__file__)) def get_default_template_path(lang: str): return os.path.abspath(os.path.join(DEFAULT_TEMPLATE_DIR_PATH, "default_template.{}".format(lang)))
28.75
103
0.791304
35
230
4.8
0.457143
0.357143
0.214286
0.261905
0
0
0
0
0
0
0
0
0.078261
230
7
104
32.857143
0.792453
0
0
0
0
0
0.082609
0
0
0
0
0
0
1
0.25
false
0
0.25
0.25
0.75
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
7
dbbba0959ba20daea8aa8eec1f24d31592a47e6b
104
py
Python
karp5/util/__init__.py
spraakbanken/karp-backend-v5
bfca9d0f29a1243ee8d817c6a7db8b30a7da1097
[ "MIT" ]
4
2018-01-09T10:20:22.000Z
2019-11-21T12:26:56.000Z
karp5/util/__init__.py
spraakbanken/karp-backend-v5
bfca9d0f29a1243ee8d817c6a7db8b30a7da1097
[ "MIT" ]
44
2018-03-23T13:59:13.000Z
2022-03-29T06:03:17.000Z
karp5/util/__init__.py
spraakbanken/karp-backend-v5
bfca9d0f29a1243ee8d817c6a7db8b30a7da1097
[ "MIT" ]
2
2018-01-07T12:08:32.000Z
2019-08-21T08:05:17.000Z
from .text import escape_control # from .text import control_escape from .text import unescape_control
20.8
34
0.826923
15
104
5.533333
0.4
0.289157
0.506024
0
0
0
0
0
0
0
0
0
0.134615
104
4
35
26
0.922222
0.307692
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
9183d3b03c088712ce048c7face5340dcf54af97
613,291
py
Python
python/phonenumbers/geodata/locale.py
AdaSupport/python-phonenumbers
8cfd93dadc6006cdf1bb3d24a779cd002e1c6661
[ "Apache-2.0" ]
null
null
null
python/phonenumbers/geodata/locale.py
AdaSupport/python-phonenumbers
8cfd93dadc6006cdf1bb3d24a779cd002e1c6661
[ "Apache-2.0" ]
null
null
null
python/phonenumbers/geodata/locale.py
AdaSupport/python-phonenumbers
8cfd93dadc6006cdf1bb3d24a779cd002e1c6661
[ "Apache-2.0" ]
1
2020-12-14T11:39:53.000Z
2020-12-14T11:39:53.000Z
"""Locale information. Holds a map from ISO 3166-1 country code (e.g. GB) to a dict. Each dict maps from an ISO 639-1 language code (e.g. ja) to the country's name in that language. Generated from java.util.Locale, generation info: java.version=1.8.0_51 java.vendor=Oracle Corporation os.name=Mac OS X os.arch=x86_64 os.version=10.14.6 Auto-generated file, do not edit by hand. """ from ..util import u LOCALE_DATA = { 'AD': {'aa':'Andorra','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03bd\u03b4\u03cc\u03c1\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Andorre','fy':'*aa','ga':u('And\u00f3ra'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Andora','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a2\u30f3\u30c9\u30e9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc548\ub3c4\ub77c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*id','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0410\u043d\u0434\u043e\u0440\u0440\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0410\u043d\u0434\u043e\u0440\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e31\u0e19\u0e14\u0e2d\u0e23\u0e4c\u0e23\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5b89\u9053\u5c14'),'zu':'*aa',}, 'AE': {'aa':'United Arab Emirates','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':u('\u0627\u0644\u0625\u0645\u0627\u0631\u0627\u062a'),'as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Uni\u00f3 dels Emirats \u00c0rabs'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Vereinigte Arabische Emirate','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0397\u03bd\u03c9\u03bc\u03ad\u03bd\u03b1 \u0391\u03c1\u03b1\u03b2\u03b9\u03ba\u03ac \u0395\u03bc\u03b9\u03c1\u03ac\u03c4\u03b1'),'en':'*aa','eo':'*aa','es':u('Emiratos \u00c1rabes Unidos'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Emirats Arabes Unis','fy':'*aa','ga':u('Aontas na n\u00c9im\u00edr\u00edochta\u00ed Arabacha'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Uni Emirat Arab','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Emirati Arabi Uniti','iu':'*aa','iw':'*aa','ja':u('\u30a2\u30e9\u30d6\u9996\u9577\u56fd\u9023\u90a6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc544\ub78d\uc5d0\ubbf8\ub9ac\ud2b8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Emiriah Arab Bersatu','mt':u('Emirati G\u0127arab Maqg\u0127uda'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Verenigde Arabische Emiraten','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Zjednoczone Emiraty Arabskie','ps':'*aa','pt':u('Emirados \u00c1rabes Unidos'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041e\u0431\u044a\u0435\u0434\u0438\u043d\u0435\u043d\u043d\u044b\u0435 \u0410\u0440\u0430\u0431\u0441\u043a\u0438\u0435 \u042d\u043c\u0438\u0440\u0430\u0442\u044b'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0423\u0458\u0435\u0434\u0438\u045a\u0435\u043d\u0438 \u0410\u0440\u0430\u043f\u0441\u043a\u0438 \u0415\u043c\u0438\u0440\u0430\u0442\u0438'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('F\u00f6renade Arabemiraten'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2a\u0e2b\u0e23\u0e31\u0e10\u0e2d\u0e32\u0e2b\u0e23\u0e31\u0e1a\u0e40\u0e2d\u0e21\u0e34\u0e40\u0e23\u0e15\u0e2a\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('C\u00e1c Ti\u1ec3u V\u01b0\u01a1ng qu\u1ed1c A-r\u1eadp Th\u1ed1ng nh\u1ea5t'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u963f\u62c9\u4f2f\u8054\u5408\u914b\u957f\u56fd'),'zu':'*aa',}, 'AF': {'aa':'Afghanistan','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Afganistan','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03c6\u03b3\u03b1\u03bd\u03b9\u03c3\u03c4\u03ac\u03bd'),'en':'*aa','eo':'*aa','es':u('Afganist\u00e1n'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An Afganast\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a2\u30d5\u30ac\u30cb\u30b9\u30bf\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc544\ud504\uac00\ub2c8\uc2a4\ud0c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*ca','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*ca','ps':'*aa','pt':u('Afeganist\u00e3o'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0410\u0444\u0433\u0430\u043d\u0438\u0441\u0442\u0430\u043d'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0410\u0432\u0433\u0430\u043d\u0438\u0441\u0442\u0430\u043d'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e31\u0e1f\u0e01\u0e32\u0e19\u0e34\u0e2a\u0e16\u0e32\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('\u00c1p-ga-ni-xtan'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u963f\u5bcc\u6c57'),'zu':'*aa',}, 'AG': {'aa':'Antigua and Barbuda','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Antigua und Barbuda','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03bd\u03c4\u03af\u03b3\u03ba\u03bf\u03c5\u03b1 \u03ba\u03b1\u03b9 \u039c\u03c0\u03b1\u03c1\u03bc\u03c0\u03bf\u03cd\u03bd\u03c4\u03b1'),'en':'*aa','eo':'*aa','es':'Antigua y Barbuda','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Antigua et Barbuda','fy':'*aa','ga':'Antigua agus Barbuda','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Antigua dan Barbuda','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Antigua e Barbuda','iu':'*aa','iw':'*aa','ja':u('\u30a2\u30f3\u30c1\u30b0\u30a2\u30d0\u30fc\u30d6\u30fc\u30c0'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc564\ud2f0\uac00 \ubc14\ubd80\ub2e4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Antigua en Barbuda','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Ant\u00edgua e Barbuda'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*aa','ss':'*aa','st':'*aa','su':'*aa','sv':'Antigua och Barbuda','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('An-ti-gu-a v\u00e0 Ba-bu-\u0111a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5b89\u63d0\u74dc\u548c\u5df4\u5e03\u8fbe'),'zu':'*aa',}, 'AI': {'aa':'Anguilla','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03bd\u03b3\u03ba\u03bf\u03c5\u03af\u03bb\u03b1'),'en':'*aa','eo':'*aa','es':'Anguila','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a2\u30f3\u30ae\u30e9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc548\uae38\ub77c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Angwilla','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0410\u043d\u0433\u0443\u0438\u043b\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*aa','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e31\u0e19\u0e01\u0e34\u0e25\u0e48\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5b89\u572d\u62c9'),'zu':'*aa',}, 'AL': {'aa':'Albania','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Alb\u00e0nia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Albanien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03bb\u03b2\u03b1\u03bd\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Albanie','fy':'*aa','ga':u('An Alb\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a2\u30eb\u30d0\u30cb\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc54c\ubc14\ub2c8\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Albanija','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Albani\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Alb\u00e2nia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0410\u043b\u0431\u0430\u043d\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':u('Shqip\u00ebria'),'sr':u('\u0410\u043b\u0431\u0430\u043d\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e41\u0e2d\u0e25\u0e40\u0e1a\u0e40\u0e19\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'An-ba-ni','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u963f\u5c14\u5df4\u5c3c\u4e9a'),'zu':'*aa',}, 'AM': {'aa':'Armenia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Arm\u00e8nia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Armenien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03c1\u03bc\u03b5\u03bd\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Arm\u00e9nie'),'fy':'*aa','ga':u('An Airm\u00e9in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a2\u30eb\u30e1\u30cb\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc544\ub974\uba54\ub2c8\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Armenja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Armeni\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Arm\u00eania'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0410\u0440\u043c\u0435\u043d\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0410\u0440\u043c\u0435\u043d\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e32\u0e23\u0e4c\u0e21\u0e35\u0e40\u0e19\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('\u00c1c-m\u00ea-ni-a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u4e9a\u7f8e\u5c3c\u4e9a'),'zu':'*aa',}, 'AN': {'aa':'Netherlands Antilles','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Antilles Holandeses','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('Niederl\u00e4ndische Antillen'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039f\u03bb\u03bb\u03b1\u03bd\u03b4\u03b9\u03ba\u03ad\u03c2 \u0391\u03bd\u03c4\u03af\u03bb\u03bb\u03b5\u03c2'),'en':'*aa','eo':'*aa','es':'Antillas Holandesas','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Antilles N\u00e9erlandaises'),'fy':'*aa','ga':u('Antill\u00ed na h\u00cdsilt\u00edre'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Antilles Belanda','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Antille Olandesi','iu':'*aa','iw':'*aa','ja':u('\u30aa\u30e9\u30f3\u30c0\u9818\u30a2\u30f3\u30c6\u30a3\u30eb\u8af8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub124\ub35c\ub780\ub4dc\ub839 \uc548\ud2f8\ub808\uc2a4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Antilles Olandi\u017ci'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Nederlandse Antillen','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Antyle Holenderskie','ps':'*aa','pt':'Antilhas Holandesas','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041d\u0438\u0434\u0435\u0440\u043b\u0430\u043d\u0434\u0441\u043a\u0438\u0435 \u0410\u043d\u0442\u0438\u043b\u044c\u0441\u043a\u0438\u0435 \u043e\u0441\u0442\u0440\u043e\u0432\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0425\u043e\u043b\u0430\u043d\u0434\u0441\u043a\u0438 \u0410\u043d\u0442\u0438\u043b\u0438'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('Nederl\u00e4ndska Antillerna '),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e19\u0e40\u0e18\u0e2d\u0e23\u0e4c\u0e41\u0e25\u0e19\u0e14\u0e4c\u0e41\u0e2d\u0e19\u0e17\u0e34\u0e25\u0e25\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u8377\u5c5e\u5b89\u7684\u5217\u65af\u7fa4\u5c9b'),'zu':'*aa',}, 'AO': {'aa':'Angola','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03bd\u03b3\u03ba\u03cc\u03bb\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('Ang\u00f3la'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a2\u30f3\u30b4\u30e9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc559\uace8\ub77c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0410\u043d\u0433\u043e\u043b\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e31\u0e19\u0e42\u0e01\u0e25\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('\u0102ng-g\u00f4-la'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5b89\u54e5\u62c9'),'zu':'*aa',}, 'AQ': {'aa':'Antarctica','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Antarktis','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03bd\u03c4\u03b1\u03c1\u03ba\u03c4\u03b9\u03ba\u03ae'),'en':'*aa','eo':'*aa','es':u('Ant\u00e1rtida'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Antarctique','fy':'*aa','ga':'An Antartaice','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Antarktika','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Antartide','iu':'*aa','iw':'*aa','ja':u('\u5357\u6975'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub0a8\uadf9'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Antartika','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*es','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*aa','ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5357\u6781\u6d32'),'zu':'*aa',}, 'AR': {'aa':'Argentina','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Argentinien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03c1\u03b3\u03b5\u03bd\u03c4\u03b9\u03bd\u03ae'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Argentine','fy':'*aa','ga':u('An Airgint\u00edn'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a2\u30eb\u30bc\u30f3\u30c1\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc544\ub974\ud5e8\ud2f0\ub098'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Ar\u0121entina'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Argentini\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Argentyna','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0410\u0440\u0433\u0435\u043d\u0442\u0438\u043d\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e32\u0e23\u0e4c\u0e40\u0e08\u0e19\u0e15\u0e34\u0e19\u0e48\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('\u00c1c-hen-ti-na'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u963f\u6839\u5ef7'),'zu':'*aa',}, 'AS': {'aa':'American Samoa','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Amerikanisch-Samoa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03bc\u03b5\u03c1\u03b9\u03ba\u03b1\u03bd\u03b9\u03ba\u03ae \u03a3\u03b1\u03bc\u03cc\u03b1'),'en':'*aa','eo':'*aa','es':'Samoa Americana','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Samoa am\u00e9ricaines'),'fy':'*aa','ga':u('Sam\u00f3 Meirice\u00e1nach'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Samoa Amerika','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Samoa americane','iu':'*aa','iw':'*aa','ja':u('\u30a2\u30e1\u30ea\u30ab\u30f3\u30b5\u30e2\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubbf8\uad6d\ub839 \uc0ac\ubaa8\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Samoa Amerikana','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Amerikaans Samoa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*es','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*aa','ss':'*aa','st':'*aa','su':'*aa','sv':'Amerikanska Samoa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u4e1c\u8428\u6469\u4e9a'),'zu':'*aa',}, 'AT': {'aa':'Austria','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('\u00c2ustria'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('\u00d6sterreich'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03c5\u03c3\u03c4\u03c1\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Autriche','fy':'*aa','ga':'An Ostair','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30aa\u30fc\u30b9\u30c8\u30ea\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc624\uc2a4\ud2b8\ub9ac\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Awstrija','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Oostenrijk','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('\u00c1ustria'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0410\u0432\u0441\u0442\u0440\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0410\u0443\u0441\u0442\u0440\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('\u00d6sterrike'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e2d\u0e2a\u0e40\u0e15\u0e23\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('\u00c1o'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5965\u5730\u5229'),'zu':'*aa',}, 'AU': {'aa':'Australia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Austr\u00e0lia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Australien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03c5\u03c3\u03c4\u03c1\u03b1\u03bb\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Australie','fy':'*aa','ga':u('An Astr\u00e1il'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30aa\u30fc\u30b9\u30c8\u30e9\u30ea\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc624\uc2a4\ud2b8\ub808\uc77c\ub9ac\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Awstralja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Australi\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Austr\u00e1lia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0410\u0432\u0441\u0442\u0440\u0430\u043b\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0410\u0443\u0441\u0442\u0440\u0430\u043b\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e2d\u0e2a\u0e40\u0e15\u0e23\u0e40\u0e25\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('\u00dac'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6fb3\u5927\u5229\u4e9a'),'zu':'*aa',}, 'AW': {'aa':'Aruba','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03c1\u03bf\u03cd\u03bc\u03c0\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a2\u30eb\u30d0\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc544\ub8e8\ubc14'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0410\u0440\u0443\u0431\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e32\u0e23\u0e39\u0e1a\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u963f\u9c81\u5df4'),'zu':'*aa',}, 'AX': {'aa':u('\u00c5land Islands'),'ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Aaland-Inseln','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03ae\u03c3\u03bf\u03b9 Aland'),'en':'*aa','eo':'*aa','es':'Islas Aland','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('\u00celes \u00c5land'),'fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Isole Aland','iu':'*aa','iw':'*aa','ja':u('\u30aa\u30fc\u30e9\u30f3\u30c9\u8af8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc62c\ub780\ub4dc \uc81c\ub3c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Alandeilanden','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'Ilhas Aland','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0410\u043b\u0430\u043d\u0434\u0441\u043a\u0430 \u043e\u0441\u0442\u0440\u0432\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('\u00c5land'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5965\u5170\u7fa4\u5c9b'),'zu':'*aa',}, 'AZ': {'aa':'Azerbaijan','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Azerbaidjan','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Aserbaidschan','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03b6\u03b5\u03c1\u03bc\u03c0\u03b1\u03ca\u03c4\u03b6\u03ac\u03bd'),'en':'*aa','eo':'*aa','es':u('Azerbaiy\u00e1n'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Azerba\u00efdjan'),'fy':'*aa','ga':u('An Asarbaise\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Azerbaigian','iu':'*aa','iw':'*aa','ja':u('\u30a2\u30bc\u30eb\u30d0\u30a4\u30b8\u30e3\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc544\uc81c\ub974\ubc14\uc774\uc794'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('A\u017cerbaj\u0121an'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Azerbeidzjan','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('Azerbejd\u017can'),'ps':'*aa','pt':u('Azerbaij\u00e3o'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0410\u0437\u0435\u0440\u0431\u0430\u0439\u0434\u0436\u0430\u043d'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0410\u0437\u0435\u0440\u0431\u0435\u0458\u045f\u0430\u043d'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Azerbadjan','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e32\u0e40\u0e0b\u0e2d\u0e23\u0e4c\u0e44\u0e1a\u0e08\u0e31\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Ai-d\u00e9c-bai-gian'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u963f\u585e\u62dc\u7586'),'zu':'*aa',}, 'BA': {'aa':'Bosnia and Herzegovina','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('B\u00f2snia i Hercegovina'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Bosnien und Herzegowina','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0392\u03bf\u03c3\u03bd\u03af\u03b1 - \u0395\u03c1\u03b6\u03b5\u03b3\u03bf\u03b2\u03af\u03bd\u03b7'),'en':'*aa','eo':'*aa','es':'Bosnia y Hercegovina','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Bosnie-Herz\u00e9govine'),'fy':'*aa','ga':u('An Bhoisnia-Heirseagaiv\u00e9in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Bosnia dan Herzegovina','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Bosnia-Erzegovina','iu':'*aa','iw':'*aa','ja':u('\u30dc\u30b9\u30cb\u30a2\u30fb\u30d8\u30eb\u30c4\u30a7\u30b4\u30d3\u30ca'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubcf4\uc2a4\ub2c8\uc544 \ud5e4\ub974\uccb4\uace0\ube44\ub098'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':u('Bo\u017cnija \u0126er\u017cegovina'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Bosni\u00eb en Herzegovina'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('Bo\u015bnia i Hercegowina'),'ps':'*aa','pt':u('B\u00f3snia-Herzegovina'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0411\u043e\u0441\u043d\u0438\u044f \u0438 \u0413\u0435\u0440\u0446\u0435\u0433\u043e\u0432\u0438\u043d\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0411\u043e\u0441\u043d\u0430 \u0438 \u0425\u0435\u0440\u0446\u0435\u0433\u043e\u0432\u0438\u043d\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Bosnien och Herzegovina','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1a\u0e2d\u0e2a\u0e40\u0e19\u0e35\u0e22 \u0e41\u0e25\u0e30 \u0e40\u0e2e\u0e34\u0e23\u0e4c\u0e0b\u0e42\u0e01\u0e27\u0e34\u0e40\u0e19\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('B\u00f4-xni-a H\u00e9c-x\u00ea-g\u00f4-vi-na'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6ce2\u65af\u5c3c\u4e9a\u548c\u9ed1\u5c71\u5171\u548c\u56fd'),'zu':'*aa',}, 'BB': {'aa':'Barbados','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03c0\u03b1\u03c1\u03bc\u03c0\u03ac\u03bd\u03c4\u03bf\u03c2'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Barbade','fy':'*aa','ga':u('Barbad\u00f3s'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d0\u30eb\u30d0\u30c9\u30b9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubc14\ubca0\uc774\ub3c4\uc2a4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0411\u0430\u0440\u0431\u0430\u0434\u043e\u0441'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1a\u0e32\u0e23\u0e4c\u0e1a\u0e32\u0e14\u0e2d\u0e2a'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('B\u00e1c-ba-\u0111\u1ed1t'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5df4\u5df4\u591a\u65af'),'zu':'*aa',}, 'BD': {'aa':'Bangladesh','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Bangla Desh','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Bangladesch','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03c0\u03b1\u03bd\u03b3\u03ba\u03bb\u03b1\u03bd\u03c4\u03ad\u03c2'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An Bhanglaid\u00e9is'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d0\u30f3\u30b0\u30e9\u30c7\u30b7\u30e5'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubc29\uae00\ub77c\ub370\uc2dc'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Bangladexx','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Bangladesz','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0411\u0430\u043d\u0433\u043b\u0430\u0434\u0435\u0448'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1a\u0e31\u0e07\u0e04\u0e25\u0e32\u0e40\u0e17\u0e28'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('B\u0103ng-la-\u0111\u00e9t'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5b5f\u52a0\u62c9'),'zu':'*aa',}, 'BE': {'aa':'Belgium','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('B\u00e8lgica'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Belgien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0392\u03ad\u03bb\u03b3\u03b9\u03bf'),'en':'*aa','eo':'*aa','es':u('B\u00e9lgica'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Belgia','fj':'*aa','fo':'*aa','fr':'Belgique','fy':'*aa','ga':'An Bheilg','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*fi','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*fi','io':'*aa','is':'*aa','it':'Belgio','iu':'*aa','iw':'*aa','ja':u('\u30d9\u30eb\u30ae\u30fc'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubca8\uae30\uc5d0'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Bel\u0121ju'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Belgi\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*fi','ps':'*aa','pt':'*es','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0411\u0435\u043b\u044c\u0433\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0411\u0435\u043b\u0433\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e1a\u0e25\u0e40\u0e22\u0e35\u0e48\u0e22\u0e21'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('B\u1ec9'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6bd4\u5229\u65f6'),'zu':'*aa',}, 'BF': {'aa':'Burkina Faso','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03c0\u03bf\u03c5\u03c1\u03ba\u03af\u03bd\u03b1 \u03a6\u03ac\u03c3\u03bf'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('Buirc\u00edne Fas\u00f3'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d6\u30eb\u30ad\u30ca\u30d5\u30a1\u30bd'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubd80\ub974\ud0a4\ub098\ud30c\uc18c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'Burquina Faso','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0411\u0443\u0440\u043a\u0438\u043d\u0430-\u0424\u0430\u0441\u043e'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0411\u0443\u0440\u043a\u0438\u043d\u0430 \u0424\u0430\u0441\u043e'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e1a\u0e2d\u0e23\u0e4c\u0e01\u0e34\u0e19\u0e32\u0e1f\u0e32\u0e42\u0e0b'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Bu\u1ed1c-ki-na Pha-x\u00f4'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5e03\u57fa\u7eb3\u6cd5\u7d22'),'zu':'*aa',}, 'BG': {'aa':'Bulgaria','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':u('\u0411\u044a\u043b\u0433\u0430\u0440\u0438\u044f'),'bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Bulg\u00e0ria'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Bulgarien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0392\u03bf\u03c5\u03bb\u03b3\u03b1\u03c1\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Bulgarie','fy':'*aa','ga':u('An Bhulg\u00e1ir'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d6\u30eb\u30ac\u30ea\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubd88\uac00\ub9ac\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Bulgarija','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Bulgarije','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('Bu\u0142garia'),'ps':'*aa','pt':u('Bulg\u00e1ria'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0411\u043e\u043b\u0433\u0430\u0440\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0411\u0443\u0433\u0430\u0440\u0441\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1a\u0e31\u0e25\u0e41\u0e01\u0e40\u0e23\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Bun-ga-ri','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u4fdd\u52a0\u5229\u4e9a'),'zu':'*aa',}, 'BH': {'aa':'Bahrain','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':u('\u0627\u0644\u0628\u062d\u0631\u064a\u0646'),'as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03c0\u03b1\u03c7\u03c1\u03ad\u03b9\u03bd'),'en':'*aa','eo':'*aa','es':u('Bahr\u00e1in'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Bahre\u00efn'),'fy':'*aa','ga':u('Bair\u00e9in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d0\u30fc\u30ec\u30fc\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubc14\ub808\uc778'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Ba\u0127rajn'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Bahrein','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Bahrajn','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0411\u0430\u0445\u0440\u0435\u0439\u043d'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0411\u0430\u0445\u0440\u0435\u0438\u043d'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1a\u0e32\u0e2b\u0e4c\u0e40\u0e23\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Ba-ren','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5df4\u6797'),'zu':'*aa',}, 'BI': {'aa':'Burundi','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03c0\u03bf\u03c5\u03c1\u03bf\u03cd\u03bd\u03c4\u03b9'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An Bhur\u00fain'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d6\u30eb\u30f3\u30b8'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubd80\ub8ec\ub514'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0411\u0443\u0440\u0443\u043d\u0434\u0438'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1a\u0e39\u0e23\u0e31\u0e19\u0e14\u0e34'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Bu-run-\u0111i'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5e03\u9686\u8fea'),'zu':'*aa',}, 'BJ': {'aa':'Benin','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03c0\u03ad\u03bd\u03b9\u03bd'),'en':'*aa','eo':'*aa','es':u('Ben\u00edn'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'Beinin','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d9\u30cb\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubca0\ub139'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0411\u0435\u043d\u0438\u043d'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e1a\u0e19\u0e34\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('B\u00ea-nanh'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u8d1d\u5b81'),'zu':'*aa',}, 'BL': {'aa':u('Saint Barth\u00e9lemy'),'ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':'*aa','en':'*aa','eo':'*aa','es':u('San Bartolom\u00e9'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Saint-Barth\u00e9lemy'),'fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b5\u30f3\u30d0\u30eb\u30c6\u30eb\u30df\u30fc'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc0dd \ubc14\ub974\ud154\ub808\ubbf8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('S\u00e3o Bartolomeu'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*aa','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5723\u5df4\u6cf0\u52d2\u7c73\u5c9b'),'zu':'*aa',}, 'BM': {'aa':'Bermuda','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Bermudes','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0392\u03b5\u03c1\u03bc\u03bf\u03cd\u03b4\u03b5\u03c2'),'en':'*aa','eo':'*aa','es':'Bermudas','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*ca','fy':'*aa','ga':u('Beirmi\u00fada'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d0\u30fc\u30df\u30e5\u30fc\u30c0\u8af8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubc84\ubba4\ub2e4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Bermudy','ps':'*aa','pt':'*es','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0411\u0435\u0440\u043c\u0443\u0434\u0441\u043a\u0438\u0435 \u043e\u0441\u0442\u0440\u043e\u0432\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0411\u0435\u0440\u043c\u0443\u0434\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e1a\u0e2d\u0e23\u0e4c\u0e21\u0e34\u0e27\u0e14\u0e49\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u767e\u6155\u5927'),'zu':'*aa',}, 'BN': {'aa':'Brunei','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03c0\u03c1\u03bf\u03c5\u03bd\u03ad\u03b9 \u039d\u03c4\u03b1\u03c1\u03bf\u03c5\u03c3\u03b1\u03bb\u03ac\u03bc'),'en':'*aa','eo':'*aa','es':u('Brun\u00e9i'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('Br\u00fain\u00e9'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d6\u30eb\u30cd\u30a4'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ube0c\ub8e8\ub098\uc774'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Brunej','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0411\u0440\u0443\u043d\u0435\u0439'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0411\u0440\u0443\u043d\u0435\u0458'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1a\u0e23\u0e39\u0e44\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Bru-n\u00e2y'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6587\u83b1'),'zu':'*aa',}, 'BO': {'aa':'Bolivia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Bol\u00edvia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Bolivien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0392\u03bf\u03bb\u03b9\u03b2\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Bolivie','fy':'*aa','ga':'An Bholaiv','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30dc\u30ea\u30d3\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubcfc\ub9ac\ube44\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Bolivja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Boliwia','ps':'*aa','pt':'*ca','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0411\u043e\u043b\u0438\u0432\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0411\u043e\u043b\u0438\u0432\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e42\u0e1a\u0e25\u0e34\u0e40\u0e27\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('B\u00f4-li-vi-a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u73bb\u5229\u7ef4\u4e9a'),'zu':'*aa',}, 'BQ': {'aa':'Bonaire, Sint Eustatius and Saba','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Bonaire, Sint Eustatius und Saba','dv':'*aa','dz':'*aa','ee':'*aa','el':'*aa','en':'*aa','eo':'*aa','es':'Bonaire, San Eustaquio y Saba','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Bonaire, Saint-Eustache et Saba','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Bonaire, Sint Eustatius e Saba','iu':'*aa','iw':'*aa','ja':u('\u30dc\u30cd\u30fc\u30eb\u3001\u30b7\u30f3\u30c8\u30e6\u30fc\u30b9\u30bf\u30c6\u30a3\u30a6\u30b9\u304a\u3088\u3073\u30b5\u30d0'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubcf4\ub124\ub974, \uc2e0\ud2b8\uc720\uc2a4\ud0c0\ud2f0\uc6b0\uc2a4, \uc0ac\ubc14 \uc81c\ub3c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*aa','ss':'*aa','st':'*aa','su':'*aa','sv':'Bonaire, Saint Eustatius och Saba','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u535a\u5948\u5c14\u5c9b, \u5723\u5c24\u65af\u7279\u6b47\u65af\u5c9b\u548c\u8428\u5df4\u5c9b'),'zu':'*aa',}, 'BR': {'aa':'Brazil','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Brasil','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Brasilien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0392\u03c1\u03b1\u03b6\u03b9\u03bb\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*ca','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Brasilia','fj':'*aa','fo':'*aa','fr':u('Br\u00e9sil'),'fy':'*aa','ga':u('An Bhrasa\u00edl'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Brasile','iu':'*aa','iw':'*aa','ja':u('\u30d6\u30e9\u30b8\u30eb'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ube0c\ub77c\uc9c8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Bra\u017cil'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Brazili\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Brazylia','ps':'*aa','pt':'*ca','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0411\u0440\u0430\u0437\u0438\u043b\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0411\u0440\u0430\u0455\u0438\u043b'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1a\u0e23\u0e32\u0e0b\u0e34\u0e25'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Bra-xin','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5df4\u897f'),'zu':'*aa',}, 'BS': {'aa':'Bahamas','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Bahames','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03c0\u03b1\u03c7\u03ac\u03bc\u03b5\u03c2'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('Na Bah\u00e1ma\u00ed'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d0\u30cf\u30de'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubc14\ud558\ub9c8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Ba\u0127amas'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Bahama\u2019s'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Bahamy','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0411\u0430\u0433\u0430\u043c\u0441\u043a\u0438\u0435 \u043e\u0441\u0442\u0440\u043e\u0432\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0411\u0430\u0445\u0430\u043c\u0438'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1a\u0e32\u0e2e\u0e32\u0e21\u0e32\u0e2a'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Ba-ha-ma','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5df4\u54c8\u9a6c'),'zu':'*aa',}, 'BT': {'aa':'Bhutan','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03c0\u03bf\u03c5\u03c4\u03ac\u03bd'),'en':'*aa','eo':'*aa','es':u('But\u00e1n'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Bhoutan','fy':'*aa','ga':u('An Bh\u00fat\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d6\u30fc\u30bf\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubd80\ud0c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Butan','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('But\u00e3o'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0411\u0443\u0442\u0430\u043d'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e20\u0e39\u0e10\u0e32\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u4e0d\u4e39'),'zu':'*aa',}, 'BV': {'aa':'Bouvet Island','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Bouvet-Insel','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03ae\u03c3\u03bf\u03c2 \u039c\u03c0\u03bf\u03c5\u03b2\u03ad'),'en':'*aa','eo':'*aa','es':'Isla Bouvet','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('\u00cele Bouvet'),'fy':'*aa','ga':u('Oile\u00e1in Bouvet'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Kepulauan Bouvet','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Isola di Bouvet','iu':'*aa','iw':'*aa','ja':u('\u30d6\u30fc\u30d9\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubd80\ubca0\uc774 \uc12c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Bouveteiland','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'Ilha Bouvet','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0411\u0443\u0432\u0435'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('Bouvet\u00f6n'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5e03\u97e6\u5c9b'),'zu':'*aa',}, 'BW': {'aa':'Botswana','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Botsuana','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03c0\u03bf\u03c4\u03c3\u03bf\u03c5\u03ac\u03bd\u03b1'),'en':'*aa','eo':'*aa','es':'*de','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An Bhotsu\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30dc\u30c4\u30ef\u30ca'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubcf4\uce20\uc640\ub098'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*de','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0411\u043e\u0442\u0441\u0432\u0430\u043d\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0411\u043e\u0446\u0432\u0430\u043d\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1a\u0e2d\u0e15\u0e2a\u0e27\u0e32\u0e19\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('B\u1ed1t-xoa-na'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u535a\u8328\u74e6\u7eb3'),'zu':'*aa',}, 'BY': {'aa':'Belarus','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':u('\u0411\u0435\u043b\u0430\u0440\u0443\u0441\u044c'),'bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Bielor\u00fassia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039b\u03b5\u03c5\u03ba\u03bf\u03c1\u03c9\u03c3\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Bielorrusia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Bi\u00e9lo-Russie'),'fy':'*aa','ga':u('An Bhealar\u00fais'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Belarusia','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Bielorussia','iu':'*aa','iw':'*aa','ja':u('\u30d9\u30e9\u30eb\u30fc\u30b7'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubca8\ub77c\ub8e8\uc2a4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Bjelorussja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Wit-Rusland','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('Bia\u0142oru\u015b'),'ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*be','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0411\u0435\u043b\u043e\u0440\u0443\u0441\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Vitryssland','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e1a\u0e25\u0e25\u0e32\u0e23\u0e31\u0e2a'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('B\u00ea-la-r\u00fat'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u767d\u4fc4\u7f57\u65af'),'zu':'*aa',}, 'BZ': {'aa':'Belize','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03c0\u03b5\u03bb\u03af\u03b6'),'en':'*aa','eo':'*aa','es':'Belice','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('B\u00e9lize'),'fy':'*aa','ga':u('An Bheil\u00eds'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d9\u30ea\u30fc\u30ba'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubca8\ub9ac\uc988'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Beli\u017ce'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0411\u0435\u043b\u0438\u0437'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0411\u0435\u043b\u0438\u0441\u0435'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e1a\u0e25\u0e34\u0e0b'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('B\u00ea-li-x\u00ea'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u4f2f\u91cc\u5179'),'zu':'*aa',}, 'CA': {'aa':'Canada','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Canad\u00e0'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Kanada','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03b1\u03bd\u03b1\u03b4\u03ac\u03c2'),'en':'*aa','eo':'*aa','es':u('Canad\u00e1'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*de','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'Ceanada','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*de','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*de','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30ab\u30ca\u30c0'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uce90\ub098\ub2e4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*de','mt':'*de','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*de','ps':'*aa','pt':'*es','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041a\u0430\u043d\u0430\u0434\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e41\u0e04\u0e19\u0e32\u0e14\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Ca-na-\u0111a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u52a0\u62ff\u5927'),'zu':'*aa',}, 'CC': {'aa':'Cocos Islands','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Kokos-Inseln','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03ae\u03c3\u03bf\u03b9 \u039a\u03cc\u03ba\u03bf\u03c2 (\u039a\u03ae\u03bb\u03b9\u03bd\u03b3\u03ba)'),'en':'*aa','eo':'*aa','es':'Islas Cocos','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('\u00celes Cocos'),'fy':'*aa','ga':u('Oile\u00e1in Cocos (Keeling)'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Kepulauan Cocos','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Isole Cocos','iu':'*aa','iw':'*aa','ja':u('\u30b3\u30b3\u30b9\u8af8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ucf54\ucf54\uc2a4 \uad70\ub3c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Cocos (Keeling) Islands','mt':'*ms','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Cocoseilanden','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'Ilhas Coco','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041a\u043e\u043a\u043e\u0441\u043e\u0432\u0430 \u041e\u0441\u0442\u0440\u0432\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('Cocos\u00f6arna'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u79d1\u5e93\u65af\u7fa4\u5c9b'),'zu':'*aa',}, 'CD': {'aa':'The Democratic Republic Of Congo','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Demokratische Republik Kongo','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03bf\u03bd\u03b3\u03ba\u03cc, \u039b\u03b1\u03ca\u03ba\u03ae \u0394\u03b7\u03bc\u03bf\u03ba\u03c1\u03b1\u03c4\u03af\u03b1 \u03c4\u03bf\u03c5'),'en':'*aa','eo':'*aa','es':u('Rep\u00fablica Democr\u00e1tica del Congo'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('R\u00e9publique d\u00e9mocratique du Congo'),'fy':'*aa','ga':u('Poblacht Dhaonlathach an Chong\u00f3'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Republik Demokratik Kongo','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Repubblica democratica del Congo','iu':'*aa','iw':'*aa','ja':u('\u30b3\u30f3\u30b4\u6c11\u4e3b\u5171\u548c\u56fd'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ucf69\uace0 \ubbfc\uc8fc \uacf5\ud654\uad6d'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Democratic Republic of the Congo','mt':'*ms','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Congo-Kinshasa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*nl','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0414\u0435\u043c\u043e\u043a\u0440\u0430\u0442\u0441\u043a\u0430 \u0440\u0435\u043f\u0443\u0431\u043b\u0438\u043a\u0430 \u041a\u043e\u043d\u0433\u043e'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Demokratiska republiken Kongo','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u521a\u679c\u6c11\u4e3b\u5171\u548c\u56fd'),'zu':'*aa',}, 'CF': {'aa':'Central African Republic','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Rep\u00fablica Centrafricana'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Zentralafrikanische Republik','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03b5\u03bd\u03c4\u03c1\u03bf\u03b1\u03c6\u03c1\u03b9\u03ba\u03b1\u03bd\u03b9\u03ba\u03ae \u0394\u03b7\u03bc\u03bf\u03ba\u03c1\u03b1\u03c4\u03af\u03b1'),'en':'*aa','eo':'*aa','es':u('Rep\u00fablica Centroafricana'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('R\u00e9publique Centre-Africaine'),'fy':'*aa','ga':u('Poblacht na hAfraice L\u00e1ir'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Republik Afrika Tengah','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Repubblica Centrafricana','iu':'*aa','iw':'*aa','ja':u('\u4e2d\u592e\u30a2\u30d5\u30ea\u30ab\u5171\u548c\u56fd'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc911\uc559 \uc544\ud504\ub9ac\uce74'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':u('Repubblika Afrikana \u010aentrali'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Centraal-Afrikaanse Republiek','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('Republika \u015arodkowoafryka\u0144ska'),'ps':'*aa','pt':u('Rep\u00fablica Centro-Africana'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0426\u0435\u043d\u0442\u0440\u0430\u043b\u044c\u043d\u043e\u0430\u0444\u0440\u0438\u043a\u0430\u043d\u0441\u043a\u0430\u044f \u0420\u0435\u0441\u043f\u0443\u0431\u043b\u0438\u043a\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0426\u0435\u043d\u0442\u0440\u0430\u043b\u043d\u043e \u0410\u0444\u0440\u0438\u0447\u043a\u0430 \u0420\u0435\u043f\u0443\u0431\u043b\u0438\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Centralafrikanska republiken','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2a\u0e32\u0e18\u0e32\u0e23\u0e13\u0e23\u0e31\u0e10\u0e41\u0e2d\u0e1f\u0e23\u0e34\u0e01\u0e32\u0e01\u0e25\u0e32\u0e07'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('C\u1ed9ng h\u00f2a Trung Phi'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u4e2d\u975e\u5171\u548c\u56fd'),'zu':'*aa',}, 'CG': {'aa':'Congo','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Kongo','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03bf\u03bd\u03b3\u03ba\u03cc'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An Cong\u00f3'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*de','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*de','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b3\u30f3\u30b4'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ucf69\uace0'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*de','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*de','ps':'*aa','pt':'Congo - Brazzaville','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041a\u043e\u043d\u0433\u043e'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e04\u0e2d\u0e07\u0e42\u0e01'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('C\u00f4ng-g\u00f4'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u521a\u679c'),'zu':'*aa',}, 'CH': {'aa':'Switzerland','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Schweiz','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0395\u03bb\u03b2\u03b5\u03c4\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Suiza','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Sveitsi','fj':'*aa','fo':'*aa','fr':'Suisse','fy':'*aa','ga':u('An Eilv\u00e9is'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Swiss','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Svizzera','iu':'*aa','iw':'*aa','ja':u('\u30b9\u30a4\u30b9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc2a4\uc704\uc2a4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*it','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Zwitserland','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Szwajcaria','ps':'*aa','pt':u('Su\u00ed\u00e7a'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0428\u0432\u0435\u0439\u0446\u0430\u0440\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0428\u0432\u0430\u0458\u0446\u0430\u0440\u0441\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2a\u0e27\u0e34\u0e2a\u0e40\u0e0b\u0e2d\u0e23\u0e4c\u0e41\u0e25\u0e19\u0e14\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Th\u1ee5y S\u0129'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u745e\u58eb'),'zu':'*aa',}, 'CI': {'aa':u('C\u00f4te d\'Ivoire'),'ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Costa d\'Ivori','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('Elfenbeink\u00fcste'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03ba\u03c4\u03ae \u0395\u03bb\u03b5\u03c6\u03b1\u03bd\u03c4\u03cc\u03b4\u03bf\u03bd\u03c4\u03bf\u03c2'),'en':'*aa','eo':'*aa','es':'Costa de Marfil','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An C\u00f3sta Eabhair'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Pantai Gading','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Costa d\'Avorio','iu':'*aa','iw':'*aa','ja':u('\u30b3\u30fc\u30c8\u30b8\u30dc\u30a2\u30fc\u30eb'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ucf54\ud2b8\ub514\ubd80\uc640\ub974'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':u('Kosta ta\u2019 l-Avorju'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Ivoorkust','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('Wybrze\u017ce Ko\u015bci S\u0142oniowej'),'ps':'*aa','pt':'Costa do Marfim','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041a\u043e\u0442-\u0434\'\u0418\u0432\u0443\u0430\u0440'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041e\u0431\u0430\u043b\u0430 \u0421\u043b\u043e\u043d\u043e\u0432\u0430\u0447\u0435'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Elfenbenskusten','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1d\u0e31\u0e48\u0e07\u0e17\u0e30\u0e40\u0e25\u0e44\u0e2d\u0e27\u0e2d\u0e23\u0e34'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('B\u1edd Bi\u1ec3n Ng\u00e0'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u8c61\u7259\u6d77\u5cb8'),'zu':'*aa',}, 'CK': {'aa':'Cook Islands','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Cook-Inseln','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03ae\u03c3\u03bf\u03b9 \u039a\u03bf\u03c5\u03ba'),'en':'*aa','eo':'*aa','es':'Islas Cook','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('\u00celes Cook'),'fy':'*aa','ga':u('Oile\u00e1in Cook'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Kepulauan Cook','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Isole Cook','iu':'*aa','iw':'*aa','ja':u('\u30af\u30c3\u30af\u8af8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ucfe1 \uc81c\ub3c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Cookeilanden','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'Ilhas Cook','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*aa','ss':'*aa','st':'*aa','su':'*aa','sv':u('Cook\u00f6arna'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5e93\u514b\u7fa4\u5c9b'),'zu':'*aa',}, 'CL': {'aa':'Chile','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Xile','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a7\u03b9\u03bb\u03ae'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Chili','fy':'*aa','ga':'An tSile','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*fr','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*fr','io':'*aa','is':'*aa','it':'Cile','iu':'*aa','iw':'*aa','ja':u('\u30c1\u30ea'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uce60\ub808'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*it','mt':u('\u010aili'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*fr','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0427\u0438\u043b\u0438'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0427\u0438\u043b\u0435'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e0a\u0e34\u0e25\u0e35'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Chi-l\u00ea'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u667a\u5229'),'zu':'*aa',}, 'CM': {'aa':'Cameroon','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Camerun','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Kamerun','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03b1\u03bc\u03b5\u03c1\u03bf\u03cd\u03bd'),'en':'*aa','eo':'*aa','es':u('Camer\u00fan'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Cameroun','fy':'*aa','ga':u('Camar\u00fan'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*de','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*de','io':'*aa','is':'*aa','it':'*ca','iu':'*aa','iw':'*aa','ja':u('\u30ab\u30e1\u30eb\u30fc\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uce74\uba54\ub8ec'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*de','mt':'*de','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Kameroen','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*de','ps':'*aa','pt':u('Rep\u00fablica dos Camar\u00f5es'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041a\u0430\u043c\u0435\u0440\u0443\u043d'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e04\u0e32\u0e40\u0e21\u0e23\u0e39\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Ca-m\u01a1-run'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5580\u9ea6\u9686'),'zu':'*aa',}, 'CN': {'aa':'China','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Xina','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03af\u03bd\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Kiina','fj':'*aa','fo':'*aa','fr':'Chine','fy':'*aa','ga':u('An tS\u00edn'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Cina','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*id','iu':'*aa','iw':'*aa','ja':u('\u4e2d\u83ef\u4eba\u6c11\u5171\u548c\u56fd'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc911\uad6d'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':u('\u010aina'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Chiny','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041a\u0438\u0442\u0430\u0439'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041a\u0438\u043d\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Kina','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e08\u0e35\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Trung Qu\u1ed1c'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u4e2d\u56fd'),'zu':'*aa',}, 'CO': {'aa':'Colombia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Col\u00f2mbia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Kolumbien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03bf\u03bb\u03bf\u03bc\u03b2\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Colombie','fy':'*aa','ga':u('An Chol\u00f3im'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Kolombia','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b3\u30ed\u30f3\u30d3\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ucf5c\ub86c\ube44\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Kolumbja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Kolumbia','ps':'*aa','pt':u('Col\u00f4mbia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041a\u043e\u043b\u0443\u043c\u0431\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041a\u043e\u043b\u0443\u043c\u0431\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e42\u0e04\u0e25\u0e31\u0e21\u0e40\u0e1a\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('C\u00f4-l\u00f4m-bi-a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u54e5\u4f26\u6bd4\u4e9a'),'zu':'*aa',}, 'CR': {'aa':'Costa Rica','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03cc\u03c3\u03c4\u03b1 \u03a1\u03af\u03ba\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('C\u00f3sta R\u00edce'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Kosta Rika','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b3\u30b9\u30bf\u30ea\u30ab'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ucf54\uc2a4\ud0c0\ub9ac\uce74'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':'*id','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Kostaryka','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041a\u043e\u0441\u0442\u0430-\u0420\u0438\u043a\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041a\u043e\u0441\u0442\u0430\u0440\u0438\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e04\u0e2d\u0e2a\u0e15\u0e32\u0e23\u0e34\u0e01\u0e49\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('C\u1ed1t-xta Ri-ca'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u54e5\u65af\u8fbe\u9ece\u52a0'),'zu':'*aa',}, 'CU': {'aa':'Cuba','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Kuba','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03bf\u03cd\u03b2\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('C\u00faba'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*de','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*de','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30ad\u30e5\u30fc\u30d0'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ucfe0\ubc14'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*de','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*de','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041a\u0443\u0431\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e04\u0e34\u0e27\u0e1a\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Cu Ba','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u53e4\u5df4'),'zu':'*aa',}, 'CV': {'aa':'Cape Verde','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Cap Verd','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Kap Verde','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03ae\u03c3\u03bf\u03b9 \u03a0\u03c1\u03ac\u03c3\u03b9\u03bd\u03bf\u03c5 \u0391\u03ba\u03c1\u03c9\u03c4\u03b7\u03c1\u03af\u03bf\u03c5'),'en':'*aa','eo':'*aa','es':'Cabo Verde','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Cap Vert','fy':'*aa','ga':'Rinn Verde','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Tanjung Verde','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Capo Verde','iu':'*aa','iw':'*aa','ja':u('\u30ab\u30fc\u30dc\u30d9\u30eb\u30c7'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uae4c\ubf40\ubca0\ub974\ub370'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Kape Verde','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Kaapverdi\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('Republika Zielonego Przyl\u0105dka'),'ps':'*aa','pt':'*es','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0417\u0435\u043b\u0435\u043d\u044b\u0439 \u041c\u044b\u0441'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041a\u0430\u043f\u0435 \u0412\u0435\u0440\u0434\u0435'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e04\u0e1e\u0e40\u0e27\u0e2d\u0e23\u0e4c\u0e14'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('C\u00e1p-ve'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u4f5b\u5f97\u89d2'),'zu':'*aa',}, 'CW': {'aa':u('Cura\u00e7ao'),'ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':'*aa','en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30ad\u30e5\u30e9\u30bd\u30fc'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud034\ub77c\uc18c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*aa','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5e93\u62c9\u7d22\u5c9b'),'zu':'*aa',}, 'CX': {'aa':'Christmas Island','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Weihnachtsinsel','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03ae\u03c3\u03bf\u03c2 \u03a7\u03c1\u03b9\u03c3\u03c4\u03bf\u03c5\u03b3\u03ad\u03bd\u03bd\u03c9\u03bd'),'en':'*aa','eo':'*aa','es':'Isla Christmas','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('\u00cele Christmas'),'fy':'*aa','ga':u('Oile\u00e1n na Nollag'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Pulau Christmas','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Isola di Natale','iu':'*aa','iw':'*aa','ja':u('\u30af\u30ea\u30b9\u30de\u30b9\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud06c\ub9ac\uc2a4\ub9c8\uc2a4 \uc12c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Christmaseiland','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'Ilhas Natal','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0411\u043e\u0436\u0438\u045b\u043d\u043e \u041e\u0441\u0442\u0440\u0432\u043e'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('Jul\u00f6n'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5723\u8bde\u5c9b'),'zu':'*aa',}, 'CY': {'aa':'Cyprus','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Xipre','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Zypern','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03cd\u03c0\u03c1\u03bf\u03c2'),'en':'*aa','eo':'*aa','es':'Chipre','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Chypre','fy':'*aa','ga':'An Chipir','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Siprus','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Cipro','iu':'*aa','iw':'*aa','ja':u('\u30ad\u30d7\u30ed\u30b9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc0ac\uc774\ud504\ub7ec\uc2a4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Kibris','mt':u('\u010aipru'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Cypr','ps':'*aa','pt':'*es','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041a\u0438\u043f\u0440'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041a\u0438\u043f\u0430\u0440'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Cypern','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e44\u0e0b\u0e1b\u0e23\u0e31\u0e2a'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('S\u00edp'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u585e\u6d66\u8def\u65af'),'zu':'*aa',}, 'CZ': {'aa':'Czech Republic','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Rep\u00fablica Txeca'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':u('\u010cesk\u00e1 republika'),'cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Tschechische Republik','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a4\u03c3\u03b5\u03c7\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Chequia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Tsekin tasavalta','fj':'*aa','fo':'*aa','fr':u('R\u00e9publique Tch\u00e8que'),'fy':'*aa','ga':'Poblacht na Seice','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Republik Ceko','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Repubblica Ceca','iu':'*aa','iw':'*aa','ja':u('\u30c1\u30a7\u30b3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uccb4\ucf54'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Republik Czech','mt':u('Repubblika \u010aeka'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Tsjechi\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Republika Czeska','ps':'*aa','pt':u('Rep\u00fablica Tcheca'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0427\u0435\u0445\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0427\u0435\u0448\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Tjeckiska republiken','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2a\u0e32\u0e18\u0e32\u0e23\u0e13\u0e23\u0e31\u0e10\u0e40\u0e0a\u0e47\u0e04'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('C\u1ed9ng h\u00f2a S\u00e9c'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6377\u514b\u5171\u548c\u56fd'),'zu':'*aa',}, 'DE': {'aa':'Germany','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Alemanya','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Deutschland','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0393\u03b5\u03c1\u03bc\u03b1\u03bd\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Alemania','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Saksa','fj':'*aa','fo':'*aa','fr':'Allemagne','fy':'*aa','ga':u('An Ghearm\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Jerman','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Germania','iu':'*aa','iw':'*aa','ja':u('\u30c9\u30a4\u30c4'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub3c5\uc77c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':u('\u0120ermanja'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Duitsland','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Niemcy','ps':'*aa','pt':'Alemanha','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0413\u0435\u0440\u043c\u0430\u043d\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041d\u0435\u043c\u0430\u0447\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Tyskland','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e22\u0e2d\u0e23\u0e21\u0e19\u0e35'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('\u0110\u1ee9c'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5fb7\u56fd'),'zu':'*aa',}, 'DJ': {'aa':'Djibouti','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Dschibuti','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a4\u03b6\u03b9\u03bc\u03c0\u03bf\u03c5\u03c4\u03af'),'en':'*aa','eo':'*aa','es':'Yibuti','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Jibouti','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Gibuti','iu':'*aa','iw':'*aa','ja':u('\u30b8\u30d6\u30c1'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc9c0\ubd80\ud2f0'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':u('\u0120ibuti'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('D\u017cibuti'),'ps':'*aa','pt':'Djibuti','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0414\u0436\u0438\u0431\u0443\u0442\u0438'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u040f\u0438\u0431\u0443\u0442\u0438'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e14\u0e34\u0e42\u0e1a\u0e15\u0e34'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Gi-bu-ti','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5409\u5e03\u63d0'),'zu':'*aa',}, 'DK': {'aa':'Denmark','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Dinamarca','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'Danmark','de':u('D\u00e4nemark'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0394\u03b1\u03bd\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*ca','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Tanska','fj':'*aa','fo':'*aa','fr':'Danemark','fy':'*aa','ga':'An Danmhairg','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Danimarca','iu':'*aa','iw':'*aa','ja':u('\u30c7\u30f3\u30de\u30fc\u30af'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub374\ub9c8\ud06c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Danimarka','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Denemarken','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Dania','ps':'*aa','pt':'*ca','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0414\u0430\u043d\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0414\u0430\u043d\u0441\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*da','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e14\u0e19\u0e21\u0e32\u0e23\u0e4c\u0e01'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('\u0110an M\u1ea1ch'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u4e39\u9ea6'),'zu':'*aa',}, 'DM': {'aa':'Dominica','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03c4\u03bf\u03bc\u03af\u03bd\u03b9\u03ba\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Dominique','fy':'*aa','ga':'Doiminice','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Dominika','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30c9\u30df\u30cb\u30ab\u56fd'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub3c4\ubbf8\ub2c8\uce74'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*id','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*id','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0414\u043e\u043c\u0438\u043d\u0438\u043a\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e42\u0e14\u0e21\u0e34\u0e19\u0e34\u0e01\u0e49\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u591a\u7c73\u5c3c\u52a0\u8054\u90a6'),'zu':'*aa',}, 'DO': {'aa':'Dominican Republic','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Rep\u00fablica Dominicana'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Dominikanische Republik','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0394\u03bf\u03bc\u03b9\u03bd\u03b9\u03ba\u03b1\u03bd\u03ae \u0394\u03b7\u03bc\u03bf\u03ba\u03c1\u03b1\u03c4\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*ca','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('R\u00e9publique Dominicaine'),'fy':'*aa','ga':'An Phoblacht Dhoiminiceach','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Republik Dominika','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Repubblica Dominicana','iu':'*aa','iw':'*aa','ja':u('\u30c9\u30df\u30cb\u30ab\u5171\u548c\u56fd'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub3c4\ubbf8\ub2c8\uce74 \uacf5\ud654\uad6d'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Republik Dominican','mt':'Republikka Domenikana','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Dominicaanse Republiek','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Republika Dominikany','ps':'*aa','pt':'*ca','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0414\u043e\u043c\u0438\u043d\u0438\u043a\u0430\u043d\u0441\u043a\u0430\u044f \u0420\u0435\u0441\u043f\u0443\u0431\u043b\u0438\u043a\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0414\u043e\u043c\u0438\u043d\u0438\u043a\u0430\u043d\u0441\u043a\u0430 \u0420\u0435\u043f\u0443\u0431\u043b\u0438\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Dominikanska republiken','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2a\u0e32\u0e18\u0e32\u0e23\u0e13\u0e23\u0e31\u0e10\u0e42\u0e14\u0e21\u0e34\u0e19\u0e34\u0e01\u0e31\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u591a\u7c73\u5c3c\u52a0\u5171\u548c\u56fd'),'zu':'*aa',}, 'DZ': {'aa':'Algeria','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':u('\u0627\u0644\u062c\u0632\u0627\u0626\u0631'),'as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Alg\u00e8ria'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Algerien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03bb\u03b3\u03b5\u03c1\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Argelia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Alg\u00e9rie'),'fy':'*aa','ga':u('An Ailg\u00e9ir'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a2\u30eb\u30b8\u30a7\u30ea\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc54c\uc81c\ub9ac'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Aljazair','mt':u('Al\u0121erija'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Algerije','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Arg\u00e9lia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0410\u043b\u0436\u0438\u0440'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'Algeriet','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e41\u0e2d\u0e25\u0e08\u0e35\u0e40\u0e23\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('An-gi\u00ea-ri'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u963f\u5c14\u53ca\u5229\u4e9a'),'zu':'*aa',}, 'EC': {'aa':'Ecuador','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Equador','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0399\u03c3\u03b7\u03bc\u03b5\u03c1\u03b9\u03bd\u03cc\u03c2'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Equateur','fy':'*aa','ga':u('Eacuad\u00f3r'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Ekuador','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a8\u30af\u30a2\u30c9\u30eb'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc5d0\ucfe0\uc544\ub3c4\ub974'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Ekwador','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*mt','ps':'*aa','pt':'*ca','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u042d\u043a\u0432\u0430\u0434\u043e\u0440'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0415\u043a\u0432\u0430\u0434\u043e\u0440'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e2d\u0e01\u0e27\u0e32\u0e14\u0e2d\u0e23\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('\u00ca-cu-a-\u0111o'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5384\u74dc\u591a\u5c14'),'zu':'*aa',}, 'EE': {'aa':'Estonia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Est\u00f2nia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Estland','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0395\u03c3\u03b8\u03bf\u03bd\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'Eesti','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Estonie','fy':'*aa','ga':u('An East\u00f3in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a8\u30b9\u30c8\u30cb\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc5d0\uc2a4\ud1a0\ub2c8\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Estonja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*de','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Est\u00f4nia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u042d\u0441\u0442\u043e\u043d\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0415\u0441\u0442\u043e\u043d\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e2d\u0e2a\u0e42\u0e15\u0e40\u0e19\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('E-xt\u00f4-ni-a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u7231\u6c99\u5c3c\u4e9a'),'zu':'*aa',}, 'EG': {'aa':'Egypt','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':u('\u0645\u0635\u0631'),'as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Egipte','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('\u00c4gypten'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03af\u03b3\u03c5\u03c0\u03c4\u03bf\u03c2'),'en':'*aa','eo':'*aa','es':'Egipto','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Egypte','fy':'*aa','ga':u('An \u00c9igipt'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Mesir','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Egitto','iu':'*aa','iw':'*aa','ja':u('\u30a8\u30b8\u30d7\u30c8'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc774\uc9d1\ud2b8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':u('E\u0121ittu'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*fr','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Egipt','ps':'*aa','pt':'Egito','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0415\u0433\u0438\u043f\u0442'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0415\u0433\u0438\u043f\u0430\u0442'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Egypten','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e35\u0e22\u0e34\u0e1b\u0e15\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Ai C\u1eadp'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u57c3\u53ca'),'zu':'*aa',}, 'EH': {'aa':'Western Sahara','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('S\u00e0hara Occidental'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Westsahara','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0394\u03c5\u03c4\u03b9\u03ba\u03ae \u03a3\u03b1\u03c7\u03ac\u03c1\u03b1'),'en':'*aa','eo':'*aa','es':'Sahara Occidental','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*es','fy':'*aa','ga':u('An Sah\u00e1ra Thiar'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Sahara Barat','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Sahara Occidentale','iu':'*aa','iw':'*aa','ja':u('\u897f\u30b5\u30cf\u30e9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc11c\uc0ac\ud558\ub77c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':'Sahara tal-Punent','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Westelijke Sahara','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Sahara Zachodnia','ps':'*aa','pt':'Saara Ocidental','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0417\u0430\u043f\u0430\u0434\u043d\u0430\u044f \u0421\u0430\u0445\u0430\u0440\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0417\u0430\u043f\u0430\u0434\u043d\u0430 \u0421\u0430\u0445\u0430\u0440\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('V\u00e4stra Sahara'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e0b\u0e32\u0e2e\u0e32\u0e23\u0e48\u0e32\u0e15\u0e30\u0e27\u0e31\u0e19\u0e15\u0e01'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('T\u00e2y Sahara'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u897f\u6492\u54c8\u62c9'),'zu':'*aa',}, 'ER': {'aa':'Eritrea','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0395\u03c1\u03c5\u03b8\u03c1\u03b1\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Erythr\u00e9e'),'fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a8\u30ea\u30c8\u30ea\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc5d0\ub9ac\ud2b8\ub9ac\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Eritreja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Erytrea','ps':'*aa','pt':'Eritreia','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u042d\u0440\u0438\u0442\u0440\u0435\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0415\u0440\u0438\u0442\u0440\u0435\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e34\u0e23\u0e34\u0e17\u0e23\u0e35'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('\u00ca-ri-t\u01a1-r\u00ea-a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5384\u91cc\u7279\u5c3c\u4e9a'),'zu':'*aa',}, 'ES': {'aa':'Spain','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Espanya','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Spanien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0399\u03c3\u03c0\u03b1\u03bd\u03af\u03b1'),'en':'*aa','eo':'*aa','es':u('Espa\u00f1a'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Espanja','fj':'*aa','fo':'*aa','fr':'Espagne','fy':'*aa','ga':u('An Sp\u00e1inn'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Spanyol','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Spagna','iu':'*aa','iw':'*aa','ja':u('\u30b9\u30da\u30a4\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc2a4\ud398\uc778'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Sepanyol','mt':'Spanja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Spanje','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Hiszpania','ps':'*aa','pt':'Espanha','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0418\u0441\u043f\u0430\u043d\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0428\u043f\u0430\u043d\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2a\u0e40\u0e1b\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('T\u00e2y Ban Nha'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u897f\u73ed\u7259'),'zu':'*aa',}, 'ET': {'aa':'Ethiopia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Eti\u00f2pia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('\u00c4thiopien'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03b9\u03b8\u03b9\u03bf\u03c0\u03af\u03b1'),'en':'*aa','eo':'*aa','es':u('Etiop\u00eda'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Ethiopie','fy':'*aa','ga':u('An Aet\u00f3ip'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Etiopia','iu':'*aa','iw':'*aa','ja':u('\u30a8\u30c1\u30aa\u30d4\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc774\ub514\uc624\ud53c\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Etijopja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Ethiopi\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*it','ps':'*aa','pt':u('Eti\u00f3pia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u042d\u0444\u0438\u043e\u043f\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0415\u0442\u0438\u043e\u043f\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Etiopien','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e2d\u0e18\u0e34\u0e42\u0e2d\u0e40\u0e1b\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('\u00ca-ti-\u00f4-pi-a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u57c3\u585e\u4fc4\u6bd4\u4e9a'),'zu':'*aa',}, 'FI': {'aa':'Finland','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Finl\u00e0ndia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Finnland','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a6\u03b9\u03bd\u03bb\u03b1\u03bd\u03b4\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Finlandia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Suomi','fj':'*aa','fo':'*aa','fr':'Finlande','fy':'*aa','ga':'An Fhionlainn','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*es','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*es','io':'*aa','is':'*aa','it':'*es','iu':'*aa','iw':'*aa','ja':u('\u30d5\u30a3\u30f3\u30e9\u30f3\u30c9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud540\ub780\ub4dc'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Finlandja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*es','ps':'*aa','pt':u('Finl\u00e2ndia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0424\u0438\u043d\u043b\u044f\u043d\u0434\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0424\u0438\u043d\u0441\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1f\u0e34\u0e19\u0e41\u0e25\u0e19\u0e14\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Ph\u1ea7n Lan'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u82ac\u5170'),'zu':'*aa',}, 'FJ': {'aa':'Fiji','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Fidschi','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a6\u03af\u03c4\u03b6\u03b9'),'en':'*aa','eo':'*aa','es':'Fiyi','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Fidji','fy':'*aa','ga':u('Fids\u00ed'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Figi','iu':'*aa','iw':'*aa','ja':u('\u30d5\u30a3\u30b8\u30fc'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud53c\uc9c0'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Fi\u0121i'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('Fid\u017ci'),'ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0424\u0438\u0434\u0436\u0438'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0424\u0438\u045f\u0438'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1f\u0e34\u0e08\u0e34'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Phi-gi','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6590\u6d4e'),'zu':'*aa',}, 'FK': {'aa':'Falkland Islands','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Falkland-Inseln','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03ae\u03c3\u03bf\u03b9 \u03a6\u03ce\u03ba\u03bb\u03b1\u03bd\u03c4'),'en':'*aa','eo':'*aa','es':'Islas Malvinas','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('\u00celes Malouines'),'fy':'*aa','ga':u('Oile\u00e1in Fh\u00e1clainne'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Kepulauan Falkland','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Isole Falkland','iu':'*aa','iw':'*aa','ja':u('\u30d5\u30a9\u30fc\u30af\u30e9\u30f3\u30c9\u8af8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud3ec\ud074\ub79c\ub4dc \uc81c\ub3c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Falklandeilanden','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'Ilhas Malvinas','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0424\u043e\u043b\u043a\u043b\u0430\u043d\u0434\u0441\u043a\u0430 \u041e\u0441\u0442\u0440\u0432\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('Falklands\u00f6arna'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5bcc\u514b\u5170\u7fa4\u5c9b'),'zu':'*aa',}, 'FM': {'aa':'Micronesia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Micron\u00e8sia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Mikronesien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03b9\u03ba\u03c1\u03bf\u03bd\u03b7\u03c3\u03af\u03b1, \u039f\u03bc\u03cc\u03c3\u03c0\u03bf\u03bd\u03b4\u03b5\u03c2 \u03a0\u03bf\u03bb\u03b9\u03c4\u03b5\u03af\u03b5\u03c2 \u03c4\u03b7\u03c2'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Micron\u00e9sie'),'fy':'*aa','ga':u('An Mhicrin\u00e9is'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Mikronesia','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30df\u30af\u30ed\u30cd\u30b7\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub9c8\uc774\ud06c\ub85c\ub124\uc2dc\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Mikronesja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Micronesi\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Mikronezja','ps':'*aa','pt':u('Micron\u00e9sia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041c\u0438\u043a\u0440\u043e\u043d\u0435\u0437\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041c\u0438\u043a\u0440\u043e\u043d\u0435\u0437\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e44\u0e21\u0e42\u0e04\u0e23\u0e19\u0e34\u0e40\u0e0b\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Mi-cr\u00f4-n\u00ea-xi-a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5bc6\u514b\u7f57\u5c3c\u897f\u4e9a'),'zu':'*aa',}, 'FO': {'aa':'Faroe Islands','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('F\u00e4r\u00f6er-Inseln'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03ae\u03c3\u03bf\u03b9 \u03a6\u03b5\u03c1\u03cc\u03b5\u03c2'),'en':'*aa','eo':'*aa','es':'Islas Feroe','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('\u00celes F\u00e9ro\u00e9'),'fy':'*aa','ga':u('Oile\u00e1in Fhar\u00f3'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Kepulauan Faroe','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':u('Isole F\u00e6roer'),'iu':'*aa','iw':'*aa','ja':u('\u30d5\u30a7\u30ed\u30fc\u8af8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud398\ub85c \uc81c\ub3c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('G\u017cejjer Faroe'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Faer\u00f6er'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'Ilhas Faroe','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0424\u0430\u0440\u0441\u043a\u0430 \u041e\u0441\u0442\u0440\u0432\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('F\u00e4r\u00f6arna'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6cd5\u7f57\u7fa4\u5c9b'),'zu':'*aa',}, 'FR': {'aa':'France','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Fran\u00e7a'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Frankreich','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0393\u03b1\u03bb\u03bb\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Francia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Ranska','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'An Fhrainc','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Perancis','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*es','iu':'*aa','iw':'*aa','ja':u('\u30d5\u30e9\u30f3\u30b9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud504\ub791\uc2a4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':'Franza','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Frankrijk','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Francja','ps':'*aa','pt':'*ca','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0424\u0440\u0430\u043d\u0446\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0424\u0440\u0430\u043d\u0446\u0443\u0441\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Frankrike','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1d\u0e23\u0e31\u0e48\u0e07\u0e40\u0e28\u0e2a'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Ph\u00e1p'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6cd5\u56fd'),'zu':'*aa',}, 'GA': {'aa':'Gabon','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Gabun','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0393\u03ba\u03b1\u03bc\u03c0\u03cc\u03bd'),'en':'*aa','eo':'*aa','es':u('Gab\u00f3n'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An Ghab\u00fain'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30ac\u30dc\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uac00\ubd09'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Gab\u00e3o'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0413\u0430\u0431\u043e\u043d'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e01\u0e32\u0e1a\u0e2d\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Ga-b\u00f4ng'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u52a0\u84ec'),'zu':'*aa',}, 'GB': {'aa':'United Kingdom','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Regne Unit','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('Vereinigtes K\u00f6nigreich'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0397\u03bd\u03c9\u03bc\u03ad\u03bd\u03bf \u0392\u03b1\u03c3\u03af\u03bb\u03b5\u03b9\u03bf'),'en':'*aa','eo':'*aa','es':'Reino Unido','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Iso-Britannia','fj':'*aa','fo':'*aa','fr':'Royaume-Uni','fy':'*aa','ga':u('An R\u00edocht Aontaithe'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Inggris Raya','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Regno Unito','iu':'*aa','iw':'*aa','ja':u('\u30a4\u30ae\u30ea\u30b9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc601\uad6d'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Ingilterra','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Verenigd Koninkrijk','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Wielka Brytania','ps':'*aa','pt':'*es','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0421\u043e\u0435\u0434\u0438\u043d\u0435\u043d\u043d\u043e\u0435 \u041a\u043e\u0440\u043e\u043b\u0435\u0432\u0441\u0442\u0432\u043e'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0412\u0435\u043b\u0438\u043a\u0430 \u0411\u0440\u0438\u0442\u0430\u043d\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Storbritannien','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2a\u0e2b\u0e23\u0e32\u0e0a\u0e2d\u0e32\u0e13\u0e32\u0e08\u0e31\u0e01\u0e23'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('V\u01b0\u01a1ng qu\u1ed1c Anh'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u82f1\u56fd'),'zu':'*aa',}, 'GD': {'aa':'Grenada','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0393\u03c1\u03b5\u03bd\u03ac\u03b4\u03b1'),'en':'*aa','eo':'*aa','es':'Granada','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Grenade','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b0\u30ec\u30ca\u30c0'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uadf8\ub808\ub098\ub2e4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*es','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0413\u0440\u0435\u043d\u0430\u0434\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Gr\u00ea-na-\u0111a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u683c\u6797\u7eb3\u8fbe'),'zu':'*aa',}, 'GE': {'aa':'Georgia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Ge\u00f2rgia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Georgien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0393\u03b5\u03c9\u03c1\u03b3\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('G\u00e9orgie'),'fy':'*aa','ga':'An tSeoirsia','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b0\u30eb\u30b8\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uadf8\ub8e8\uc9c0\uc57c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('\u0120or\u0121ja'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Georgi\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Gruzja','ps':'*aa','pt':u('Ge\u00f3rgia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0413\u0440\u0443\u0437\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0413\u0440\u0443\u0437\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e08\u0e2d\u0e23\u0e4c\u0e40\u0e08\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Gru-di-a','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u683c\u9c81\u5409\u4e9a'),'zu':'*aa',}, 'GF': {'aa':'French Guiana','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Guaiana Francesa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('Franz\u00f6sisch-Guayana'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0393\u03b1\u03bb\u03bb\u03b9\u03ba\u03ae \u0393\u03bf\u03c5\u03b9\u03ac\u03bd\u03b1'),'en':'*aa','eo':'*aa','es':'Guayana Francesa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Guyane fran\u00e7aise'),'fy':'*aa','ga':u('An Ghu\u00e1in Fhrancach'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Guyana Perancis','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Guayana Francese','iu':'*aa','iw':'*aa','ja':u('\u4ecf\u9818\u30ae\u30a2\u30ca'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud504\ub791\uc2a4\ub839 \uae30\uc544\ub098'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Gujana Fran\u010bi\u017ca'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Frans-Guyana','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Gujana Francuska','ps':'*aa','pt':'Guiana Francesa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0424\u0440\u0430\u043d\u0446\u0443\u0437\u0441\u043a\u0430\u044f \u0413\u0432\u0438\u043d\u0435\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0424\u0440\u0430\u043d\u0446\u0443\u0441\u043a\u0430 \u0413\u0432\u0430\u0458\u0430\u043d\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Franska Guyana','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e1f\u0e23\u0e47\u0e19\u0e0a\u0e01\u0e34\u0e27\u0e19\u0e48\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6cd5\u5c5e\u572d\u4e9a\u90a3'),'zu':'*aa',}, 'GG': {'aa':'Guernsey','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':'*aa','en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Guernesey','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30ac\u30fc\u30f3\u30b8\u30fc'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uac74\uc9c0'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*aa','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u683c\u6069\u897f\u5c9b'),'zu':'*aa',}, 'GH': {'aa':'Ghana','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0393\u03ba\u03ac\u03bd\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('G\u00e1na'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30ac\u30fc\u30ca'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uac00\ub098'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Gana','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*mt','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0413\u0430\u043d\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e01\u0e32\u0e19\u0e48\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Gha-na','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u52a0\u7eb3'),'zu':'*aa',}, 'GI': {'aa':'Gibraltar','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0393\u03b9\u03b2\u03c1\u03b1\u03bb\u03c4\u03ac\u03c1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('Giobr\u00e1ltar'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Gibilterra','iu':'*aa','iw':'*aa','ja':u('\u30b8\u30d6\u30e9\u30eb\u30bf\u30eb'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc9c0\ube0c\ub864\ud130'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0413\u0438\u0431\u0440\u0430\u043b\u0442\u0430\u0440'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u76f4\u5e03\u7f57\u9640'),'zu':'*aa',}, 'GL': {'aa':'Greenland','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('Gr\u00f6nland'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0393\u03c1\u03bf\u03b9\u03bb\u03b1\u03bd\u03b4\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Groenlandia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Groenland','fy':'*aa','ga':'An Ghraonlainn','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*es','iu':'*aa','iw':'*aa','ja':u('\u30b0\u30ea\u30fc\u30f3\u30e9\u30f3\u30c9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uadf8\ub9b0\ub79c\ub4dc'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Grinlandja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*fr','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Gro\u00eanlandia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0413\u0440\u0435\u043d\u043b\u0430\u043d\u0434'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u683c\u9675\u5170'),'zu':'*aa',}, 'GM': {'aa':'Gambia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('G\u00e0mbia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0393\u03ba\u03ac\u03bc\u03c0\u03b9\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Gambie','fy':'*aa','ga':'An Ghaimbia','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30ac\u30f3\u30d3\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uac10\ube44\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Gambja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('G\u00e2mbia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0413\u0430\u043c\u0431\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0413\u0430\u043c\u0431\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e41\u0e01\u0e21\u0e40\u0e1a\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('G\u0103m-bi-a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5188\u6bd4\u4e9a'),'zu':'*aa',}, 'GN': {'aa':'Guinea','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0393\u03bf\u03c5\u03b9\u03bd\u03ad\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Guin\u00e9e'),'fy':'*aa','ga':'An Ghuine','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30ae\u30cb\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uae30\ub2c8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Gineja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Guinee','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Gwinea','ps':'*aa','pt':u('Guin\u00e9'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0413\u0432\u0438\u043d\u0435\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0413\u0432\u0438\u043d\u0435\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e01\u0e34\u0e27\u0e19\u0e35'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Ghi-n\u00ea'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u51e0\u5185\u4e9a'),'zu':'*aa',}, 'GP': {'aa':'Guadeloupe','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0393\u03bf\u03c5\u03b1\u03b4\u03b5\u03bb\u03bf\u03cd\u03c0\u03b7'),'en':'*aa','eo':'*aa','es':'Guadalupe','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('Guadal\u00faip'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Guadalupa','iu':'*aa','iw':'*aa','ja':u('\u30b0\u30a2\u30c9\u30eb\u30fc\u30d7'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uacfc\ub2ec\ub85c\ud504'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Gwadelupe','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Gwadelupa','ps':'*aa','pt':'*es','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0413\u0432\u0430\u0434\u0435\u043b\u0443\u043f\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0413\u0432\u0430\u0434\u0435\u043b\u0443\u043f\u0435'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e01\u0e31\u0e27\u0e40\u0e14\u0e2d\u0e25\u0e39\u0e1b'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u74dc\u5fb7\u7f57\u666e\u5c9b'),'zu':'*aa',}, 'GQ': {'aa':'Equatorial Guinea','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Guinea Equatorial','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('\u00c4quatorial-Guinea'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0399\u03c3\u03b7\u03bc\u03b5\u03c1\u03b9\u03bd\u03ae \u0393\u03bf\u03c5\u03b9\u03bd\u03ad\u03b1'),'en':'*aa','eo':'*aa','es':'Guinea Ecuatorial','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Guin\u00e9e Equatoriale'),'fy':'*aa','ga':u('An Ghuine Mhe\u00e1nchriosach'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Guinea Khatulistiwa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Guinea Equatoriale','iu':'*aa','iw':'*aa','ja':u('\u8d64\u9053\u30ae\u30cb\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc801\ub3c4 \uae30\ub2c8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Ginea Ekwatorjali','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Equatoriaal-Guinea','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('Gwinea R\u00f3wnikowa'),'ps':'*aa','pt':u('Guin\u00e9 Equatorial'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u042d\u043a\u0432\u0430\u0442\u043e\u0440\u0438\u0430\u043b\u044c\u043d\u0430\u044f \u0413\u0432\u0438\u043d\u0435\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0415\u043a\u0432\u0430\u0442\u043e\u0440\u0438\u0458\u0430\u043b\u043d\u0430 \u0413\u0432\u0438\u043d\u0435\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Ekvatorialguinea','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e2d\u0e04\u0e27\u0e32\u0e42\u0e17\u0e40\u0e23\u0e35\u0e22\u0e25\u0e01\u0e34\u0e19\u0e35'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Ghi-n\u00ea X\u00edch-\u0111\u1ea1o'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u8d64\u9053\u51e0\u5185\u4e9a'),'zu':'*aa',}, 'GR': {'aa':'Greece','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Gr\u00e8cia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Griechenland','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0395\u03bb\u03bb\u03ac\u03b4\u03b1'),'en':'*aa','eo':'*aa','es':'Grecia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Kreikka','fj':'*aa','fo':'*aa','fr':u('Gr\u00e8ce'),'fy':'*aa','ga':u('An Ghr\u00e9ig'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Yunani','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*es','iu':'*aa','iw':'*aa','ja':u('\u30ae\u30ea\u30b7\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uadf8\ub9ac\uc2a4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':u('Gre\u010bja'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Griekenland','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Grecja','ps':'*aa','pt':u('Gr\u00e9cia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0413\u0440\u0435\u0446\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0413\u0440\u0447\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Grekland','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e01\u0e23\u0e35\u0e0b'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Hy L\u1ea1p'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5e0c\u814a'),'zu':'*aa',}, 'GS': {'aa':'South Georgia And The South Sandwich Islands','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('S\u00fcd-Georgia und die s\u00fcdlichen Sandwich-Inseln'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03cc\u03c4\u03b9\u03b1 \u0393\u03b5\u03c9\u03c1\u03b3\u03af\u03b1 \u03ba\u03b1\u03b9 \u039d\u03ae\u03c3\u03bf\u03b9 \u039d\u03cc\u03c4\u03b9\u03b5\u03c2 \u03a3\u03ac\u03bd\u03c4\u03bf\u03c5\u03b9\u03c4\u03c2'),'en':'*aa','eo':'*aa','es':'Islas Georgia del Sur y Sandwich del Sur','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('G\u00e9orgie du Sud et \u00eeles Sandwich du Sud'),'fy':'*aa','ga':u('An tSeoirsia Theas agus Oile\u00e1in Sandwich Theas'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Georgia Selatan dan Kepulauan Sandwich Selatan','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Georgia del sud e isole Sandwich meridionali','iu':'*aa','iw':'*aa','ja':u('\u30b5\u30a6\u30b9\u30b8\u30e7\u30fc\u30b8\u30a2\u5cf6\u30fb\u30b5\u30a6\u30b9\u30b5\u30f3\u30c9\u30a6\u30a3\u30c3\u30c1\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc0ac\uc6b0\uc2a4 \uc870\uc9c0\uc544 \ubc0f \uc0ac\uc6b0\uc2a4 \uc0cc\ub4dc\uc704\uce58 \uc81c\ub3c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'South Georgia and the South Sandwich Islands','mt':'*ms','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Zuid-Georgi\u00eb en Zuidelijke Sandwicheilanden'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Ge\u00f3rgia do Sul e Ilhas Sandwich do Sul'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0408\u0443\u0436\u043d\u0430 \u040f\u043e\u0440\u045f\u0438\u0458\u0430 \u0438 \u0408\u0443\u0436\u043d\u0430 \u0421\u0435\u043d\u0434\u0432\u0438\u0447 \u041e\u0441\u0442\u0440\u0432\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('Sydgeorgien och Sydsandwich\u00f6arna'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5357\u4e54\u6cbb\u4e9a\u5c9b\u548c\u5357\u6851\u5fb7\u97e6\u5947\u5c9b'),'zu':'*aa',}, 'GT': {'aa':'Guatemala','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0393\u03bf\u03c5\u03b1\u03c4\u03b5\u03bc\u03ac\u03bb\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'Guatamala','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b0\u30a2\u30c6\u30de\u30e9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uacfc\ud14c\ub9d0\ub77c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Gwatemala','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*mt','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0413\u0432\u0430\u0442\u0435\u043c\u0430\u043b\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e01\u0e31\u0e27\u0e40\u0e15\u0e21\u0e32\u0e25\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Goa-t\u00ea-ma-la'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5371\u5730\u9a6c\u62c9'),'zu':'*aa',}, 'GU': {'aa':'Guam','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0393\u03ba\u03bf\u03c5\u03ac\u03bc'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b0\u30a2\u30e0'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uad0c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Gwam','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0413\u0443\u0430\u043c'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5173\u5c9b'),'zu':'*aa',}, 'GW': {'aa':'Guinea-Bissau','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Guinea Bissau','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0393\u03bf\u03c5\u03b9\u03bd\u03ad\u03b1-\u039c\u03c0\u03b9\u03c3\u03ac\u03bf\u03c5'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Guin\u00e9e-Bissau'),'fy':'*aa','ga':'An Ghuine-Bhissau','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*ca','iu':'*aa','iw':'*aa','ja':u('\u30ae\u30cb\u30a2\u30d3\u30b5\u30a6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uae30\ub124\ube44\uc3d8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*ca','mt':'Ginea-Bissaw','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Guinee-Bissau','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Gwinea Bissau','ps':'*aa','pt':u('Guin\u00e9 Bissau'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0413\u0432\u0438\u043d\u0435\u044f-\u0411\u0438\u0441\u0430\u0443'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0413\u0432\u0438\u043d\u0435\u0458\u0430-\u0411\u0438\u0441\u0430\u043e'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e01\u0e34\u0e27\u0e19\u0e35-\u0e1a\u0e34\u0e2a\u0e42\u0e0b'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Ghi-n\u00ea B\u00edt-xao'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u51e0\u5185\u4e9a\u6bd4\u7ecd\u5171\u548c\u56fd'),'zu':'*aa',}, 'GY': {'aa':'Guyana','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0393\u03bf\u03c5\u03b9\u03ac\u03bd\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An Ghu\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30ac\u30a4\u30a2\u30ca'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uac00\uc774\uc544\ub098'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Gujana','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*mt','ps':'*aa','pt':'Guiana','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0413\u0430\u0439\u0430\u043d\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0413\u0432\u0430\u0458\u0430\u043d\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e01\u0e39\u0e22\u0e32\u0e19\u0e48\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Guy-a-na','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u572d\u4e9a\u90a3'),'zu':'*aa',}, 'HK': {'aa':'Hong Kong','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Hongkong','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a7\u03bf\u03bd\u03b3\u03ba \u039a\u03bf\u03bd\u03b3\u03ba, \u0395\u03b9\u03b4\u03b9\u03ba\u03ae \u0394\u03b9\u03bf\u03b9\u03ba\u03b7\u03c4\u03b9\u03ba\u03ae \u03a0\u03b5\u03c1\u03b9\u03c6\u03ad\u03c1\u03b5\u03b9\u03b1 \u03c4\u03b7\u03c2 \u039a\u03af\u03bd\u03b1\u03c2'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Hong-Kong','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Hong Kong S.A.R., Cina','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u9999\u6e2f'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud64d\ucf69'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Hong Kong S.A.R., China','mt':u('\u0126ong Kong S.A.R., \u010aina'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Hongkong SAR van China','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Hong Kong, Regi\u00e3o Admin. Especial da China'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0413\u043e\u043d\u043a\u043e\u043d\u0433'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0425\u043e\u043d\u0433 \u041a\u043e\u043d\u0433 (\u0421. \u0410. \u0420. \u041a\u0438\u043d\u0430)'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2e\u0e48\u0e2d\u0e07\u0e01\u0e07'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':'*ja','zu':'*aa',}, 'HM': {'aa':'Heard Island And McDonald Islands','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Heard- und McDonald-Inseln','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03ae\u03c3\u03bf\u03b9 \u03a7\u03b5\u03c1\u03bd\u03c4 \u03ba\u03b1\u03b9 \u039c\u03b1\u03ba\u03bd\u03c4\u03cc\u03bd\u03b1\u03bb\u03bd\u03c4'),'en':'*aa','eo':'*aa','es':'Islas Heard y McDonald','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('\u00cele Heard et \u00eeles McDonald'),'fy':'*aa','ga':u('Oile\u00e1n Heard agus Oile\u00e1in McDonald'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Pulau Heard dan Kepulauan McDonald','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Isole Heard e McDonald','iu':'*aa','iw':'*aa','ja':u('\u30cf\u30fc\u30c9\u30fb\u30de\u30af\u30c9\u30ca\u30eb\u30c9\u8af8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud5c8\ub4dc \uc12c \ubc0f \ub9e5\ub3c4\ub110\ub4dc \uc81c\ub3c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Heard Island and McDonald Islands','mt':'*ms','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Heard- en McDonaldeilanden','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'Ilha Heard e Ilhas McDonald','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0425\u0435\u0440\u0434 \u0438 \u041c\u0435\u043a\u0434\u043e\u043d\u0430\u043b\u0434 \u041e\u0441\u0442\u0440\u0432\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('Heard\u00f6ch McDonald\u00f6arna'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u8d6b\u5fb7\u548c\u9ea6\u514b\u5510\u7eb3\u7fa4\u5c9b'),'zu':'*aa',}, 'HN': {'aa':'Honduras','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Hondures','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039f\u03bd\u03b4\u03bf\u03cd\u03c1\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('Hond\u00faras'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30db\u30f3\u30b8\u30e5\u30e9\u30b9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc628\ub450\ub77c\uc2a4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('\u0126onduras'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0413\u043e\u043d\u0434\u0443\u0440\u0430\u0441'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0425\u043e\u043d\u0434\u0443\u0440\u0430\u0441'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2e\u0e2d\u0e19\u0e14\u0e39\u0e23\u0e31\u0e2a'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('H\u00f4n-\u0111u-r\u00e1t'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6d2a\u90fd\u62c9\u65af'),'zu':'*aa',}, 'HR': {'aa':'Croatia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Cro\u00e0cia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Kroatien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03c1\u03bf\u03b1\u03c4\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Croacia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Croatie','fy':'*aa','ga':u('An Chr\u00f3it'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'Hrvatska','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Kroasia','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Croazia','iu':'*aa','iw':'*aa','ja':u('\u30af\u30ed\u30a2\u30c1\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud06c\ub85c\uc544\ud2f0\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Kroazja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Kroati\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Chorwacja','ps':'*aa','pt':u('Cro\u00e1cia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0425\u043e\u0440\u0432\u0430\u0442\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0425\u0440\u0432\u0430\u0442\u0441\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e42\u0e04\u0e23\u0e40\u0e2d\u0e40\u0e0a\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Cr\u00f4-a-ti-a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u514b\u7f57\u5730\u4e9a'),'zu':'*aa',}, 'HT': {'aa':'Haiti','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Hait\u00ed'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03ca\u03c4\u03ae'),'en':'*aa','eo':'*aa','es':'*ca','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Ha\u00efti'),'fy':'*aa','ga':u('H\u00e1it\u00ed'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30cf\u30a4\u30c1'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud558\uc774\ud2f0'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('\u0126aiti'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*fr','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0413\u0430\u0438\u0442\u0438'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0425\u0430\u0438\u0442\u0438'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e44\u0e2e\u0e15\u0e35'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Ha-i-ti','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6d77\u5730'),'zu':'*aa',}, 'HU': {'aa':'Hungary','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Hongria','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Ungarn','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039f\u03c5\u03b3\u03b3\u03b1\u03c1\u03af\u03b1'),'en':'*aa','eo':'*aa','es':u('Hungr\u00eda'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Hongrie','fy':'*aa','ga':u('An Ung\u00e1ir'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':u('Magyarorsz\u00e1g'),'hy':'*aa','hz':'*aa','ia':'*aa','id':'Hungaria','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Ungheria','iu':'*aa','iw':'*aa','ja':u('\u30cf\u30f3\u30ac\u30ea\u30fc'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud5dd\uac00\ub9ac'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Hungari','mt':'Ungerija','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Hongarije','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('W\u0119gry'),'ps':'*aa','pt':'Hungria','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0412\u0435\u043d\u0433\u0440\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041c\u0430\u0452\u0430\u0440\u0441\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Ungern','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2e\u0e31\u0e07\u0e01\u0e32\u0e23\u0e35'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Hung-ga-ri','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5308\u7259\u5229'),'zu':'*aa',}, 'ID': {'aa':'Indonesia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Indon\u00e8sia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Indonesien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0399\u03bd\u03b4\u03bf\u03bd\u03b7\u03c3\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Indon\u00e9sie'),'fy':'*aa','ga':u('An Indin\u00e9is'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a4\u30f3\u30c9\u30cd\u30b7\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc778\ub3c4\ub124\uc2dc\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Indone\u017cja'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Indonesi\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Indonezja','ps':'*aa','pt':u('Indon\u00e9sia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0418\u043d\u0434\u043e\u043d\u0435\u0437\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0418\u043d\u0434\u043e\u043d\u0435\u0437\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e34\u0e19\u0e42\u0e14\u0e19\u0e35\u0e40\u0e0b\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Nam D\u01b0\u01a1ng'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5370\u5ea6\u5c3c\u897f\u4e9a'),'zu':'*aa',}, 'IE': {'aa':'Ireland','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Irlanda','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Irland','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0399\u03c1\u03bb\u03b1\u03bd\u03b4\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*ca','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Irlanti','fj':'*aa','fo':'*aa','fr':'Irlande','fy':'*aa','ga':u('\u00c9ire'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Irlandia','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*ca','iu':'*aa','iw':'*aa','ja':u('\u30a2\u30a4\u30eb\u30e9\u30f3\u30c9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc544\uc77c\ub79c\ub4dc'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*ca','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Ierland','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*id','ps':'*aa','pt':'*ca','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0418\u0440\u043b\u0430\u043d\u0434\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0418\u0440\u0441\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e44\u0e2d\u0e23\u0e4c\u0e41\u0e25\u0e19\u0e14\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Ai-len','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u7231\u5c14\u5170'),'zu':'*aa',}, 'IL': {'aa':'Israel','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0399\u03c3\u03c1\u03b1\u03ae\u03bb'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Isra\u00ebl'),'fy':'*aa','ga':'Iosrael','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':u('\u05d9\u05e9\u05e8\u05d0\u05dc'),'hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Israele','iu':'*aa','iw':'*he','ja':u('\u30a4\u30b9\u30e9\u30a8\u30eb'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc774\uc2a4\ub77c\uc5d8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('I\u017crael'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*fr','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Izrael','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0418\u0437\u0440\u0430\u0438\u043b\u044c'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0418\u0437\u0440\u0430\u0435\u043b'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e34\u0e2a\u0e23\u0e32\u0e40\u0e2d\u0e25'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'I-xra-en','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u4ee5\u8272\u5217'),'zu':'*aa',}, 'IM': {'aa':'Isle Of Man','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Isle of Man','dv':'*aa','dz':'*aa','ee':'*aa','el':'*aa','en':'*aa','eo':'*aa','es':'Isla de Man','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Ile de Man','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Isola di Man','iu':'*aa','iw':'*aa','ja':u('\u30de\u30f3\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub9e8 \uc12c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'Ilha de Man','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*aa','ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u66fc\u5c9b'),'zu':'*aa',}, 'IN': {'aa':'India','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('\u00cdndia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Indien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0399\u03bd\u03b4\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Inde','fy':'*aa','ga':'An India','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':u('\u092d\u093e\u0930\u0924'),'ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a4\u30f3\u30c9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc778\ub3c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Hindia','mt':'Indja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Indie','ps':'*aa','pt':'*ca','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0418\u043d\u0434\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0418\u043d\u0434\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e34\u0e19\u0e40\u0e14\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('\u1ea4n \u0110\u1ed9'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5370\u5ea6'),'zu':'*aa',}, 'IO': {'aa':'British Indian Ocean Territory','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Britische Territorien im Indischen Ozean','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0392\u03c1\u03b5\u03c4\u03b1\u03bd\u03b9\u03ba\u03ac \u0388\u03b4\u03ac\u03c6\u03b7 \u0399\u03bd\u03b4\u03b9\u03ba\u03bf\u03cd \u03a9\u03ba\u03b5\u03b1\u03bd\u03bf\u03cd'),'en':'*aa','eo':'*aa','es':u('Territorio Brit\u00e1nico del Oc\u00e9ano \u00cdndico'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Territoires britanniques de l\'Oc\u00e9an Indien'),'fy':'*aa','ga':u('Cr\u00edocha Briotanacha an Aig\u00e9in Indiagh'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Territorio britannico dell\'Oceano Indiano','iu':'*aa','iw':'*aa','ja':u('\u82f1\u9818\u30a4\u30f3\u30c9\u6d0b\u5730\u57df'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc601\uc778\ub3c4 \uc81c\ub3c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Britse Gebieden in de Indische Oceaan','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Territ\u00f3rio Brit\u00e2nico do Oceano \u00cdndico'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*aa','ss':'*aa','st':'*aa','su':'*aa','sv':'Brittiska territoriet i Indiska Oceanen','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u82f1\u5c5e\u5370\u5ea6\u6d0b\u9886\u5730'),'zu':'*aa',}, 'IQ': {'aa':'Iraq','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':u('\u0627\u0644\u0639\u0631\u0627\u0642'),'as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Irak','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0399\u03c1\u03ac\u03ba'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*de','fy':'*aa','ga':u('An Iar\u00e1ic'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a4\u30e9\u30af'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc774\ub77c\ud06c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*de','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*de','ps':'*aa','pt':'Iraque','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0418\u0440\u0430\u043a'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e34\u0e23\u0e31\u0e01'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('I-r\u1eafc'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u4f0a\u62c9\u514b'),'zu':'*aa',}, 'IR': {'aa':'Iran','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0399\u03c1\u03ac\u03bd, \u0399\u03c3\u03bb\u03b1\u03bc\u03b9\u03ba\u03ae \u0394\u03b7\u03bc\u03bf\u03ba\u03c1\u03b1\u03c4\u03af\u03b1 \u03c4\u03bf\u03c5'),'en':'*aa','eo':'*aa','es':u('Ir\u00e1n'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An Iar\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a4\u30e9\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc774\ub780'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Ir\u00e3'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0418\u0440\u0430\u043d'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e34\u0e2b\u0e23\u0e48\u0e32\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'I-ran','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u4f0a\u6717'),'zu':'*aa',}, 'IS': {'aa':'Iceland','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Isl\u00e0ndia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Island','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0399\u03c3\u03bb\u03b1\u03bd\u03b4\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Islandia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Islande','fy':'*aa','ga':u('An \u00cdoslainn'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*es','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*es','io':'*aa','is':u('\u00cdsland'),'it':'Islanda','iu':'*aa','iw':'*aa','ja':u('\u30a2\u30a4\u30b9\u30e9\u30f3\u30c9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc544\uc774\uc2ac\ub780\ub4dc'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*it','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'IJsland','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*es','ps':'*aa','pt':u('Isl\u00e2ndia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0418\u0441\u043b\u0430\u043d\u0434\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0418\u0441\u043b\u0430\u043d\u0434'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e44\u0e2d\u0e0b\u0e41\u0e25\u0e19\u0e14\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Ai-x\u01a1-len'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u51b0\u5c9b'),'zu':'*aa',}, 'IT': {'aa':'Italy','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('It\u00e0lia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Italien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0399\u03c4\u03b1\u03bb\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Italia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*es','fj':'*aa','fo':'*aa','fr':'Italie','fy':'*aa','ga':u('An Iod\u00e1il'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Itali','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*es','iu':'*aa','iw':'*aa','ja':u('\u30a4\u30bf\u30ea\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc774\ud0c8\ub9ac\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':'Italja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Itali\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('W\u0142ochy'),'ps':'*aa','pt':u('It\u00e1lia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0418\u0442\u0430\u043b\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0418\u0442\u0430\u043b\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e34\u0e15\u0e32\u0e25\u0e35'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('\u00dd'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u610f\u5927\u5229'),'zu':'*aa',}, 'JE': {'aa':'Jersey','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':'*aa','en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b8\u30e3\u30fc\u30b8\u30fc'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc800\uc9c0'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*aa','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6cfd\u897f\u5c9b'),'zu':'*aa',}, 'JM': {'aa':'Jamaica','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Jamaika','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a4\u03b6\u03b1\u03bc\u03ac\u03b9\u03ba\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Jama\u00efque'),'fy':'*aa','ga':u('Iam\u00e1ice'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*de','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*de','io':'*aa','is':'*aa','it':'Giamaica','iu':'*aa','iw':'*aa','ja':u('\u30b8\u30e3\u30de\u30a4\u30ab'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc790\uba54\uc774\uce74'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*de','mt':u('\u0120amajka'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Jamajka','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u042f\u043c\u0430\u0439\u043a\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0408\u0430\u043c\u0430\u0458\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e08\u0e32\u0e44\u0e21\u0e01\u0e49\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Ha-mai-ca','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u7259\u4e70\u52a0'),'zu':'*aa',}, 'JO': {'aa':'Jordan','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':u('\u0627\u0644\u0623\u0631\u062f\u0646'),'as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Jord\u00e0nia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Jordanien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0399\u03bf\u03c1\u03b4\u03b1\u03bd\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Jordania','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Jordanie','fy':'*aa','ga':u('An Iord\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Yordania','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Giordania','iu':'*aa','iw':'*aa','ja':u('\u30e8\u30eb\u30c0\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc694\ub974\ub2e8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('\u0120ordan'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Jordani\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Jord\u00e2nia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0418\u043e\u0440\u0434\u0430\u043d\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0408\u043e\u0440\u0434\u0430\u043d'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e08\u0e2d\u0e23\u0e4c\u0e41\u0e14\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Gi\u00f3c-\u0111a-ni'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u7ea6\u65e6'),'zu':'*aa',}, 'JP': {'aa':'Japan','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Jap\u00f3'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0399\u03b1\u03c0\u03c9\u03bd\u03af\u03b1'),'en':'*aa','eo':'*aa','es':u('Jap\u00f3n'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Japani','fj':'*aa','fo':'*aa','fr':'Japon','fy':'*aa','ga':u('An tSeap\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Jepang','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Giappone','iu':'*aa','iw':'*aa','ja':u('\u65e5\u672c'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc77c\ubcf8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Jepun','mt':u('\u0120appun'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Japonia','ps':'*aa','pt':u('Jap\u00e3o'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u042f\u043f\u043e\u043d\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0408\u0430\u043f\u0430\u043d'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e0d\u0e35\u0e48\u0e1b\u0e38\u0e48\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Nh\u1eadt B\u1ea3n'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':'*ja','zu':'*aa',}, 'KE': {'aa':'Kenya','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Kenia','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03ad\u03bd\u03c5\u03b1'),'en':'*aa','eo':'*aa','es':'*de','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An Ch\u00e9inia'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b1\u30cb\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ucf00\ub0d0'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Kenja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*de','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*de','ps':'*aa','pt':u('Qu\u00eania'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041a\u0435\u043d\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041a\u0435\u043d\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e04\u0e19\u0e22\u0e48\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('K\u00ea-ni-a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u80af\u5c3c\u4e9a'),'zu':'*aa',}, 'KG': {'aa':'Kyrgyzstan','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Kirgizistan','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Kirgistan','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03b9\u03c1\u03b3\u03b9\u03b6\u03af\u03b1'),'en':'*aa','eo':'*aa','es':u('Kirguizist\u00e1n'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An Chirgeast\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Kirghizistan','iu':'*aa','iw':'*aa','ja':u('\u30ad\u30eb\u30ae\u30b9\u30bf\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud0a4\ub974\uae30\uc2a4\uc2a4\ud0c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*de','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Kirgizi\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*de','ps':'*aa','pt':u('Quirguist\u00e3o'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041a\u0438\u0440\u0433\u0438\u0437\u0441\u0442\u0430\u043d'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'Kirgisistan','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e04\u0e2d\u0e23\u0e4c\u0e01\u0e34\u0e2a\u0e16\u0e32\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('C\u01b0-r\u01a1-g\u01b0-xtan'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5409\u5c14\u5409\u514b\u65af\u5766'),'zu':'*aa',}, 'KH': {'aa':'Cambodia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Cambodja','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Kambodscha','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03b1\u03bc\u03c0\u03cc\u03c4\u03b6\u03b7'),'en':'*aa','eo':'*aa','es':'Camboya','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Cambodge','fy':'*aa','ga':u('An Chamb\u00f3id'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Kamboja','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Cambogia','iu':'*aa','iw':'*aa','ja':u('\u30ab\u30f3\u30dc\u30b8\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uce84\ubcf4\ub514\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Kemboja','mt':'Kambodja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*ca','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('Kambod\u017ca'),'ps':'*aa','pt':'Camboja','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041a\u0430\u043c\u0431\u043e\u0434\u0436\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041a\u0430\u043c\u0431\u043e\u045f\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*mt','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e01\u0e31\u0e21\u0e1e\u0e39\u0e0a\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Campuchia','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u67ec\u57d4\u5be8'),'zu':'*aa',}, 'KI': {'aa':'Kiribati','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03b9\u03c1\u03b9\u03bc\u03c0\u03ac\u03c4\u03b9'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('Cireabait\u00ed'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30ad\u30ea\u30d0\u30b9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud0a4\ub9ac\ubc14\uc2dc'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'Quiribati','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041a\u0438\u0440\u0438\u0431\u0430\u0442\u0438'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e04\u0e34\u0e23\u0e35\u0e1a\u0e32\u0e15\u0e34'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Ki-ri-ba-ti','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u57fa\u91cc\u5df4\u65af'),'zu':'*aa',}, 'KM': {'aa':'Comoros','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Comores','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Komoren','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03bf\u03bc\u03cc\u03c1\u03b5\u03c2'),'en':'*aa','eo':'*aa','es':'*ca','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*ca','fy':'*aa','ga':u('Oile\u00e1in Chom\u00f3ra'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Komoros','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Comore','iu':'*aa','iw':'*aa','ja':u('\u30b3\u30e2\u30ed'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ucf54\ubaa8\ub974'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*id','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Comoren','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Komory','ps':'*aa','pt':'*ca','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041a\u043e\u043c\u043e\u0440\u043e\u0441'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041a\u043e\u043c\u043e\u0440\u0441\u043a\u0430 \u041e\u0441\u0442\u0440\u0432\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Komorerna','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e42\u0e04\u0e42\u0e21\u0e23\u0e2d\u0e2a'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('C\u00f4-m\u00f4'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u79d1\u6469\u7f57'),'zu':'*aa',}, 'KN': {'aa':'Saint Kitts And Nevis','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Saint Kitts und Nevis','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a3\u03b1\u03b9\u03bd\u03c4 \u039a\u03b9\u03c4\u03c2 \u03ba\u03b1\u03b9 \u039d\u03ad\u03b2\u03b9\u03c2'),'en':'*aa','eo':'*aa','es':u('San Crist\u00f3bal y Nieves'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Saint-Christophe-et-Ni\u00e9v\u00e8s'),'fy':'*aa','ga':'Saint Kitts agus Nevis','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Saint Kitts dan Nevis','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Saint Kitts e Nevis','iu':'*aa','iw':'*aa','ja':u('\u30bb\u30f3\u30c8\u30af\u30ea\u30b9\u30c8\u30d5\u30a1\u30fc\u30fb\u30cd\u30a4\u30d3\u30b9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc138\uc778\ud2b8 \ud06c\ub9ac\uc2a4\ud1a0\ud37c \ub2c8\ube44\uc2a4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':'Saint Kitts and Nevis','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Saint Kitts en Nevis','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('S\u00e3o Cristov\u00e3o e Nevis'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u0435\u043d\u0442 \u041a\u0438\u0442\u0441 \u0438 \u041d\u0435\u0432\u0438\u0441'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Saint Kitts och Nevis','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Xan-k\u00edt v\u00e0 N\u00ea-vi'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5723\u57fa\u8328\u548c\u5c3c\u7ef4\u65af'),'zu':'*aa',}, 'KP': {'aa':'North Korea','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Corea del Nord','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Nordkorea','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03bf\u03c1\u03ad\u03b1, \u0392\u03cc\u03c1\u03b5\u03b9\u03b1'),'en':'*aa','eo':'*aa','es':'Corea del Norte','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Cor\u00e9e du Nord'),'fy':'*aa','ga':u('An Ch\u00f3ir\u00e9 Thuaidh'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Korea Utara','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*ca','iu':'*aa','iw':'*aa','ja':u('\u671d\u9bae\u6c11\u4e3b\u4e3b\u7fa9\u4eba\u6c11\u5171\u548c\u56fd'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubd81\ud55c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Utara Korea','mt':u('Koreja ta\u2019 Fuq'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Noord-Korea','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('Korea P\u00f3\u0142nocna'),'ps':'*aa','pt':'Coreia do Norte','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0421\u0435\u0432\u0435\u0440\u043d\u0430\u044f \u041a\u043e\u0440\u0435\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u0435\u0432\u0435\u0440\u043d\u0430 \u041a\u043e\u0440\u0435\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e01\u0e32\u0e2b\u0e25\u0e35\u0e40\u0e2b\u0e19\u0e37\u0e2d'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('B\u1eafc Tri\u1ec1u Ti\u00ean'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u671d\u9c9c'),'zu':'*aa',}, 'KR': {'aa':'South Korea','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Corea del Sud','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('S\u00fcdkorea'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03bf\u03c1\u03ad\u03b1, \u039d\u03cc\u03c4\u03b9\u03b1'),'en':'*aa','eo':'*aa','es':'Corea del Sur','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Korea','fj':'*aa','fo':'*aa','fr':u('Cor\u00e9e du Sud'),'fy':'*aa','ga':u('An Ch\u00f3ir\u00e9 Theas'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Korea Selatan','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*ca','iu':'*aa','iw':'*aa','ja':u('\u5927\u97d3\u6c11\u56fd'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub300\ud55c\ubbfc\uad6d'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Selatan Korea','mt':u('Koreja t\u2019Isfel'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Zuid-Korea','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('Korea Po\u0142udniowa'),'ps':'*aa','pt':'Coreia do Sul','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u042e\u0436\u043d\u0430\u044f \u041a\u043e\u0440\u0435\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0408\u0443\u0436\u043d\u0430 \u041a\u043e\u0440\u0435\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Sydkorea','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e01\u0e32\u0e2b\u0e25\u0e35\u0e43\u0e15\u0e49'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('H\u00e0n Qu\u1ed1c'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u97e9\u56fd'),'zu':'*aa',}, 'KW': {'aa':'Kuwait','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':u('\u0627\u0644\u0643\u0648\u064a\u062a'),'as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03bf\u03c5\u03b2\u03ad\u03b9\u03c4'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Koweit','fy':'*aa','ga':u('Cu\u00e1it'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30af\u30a6\u30a7\u30fc\u30c8'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ucfe0\uc6e8\uc774\ud2b8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Kuwajt','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Koeweit','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Kuwejt','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041a\u0443\u0432\u0435\u0439\u0442'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041a\u0443\u0432\u0430\u0458\u0442'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e04\u0e39\u0e40\u0e27\u0e15'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('C\u00f4-o\u00e9t'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u79d1\u5a01\u7279'),'zu':'*aa',}, 'KY': {'aa':'Cayman Islands','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Kaiman-Inseln','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03ae\u03c3\u03bf\u03b9 \u039a\u03ad\u03b9\u03bc\u03b1\u03bd'),'en':'*aa','eo':'*aa','es':u('Islas Caim\u00e1n'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('\u00celes Ca\u00efmans'),'fy':'*aa','ga':u('Oile\u00e1in Cayman'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Kepulauan Kayman','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Isole Cayman','iu':'*aa','iw':'*aa','ja':u('\u30b1\u30a4\u30de\u30f3\u8af8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ucf00\uc774\ub9e8 \uc81c\ub3c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Caymaneilanden','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'Ilhas Caiman','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041a\u0430\u0458\u043c\u0430\u043d\u0441\u043a\u0430 \u041e\u0441\u0442\u0440\u0432\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('Cayman\u00f6arna'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5f00\u66fc\u7fa4\u5c9b'),'zu':'*aa',}, 'KZ': {'aa':'Kazakhstan','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Kasachstan','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03b1\u03b6\u03b1\u03ba\u03c3\u03c4\u03ac\u03bd'),'en':'*aa','eo':'*aa','es':u('Kazajst\u00e1n'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An Chasacst\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30ab\u30b6\u30d5\u30b9\u30bf\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uce74\uc790\ud750\uc2a4\ud0c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Ka\u017cakstan'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Kazachstan','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*nl','ps':'*aa','pt':u('Casaquist\u00e3o'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041a\u0430\u0437\u0430\u0445\u0441\u0442\u0430\u043d'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'Kazakstan','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e04\u0e32\u0e0b\u0e31\u0e04\u0e2a\u0e16\u0e32\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Ka-d\u1eafc-xtan'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u54c8\u8428\u514b\u65af\u5766'),'zu':'*aa',}, 'LA': {'aa':'Laos','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039b\u03b1\u03c4\u03b9\u03bd\u03b9\u03ba\u03ae \u0391\u03bc\u03b5\u03c1\u03b9\u03ba\u03ae'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30e9\u30aa\u30b9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub77c\uc624\uc2a4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Rep\u00fablica Popular Democr\u00e1tica do Laos'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041b\u0430\u043e\u0441'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e25\u0e32\u0e27'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('L\u00e0o'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u8001\u631d'),'zu':'*aa',}, 'LB': {'aa':'Lebanon','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':u('\u0644\u0628\u0646\u0627\u0646'),'as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('L\u00edban'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Libanon','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039b\u03af\u03b2\u03b1\u03bd\u03bf\u03c2'),'en':'*aa','eo':'*aa','es':u('L\u00edbano'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Liban','fy':'*aa','ga':u('An Liob\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Libano','iu':'*aa','iw':'*aa','ja':u('\u30ec\u30d0\u30ce\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub808\ubc14\ub17c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Lubnan','mt':'Libanu','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*de','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*fr','ps':'*aa','pt':'*es','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041b\u0438\u0432\u0430\u043d'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041b\u0438\u0431\u0430\u043d'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e25\u0e1a\u0e32\u0e19\u0e2d\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Li-b\u0103ng'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u9ece\u5df4\u5ae9'),'zu':'*aa',}, 'LC': {'aa':'Saint Lucia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'St. Lucia','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03b3\u03af\u03b1 \u039b\u03bf\u03c5\u03ba\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Santa Lucia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Sainte-Lucie','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Santa Lusia','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Sainte Lucia','iu':'*aa','iw':'*aa','ja':u('\u30bb\u30f3\u30c8\u30eb\u30b7\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc138\uc778\ud2b8 \ub8e8\uc2dc\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Santa Lu\u010bija'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Santa L\u00facia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u0435\u043d\u0442 \u041b\u0443\u0446\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Xan Lu-xi','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5723\u5362\u897f\u4e9a'),'zu':'*aa',}, 'LI': {'aa':'Liechtenstein','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039b\u03b9\u03c7\u03c4\u03b5\u03bd\u03c3\u03c4\u03ac\u03b9\u03bd'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('Lichtinst\u00e9in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30ea\u30d2\u30c6\u30f3\u30b7\u30e5\u30bf\u30a4\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub9ac\ud788\ud150\uc288\ud0c0\uc778'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041b\u0438\u0445\u0442\u0435\u043d\u0448\u0442\u0435\u0439\u043d'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041b\u0438\u0445\u0442\u0435\u043d\u0448\u0442\u0430\u0458\u043d'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e44\u0e25\u0e40\u0e17\u0e19\u0e2a\u0e44\u0e15\u0e19\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Lich-ten-xt\u00ean'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5217\u652f\u6566\u58eb\u767b'),'zu':'*aa',}, 'LK': {'aa':'Sri Lanka','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a3\u03c1\u03b9 \u039b\u03ac\u03bd\u03ba\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('Sr\u00ed Lanca'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b9\u30ea\u30e9\u30f3\u30ab'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc2a4\ub9ac\ub791\uce74'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0428\u0440\u0438-\u041b\u0430\u043d\u043a\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0428\u0440\u0438 \u041b\u0430\u043d\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e28\u0e23\u0e35\u0e25\u0e31\u0e07\u0e01\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Xri Lan-ca','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u65af\u91cc\u5170\u5361'),'zu':'*aa',}, 'LR': {'aa':'Liberia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Lib\u00e8ria'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039b\u03b9\u03b2\u03b5\u03c1\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An Lib\u00e9ir'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30ea\u30d9\u30ea\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub77c\uc774\ubca0\ub9ac\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Liberja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Lib\u00e9ria'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041b\u0438\u0431\u0435\u0440\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041b\u0438\u0431\u0435\u0440\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e25\u0e34\u0e40\u0e1a\u0e2d\u0e23\u0e4c\u0e40\u0e25\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Li-b\u00ea-ri-a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5229\u6bd4\u91cc\u4e9a'),'zu':'*aa',}, 'LS': {'aa':'Lesotho','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039b\u03b5\u03c3\u03cc\u03c4\u03bf'),'en':'*aa','eo':'*aa','es':'Lesoto','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('Leos\u00f3ta'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30ec\u30bd\u30c8'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub808\uc18c\ud1a0'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*es','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*es','ps':'*aa','pt':'*es','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041b\u0435\u0441\u043e\u0442\u043e'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e25\u0e42\u0e0b\u0e42\u0e17'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('L\u00ea-x\u00f4-th\u00f4'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u83b1\u7d22\u6258'),'zu':'*aa',}, 'LT': {'aa':'Lithuania','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Litu\u00e0nia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Litauen','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039b\u03b9\u03b8\u03bf\u03c5\u03b1\u03bd\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Lituania','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Lithuanie','fy':'*aa','ga':u('An Liotu\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*es','iu':'*aa','iw':'*aa','ja':u('\u30ea\u30c8\u30a2\u30cb\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub9ac\ud22c\uc544\ub2c8\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'Lietuva','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Litwanja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Litouwen','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Litwa','ps':'*aa','pt':u('Litu\u00e2nia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041b\u0438\u0442\u0432\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041b\u0438\u0442\u0432\u0430\u043d\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e25\u0e34\u0e40\u0e17\u0e2d\u0e23\u0e4c\u0e40\u0e19\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Li-tu-a-ni-a','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u7acb\u9676\u5b9b'),'zu':'*aa',}, 'LU': {'aa':'Luxembourg','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Luxemburg','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*ca','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039b\u03bf\u03c5\u03be\u03b5\u03bc\u03b2\u03bf\u03cd\u03c1\u03b3\u03bf'),'en':'*aa','eo':'*aa','es':'Luxemburgo','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'Lucsamburg','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Lussemburgo','iu':'*aa','iw':'*aa','ja':u('\u30eb\u30af\u30bb\u30f3\u30d6\u30eb\u30af'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub8e9\uc148\ubd80\ub974\ud06c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Luksembourg','mt':'Lussemburgu','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*ca','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Luksemburg','ps':'*aa','pt':'*es','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041b\u044e\u043a\u0441\u0435\u043c\u0431\u0443\u0440\u0433'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041b\u0443\u043a\u0441\u0435\u043c\u0431\u0443\u0440\u0433'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*ca','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e25\u0e31\u0e01\u0e0b\u0e4c\u0e40\u0e0b\u0e21\u0e40\u0e1a\u0e2d\u0e23\u0e4c\u0e01'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('L\u00fac-x\u0103m-bua'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5362\u68ee\u5821'),'zu':'*aa',}, 'LV': {'aa':'Latvia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Let\u00f2nia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Lettland','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039b\u03b5\u03c4\u03bf\u03bd\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Letonia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Lettonie','fy':'*aa','ga':'An Laitvia','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Lettonia','iu':'*aa','iw':'*aa','ja':u('\u30e9\u30c8\u30d3\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub77c\ud2b8\ube44\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'Latvija','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Latvja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Letland','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('\u0141otwa'),'ps':'*aa','pt':u('Let\u00f4nia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041b\u0430\u0442\u0432\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041b\u0435\u0442\u043e\u043d\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e25\u0e32\u0e15\u0e40\u0e27\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('L\u00e1t-vi-a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u62c9\u8131\u7ef4\u4e9a'),'zu':'*aa',}, 'LY': {'aa':'Libya','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':u('\u0644\u064a\u0628\u064a\u0627'),'as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('L\u00edbia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Libyen','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039b\u03b9\u03b2\u03cd\u03b7'),'en':'*aa','eo':'*aa','es':'Libia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Libye','fy':'*aa','ga':'An Libia','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*es','iu':'*aa','iw':'*aa','ja':u('\u30ea\u30d3\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub9ac\ube44\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Libja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Libi\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*es','ps':'*aa','pt':'*ca','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041b\u0438\u0432\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041b\u0438\u0431\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e25\u0e34\u0e40\u0e1a\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Li-bi','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5229\u6bd4\u4e9a'),'zu':'*aa',}, 'MA': {'aa':'Morocco','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':u('\u0627\u0644\u0645\u063a\u0631\u0628'),'as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Marroc','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Marokko','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03b1\u03c1\u03cc\u03ba\u03bf'),'en':'*aa','eo':'*aa','es':'Marruecos','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Maroc','fy':'*aa','ga':u('Marac\u00f3'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Maroko','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Marocco','iu':'*aa','iw':'*aa','ja':u('\u30e2\u30ed\u30c3\u30b3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubaa8\ub85c\ucf54'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Maghribi','mt':'Marokk','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*de','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*id','ps':'*aa','pt':'Marrocos','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041c\u0430\u0440\u043e\u043a\u043a\u043e'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041c\u0430\u0440\u043e\u043a\u043e'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Marocko','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e42\u0e21\u0e23\u0e2d\u0e04\u0e42\u0e04'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Ma-r\u1ed1c'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6469\u6d1b\u54e5'),'zu':'*aa',}, 'MC': {'aa':'Monaco','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('M\u00f2naco'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03bf\u03bd\u03b1\u03ba\u03cc'),'en':'*aa','eo':'*aa','es':u('M\u00f3naco'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('Monac\u00f3'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30e2\u30ca\u30b3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubaa8\ub098\ucf54'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Monako','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*mt','ps':'*aa','pt':u('M\u00f4naco'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041c\u043e\u043d\u0430\u043a\u043e'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e42\u0e21\u0e19\u0e32\u0e42\u0e04'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('M\u00f4-na-c\u00f4'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6469\u7eb3\u54e5'),'zu':'*aa',}, 'MD': {'aa':'Moldova','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Mold\u00e0via'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Moldau','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03bf\u03bb\u03b4\u03b1\u03b2\u03af\u03b1, \u0394\u03b7\u03bc\u03bf\u03ba\u03c1\u03b1\u03c4\u03af\u03b1 \u03c4\u03b7\u03c2'),'en':'*aa','eo':'*aa','es':'Moldavia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Moldavie','fy':'*aa','ga':u('An Mhold\u00f3iv'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*es','iu':'*aa','iw':'*aa','ja':u('\u30e2\u30eb\u30c9\u30d0'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubab0\ub3c4\ubc14'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Maldova','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Moldavi\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('Mo\u0142dawia'),'ps':'*aa','pt':u('Mold\u00e1via'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041c\u043e\u043b\u0434\u043e\u0432\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041c\u043e\u043b\u0434\u0430\u0432\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Moldavien','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e42\u0e21\u0e25\u0e42\u0e14\u0e27\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('M\u00f4n-\u0111\u00f4-va'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6469\u5c14\u591a\u74e6'),'zu':'*aa',}, 'ME': {'aa':'Montenegro','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':'*aa','en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Mont\u00e9n\u00e9gro'),'fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30e2\u30f3\u30c6\u30cd\u30b0\u30ed'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubaac\ud14c\ub124\uadf8\ub85c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*aa','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u9ed1\u5c71'),'zu':'*aa',}, 'MF': {'aa':'Saint Martin','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'St. Martin','dv':'*aa','dz':'*aa','ee':'*aa','el':'*aa','en':'*aa','eo':'*aa','es':u('San Mart\u00edn'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Saint-Martin','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b5\u30f3\u30de\u30eb\u30bf\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc0dd \ub9c8\ub974\ud0f1'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('S\u00e3o Martinho'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*aa','ss':'*aa','st':'*aa','su':'*aa','sv':'*fr','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5723\u9a6c\u4e01'),'zu':'*aa',}, 'MG': {'aa':'Madagascar','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Madagaskar','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03b1\u03b4\u03b1\u03b3\u03b1\u03c3\u03ba\u03ac\u03c1\u03b7'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*de','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*de','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30de\u30c0\u30ac\u30b9\u30ab\u30eb'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub9c8\ub2e4\uac00\uc2a4\uce74\ub974'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*de','mt':'*de','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*de','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*de','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041c\u0430\u0434\u0430\u0433\u0430\u0441\u043a\u0430\u0440'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e21\u0e32\u0e14\u0e32\u0e01\u0e32\u0e2a\u0e01\u0e49\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Ma-\u0111a-g\u00e1t-xca'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u9a6c\u8fbe\u52a0\u65af\u52a0'),'zu':'*aa',}, 'MH': {'aa':'Marshall Islands','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Marshall-Inseln','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03ae\u03c3\u03bf\u03b9 \u039c\u03ac\u03c1\u03c3\u03b1\u03bb'),'en':'*aa','eo':'*aa','es':'Islas Marshall','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('\u00celes Marshall'),'fy':'*aa','ga':u('Oile\u00e1in Marshall'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Kepulauan Marshall','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Isole Marshall','iu':'*aa','iw':'*aa','ja':u('\u30de\u30fc\u30b7\u30e3\u30eb\u8af8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub9c8\uc15c \uc81c\ub3c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':u('G\u017cejjer ta\u2019 Marshall'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Marshalleilanden','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'Ilhas Marshall','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041c\u0430\u0440\u0448\u0430\u043b\u0441\u043a\u0430 \u041e\u0441\u0442\u0440\u0432\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('Marshall\u00f6arna'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Qu\u1ea7n \u0111\u1ea3o M\u00e1c-san'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u9a6c\u7ecd\u5c14\u7fa4\u5c9b'),'zu':'*aa',}, 'MK': {'aa':'Macedonia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Maced\u00f2nia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Mazedonien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a0\u0393\u0394 \u039c\u03b1\u03ba\u03b5\u03b4\u03bf\u03bd\u03af\u03b1\u03c2'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Mac\u00e9doine'),'fy':'*aa','ga':u('An Mhacad\u00f3in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30de\u30b1\u30c9\u30cb\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub9c8\ucf00\ub3c4\ub2c8\uc544\uc5b4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':u('\u041c\u0430\u043a\u0435\u0434\u043e\u043d\u0438\u0458\u0430'),'ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Ma\u010bedonja'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Macedoni\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Maced\u00f4nia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041c\u0430\u043a\u0435\u0434\u043e\u043d\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*mk','ss':'*aa','st':'*aa','su':'*aa','sv':'Makedonien','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e41\u0e21\u0e0b\u0e35\u0e42\u0e14\u0e40\u0e19\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Ma-x\u00ea-\u0111\u00f4-ni-a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u9a6c\u5176\u987f\u738b\u56fd'),'zu':'*aa',}, 'ML': {'aa':'Mali','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03ac\u03bb\u03b9'),'en':'*aa','eo':'*aa','es':u('Mal\u00ed'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('Mail\u00ed'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30de\u30ea'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub9d0\ub9ac'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041c\u0430\u043b\u0438'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e21\u0e32\u0e25\u0e35'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Ma-li','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u9a6c\u91cc'),'zu':'*aa',}, 'MM': {'aa':'Myanmar','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03b9\u03b1\u03bd\u03bc\u03ac\u03c1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'Maenmar','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30df\u30e3\u30f3\u30de\u30fc'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubbf8\uc580\ub9c8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Mjanmar','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Mianmar [Birm\u00e2nia]'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041c\u044c\u044f\u043d\u043c\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041c\u0438\u0458\u0430\u043d\u043c\u0430\u0440'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2a\u0e2b\u0e20\u0e32\u0e1e\u0e1e\u0e21\u0e48\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Mi-an-ma','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u7f05\u7538'),'zu':'*aa',}, 'MN': {'aa':'Mongolia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Mong\u00f2lia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Mongolei','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03bf\u03b3\u03b3\u03bf\u03bb\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Mongolie','fy':'*aa','ga':u('An Mhong\u00f3il'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30e2\u30f3\u30b4\u30eb'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubabd\uace8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Mongolja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Mongoli\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Mong\u00f3lia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041c\u043e\u043d\u0433\u043e\u043b\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041c\u043e\u043d\u0433\u043e\u043b\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Mongoliet','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e21\u0e2d\u0e07\u0e42\u0e01\u0e40\u0e25\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('M\u00f4ng C\u1ed5'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u8499\u53e4'),'zu':'*aa',}, 'MO': {'aa':'Macao','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03b1\u03ba\u03ac\u03bf, \u0395\u03b9\u03b4\u03b9\u03ba\u03ae \u0394\u03b9\u03bf\u03b9\u03ba\u03b7\u03c4\u03b9\u03ba\u03ae \u03a0\u03b5\u03c1\u03b9\u03c6\u03ad\u03c1\u03b5\u03b9\u03b1 \u03c4\u03b7\u03c2 \u039a\u03af\u03bd\u03b1\u03c2'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Makao S.A.R. Cina','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30de\u30ab\u30aa'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub9c8\uce74\uc624'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Macao S.A.R., China','mt':'*ms','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Macao SAR van China','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Macau, Regi\u00e3o Admin. Especial da China'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041c\u0430\u043a\u0430\u043e (\u0421. \u0410. \u0420. \u041a\u0438\u043d\u0430)'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6fb3\u95e8\u7279\u533a'),'zu':'*aa',}, 'MP': {'aa':'Northern Mariana Islands','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('N\u00f6rdliche Mariannen-Inseln'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03ae\u03c3\u03bf\u03b9 \u0392\u03cc\u03c1\u03b5\u03b9\u03b5\u03c2 \u039c\u03b1\u03c1\u03b9\u03ac\u03bd\u03b5\u03c2'),'en':'*aa','eo':'*aa','es':'Islas Marianas del Norte','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('\u00celes Mariannes du Nord'),'fy':'*aa','ga':u('Oile\u00e1in Mariana Thuaidh'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Kepualuan Mariana Utara','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Isole Marianne settentrionali','iu':'*aa','iw':'*aa','ja':u('\u5317\u30de\u30ea\u30a2\u30ca\u8af8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubd81\ub9c8\ub9ac\uc544\ub098 \uc81c\ub3c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('G\u017cejjer Marjana ta\u2019 Fuq'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Noordelijke Marianeneilanden','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'Ilhas Marianas do Norte','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u0435\u0432\u0435\u0440\u043d\u0430 \u041c\u0430\u0440\u0438\u0458\u0430\u043d\u0441\u043a\u0430 \u041e\u0441\u0442\u0440\u0432\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Nordmarianerna','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u7f8e\u5c5e\u5317\u9a6c\u91cc\u4e9a\u7eb3\u7fa4\u5c9b'),'zu':'*aa',}, 'MQ': {'aa':'Martinique','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Martinica','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03b1\u03c1\u03c4\u03b9\u03bd\u03af\u03ba\u03b1'),'en':'*aa','eo':'*aa','es':'*ca','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*ca','iu':'*aa','iw':'*aa','ja':u('\u30de\u30eb\u30c6\u30a3\u30cb\u30fc\u30af\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub9d0\ud2f0\ub2c8\ud06c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Martinik','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Martynika','ps':'*aa','pt':'*ca','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041c\u0430\u0440\u0442\u0438\u043d\u0438\u043a\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041c\u0430\u0440\u0442\u0438\u043d\u0438\u043a'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e21\u0e32\u0e23\u0e4c\u0e15\u0e34\u0e19\u0e34\u0e01'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u9a6c\u63d0\u5c3c\u514b\u5c9b'),'zu':'*aa',}, 'MR': {'aa':'Mauritania','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Maurit\u00e0nia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Mauretanien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03b1\u03c5\u03c1\u03b9\u03c4\u03b1\u03bd\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Mauritanie','fy':'*aa','ga':u('An Mharat\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30e2\u30fc\u30ea\u30bf\u30cb\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubaa8\ub9ac\ud0c0\ub2c8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Mawritanja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Mauritani\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Mauretania','ps':'*aa','pt':u('Maurit\u00e2nia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041c\u0430\u0432\u0440\u0438\u0442\u0430\u043d\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041c\u0430\u0443\u0440\u0438\u0442\u0430\u043d\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e21\u0e2d\u0e23\u0e34\u0e17\u0e32\u0e40\u0e19\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('M\u00f4-ri-ta-ni'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6bdb\u91cc\u5854\u5c3c\u4e9a'),'zu':'*aa',}, 'MS': {'aa':'Montserrat','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03bf\u03bd\u03c3\u03b5\u03c1\u03ac\u03c4'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'Montsarat','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30e2\u30f3\u30c8\u30bb\u30e9\u30c8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubaac\ud2b8\uc138\ub77c\ud2b8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041c\u043e\u043d\u0442\u0441\u0435\u0440\u0430\u0442'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041c\u043e\u043d\u0441\u0435\u0440\u0430\u0442'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e21\u0e2d\u0e19\u0e15\u0e4c\u0e40\u0e0b\u0e2d\u0e23\u0e32\u0e15'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u8499\u7279\u585e\u62c9\u7fa4\u5c9b'),'zu':'*aa',}, 'MT': {'aa':'Malta','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03ac\u03bb\u03c4\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Malte','fy':'*aa','ga':u('M\u00e1lta'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30de\u30eb\u30bf'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubab0\ud0c0'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041c\u0430\u043b\u044c\u0442\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041c\u0430\u043b\u0442\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e21\u0e31\u0e25\u0e15\u0e49\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Man-ta','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u9a6c\u8033\u4ed6'),'zu':'*aa',}, 'MU': {'aa':'Mauritius','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Maurici','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03b1\u03c5\u03c1\u03af\u03ba\u03b9\u03bf\u03c2'),'en':'*aa','eo':'*aa','es':'Mauricio','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Maurice','fy':'*aa','ga':u('Oile\u00e1n Mhuir\u00eds'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Maurizio','iu':'*aa','iw':'*aa','ja':u('\u30e2\u30fc\u30ea\u30b7\u30e3\u30b9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubaa8\ub9ac\uc154\uc2a4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Mawrizju','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Maur\u00edcio'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041c\u0430\u0432\u0440\u0438\u043a\u0438\u0439'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041c\u0430\u0443\u0440\u0438\u0446\u0438\u0458\u0443\u0441'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e21\u0e2d\u0e23\u0e34\u0e40\u0e15\u0e35\u0e22\u0e2a'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('M\u00f4-ri-x\u01a1'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6bdb\u91cc\u6c42\u65af'),'zu':'*aa',}, 'MV': {'aa':'Maldives','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Maldiven','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03b1\u03bb\u03b4\u03af\u03b2\u03b5\u03c2'),'en':'*aa','eo':'*aa','es':'Maldivas','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('Mhaildiv\u00ed'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Maldive','iu':'*aa','iw':'*aa','ja':u('\u30e2\u30eb\u30c7\u30a3\u30d6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubab0\ub514\ube0c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Maldiv','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*de','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*es','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041c\u0430\u043b\u0434\u0438\u0432\u0438'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Maldiverna','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Man-\u0111i-v\u01a1'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u9a6c\u5c14\u4ee3\u592b'),'zu':'*aa',}, 'MW': {'aa':'Malawi','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03b1\u03bb\u03ac\u03bf\u03c5\u03b9'),'en':'*aa','eo':'*aa','es':'Malaui','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An Mhal\u00e1iv'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30de\u30e9\u30a6\u30a4'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub9d0\ub77c\uc704'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041c\u0430\u043b\u0430\u0432\u0438'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Ma-la-uy','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u9a6c\u62c9\u7ef4'),'zu':'*aa',}, 'MX': {'aa':'Mexico','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('M\u00e8xic'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Mexiko','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03b5\u03be\u03b9\u03ba\u03cc'),'en':'*aa','eo':'*aa','es':u('M\u00e9xico'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Mexique','fy':'*aa','ga':'Meicsiceo','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Messico','iu':'*aa','iw':'*aa','ja':u('\u30e1\u30ad\u30b7\u30b3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uba55\uc2dc\ucf54'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Meksiko','mt':'Messiku','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Meksyk','ps':'*aa','pt':'*es','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041c\u0435\u043a\u0441\u0438\u043a\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041c\u0435\u043a\u0441\u0438\u043a\u043e'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e41\u0e21\u0e47\u0e01\u0e0b\u0e34\u0e42\u0e01'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('M\u00ea-hi-c\u00f4'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u58a8\u897f\u54e5'),'zu':'*aa',}, 'MY': {'aa':'Malaysia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Mal\u00e0isia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03b1\u03bb\u03b1\u03b9\u03c3\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Malasia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Malaisie','fy':'*aa','ga':'An Mhalaeisia','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30de\u30ec\u30fc\u30b7\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub9d0\ub808\uc774\uc9c0\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Malasja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Maleisi\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Malezja','ps':'*aa','pt':u('Mal\u00e1sia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041c\u0430\u043b\u0430\u0439\u0437\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041c\u0430\u043b\u0435\u0437\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e21\u0e32\u0e40\u0e25\u0e40\u0e0b\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Ma-lay-xi-a','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u9a6c\u6765\u897f\u4e9a'),'zu':'*aa',}, 'MZ': {'aa':'Mozambique','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Mo\u00e7ambic'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Mosambik','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03bf\u03b6\u03b1\u03bc\u03b2\u03af\u03ba\u03b7'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('M\u00f3saimb\u00edc'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Mozambico','iu':'*aa','iw':'*aa','ja':u('\u30e2\u30b6\u30f3\u30d3\u30fc\u30af'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubaa8\uc7a0\ube44\ud06c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Mozambik','mt':u('Mo\u017cambik'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*ms','ps':'*aa','pt':u('Mo\u00e7ambique'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041c\u043e\u0437\u0430\u043c\u0431\u0438\u043a'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*pt','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e42\u0e21\u0e41\u0e0b\u0e21\u0e1a\u0e34\u0e04'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('M\u00f4-d\u0103m-b\u00edch'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u83ab\u6851\u6bd4\u514b'),'zu':'*aa',}, 'NA': {'aa':'Namibia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Nam\u00edbia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03b1\u03bc\u03af\u03bc\u03c0\u03b9\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Namibie','fy':'*aa','ga':'An Namaib','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30ca\u30df\u30d3\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub098\ubbf8\ube44\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Namibja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Namibi\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*ca','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041d\u0430\u043c\u0438\u0431\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041d\u0430\u043c\u0438\u0431\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e19\u0e32\u0e21\u0e34\u0e40\u0e1a\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Nam-mi-bi-a','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u7eb3\u7c73\u6bd4\u4e9a'),'zu':'*aa',}, 'NC': {'aa':'New Caledonia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Nova Caled\u00f2nia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Neukaledonien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03ad\u03b1 \u039a\u03b1\u03bb\u03b7\u03b4\u03bf\u03bd\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Nueva Caledonia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Nouvelle-Cal\u00e9donie'),'fy':'*aa','ga':u('An Nua-Chalad\u00f3in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Kaledonia Baru','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Nuova Caledonia','iu':'*aa','iw':'*aa','ja':u('\u30cb\u30e5\u30fc\u30ab\u30ec\u30c9\u30cb\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub274 \uce7c\ub808\ub3c4\ub2c8\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Nieuw-Caledoni\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Nowa Kaledonia','ps':'*aa','pt':u('Nova Caled\u00f4nia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041d\u043e\u0432\u0430\u044f \u041a\u0430\u043b\u0435\u0434\u043e\u043d\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041d\u043e\u0432\u0430 \u041a\u0430\u043b\u0435\u0434\u043e\u043d\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Nya Kaledonien','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e19\u0e34\u0e27\u0e04\u0e32\u0e25\u0e34\u0e42\u0e14\u0e40\u0e19\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u65b0\u514b\u91cc\u591a\u5c3c\u4e9a\u7fa4\u5c9b'),'zu':'*aa',}, 'NE': {'aa':'Niger','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('N\u00edger'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03af\u03b3\u03b7\u03c1'),'en':'*aa','eo':'*aa','es':'*ca','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An N\u00edgir'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30cb\u30b8\u30a7\u30fc\u30eb'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub2c8\uc81c\ub974'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Ni\u0121er'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*ca','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041d\u0438\u0433\u0435\u0440'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e44\u0e19\u0e40\u0e08\u0e2d\u0e23\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Ni-gi\u00ea'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5c3c\u65e5\u5c14'),'zu':'*aa',}, 'NF': {'aa':'Norfolk Island','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Norfolk-Insel','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03ae\u03c3\u03bf\u03c2 \u039d\u03cc\u03c1\u03c6\u03bf\u03bb\u03ba'),'en':'*aa','eo':'*aa','es':'Isla Norfolk','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('\u00cele Norfolk'),'fy':'*aa','ga':u('Oile\u00e1n Norfolk'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Kepulauan Norfolk','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Isola Norfolk','iu':'*aa','iw':'*aa','ja':u('\u30ce\u30fc\u30d5\u30a9\u30fc\u30af\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub178\ud37d \uc12c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Norfolkeiland','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'Ilha Norfolk','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041d\u043e\u0440\u0444\u043e\u043b\u043a \u041e\u0441\u0442\u0440\u0432\u043e'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('Norfolk\u00f6n'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u8bfa\u798f\u514b\u5c9b'),'zu':'*aa',}, 'NG': {'aa':'Nigeria','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Nig\u00e8ria'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03b9\u03b3\u03b7\u03c1\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Nig\u00e9ria'),'fy':'*aa','ga':u('An Nig\u00e9ir'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30ca\u30a4\u30b8\u30a7\u30ea\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub098\uc774\uc9c0\ub9ac\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Ni\u0121erja'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*fr','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041d\u0438\u0433\u0435\u0440\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041d\u0438\u0433\u0435\u0440\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e44\u0e19\u0e08\u0e35\u0e40\u0e23\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Ni-gi\u00ea-ri-a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5c3c\u65e5\u5229\u4e9a'),'zu':'*aa',}, 'NI': {'aa':'Nicaragua','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03b9\u03ba\u03b1\u03c1\u03ac\u03b3\u03bf\u03c5\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'Nicearagua','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30cb\u30ab\u30e9\u30b0\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub2c8\uce74\ub77c\uacfc'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Nikaragwa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Nikaragua','ps':'*aa','pt':u('Nicar\u00e1gua'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041d\u0438\u043a\u0430\u0440\u0430\u0433\u0443\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041d\u0438\u043a\u0430\u0440\u0430\u0433\u0432\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e19\u0e34\u0e04\u0e32\u0e23\u0e32\u0e01\u0e31\u0e27'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Ni-ca-ra-goa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5c3c\u52a0\u62c9\u74dc'),'zu':'*aa',}, 'NL': {'aa':'Netherlands','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Pa\u00efsos Baixos'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Niederlande','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039f\u03bb\u03bb\u03b1\u03bd\u03b4\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Holanda','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Alankomaat','fj':'*aa','fo':'*aa','fr':'Pays-Bas','fy':'*aa','ga':u('An \u00cdsilt\u00edr'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Paesi Bassi','iu':'*aa','iw':'*aa','ja':u('\u30aa\u30e9\u30f3\u30c0'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub124\ub35c\ub780\ub4dc'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Belanda','mt':'Olanda','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Nederland','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Holandia','ps':'*aa','pt':'*es','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041d\u0438\u0434\u0435\u0440\u043b\u0430\u043d\u0434\u044b'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0425\u043e\u043b\u0430\u043d\u0434\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('Nederl\u00e4nderna'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e19\u0e40\u0e18\u0e2d\u0e23\u0e4c\u0e41\u0e25\u0e19\u0e14\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('H\u00e0 Lan'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u8377\u5170'),'zu':'*aa',}, 'NO': {'aa':'Norway','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Noruega','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Norwegen','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03bf\u03c1\u03b2\u03b7\u03b3\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*ca','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Norja','fj':'*aa','fo':'*aa','fr':u('Norv\u00e8ge'),'fy':'*aa','ga':'An Iorua','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Norwegia','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Norvegia','iu':'*aa','iw':'*aa','ja':u('\u30ce\u30eb\u30a6\u30a7\u30fc'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub178\ub974\uc6e8\uc774'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Norve\u0121ja'),'my':'*aa','na':'*aa','nb':'Norge','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Noorwegen','nn':'*nb','no':'*nb','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*id','ps':'*aa','pt':'*ca','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041d\u043e\u0440\u0432\u0435\u0433\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041d\u043e\u0440\u0432\u0435\u0448\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*nb','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e19\u0e2d\u0e23\u0e4c\u0e40\u0e27\u0e22\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Na Uy','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u632a\u5a01'),'zu':'*aa',}, 'NP': {'aa':'Nepal','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03b5\u03c0\u03ac\u03bb'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('N\u00e9pal'),'fy':'*aa','ga':'Neipeal','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30cd\u30d1\u30fc\u30eb'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub124\ud314'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041d\u0435\u043f\u0430\u043b'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e19\u0e1b\u0e32\u0e25'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('N\u00ea-pan'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5c3c\u6cca\u5c14'),'zu':'*aa',}, 'NR': {'aa':'Nauru','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03b1\u03bf\u03cd\u03c1\u03bf\u03c5'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('N\u00e1r\u00fa'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30ca\u30a6\u30eb'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub098\uc6b0\ub8e8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041d\u0430\u0443\u0440\u0443'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u7459\u9c81'),'zu':'*aa',}, 'NU': {'aa':'Niue','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03b9\u03bf\u03cd\u03b5'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30cb\u30a6\u30a8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub2c8\uc6b0\uc5d0'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041d\u0438\u044e'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041d\u0438\u0443\u0435'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e19\u0e35\u0e22\u0e39'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u7ebd\u57c3\u5c9b'),'zu':'*aa',}, 'NZ': {'aa':'New Zealand','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Nova Zelanda','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Neuseeland','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03ad\u03b1 \u0396\u03b7\u03bb\u03b1\u03bd\u03b4\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Nueva Zelanda','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Nouvelle-Z\u00e9lande'),'fy':'*aa','ga':u('An Nua-Sh\u00e9alainn'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Selandia Baru','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Nuova Zelanda','iu':'*aa','iw':'*aa','ja':u('\u30cb\u30e5\u30fc\u30b8\u30fc\u30e9\u30f3\u30c9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub274\uc9c8\ub79c\ub4dc'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Nieuw-Zeeland','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Nowa Zelandia','ps':'*aa','pt':u('Nova Zel\u00e2ndia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041d\u043e\u0432\u0430\u044f \u0417\u0435\u043b\u0430\u043d\u0434\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041d\u043e\u0432\u0438 \u0417\u0435\u043b\u0430\u043d\u0434'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Nya Zeeland','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e19\u0e34\u0e27\u0e0b\u0e35\u0e41\u0e25\u0e19\u0e14\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Niu Di-l\u00e2n'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u65b0\u897f\u5170'),'zu':'*aa',}, 'OM': {'aa':'Oman','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':u('\u0633\u0644\u0637\u0646\u0629 \u0639\u0645\u0627\u0646'),'as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039f\u03bc\u03ac\u03bd'),'en':'*aa','eo':'*aa','es':u('Om\u00e1n'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30aa\u30de\u30fc\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc624\ub9cc'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Om\u00e3'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041e\u043c\u0430\u043d'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e42\u0e2d\u0e21\u0e32\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('\u00d4-man'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u963f\u66fc'),'zu':'*aa',}, 'PA': {'aa':'Panama','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Panam\u00e0'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a0\u03b1\u03bd\u03b1\u03bc\u03ac\u03c2'),'en':'*aa','eo':'*aa','es':u('Panam\u00e1'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d1\u30ca\u30de'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud30c\ub098\ub9c8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*es','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041f\u0430\u043d\u0430\u043c\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1b\u0e32\u0e19\u0e32\u0e21\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Pa-na-ma','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5df4\u62ff\u9a6c'),'zu':'*aa',}, 'PE': {'aa':'Peru','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Per\u00fa'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a0\u03b5\u03c1\u03bf\u03cd'),'en':'*aa','eo':'*aa','es':'*ca','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('P\u00e9rou'),'fy':'*aa','ga':u('Peiri\u00fa'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':u('Per\u00f9'),'iu':'*aa','iw':'*aa','ja':u('\u30da\u30eb\u30fc'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud398\ub8e8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041f\u0435\u0440\u0443'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e1b\u0e23\u0e39'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('P\u00ea-ru'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u79d8\u9c81'),'zu':'*aa',}, 'PF': {'aa':'French Polynesia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Polin\u00e8sia Francesa'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('Franz\u00f6sisch-Polynesien'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0393\u03b1\u03bb\u03bb\u03b9\u03ba\u03ae \u03a0\u03bf\u03bb\u03c5\u03bd\u03b7\u03c3\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Polinesia Francesa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Polyn\u00e9sie Fran\u00e7aise'),'fy':'*aa','ga':u('An Pholain\u00e9is Fhrancach'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Polynesia Perancis','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Polinesia Francese','iu':'*aa','iw':'*aa','ja':u('\u4ecf\u9818\u30dd\u30ea\u30cd\u30b7\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud504\ub791\uc2a4\ub839 \ud3f4\ub9ac\ub124\uc2dc\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Polinesja Fran\u010bi\u017ca'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Frans-Polynesi\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Polinezja Francuska','ps':'*aa','pt':u('Polin\u00e9sia Francesa'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0424\u0440\u0430\u043d\u0446\u0443\u0437\u0441\u043a\u0430\u044f \u041f\u043e\u043b\u0438\u043d\u0435\u0437\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0424\u0440\u0430\u043d\u0446\u0443\u0441\u043a\u0430 \u041f\u043e\u043b\u0438\u043d\u0435\u0437\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Franska Polynesien','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e1f\u0e23\u0e47\u0e19\u0e0a\u0e42\u0e1e\u0e25\u0e34\u0e19\u0e35\u0e40\u0e0b\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6cd5\u5c5e\u73bb\u5229\u5c3c\u897f\u4e9a'),'zu':'*aa',}, 'PG': {'aa':'Papua New Guinea','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Papua Nova Guinea','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Papua-Neuguinea','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a0\u03b1\u03c0\u03bf\u03cd\u03b1 - \u039d\u03ad\u03b1 \u0393\u03bf\u03c5\u03b9\u03bd\u03ad\u03b1'),'en':'*aa','eo':'*aa','es':u('Pap\u00faa New Guinea'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Papouasie-Nouvelle-Guin\u00e9e'),'fy':'*aa','ga':'Nua-Ghuine Phapua','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Papua Nugini','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Papua Nuova Guinea','iu':'*aa','iw':'*aa','ja':u('\u30d1\u30d7\u30a2\u30cb\u30e5\u30fc\u30ae\u30cb\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud30c\ud478\uc544\ub274\uae30\ub2c8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Papwa-Ginea \u0120dida'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Papoea-Nieuw-Guinea','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Papua Nowa Gwinea','ps':'*aa','pt':u('Papua-Nova Guin\u00e9'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041f\u0430\u043f\u0443\u0430 - \u041d\u043e\u0432\u0430\u044f \u0413\u0432\u0438\u043d\u0435\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041f\u0430\u043f\u0443\u0430 \u041d\u043e\u0432\u0430 \u0413\u0432\u0438\u043d\u0435\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Papua Nya Guinea','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1b\u0e32\u0e1b\u0e31\u0e27\u0e19\u0e34\u0e27\u0e01\u0e35\u0e19\u0e35'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Pa-pu-a Niu Ghi-n\u00ea'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5df4\u5e03\u4e9a\u65b0\u51e0\u5185\u4e9a'),'zu':'*aa',}, 'PH': {'aa':'Philippines','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Filipines','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Philippinen','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a6\u03b9\u03bb\u03b9\u03c0\u03c0\u03af\u03bd\u03b5\u03c2'),'en':'*aa','eo':'*aa','es':'Filipinas','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('Na hOile\u00e1in Fhilip\u00edneacha'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Filipina','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Filippine','iu':'*aa','iw':'*aa','ja':u('\u30d5\u30a3\u30ea\u30d4\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud544\ub9ac\ud540'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':'Filippini','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Filipijnen','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Filipiny','ps':'*aa','pt':'*es','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0424\u0438\u043b\u0438\u043f\u043f\u0438\u043d\u044b'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0424\u0438\u043b\u0438\u043f\u0438\u043d\u0438'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Filippinerna','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1f\u0e34\u0e25\u0e34\u0e1b\u0e1b\u0e34\u0e19\u0e2a\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Phi-lip-pin','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u83f2\u5f8b\u5bbe'),'zu':'*aa',}, 'PK': {'aa':'Pakistan','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a0\u03b1\u03ba\u03b9\u03c3\u03c4\u03ac\u03bd'),'en':'*aa','eo':'*aa','es':u('Paquist\u00e1n'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An Phacast\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d1\u30ad\u30b9\u30bf\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud30c\ud0a4\uc2a4\ud0c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Paquist\u00e3o'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041f\u0430\u043a\u0438\u0441\u0442\u0430\u043d'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1b\u0e32\u0e01\u0e35\u0e2a\u0e16\u0e32\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Pa-ki-xtan','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5df4\u57fa\u65af\u5766'),'zu':'*aa',}, 'PL': {'aa':'Poland','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Pol\u00f2nia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Polen','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a0\u03bf\u03bb\u03c9\u03bd\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Polonia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Puola','fj':'*aa','fo':'*aa','fr':'Pologne','fy':'*aa','ga':'An Pholainn','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Polandia','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*es','iu':'*aa','iw':'*aa','ja':u('\u30dd\u30fc\u30e9\u30f3\u30c9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud3f4\ub780\ub4dc'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Polonja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*de','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Polska','ps':'*aa','pt':u('Pol\u00f4nia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041f\u043e\u043b\u044c\u0448\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041f\u043e\u0459\u0441\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e42\u0e1b\u0e41\u0e25\u0e19\u0e14\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Ba Lan','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6ce2\u5170'),'zu':'*aa',}, 'PM': {'aa':'Saint Pierre And Miquelon','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'St. Pierre und Miquelon','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a3\u03b1\u03b9\u03bd\u03c4 \u03a0\u03b9\u03ad\u03c1 \u03ba\u03b1\u03b9 \u039c\u03b9\u03ba\u03b5\u03bb\u03cc\u03bd'),'en':'*aa','eo':'*aa','es':u('San Pedro y Miquel\u00f3n'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Saint-Pierre-et-Miquelon','fy':'*aa','ga':'Saint Pierre agus Miquelon','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Saint Pierre dan Miquelon','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Saint Pierre e Miquelon','iu':'*aa','iw':'*aa','ja':u('\u30b5\u30f3\u30d4\u30a8\u30fc\u30eb\u5cf6\u30fb\u30df\u30af\u30ed\u30f3\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc138\uc778\ud2b8 \ud53c\uc5d0\ub974 \ubbf8\ucf08\ub860'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Saint Pierre and Miquelon','mt':'*ms','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Saint Pierre en Miquelon','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*it','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u0435\u043d \u041f\u0458\u0435\u0440 \u0438 \u041c\u0438\u043a\u0435\u043b\u043e\u043d'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Saint Pierre och Miquelon','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5723\u76ae\u57c3\u5c14\u548c\u5bc6\u514b\u9686\u7fa4\u5c9b'),'zu':'*aa',}, 'PN': {'aa':'Pitcairn','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a0\u03af\u03c4\u03ba\u03b5\u03c1\u03bd'),'en':'*aa','eo':'*aa','es':'Islas Pitcairn','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d4\u30c8\u30b1\u30a2\u30f3\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud54f\ucf00\uc5b8 \uc81c\ub3c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041f\u0438\u0442\u043a\u0435\u0440\u043d'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u76ae\u7279\u514b\u6069\u5c9b'),'zu':'*aa',}, 'PR': {'aa':'Puerto Rico','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a0\u03bf\u03c5\u03ad\u03c1\u03c4\u03bf \u03a1\u03af\u03ba\u03bf'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Porto Rico','fy':'*aa','ga':u('Port\u00f3 R\u00edce'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Puerto Riko','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d7\u30a8\u30eb\u30c8\u30ea\u30b3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud478\uc5d0\ub974\ud1a0\ub9ac\ucf54'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Portoryko','ps':'*aa','pt':'*fr','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041f\u0443\u044d\u0440\u0442\u043e-\u0420\u0438\u043a\u043e'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041f\u043e\u0440\u0442\u043e \u0420\u0438\u043a\u043e'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e1b\u0e2d\u0e23\u0e4c\u0e42\u0e15\u0e23\u0e34\u0e42\u0e01'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6ce2\u591a\u9ece\u54e5'),'zu':'*aa',}, 'PS': {'aa':'Palestine','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('Pal\u00e4stina'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a0\u03b1\u03bb\u03b1\u03b9\u03c3\u03c4\u03b9\u03bd\u03b9\u03b1\u03ba\u03ac \u0395\u03b4\u03ac\u03c6\u03b7'),'en':'*aa','eo':'*aa','es':'Palestina','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('Na Cr\u00edocha Pailist\u00edneacha'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Otoritas Palestina','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*es','iu':'*aa','iw':'*aa','ja':u('\u30d1\u30ec\u30b9\u30c1\u30ca'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud314\ub808\uc2a4\ud0c0\uc778'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Palestinian Territory','mt':'*ms','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Palestijns Gebied','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Territ\u00f3rios palestinos'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041f\u0430\u043b\u0435\u0441\u0442\u0438\u043d\u0441\u043a\u0430 \u0442\u0435\u0440\u0438\u0442\u043e\u0440\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*es','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5df4\u52d2\u65af\u5766'),'zu':'*aa',}, 'PT': {'aa':'Portugal','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a0\u03bf\u03c1\u03c4\u03bf\u03b3\u03b1\u03bb\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Portugali','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An Phortaing\u00e9il'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Portugis','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Portogallo','iu':'*aa','iw':'*aa','ja':u('\u30dd\u30eb\u30c8\u30ac\u30eb'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud3ec\ub974\ud22c\uce7c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Feringgi','mt':'Portugall','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Portugalia','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041f\u043e\u0440\u0442\u0443\u0433\u0430\u043b\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041f\u043e\u0440\u0442\u0443\u0433\u0430\u043b'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e42\u0e1b\u0e15\u0e38\u0e01\u0e31\u0e25'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('B\u1ed3 \u0110\u00e0o Nha'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u8461\u8404\u7259'),'zu':'*aa',}, 'PW': {'aa':'Palau','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a0\u03b1\u03bb\u03ac\u03bf\u03c5'),'en':'*aa','eo':'*aa','es':'Palaos','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Belau','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d1\u30e9\u30aa'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud314\ub77c\uc6b0'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041f\u0430\u043b\u0430\u0443'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5e15\u52b3'),'zu':'*aa',}, 'PY': {'aa':'Paraguay','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Paraguai','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a0\u03b1\u03c1\u03b1\u03b3\u03bf\u03c5\u03ac\u03b7'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'Paragua','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d1\u30e9\u30b0\u30a2\u30a4'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud30c\ub77c\uacfc\uc774'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Paragwaj','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*mt','ps':'*aa','pt':'*ca','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041f\u0430\u0440\u0430\u0433\u0432\u0430\u0439'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041f\u0430\u0440\u0430\u0433\u0432\u0430\u0458'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1b\u0e32\u0e23\u0e32\u0e01\u0e27\u0e31\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Pa-ra-goay','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5df4\u62c9\u572d'),'zu':'*aa',}, 'QA': {'aa':'Qatar','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':u('\u0642\u0637\u0631'),'as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Katar','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039a\u03b1\u03c4\u03ac\u03c1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'Catar','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30ab\u30bf\u30fc\u30eb'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uce74\ud0c0\ub974'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*de','ps':'*aa','pt':'*ga','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041a\u0430\u0442\u0430\u0440'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e01\u0e32\u0e15\u0e32\u0e23\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Ca-ta','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5361\u5854\u5c14'),'zu':'*aa',}, 'RE': {'aa':'Reunion','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a1\u03b5\u03cb\u03bd\u03b9\u03cc\u03bd'),'en':'*aa','eo':'*aa','es':u('Reuni\u00f3n'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('La R\u00e9union'),'fy':'*aa','ga':u('R\u00e9union'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*ga','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*ga','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30ec\u30e6\u30cb\u30aa\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub9ac\uc720\ub2c8\uc5b8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*ga','mt':'*ga','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*ga','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Reuni\u00e3o'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0420\u0435\u0438\u043d\u0438\u043e\u043d'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*ga','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u7559\u5c3c\u6c6a\u5c9b'),'zu':'*aa',}, 'RO': {'aa':'Romania','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('Rum\u00e4nien'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a1\u03bf\u03c5\u03bc\u03b1\u03bd\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Rumania','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Roumanie','fy':'*aa','ga':u('An R\u00f3m\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30eb\u30fc\u30de\u30cb\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub8e8\ub9c8\ub2c8\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Rumanija','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Roemeni\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Rumunia','ps':'*aa','pt':u('Rom\u00eania'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':u('Rom\u00e2nia'),'ru':u('\u0420\u0443\u043c\u044b\u043d\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0420\u0443\u043c\u0443\u043d\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e23\u0e39\u0e40\u0e21\u0e40\u0e19\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Ru-ma-ni','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u7f57\u9a6c\u5c3c\u4e9a'),'zu':'*aa',}, 'RS': {'aa':'Serbia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Serbien','dv':'*aa','dz':'*aa','ee':'*aa','el':'*aa','en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Serbie','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30bb\u30eb\u30d3\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc138\ub974\ube44\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Servi\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('S\u00e9rvia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*aa','ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u585e\u5c14\u7ef4\u4e9a'),'zu':'*aa',}, 'RU': {'aa':'Russia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('R\u00fassia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Russland','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a1\u03c9\u03c3\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Rusia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':u('Ven\u00e4j\u00e4'),'fj':'*aa','fo':'*aa','fr':'Russie','fy':'*aa','ga':u('C\u00f3naidhm na R\u00faise'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*es','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*es','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30ed\u30b7\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub7ec\uc2dc\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Russja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Rusland','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Rosja','ps':'*aa','pt':'*ca','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0420\u043e\u0441\u0441\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0420\u0443\u0441\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Ryssland','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e23\u0e31\u0e2a\u0e40\u0e0b\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Nga','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u4fc4\u7f57\u65af'),'zu':'*aa',}, 'RW': {'aa':'Rwanda','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Ruanda','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a1\u03bf\u03c5\u03ac\u03bd\u03c4\u03b1'),'en':'*aa','eo':'*aa','es':'*de','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'*de','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*de','iu':'*aa','iw':'*aa','ja':u('\u30eb\u30ef\u30f3\u30c0'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub974\uc644\ub2e4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*de','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0420\u0443\u0430\u043d\u0434\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e23\u0e32\u0e27\u0e31\u0e25\u0e14\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Ru-an-\u0111a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5362\u65fa\u8fbe'),'zu':'*aa',}, 'SA': {'aa':'Saudi Arabia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':u('\u0627\u0644\u0633\u0639\u0648\u062f\u064a\u0629'),'as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Ar\u00e0bia Saud\u00ed'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Saudi-Arabien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a3\u03b1\u03bf\u03c5\u03b4\u03b9\u03ba\u03ae \u0391\u03c1\u03b1\u03b2\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Arabia Saudita','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Arabie Saoudite','fy':'*aa','ga':u('An Araib Sh\u00e1dach'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Arab Saudi','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*es','iu':'*aa','iw':'*aa','ja':u('\u30b5\u30a6\u30b8\u30a2\u30e9\u30d3\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc0ac\uc6b0\ub514\uc544\ub77c\ube44\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':u('G\u0127arabja Sawdita'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Saoedi-Arabi\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Arabia Saudyjska','ps':'*aa','pt':u('Ar\u00e1bia Saudita'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0421\u0430\u0443\u0434\u043e\u0432\u0441\u043a\u0430\u044f \u0410\u0440\u0430\u0432\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u0430\u0443\u0434\u0438\u0458\u0441\u043a\u0430 \u0410\u0440\u0430\u0431\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Saudiarabien','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e0b\u0e32\u0e2d\u0e38\u0e14\u0e34\u0e2d\u0e32\u0e23\u0e30\u0e40\u0e1a\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('A-r\u1eadp X\u00ea-\u00fat'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6c99\u7279\u963f\u62c9\u4f2f'),'zu':'*aa',}, 'SB': {'aa':'Solomon Islands','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Solomon-Inseln','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03ae\u03c3\u03bf\u03b9 \u03a3\u03bf\u03bb\u03bf\u03bc\u03ce\u03bd\u03c4\u03bf\u03c2'),'en':'*aa','eo':'*aa','es':u('Islas Salom\u00f3n'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('\u00celes Salomon'),'fy':'*aa','ga':u('Oile\u00e1in Solomon'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Kepulauan Solomon','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Isole Solomon','iu':'*aa','iw':'*aa','ja':u('\u30bd\u30ed\u30e2\u30f3\u8af8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc194\ub85c\ubaac \uc81c\ub3c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Salomonseilanden','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Ilhas Salom\u00e3o'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u043e\u043b\u043e\u043c\u043e\u043d\u0441\u043a\u0430 \u041e\u0441\u0442\u0440\u0432\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('Salomon\u00f6arna'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Qu\u1ea7n \u0111\u1ea3o X\u00f4-l\u00f4-m\u00f4ng'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6240\u7f57\u95e8\u7fa4\u5c9b'),'zu':'*aa',}, 'SC': {'aa':'Seychelles','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Seychellen','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a3\u03b5\u03cb\u03c7\u03ad\u03bb\u03bb\u03b5\u03c2'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('Na S\u00e9is\u00e9il'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30bb\u30a4\u30b7\u30a7\u30eb'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc250\uc774\uc258'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*de','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Seszele','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0421\u0435\u0439\u0448\u0435\u043b\u044c\u0441\u043a\u0438\u0435 \u041e\u0441\u0442\u0440\u043e\u0432\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u0435\u0458\u0448\u0435\u043b\u0438'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Seychellerna','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e0b\u0e22\u0e4c\u0e41\u0e0a\u0e25\u0e25\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('X\u00e2y-sen'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u585e\u820c\u5c14\u7fa4\u5c9b'),'zu':'*aa',}, 'SD': {'aa':'Sudan','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':u('\u0627\u0644\u0633\u0648\u062f\u0627\u0646'),'as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a3\u03bf\u03c5\u03b4\u03ac\u03bd'),'en':'*aa','eo':'*aa','es':u('Sud\u00e1n'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Soudan','fy':'*aa','ga':u('An tS\u00fad\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b9\u30fc\u30c0\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc218\ub2e8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Soedan','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Sud\u00e3o'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0421\u0443\u0434\u0430\u043d'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e0b\u0e39\u0e14\u0e32\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Xu-\u0111\u0103ng'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u82cf\u4e39'),'zu':'*aa',}, 'SE': {'aa':'Sweden','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Su\u00e8cia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Schweden','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a3\u03bf\u03c5\u03b7\u03b4\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Suecia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Ruotsi','fj':'*aa','fo':'*aa','fr':u('Su\u00e8de'),'fy':'*aa','ga':'An tSualainn','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Svezia','iu':'*aa','iw':'*aa','ja':u('\u30b9\u30a6\u30a7\u30fc\u30c7\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc2a4\uc6e8\ub374'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('\u017bvezja'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Zweden','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Szwecja','ps':'*aa','pt':u('Su\u00e9cia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0428\u0432\u0435\u0446\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0428\u0432\u0435\u0434\u0441\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Sverige','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2a\u0e27\u0e35\u0e40\u0e14\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Th\u1ee5y \u0110i\u1ec3n'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u745e\u5178'),'zu':'*aa',}, 'SG': {'aa':'Singapore','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Singapur','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*ca','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a3\u03b9\u03b3\u03ba\u03b1\u03c0\u03bf\u03cd\u03c1\u03b7'),'en':'*aa','eo':'*aa','es':'*ca','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Singapour','fy':'*aa','ga':u('Singeap\u00f3r'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Singapura','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b7\u30f3\u30ac\u30dd\u30fc\u30eb'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc2f1\uac00\ud3ec\ub974'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':'Singapor','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*ca','ps':'*aa','pt':'Cingapura','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0421\u0438\u043d\u0433\u0430\u043f\u0443\u0440'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2a\u0e34\u0e07\u0e04\u0e42\u0e1b\u0e23\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Xin-ga-po','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u65b0\u52a0\u5761'),'zu':'*aa',}, 'SH': {'aa':'Saint Helena','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'St. Helena','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03b3\u03af\u03b1 \u0395\u03bb\u03ad\u03bd\u03b7'),'en':'*aa','eo':'*aa','es':'Santa Helena','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Sainte-H\u00e9l\u00e8ne'),'fy':'*aa','ga':u('San H\u00e9ilin'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Sant\'Elena','iu':'*aa','iw':'*aa','ja':u('\u30bb\u30f3\u30c8\u30d8\u30ec\u30ca\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc138\uc778\ud2b8 \ud5ec\ub808\ub098'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Sint-Helena','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*es','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u0432\u0435\u0442\u0430 \u0408\u0435\u043b\u0435\u043d\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5723\u8d6b\u52d2\u62ff\u5c9b'),'zu':'*aa',}, 'SI': {'aa':'Slovenia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Eslov\u00e8nia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Slowenien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a3\u03bb\u03bf\u03b2\u03b5\u03bd\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Eslovenia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Slov\u00e9nie'),'fy':'*aa','ga':u('An tSl\u00f3v\u00e9in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b9\u30ed\u30d9\u30cb\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc2ac\ub85c\ubca0\ub2c8\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Slovenja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Sloveni\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('S\u0142owenia'),'ps':'*aa','pt':u('Eslov\u00eania'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0421\u043b\u043e\u0432\u0435\u043d\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'Slovenija','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u043b\u043e\u0432\u0435\u043d\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Slovenien','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2a\u0e42\u0e25\u0e27\u0e34\u0e40\u0e19\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Xl\u00f4-ven-ni-a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u65af\u6d1b\u6587\u5c3c\u4e9a'),'zu':'*aa',}, 'SJ': {'aa':'Svalbard And Jan Mayen','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Svalbard und Jan Mayen','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03ae\u03c3\u03bf\u03b9 \u03a3\u03b2\u03ac\u03bb\u03bc\u03c0\u03b1\u03c1 \u03ba\u03b1\u03b9 \u0393\u03b9\u03b1\u03bd \u039c\u03b1\u03b3\u03b9\u03ad\u03bd'),'en':'*aa','eo':'*aa','es':'Svalbard y Jan Mayen','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Svalbard et Jan Mayen','fy':'*aa','ga':'Svalbard agus Jan Mayen','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Svalbard dan Jan Mayen','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Svalbard e Jan Mayen','iu':'*aa','iw':'*aa','ja':u('\u30b9\u30d0\u30fc\u30eb\u30d0\u30eb\u8af8\u5cf6\u30fb\u30e4\u30f3\u30de\u30a4\u30a8\u30f3\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc2a4\ubc1c\ubc14\ub974 \ubc0f \uc580\ub9c8\uc6ec'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Svalbard and Jan Mayen','mt':'*ms','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Svalbard en Jan Mayen','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*it','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u0432\u0430\u043b\u0431\u0430\u0440\u0434 \u0438 \u0408\u0430\u043d\u043c\u0430\u0458\u0435\u043d \u041e\u0441\u0442\u0440\u0432\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Svalbard och Jan Mayen','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u65af\u74e6\u5c14\u5df4\u7279\u548c\u626c\u9a6c\u5ef6\u5c9b'),'zu':'*aa',}, 'SK': {'aa':'Slovakia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Eslov\u00e0quia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Slowakei','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a3\u03bb\u03bf\u03b2\u03b1\u03ba\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Eslovaquia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Slovaquie','fy':'*aa','ga':u('An tSl\u00f3vaic'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Slovacchia','iu':'*aa','iw':'*aa','ja':u('\u30b9\u30ed\u30d0\u30ad\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc2ac\ub85c\ubc14\ud0a4\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Slovakkja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Slowakije','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('S\u0142owacja'),'ps':'*aa','pt':u('Eslov\u00e1quia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0421\u043b\u043e\u0432\u0430\u043a\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':u('Slovensk\u00e1 republika'),'sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u043b\u043e\u0432\u0430\u0447\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Slovakien','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2a\u0e42\u0e25\u0e27\u0e32\u0e40\u0e01\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Xl\u00f4-va-ki-a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u65af\u6d1b\u4f10\u514b'),'zu':'*aa',}, 'SL': {'aa':'Sierra Leone','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a3\u03b9\u03ad\u03c1\u03b1 \u039b\u03b5\u03cc\u03bd\u03b5'),'en':'*aa','eo':'*aa','es':'Sierra Leona','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'Siarra Leon','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b7\u30a8\u30e9\u30ec\u30aa\u30cd'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc2dc\uc5d0\ub77c\ub9ac\uc628'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Siera Leon','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'Serra Leoa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0421\u044c\u0435\u0440\u0440\u0430-\u041b\u0435\u043e\u043d\u0435'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u0438\u0458\u0435\u0440\u0430 \u041b\u0435\u043e\u043d\u0435'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e0b\u0e35\u0e22\u0e23\u0e4c\u0e23\u0e48\u0e32\u0e25\u0e35\u0e2d\u0e2d\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Xi-\u00ea-ra L\u00ea-\u00f4n'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u585e\u62c9\u91cc\u6602'),'zu':'*aa',}, 'SM': {'aa':'San Marino','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0386\u03b3\u03b9\u03bf\u03c2 \u039c\u03b1\u03c1\u03af\u03bd\u03bf\u03c2'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Saint-Marin','fy':'*aa','ga':u('San Mair\u00edne'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b5\u30f3\u30de\u30ea\u30ce'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc0b0\ub9c8\ub9ac\ub178'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u0430\u043d \u041c\u0430\u0440\u0438\u043d\u043e'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Xan Ma-ri-n\u00f4'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5723\u9a6c\u529b\u8bfa'),'zu':'*aa',}, 'SN': {'aa':'Senegal','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a3\u03b5\u03bd\u03b5\u03b3\u03ac\u03bb\u03b7'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('S\u00e9n\u00e9gal'),'fy':'*aa','ga':u('An tSeineag\u00e1il'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30bb\u30cd\u30ac\u30eb'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc138\ub124\uac08'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0421\u0435\u043d\u0435\u0433\u0430\u043b'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e0b\u0e34\u0e19\u0e35\u0e01\u0e31\u0e25'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('X\u00ea-n\u00ea-gan'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u585e\u5185\u52a0\u5c14'),'zu':'*aa',}, 'SO': {'aa':'Somalia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Som\u00e0lia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a3\u03bf\u03bc\u03b1\u03bb\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Somalie','fy':'*aa','ga':u('An tSom\u00e1il'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30bd\u30de\u30ea\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc18c\ub9d0\ub9ac\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Somalja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Somali\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Som\u00e1lia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0421\u043e\u043c\u0430\u043b\u0438'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u043e\u043c\u0430\u043b\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e42\u0e0b\u0e21\u0e32\u0e40\u0e25\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('X\u00f4-ma-li'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u7d22\u9a6c\u91cc'),'zu':'*aa',}, 'SR': {'aa':'Suriname','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Surinam','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a3\u03bf\u03c5\u03c1\u03b9\u03bd\u03ac\u03bc'),'en':'*aa','eo':'*aa','es':'*ca','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'Suranam','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b9\u30ea\u30ca\u30e0'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc218\ub9ac\ub0a8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*ca','mt':'*ca','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*ca','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0421\u0443\u0440\u0438\u043d\u0430\u043c'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*ca','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e0b\u0e39\u0e23\u0e34\u0e19\u0e32\u0e21\u0e34'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Xu-ri-nam','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u82cf\u91cc\u5357'),'zu':'*aa',}, 'SS': {'aa':'South Sudan','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('S\u00fcdsudan'),'dv':'*aa','dz':'*aa','ee':'*aa','el':'*aa','en':'*aa','eo':'*aa','es':u('Sud\u00e1n del Sur'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Sud-Soudan','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Sudan del Sud','iu':'*aa','iw':'*aa','ja':u('\u5357\u30b9\u30fc\u30c0\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub0a8\uc218\ub2e8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*aa','ss':'*aa','st':'*aa','su':'*aa','sv':'Sydsudan','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5357\u82cf\u4e39'),'zu':'*aa',}, 'ST': {'aa':'Sao Tome And Principe','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Sao Tome und Principe','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a3\u03ac\u03bf \u03a4\u03bf\u03bc\u03ad \u03ba\u03b1\u03b9 \u03a0\u03c1\u03af\u03bd\u03c3\u03b9\u03c0\u03b5'),'en':'*aa','eo':'*aa','es':u('Santo Tom\u00e9 y Pr\u00edncipe'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Sao Tom\u00e9 et Principe'),'fy':'*aa','ga':'Sao Tome agus Principe','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Sao Tome dan Principe','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':u('S\u00e3o Tom\u00e9 e Principe'),'iu':'*aa','iw':'*aa','ja':u('\u30b5\u30f3\u30c8\u30e1\u30fb\u30d7\u30ea\u30f3\u30b7\u30da'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc0c1\ud22c\uba54 \ud504\ub9b0\uc2dc\ud398'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':'Sao Tome and Principe','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Sao Tom\u00e9 en Principe'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('S\u00e3o Tom\u00e9 e Pr\u00edncipe'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u0430\u043e \u0422\u043e\u043c\u0435 \u0438 \u041f\u0440\u0438\u043d\u0446\u0438\u043f\u0438'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('S\u00e3o Tom\u00e9 och Pr\u00edncipe'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Xao T\u00f4-m\u00ea v\u00e0 Prin-xi-p\u00ea'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5723\u591a\u7f8e\u548c\u666e\u6797\u897f\u6bd4'),'zu':'*aa',}, 'SV': {'aa':'El Salvador','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0395\u03bb \u03a3\u03b1\u03bb\u03b2\u03b1\u03b4\u03cc\u03c1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An tSalvad\u00f3ir'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a8\u30eb\u30b5\u30eb\u30d0\u30c9\u30eb'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc5d8\uc0b4\ubc14\ub3c4\ub974'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Salwador','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0421\u0430\u043b\u044c\u0432\u0430\u0434\u043e\u0440'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u0430\u043b\u0432\u0430\u0434\u043e\u0440'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e2d\u0e25\u0e0b\u0e32\u0e27\u0e32\u0e14\u0e2d\u0e23\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('En-san-va-\u0111o'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u8428\u5c14\u74e6\u591a'),'zu':'*aa',}, 'SX': {'aa':'Sint Maarten (Dutch part)','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('Sint Maarten (Niederl\u00e4ndischer Teil)'),'dv':'*aa','dz':'*aa','ee':'*aa','el':'*aa','en':'*aa','eo':'*aa','es':u('San Mart\u00edn (regi\u00f3n holandesa)'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Saint-Martin (partie n\u00e9erlandaise)'),'fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Sint Maarten (parte olandese)','iu':'*aa','iw':'*aa','ja':u('\u30b7\u30f3\u30c8\u30de\u30fc\u30eb\u30c6\u30f3(\u30aa\u30e9\u30f3\u30c0\u9818)'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc2e0\ud2b8\ub9c8\ub974\ud150(\ub124\ub35c\ub780\ub4dc\ub839)'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*aa','ss':'*aa','st':'*aa','su':'*aa','sv':u('Sint Maarten (nederl\u00e4ndska delen)'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u8377\u5c5e\u5723\u9a6c\u4e01\u5c9b'),'zu':'*aa',}, 'SY': {'aa':'Syria','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':u('\u0633\u0648\u0631\u064a\u0627'),'as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('S\u00edria'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Syrien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a3\u03c5\u03c1\u03af\u03b1, \u0391\u03c1\u03b1\u03b2\u03b9\u03ba\u03ae \u0394\u03b7\u03bc\u03bf\u03ba\u03c1\u03b1\u03c4\u03af\u03b1 \u03c4\u03b7\u03c2'),'en':'*aa','eo':'*aa','es':'Siria','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Syrie','fy':'*aa','ga':'An tSiria','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*es','iu':'*aa','iw':'*aa','ja':u('\u30b7\u30ea\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc2dc\ub9ac\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Sirja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Syri\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*ca','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0421\u0438\u0440\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u0438\u0440\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e0b\u0e35\u0e40\u0e23\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Xi-ri','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u53d9\u5229\u4e9a'),'zu':'*aa',}, 'SZ': {'aa':'Swaziland','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Swazil\u00e0ndia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Swasiland','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a3\u03bf\u03c5\u03b1\u03b6\u03b9\u03bb\u03ac\u03bd\u03b4\u03b7'),'en':'*aa','eo':'*aa','es':'Suazilandia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'An tSuasalainn','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b9\u30ef\u30b8\u30e9\u30f3\u30c9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc2a4\uc640\uc9c8\ub79c\ub4dc'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Swa\u017ciland'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Suazi','ps':'*aa','pt':u('Suazil\u00e2ndia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0421\u0432\u0430\u0437\u0438\u043b\u0435\u043d\u0434'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2a\u0e27\u0e32\u0e0b\u0e34\u0e41\u0e25\u0e19\u0e14\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Xoa-di-len','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u65af\u5a01\u58eb\u5170'),'zu':'*aa',}, 'TC': {'aa':'Turks And Caicos Islands','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Turks- und Caicos-Inseln','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03ae\u03c3\u03bf\u03b9 \u03a4\u03b5\u03c1\u03ba\u03c2 \u03ba\u03b1\u03b9 \u039a\u03ac\u03b9\u03ba\u03bf\u03c2'),'en':'*aa','eo':'*aa','es':'Islas Turcas y Caicos','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('\u00celes Turks et Caicos'),'fy':'*aa','ga':u('Oile\u00e1in Turks agus Caicos'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Isole Turks e Caicos','iu':'*aa','iw':'*aa','ja':u('\u30bf\u30fc\u30af\u30b9\u8af8\u5cf6\u30fb\u30ab\u30a4\u30b3\u30b9\u8af8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud130\ud06c\uc2a4 \ucf00\uc774\ucee4\uc2a4 \uc81c\ub3c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Turks and Caicos Islands','mt':'*ms','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Turks- en Caicoseilanden','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'Ilhas Turks e Caicos','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0422\u0443\u0440\u043a\u0441 \u0438 \u041a\u0430\u0458\u043a\u043e\u0441 \u041e\u0441\u0442\u0440\u0432\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('Turks- och Caicos\u00f6arna'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u7279\u514b\u65af\u7fa4\u5c9b\u548c\u51ef\u79d1\u65af\u7fa4\u5c9b'),'zu':'*aa',}, 'TD': {'aa':'Chad','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Txad','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Tschad','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a4\u03c3\u03b1\u03bd\u03c4'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Tchad','fy':'*aa','ga':'Sead','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Ciad','iu':'*aa','iw':'*aa','ja':u('\u30c1\u30e3\u30c9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ucc28\ub4dc'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Cad','mt':u('\u010aad'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Tsjaad','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Czad','ps':'*aa','pt':'Chade','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0427\u0430\u0434'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*fr','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e0a\u0e32\u0e14'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('S\u00e1t'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u4e4d\u5f97'),'zu':'*aa',}, 'TF': {'aa':'French Southern Territories','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Territoris Meridionals Francesos ??','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('Franz\u00f6sische S\u00fcdgebiete'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0393\u03b1\u03bb\u03bb\u03b9\u03ba\u03ac \u039d\u03cc\u03c4\u03b9\u03b1 \u0395\u03b4\u03ac\u03c6\u03b7'),'en':'*aa','eo':'*aa','es':'Territorios Franceses del Sur','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Territoires Fran\u00e7ais du Sud'),'fy':'*aa','ga':u('Cr\u00edocha Francacha Theas'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Territori Francesi d\'Oltremare','iu':'*aa','iw':'*aa','ja':u('\u30d5\u30e9\u30f3\u30b9\u9818\u6975\u5357\u8af8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud504\ub791\uc2a4 \ub0a8\ubd80 \uc9c0\ubc29'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Territorji Fran\u010bi\u017ci ta\u2019 Nofsinhar'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Franse Gebieden in de zuidelijke Indische Oceaan','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Francuskie Terytoria Zamorskie','ps':'*aa','pt':u('Territ\u00f3rios Franceses do Sul'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0444\u0440\u0430\u043d\u0446\u0443\u0437\u0441\u043a\u0438\u0435 \u044e\u0436\u043d\u044b\u0435 \u0442\u0435\u0440\u0440\u0438\u0442\u043e\u0440\u0438\u0438'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0424\u0440\u0430\u043d\u0446\u0443\u0441\u043a\u0435 \u0408\u0443\u0436\u043d\u0435 \u0422\u0435\u0440\u0438\u0442\u043e\u0440\u0438\u0458\u0435'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('Franska s\u00f6dra territorierna'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e32\u0e13\u0e32\u0e40\u0e02\u0e15\u0e17\u0e32\u0e07\u0e43\u0e15\u0e49\u0e02\u0e2d\u0e07\u0e1d\u0e23\u0e31\u0e48\u0e07\u0e40\u0e28\u0e2a'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6cd5\u5c5e\u5357\u7279\u7acb\u5c3c\u8fbe'),'zu':'*aa',}, 'TG': {'aa':'Togo','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a4\u03cc\u03b3\u03ba\u03bf'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('T\u00f3ga'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30c8\u30fc\u30b4'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud1a0\uace0'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0422\u043e\u0433\u043e'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e42\u0e15\u0e42\u0e01'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('T\u00f4-g\u00f4'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u591a\u54e5'),'zu':'*aa',}, 'TH': {'aa':'Thailand','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Tail\u00e0ndia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a4\u03b1\u03ca\u03bb\u03ac\u03bd\u03b4\u03b7'),'en':'*aa','eo':'*aa','es':'Tailandia','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Tha\u00eflande'),'fy':'*aa','ga':u('An T\u00e9alainn'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Thailandia','iu':'*aa','iw':'*aa','ja':u('\u30bf\u30a4'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud0dc\uad6d'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Tajlandja','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Tajlandia','ps':'*aa','pt':u('Tail\u00e2ndia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0422\u0430\u0438\u043b\u0430\u043d\u0434'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0422\u0430\u0458\u043b\u0430\u043d\u0434'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1b\u0e23\u0e30\u0e40\u0e17\u0e28\u0e44\u0e17\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Th\u00e1i Lan'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6cf0\u56fd'),'zu':'*aa',}, 'TJ': {'aa':'Tajikistan','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Tadjikistan','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Tadschikistan','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a4\u03b1\u03c4\u03b6\u03b9\u03ba\u03b9\u03c3\u03c4\u03ac\u03bd'),'en':'*aa','eo':'*aa','es':u('Tayikist\u00e1n'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*ca','fy':'*aa','ga':u('An T\u00e1ids\u00edceast\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Tagikistan','iu':'*aa','iw':'*aa','ja':u('\u30bf\u30b8\u30ad\u30b9\u30bf\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud0c0\uc9c0\ud0a4\uc2a4\ud0c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*ca','mt':u('Ta\u0121ikistan'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Tadzjikistan','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('Tad\u017cykistan'),'ps':'*aa','pt':u('Tadjiquist\u00e3o'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0422\u0430\u0434\u0436\u0438\u043a\u0438\u0441\u0442\u0430\u043d'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0422\u0430\u045f\u0438\u043a\u0438\u0441\u0442\u0430\u043d'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*nl','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e17\u0e32\u0e08\u0e34\u0e01\u0e34\u0e2a\u0e16\u0e32\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('T\u00e1t-gi-ki-xtan'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5854\u5409\u514b\u65af\u5766'),'zu':'*aa',}, 'TK': {'aa':'Tokelau','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a4\u03bf\u03ba\u03b5\u03bb\u03ac\u03bf\u03c5'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('T\u00f3cal\u00e1'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30c8\u30b1\u30e9\u30a6\u8af8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud1a0\ucf08\ub77c\uc6b0'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Tokelaw','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0422\u043e\u043a\u0435\u043b\u0430\u0443'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e42\u0e17\u0e01\u0e34\u0e42\u0e25'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u8054\u5408\u7fa4\u5c9b'),'zu':'*aa',}, 'TL': {'aa':'Timor-Leste','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03bd\u03b1\u03c4\u03bf\u03bb\u03b9\u03ba\u03cc \u03a4\u03b9\u03bc\u03cc\u03c1'),'en':'*aa','eo':'*aa','es':'Timor Oriental','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('T\u00edom\u00f3r-Leste'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Timor Leste','iu':'*aa','iw':'*aa','ja':u('\u6771\u30c6\u30a3\u30e2\u30fc\u30eb'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub3d9\ud2f0\ubaa8\ub974'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Timor tal-Lvant','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Oost-Timor','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0422\u0438\u043c\u043e\u0440-\u041b\u0435\u0441\u0442\u0435'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u4e1c\u5e1d\u6c76'),'zu':'*aa',}, 'TM': {'aa':'Turkmenistan','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a4\u03bf\u03c5\u03c1\u03ba\u03bc\u03b5\u03bd\u03b9\u03c3\u03c4\u03ac\u03bd'),'en':'*aa','eo':'*aa','es':u('Turkmenist\u00e1n'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Turkm\u00e9nistan'),'fy':'*aa','ga':u('An Tuircm\u00e9anast\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30c8\u30eb\u30af\u30e1\u30cb\u30b9\u30bf\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud22c\ub974\ud06c\uba54\ub2c8\uc2a4\ud0c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Turcomenist\u00e3o'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0422\u0443\u0440\u043a\u043c\u0435\u043d\u0438\u0441\u0442\u0430\u043d'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e15\u0e34\u0e23\u0e4c\u0e01\u0e40\u0e21\u0e19\u0e34\u0e2a\u0e16\u0e32\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Tu\u1ed1c-m\u00ea-ni-xtan'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u571f\u5e93\u66fc\u65af\u5766'),'zu':'*aa',}, 'TN': {'aa':'Tunisia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':u('\u062a\u0648\u0646\u0633'),'as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Tun\u00edsia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Tunesien','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a4\u03c5\u03bd\u03b7\u03c3\u03af\u03b1'),'en':'*aa','eo':'*aa','es':u('T\u00fanez'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Tunisie','fy':'*aa','ga':u('An T\u00fain\u00e9is'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30c1\u30e5\u30cb\u30b8\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud280\ub2c8\uc9c0'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Tune\u017c'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Tunesi\u00eb'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Tunezja','ps':'*aa','pt':'*ca','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0422\u0443\u043d\u0438\u0441'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'Tunisien','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e15\u0e39\u0e19\u0e34\u0e40\u0e0b\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Tuy-ni-di','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u7a81\u5c3c\u65af'),'zu':'*aa',}, 'TO': {'aa':'Tonga','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a4\u03cc\u03bd\u03b3\u03ba\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30c8\u30f3\u30ac'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud1b5\uac00'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0422\u043e\u043d\u0433\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e17\u0e2d\u0e07\u0e01\u0e49\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('T\u00f4ng-ga'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6c64\u52a0'),'zu':'*aa',}, 'TR': {'aa':'Turkey','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Turquia','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('T\u00fcrkei'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a4\u03bf\u03c5\u03c1\u03ba\u03af\u03b1'),'en':'*aa','eo':'*aa','es':u('Turqu\u00eda'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Turkki','fj':'*aa','fo':'*aa','fr':'Turquie','fy':'*aa','ga':'An Tuirc','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Turchia','iu':'*aa','iw':'*aa','ja':u('\u30c8\u30eb\u30b3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud130\ud0a4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Turki','mt':'Turkija','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Turkije','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Turcja','ps':'*aa','pt':'*ca','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0422\u0443\u0440\u0446\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0422\u0443\u0440\u0441\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Turkiet','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e15\u0e38\u0e23\u0e01\u0e35'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':u('T\u00fcrkiye'),'ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Th\u1ed5 Nh\u0129 K\u1ef3'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u571f\u8033\u5176'),'zu':'*aa',}, 'TT': {'aa':'Trinidad and Tobago','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Trinitat i Tobago','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Trinidad und Tobago','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a4\u03c1\u03b9\u03bd\u03b9\u03b4\u03ac\u03b4 \u03ba\u03b1\u03b9 \u03a4\u03bf\u03bc\u03c0\u03ac\u03b3\u03ba\u03bf'),'en':'*aa','eo':'*aa','es':'Trinidad y Tobago','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Trinit\u00e9-et-Tobago'),'fy':'*aa','ga':u('Oile\u00e1in na Tr\u00edon\u00f3ide agus Tob\u00e1ga'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Trinidad dan Tobago','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Trinidad e Tobago','iu':'*aa','iw':'*aa','ja':u('\u30c8\u30ea\u30cb\u30c0\u30fc\u30c9\u30fb\u30c8\u30d0\u30b4'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud2b8\ub9ac\ub2c8\ub2e4\ub4dc \ud1a0\ubc14\uace0'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':'Trinidad u Tobago','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Trinidad en Tobago','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Trynidad i Tobago','ps':'*aa','pt':'*it','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0422\u0440\u0438\u043d\u0438\u0434\u0430\u0434 \u0438 \u0422\u043e\u0431\u0430\u0433\u043e'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'Trinidad och Tobago','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e17\u0e23\u0e34\u0e19\u0e34\u0e41\u0e14\u0e14 \u0e41\u0e25\u0e30\u0e42\u0e17\u0e1a\u0e32\u0e42\u0e01'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Tri-ni-\u0111\u00e1t v\u00e0 T\u00f4-ba-g\u00f4'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u7279\u7acb\u5c3c\u8fbe\u548c\u591a\u5df4\u54e5'),'zu':'*aa',}, 'TV': {'aa':'Tuvalu','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a4\u03bf\u03c5\u03b2\u03b1\u03bb\u03bf\u03cd'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('Tuval\u00fa'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30c4\u30d0\u30eb'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud22c\ubc1c\ub8e8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0422\u0443\u0432\u0430\u043b\u0443'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Tu-va-lu','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u56fe\u74e6\u5362'),'zu':'*aa',}, 'TW': {'aa':'Taiwan','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a4\u03b1\u03ca\u03b2\u03ac\u03bd'),'en':'*aa','eo':'*aa','es':u('Taiw\u00e1n'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An T\u00e9av\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u53f0\u6e7e'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub300\ub9cc'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Tajwan','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*mt','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0422\u0430\u0439\u0432\u0430\u043d\u044c'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0422\u0430\u0458\u0432\u0430\u043d'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e44\u0e15\u0e49\u0e2b\u0e27\u0e31\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('\u0110\u00e0i Loan'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u53f0\u6e7e\u5730\u533a'),'zu':'*aa',}, 'TZ': {'aa':'Tanzania','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Tanz\u00e0nia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Tansania','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a4\u03b1\u03bd\u03b6\u03b1\u03bd\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Tanzanie','fy':'*aa','ga':u('An Tans\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30bf\u30f3\u30b6\u30cb\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ud0c4\uc790\ub2c8\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('Tan\u017canija'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Tanz\u00e2nia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0422\u0430\u043d\u0437\u0430\u043d\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0422\u0430\u043d\u0437\u0430\u043d\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e17\u0e32\u0e19\u0e0b\u0e32\u0e40\u0e19\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Tan-da-ni-a','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5766\u6851\u5c3c\u4e9a'),'zu':'*aa',}, 'UA': {'aa':'Ukraine','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Ucra\u00efna'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039f\u03c5\u03ba\u03c1\u03b1\u03bd\u03af\u03b1'),'en':'*aa','eo':'*aa','es':'Ucrania','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An \u00dacr\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Ukraina','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Ucraina','iu':'*aa','iw':'*aa','ja':u('\u30a6\u30af\u30e9\u30a4\u30ca'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc6b0\ud06c\ub77c\uc774\ub098'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*id','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':u('Oekra\u00efne'),'nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*id','ps':'*aa','pt':u('Ucr\u00e2nia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0423\u043a\u0440\u0430\u0438\u043d\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0423\u043a\u0440\u0430\u0458\u0438\u043d\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*id','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e22\u0e39\u0e40\u0e04\u0e23\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':u('\u0423\u043a\u0440\u0430\u0457\u043d\u0430'),'ur':'*aa','uz':'*aa','ve':'*aa','vi':'U-crai-na','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u4e4c\u514b\u5170'),'zu':'*aa',}, 'UG': {'aa':'Uganda','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039f\u03c5\u03b3\u03ba\u03ac\u03bd\u03c4\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Ouganda','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a6\u30ac\u30f3\u30c0'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc6b0\uac04\ub2e4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Oeganda','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0423\u0433\u0430\u043d\u0434\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e39\u0e01\u0e32\u0e19\u0e14\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('U-gan-\u0111a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u4e4c\u5e72\u8fbe'),'zu':'*aa',}, 'UM': {'aa':'United States Minor Outlying Islands','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('\u00dcbrige Inseln im Pazifik der USA'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03c0\u03bf\u03bc\u03b1\u03ba\u03c1\u03c5\u03c3\u03bc\u03ad\u03bd\u03b5\u03c2 \u039d\u03b7\u03c3\u03af\u03b4\u03b5\u03c2 \u03c4\u03c9\u03bd \u0397\u03bd\u03c9\u03bc\u03ad\u03bd\u03c9\u03bd \u03a0\u03bf\u03bb\u03b9\u03c4\u03b5\u03b9\u03ce\u03bd'),'en':'*aa','eo':'*aa','es':'Islas menores alejadas de los Estados Unidos','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('D\u00e9pendances am\u00e9ricaines du Pacifique'),'fy':'*aa','ga':u('Mion-Oile\u00e1in Imeallacha S.A.M.'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Isole minori lontane degli Stati Uniti','iu':'*aa','iw':'*aa','ja':u('\u7c73\u9818\u592a\u5e73\u6d0b\u8af8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubbf8\uad6d\ub839 \uad70\ub3c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Amerikaanse kleinere afgelegen eilanden','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'Ilhas Menores Distantes dos Estados Unidos','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041c\u0430\u045a\u0430 \u0443\u0434\u0430\u0459\u0435\u043d\u0430 \u043e\u0441\u0442\u0440\u0432\u0430 \u0421\u0410\u0414'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('F\u00f6renta staternas mindre \u00f6ar i Oceanien och V\u00e4stindien'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u7f8e\u5c5e\u5c0f\u5965\u7279\u5170\u7fa4\u5c9b'),'zu':'*aa',}, 'US': {'aa':'United States','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Estats Units','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Vereinigte Staaten von Amerika','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0397\u03bd\u03c9\u03bc\u03ad\u03bd\u03b5\u03c2 \u03a0\u03bf\u03bb\u03b9\u03c4\u03b5\u03af\u03b5\u03c2'),'en':'*aa','eo':'*aa','es':'Estados Unidos','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'Yhdysvallat','fj':'*aa','fo':'*aa','fr':'Etats-Unis','fy':'*aa','ga':u('St\u00e1it Aontaithe Mheirice\u00e1'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':u('\u0938\u0902\u092f\u0941\u0915\u094d\u0924 \u0930\u093e\u091c\u094d\u092f \u0905\u092e\u0947\u0930\u093f\u0915\u093e'),'ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Amerika Serikat','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Stati Uniti','iu':'*aa','iw':'*aa','ja':u('\u30a2\u30e1\u30ea\u30ab\u5408\u8846\u56fd'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubbf8\uad6d'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Amerika Syarikat','mt':'*it','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Verenigde Staten','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Stany Zjednoczone Ameryki','ps':'*aa','pt':'*es','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0421\u043e\u0435\u0434\u0438\u043d\u0435\u043d\u043d\u044b\u0435 \u0428\u0442\u0430\u0442\u044b'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u0458\u0435\u0434\u0438\u045a\u0435\u043d\u0435 \u0410\u043c\u0435\u0440\u0438\u0447\u043a\u0435 \u0414\u0440\u0436\u0430\u0432\u0435'),'ss':'*aa','st':'*aa','su':'*aa','sv':u('Amerikas F\u00f6renta Stater'),'sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2a\u0e2b\u0e23\u0e31\u0e10\u0e2d\u0e40\u0e21\u0e23\u0e34\u0e01\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Hoa K\u1ef3'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u7f8e\u56fd'),'zu':'*aa',}, 'UY': {'aa':'Uruguay','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Uruguai','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039f\u03c5\u03c1\u03bf\u03c5\u03b3\u03bf\u03c5\u03ac\u03b7'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'Urugua','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a6\u30eb\u30b0\u30a2\u30a4'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc6b0\ub8e8\uacfc\uc774'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Urugwaj','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*mt','ps':'*aa','pt':'*ca','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0423\u0440\u0443\u0433\u0432\u0430\u0439'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0423\u0440\u0443\u0433\u0432\u0430\u0458'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e38\u0e23\u0e39\u0e01\u0e27\u0e31\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'U-ru-goay','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u4e4c\u62c9\u572d'),'zu':'*aa',}, 'UZ': {'aa':'Uzbekistan','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Usbekistan','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039f\u03c5\u03b6\u03bc\u03c0\u03b5\u03ba\u03b9\u03c3\u03c4\u03ac\u03bd'),'en':'*aa','eo':'*aa','es':u('Uzbekist\u00e1n'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Ouzb\u00e9kistan'),'fy':'*aa','ga':u('\u00daisb\u00e9iceast\u00e1in'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a6\u30ba\u30d9\u30ad\u30b9\u30bf\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc6b0\uc988\ubca0\ud0a4\uc2a4\ud0c4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('U\u017cbekistan'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Oezbekistan','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Uzbequist\u00e3o'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0423\u0437\u0431\u0435\u043a\u0438\u0441\u0442\u0430\u043d'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e2d\u0e38\u0e0b\u0e40\u0e1a\u0e01\u0e34\u0e2a\u0e16\u0e32\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('U-d\u01a1-b\u00ea-ki-xtan'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u4e4c\u5179\u522b\u514b\u65af\u5766'),'zu':'*aa',}, 'VA': {'aa':'Vatican','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Vatic\u00e0'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Vatikanstadt','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03b3\u03af\u03b1 \u0388\u03b4\u03c1\u03b1 (\u0392\u03b1\u03c4\u03b9\u03ba\u03b1\u03bd\u03cc)'),'en':'*aa','eo':'*aa','es':'Ciudad del Vaticano','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An Chathaoir Naofa (St\u00e1t Chathair na Vatac\u00e1ine)'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Vatikan','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':u('Citt\u00e0 del Vaticano'),'iu':'*aa','iw':'*aa','ja':u('\u30d0\u30c1\u30ab\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubc14\ud2f0\uce78'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*id','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Vaticaanstad','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Watykan','ps':'*aa','pt':'Vaticano','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0412\u0430\u0442\u0438\u043a\u0430\u043d'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'Vatikanen','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e27\u0e32\u0e15\u0e34\u0e01\u0e31\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Va-ti-c\u0103ng'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u68b5\u8482\u5188'),'zu':'*aa',}, 'VC': {'aa':'Saint Vincent And The Grenadines','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'St. Vincent und die Grenadinen','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0386\u03b3\u03b9\u03bf\u03c2 \u0392\u03b9\u03ba\u03ad\u03bd\u03c4\u03b9\u03bf\u03c2 \u03ba\u03b1\u03b9 \u0393\u03c1\u03b5\u03bd\u03b1\u03b4\u03af\u03bd\u03b5\u03c2'),'en':'*aa','eo':'*aa','es':'San Vicente y las Granadinas','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Saint-Vincent-et-les Grenadines','fy':'*aa','ga':'Saint Vincent agus na Grenadines','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Saint Vincent dan Grenadines','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Saint Vincent e Grenadine','iu':'*aa','iw':'*aa','ja':u('\u30bb\u30f3\u30c8\u30d3\u30f3\u30bb\u30f3\u30c8\u304a\u3088\u3073\u30b0\u30ec\u30ca\u30c7\u30a3\u30fc\u30f3\u8af8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc138\uc778\ud2b8 \ube48\uc13c\ud2b8 \uadf8\ub808\ub098\ub518\uc2a4'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':'Saint Vincent and the Grenadines','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Saint Vincent en de Grenadines','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('S\u00e3o Vicente e Granadinas'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u0435\u043d\u0442 \u0412\u0438\u043d\u0441\u0435\u043d\u0442 \u0438 \u0413\u0440\u0435\u043d\u0430\u0434\u0438\u043d\u0438'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Saint Vincent och Grenadinerna','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Xan Vin-xen v\u00e0 Gr\u00ea-na-din'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5723\u6587\u68ee\u7279\u548c\u683c\u6797\u7eb3\u4e01\u65af'),'zu':'*aa',}, 'VE': {'aa':'Venezuela','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Vene\u00e7uela'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0392\u03b5\u03bd\u03b5\u03b6\u03bf\u03c5\u03ad\u03bb\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('V\u00e9n\u00e9zuela'),'fy':'*aa','ga':u('Veinis\u00e9ala'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d9\u30cd\u30ba\u30a8\u30e9'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubca0\ub124\uc218\uc5d8\ub77c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Venezwela','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Wenezuela','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0412\u0435\u043d\u0435\u0441\u0443\u044d\u043b\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0412\u0435\u043d\u0435\u0446\u0443\u0435\u043b\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e27\u0e40\u0e19\u0e0b\u0e39\u0e40\u0e2d\u0e25\u0e48\u0e32'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('V\u00ea-n\u00ea-zu-\u00ea-la'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u59d4\u5185\u745e\u62c9'),'zu':'*aa',}, 'VG': {'aa':'British Virgin Islands','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Illes Verges Brit\u00e0niques'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Britische Jungferninseln','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0392\u03c1\u03b5\u03c4\u03b1\u03bd\u03b9\u03ba\u03ad\u03c2 \u03a0\u03b1\u03c1\u03b8\u03ad\u03bd\u03bf\u03b9 \u039d\u03ae\u03c3\u03bf\u03b9'),'en':'*aa','eo':'*aa','es':u('Islas V\u00edrgenes Brit\u00e1nicas'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Iles Vierges Britanniques','fy':'*aa','ga':u('Oile\u00e1in Bhriotanacha na Maighdean'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Kepulauan British Virgin','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Isole Vergini (GB)','iu':'*aa','iw':'*aa','ja':u('\u82f1\u9818\u30d0\u30fc\u30b8\u30f3\u8af8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc601\uad6d\ub839 \ubc84\uc9c4 \uc544\uc77c\ub79c\ub4dc'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Britse Maagdeneilanden','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Dziewicze Wyspy Brytyjskie','ps':'*aa','pt':u('Ilhas Virgens Brit\u00e2nicas'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0411\u0440\u0438\u0442\u0430\u043d\u0441\u043a\u0438\u0435 \u0412\u0438\u0440\u0433\u0438\u043d\u0441\u043a\u0438\u0435 \u043e\u0441\u0442\u0440\u043e\u0432\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0411\u0440\u0438\u0442\u0430\u043d\u0441\u043a\u0430 \u0414\u0435\u0432\u0438\u0447\u0430\u043d\u0441\u043a\u0430 \u041e\u0441\u0442\u0440\u0432\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Brittiska Virgin Islands','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e1a\u0e23\u0e34\u0e17\u0e34\u0e0a\u0e40\u0e27\u0e2d\u0e23\u0e4c\u0e08\u0e34\u0e19\u0e44\u0e2d\u0e2a\u0e4c\u0e41\u0e25\u0e19\u0e14\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u82f1\u5c5e\u7ef4\u4eac\u7fa4\u5c9b'),'zu':'*aa',}, 'VI': {'aa':'U.S. Virgin Islands','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Illes Verges dels USA','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Amerikanische Jungferninseln','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0391\u03bc\u03b5\u03c1\u03b9\u03ba\u03b1\u03bd\u03b9\u03ba\u03ad\u03c2 \u03a0\u03b1\u03c1\u03b8\u03ad\u03bd\u03bf\u03b9 \u039d\u03ae\u03c3\u03bf\u03b9'),'en':'*aa','eo':'*aa','es':u('Islas V\u00edrgenes Americanas'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Iles Vierges Am\u00e9ricaines'),'fy':'*aa','ga':u('Oile\u00e1in na Maighdean S.A.M.'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Kepulauan U.S. Virgin','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Isole Vergini (USA)','iu':'*aa','iw':'*aa','ja':u('\u7c73\u9818\u30d0\u30fc\u30b8\u30f3\u8af8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubbf8\uad6d\ub839 \ubc84\uc9c4 \uc544\uc77c\ub79c\ub4dc'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Amerikaanse Maagdeneilanden','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('Dziewicze Wyspy Stan\u00f3w Zjednoczonych'),'ps':'*aa','pt':'Ilhas Virgens dos EUA','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0412\u0438\u0440\u0433\u0438\u043d\u0441\u043a\u0438\u0435 \u043e\u0441\u0442\u0440\u043e\u0432\u0430 \u0421\u043e\u0435\u0434\u0438\u043d\u0435\u043d\u043d\u044b\u0445 \u0428\u0442\u0430\u0442\u043e\u0432'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421.\u0410.\u0414. \u0414\u0435\u0432\u0438\u0447\u0430\u043d\u0441\u043a\u0430 \u041e\u0441\u0442\u0440\u0432\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Amerikanska Virgin Islands','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e22\u0e39\u0e40\u0e2d\u0e2a\u0e40\u0e27\u0e2d\u0e23\u0e4c\u0e08\u0e34\u0e19\u0e44\u0e2d\u0e2a\u0e4c\u0e41\u0e25\u0e19\u0e14\u0e4c'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u7f8e\u5c5e\u7ef4\u4eac\u7fa4\u5c9b'),'zu':'*aa',}, 'VN': {'aa':'Vietnam','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0392\u03b9\u03b5\u03c4\u03bd\u03ac\u03bc'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('V\u00edtneam'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d9\u30c8\u30ca\u30e0'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubca0\ud2b8\ub0a8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Vjetnam','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'Wietnam','ps':'*aa','pt':u('Vietn\u00e3'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0412\u044c\u0435\u0442\u043d\u0430\u043c'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0412\u0438\u0458\u0435\u0442\u043d\u0430\u043c'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e27\u0e35\u0e22\u0e14\u0e19\u0e32\u0e21'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Vi\u1ec7t Nam'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u8d8a\u5357'),'zu':'*aa',}, 'VU': {'aa':'Vanuatu','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0392\u03b1\u03bd\u03bf\u03c5\u03ac\u03c4\u03bf\u03c5'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('Vanuat\u00fa'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30d0\u30cc\u30a2\u30c4'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ubc14\ub204\uc544\ud22c'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Vanwatu','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0412\u0430\u043d\u0443\u0430\u0442\u0443'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e27\u0e32\u0e19\u0e31\u0e27\u0e15\u0e39'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Va-nu-a-tu','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u74e6\u52aa\u963f\u56fe'),'zu':'*aa',}, 'WF': {'aa':'Wallis And Futuna','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Wallis und Futuna','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03ae\u03c3\u03bf\u03b9 \u039f\u03c5\u03b1\u03bb\u03bb\u03af\u03c2 \u03ba\u03b1\u03b9 \u03a6\u03bf\u03c5\u03c4\u03bf\u03c5\u03bd\u03ac'),'en':'*aa','eo':'*aa','es':'Wallis y Futuna','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Wallis-et-Futuna','fy':'*aa','ga':u('Oile\u00e1in Vail\u00eds agus Fut\u00fana'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Wallis dan Futuna','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Wallis e Futuna','iu':'*aa','iw':'*aa','ja':u('\u30ef\u30ea\u30b9\u30fb\u30d5\u30c6\u30e5\u30ca\u8af8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc70c\ub9ac\uc2a4 \ud478\ud22c\ub098'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'Wallis and Futuna','mt':'*ms','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Wallis en Futuna','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*it','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0412\u0430\u043b\u0438\u0441 \u0438 \u0424\u0443\u0442\u0443\u043d\u0430 \u041e\u0441\u0442\u0440\u0432\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Wallis och Futuna','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u74e6\u5229\u65af\u7fa4\u5c9b\u548c\u5bcc\u56fe\u7eb3\u7fa4\u5c9b'),'zu':'*aa',}, 'WS': {'aa':'Samoa','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a3\u03b1\u03bc\u03cc\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('Sam\u00f3'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u897f\u30b5\u30e2\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc0ac\ubaa8\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'*aa','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':'*aa','rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0421\u0430\u043c\u043e\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':'*aa','ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Xa-moa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':'*zh','zu':'*aa',}, 'YE': {'aa':'Yemen','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':u('\u0627\u0644\u064a\u0645\u0646'),'as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'Iemen','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Jemen','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u03a5\u03b5\u03bc\u03ad\u03bd\u03b7'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':u('Y\u00e9men'),'fy':'*aa','ga':u('\u00c9imin'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Yaman','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30a4\u30a8\u30e1\u30f3'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc608\uba58'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':'*de','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*de','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*de','ps':'*aa','pt':u('I\u00eamen'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0419\u0435\u043c\u0435\u043d'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0408\u0435\u043c\u0435\u043d'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*de','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e40\u0e22\u0e40\u0e21\u0e19'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Y-\u00ea-men'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u4e5f\u95e8'),'zu':'*aa',}, 'YT': {'aa':'Mayotte','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'*aa','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039c\u03b1\u03b3\u03b9\u03cc\u03c4'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':'*aa','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'Mayotta','iu':'*aa','iw':'*aa','ja':u('\u30de\u30e8\u30c3\u30c8\u5cf6'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub9c8\uc694\ud2f0'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':'Majotte','my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':'*aa','qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u041c\u0430\u0439\u043e\u0442\u0442\u0435'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u041c\u0430\u0458\u043e\u0442\u0435'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e21\u0e32\u0e22\u0e2d\u0e15'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'*aa','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u9a6c\u7ea6\u7279\u5c9b'),'zu':'*aa',}, 'ZA': {'aa':'South Africa','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Sud-\u00e0frica'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':u('S\u00fcdafrika'),'dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u039d\u03cc\u03c4\u03b9\u03b1 \u0391\u03c6\u03c1\u03b9\u03ba\u03ae'),'en':'*aa','eo':'*aa','es':u('Sud\u00e1frica'),'et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Afrique du Sud','fy':'*aa','ga':'An Afraic Theas','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'Afrika Selatan','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*id','io':'*aa','is':'*aa','it':'Sudafrica','iu':'*aa','iw':'*aa','ja':u('\u5357\u30a2\u30d5\u30ea\u30ab'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\ub0a8\uc544\ud504\ub9ac\uce74'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*id','mt':u('Afrika t\u2019Isfel'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'Zuid-Afrika','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':u('Republika Po\u0142udniowej Afryki'),'ps':'*aa','pt':u('\u00c1frica do Sul'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u042e\u0436\u043d\u0430\u044f \u0410\u0444\u0440\u0438\u043a\u0430'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0408\u0443\u0436\u043d\u043e\u0430\u0444\u0440\u0438\u0447\u043a\u0430 \u0420\u0435\u043f\u0443\u0431\u043b\u0438\u043a\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'Sydafrika','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e41\u0e2d\u0e1f\u0e23\u0e34\u0e01\u0e32\u0e43\u0e15\u0e49'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':'Nam Phi','vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u5357\u975e'),'zu':'*aa',}, 'ZM': {'aa':'Zambia','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':u('Z\u00e0mbia'),'ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Sambia','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0396\u03ac\u03bc\u03c0\u03b9\u03b1'),'en':'*aa','eo':'*aa','es':'*aa','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'Zambie','fy':'*aa','ga':'An tSaimbia','gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b6\u30f3\u30d3\u30a2'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc7a0\ube44\uc544'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('\u017bambja'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Z\u00e2mbia'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0417\u0430\u043c\u0431\u0438\u044f'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':u('\u0417\u0430\u043c\u0431\u0438\u0458\u0430'),'ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e41\u0e0b\u0e21\u0e40\u0e1a\u0e35\u0e22'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('D\u0103m-bi-a'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u8d5e\u6bd4\u4e9a'),'zu':'*aa',}, 'ZW': {'aa':'Zimbabwe','ab':'*aa','ae':'*aa','af':'*aa','ak':'*aa','am':'*aa','an':'*aa','ar':'*aa','as':'*aa','av':'*aa','ay':'*aa','az':'*aa','ba':'*aa','be':'*aa','bg':'*aa','bh':'*aa','bi':'*aa','bm':'*aa','bn':'*aa','bo':'*aa','br':'*aa','bs':'*aa','ca':'*aa','ce':'*aa','ch':'*aa','co':'*aa','cr':'*aa','cs':'*aa','cu':'*aa','cv':'*aa','cy':'*aa','da':'*aa','de':'Simbabwe','dv':'*aa','dz':'*aa','ee':'*aa','el':u('\u0396\u03b9\u03bc\u03c0\u03ac\u03bc\u03c0\u03bf\u03c5\u03b5'),'en':'*aa','eo':'*aa','es':'Zimbabue','et':'*aa','eu':'*aa','fa':'*aa','ff':'*aa','fi':'*aa','fj':'*aa','fo':'*aa','fr':'*aa','fy':'*aa','ga':u('An tSiomb\u00e1ib'),'gd':'*aa','gl':'*aa','gn':'*aa','gu':'*aa','gv':'*aa','ha':'*aa','he':'*aa','hi':'*aa','ho':'*aa','hr':'*aa','ht':'*aa','hu':'*aa','hy':'*aa','hz':'*aa','ia':'*aa','id':'*aa','ie':'*aa','ig':'*aa','ii':'*aa','ik':'*aa','in':'*aa','io':'*aa','is':'*aa','it':'*aa','iu':'*aa','iw':'*aa','ja':u('\u30b8\u30f3\u30d0\u30d6\u30a8'),'ji':'*aa','jv':'*aa','ka':'*aa','kg':'*aa','ki':'*aa','kj':'*aa','kk':'*aa','kl':'*aa','km':'*aa','kn':'*aa','ko':u('\uc9d0\ubc14\ube0c\uc6e8'),'kr':'*aa','ks':'*aa','ku':'*aa','kv':'*aa','kw':'*aa','ky':'*aa','la':'*aa','lb':'*aa','lg':'*aa','li':'*aa','ln':'*aa','lo':'*aa','lt':'*aa','lu':'*aa','lv':'*aa','mg':'*aa','mh':'*aa','mi':'*aa','mk':'*aa','ml':'*aa','mn':'*aa','mo':'*aa','mr':'*aa','ms':'*aa','mt':u('\u017bimbabwe'),'my':'*aa','na':'*aa','nb':'*aa','nd':'*aa','ne':'*aa','ng':'*aa','nl':'*aa','nn':'*aa','no':'*aa','nr':'*aa','nv':'*aa','ny':'*aa','oc':'*aa','oj':'*aa','om':'*aa','or':'*aa','os':'*aa','pa':'*aa','pi':'*aa','pl':'*aa','ps':'*aa','pt':u('Zimb\u00e1bue'),'qu':'*aa','rm':'*aa','rn':'*aa','ro':'*aa','ru':u('\u0417\u0438\u043c\u0431\u0430\u0431\u0432\u0435'),'rw':'*aa','sa':'*aa','sc':'*aa','sd':'*aa','se':'*aa','sg':'*aa','si':'*aa','sk':'*aa','sl':'*aa','sm':'*aa','sn':'*aa','so':'*aa','sq':'*aa','sr':'*ru','ss':'*aa','st':'*aa','su':'*aa','sv':'*aa','sw':'*aa','ta':'*aa','te':'*aa','tg':'*aa','th':u('\u0e0b\u0e34\u0e21\u0e1a\u0e32\u0e1a\u0e40\u0e27'),'ti':'*aa','tk':'*aa','tl':'*aa','tn':'*aa','to':'*aa','tr':'*aa','ts':'*aa','tt':'*aa','tw':'*aa','ty':'*aa','ug':'*aa','uk':'*aa','ur':'*aa','uz':'*aa','ve':'*aa','vi':u('Dim-ba-bu-\u00ea'),'vo':'*aa','wa':'*aa','wo':'*aa','xh':'*aa','yi':'*aa','yo':'*aa','za':'*aa','zh':u('\u6d25\u5df4\u5e03\u97e6'),'zu':'*aa',}, }
2,296.970037
3,269
0.434086
108,948
613,291
2.443533
0.03096
0.003771
0.005657
0.007513
0.837757
0.82784
0.820102
0.810144
0.801242
0.790153
0
0.059328
0.004357
613,291
266
3,270
2,305.605263
0.376652
0.000629
0
0
1
0.806324
0.520713
0.100919
0
0
0
0
0
1
0
false
0
0.003953
0
0.003953
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
1
1
1
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
11
530455503b3ac22e6287f98b0751f1ea2011f19a
528,032
py
Python
adi_study_watch/nrf5_sdk_15.2.0/adi_study_watch/cli/m2m2/tools/CLI.py
ArrowElectronics/Vital-Signs-Monitoring
ba43fe9a116d94170561433910fd7bffba5726e7
[ "Unlicense" ]
5
2021-06-13T17:11:19.000Z
2021-12-01T18:20:38.000Z
adi_study_watch/nrf5_sdk_15.2.0/adi_study_watch/cli/m2m2/tools/CLI.py
ArrowElectronics/Vital-Signs-Monitoring
ba43fe9a116d94170561433910fd7bffba5726e7
[ "Unlicense" ]
null
null
null
adi_study_watch/nrf5_sdk_15.2.0/adi_study_watch/cli/m2m2/tools/CLI.py
ArrowElectronics/Vital-Signs-Monitoring
ba43fe9a116d94170561433910fd7bffba5726e7
[ "Unlicense" ]
1
2022-01-08T15:01:44.000Z
2022-01-08T15:01:44.000Z
try: import cmd, struct, serial, ctypes, threading, Queue, binascii, sys, glob, subprocess, errno import datetime, time, os import math import socket import m2m2_server from cobs import cobs from m2m2_common import * import colorama as cr import array as arr import tqdm import re from cli_utils import * except ImportError as e: print "Oops, looks like you're missing a Python module!" print "Try installing it with pip: `pip install MODULE_NAME`" print "Error message is: {}".format(e) print "Exiting..." raise ImportError(e) class LowTouch(): User_File = '' User_File_name = "USER_INPUT_CONFIG.LOG" DCB_File_name = "GEN_BLK_DCB_CONFIG.LOG" Startcmd = False Startcmdlen = 0 Startcmdcount = 0 Stopcmd = False Stopcmdlen = 0 Stopcmdcount = 0 Enable_lowtouch = False enable_csv_logs = 0 fs_file_pagechunk_list = [] lowtouch = LowTouch() class verboser(): msg_formatters = { 0:None, # A placeholder for not printing anything 1:{"fmt":"{}", "help":"Regular old prints"}, 2:{"fmt":cr.Back.WHITE + cr.Fore.BLACK + "{}", "help":"More chatter (i.e. Which command was just run)"}, 3:{"fmt":cr.Back.CYAN + cr.Fore.BLACK + "{}", "help": "Underlying transaction info (i.e. raw packet data)"}, 4:{"fmt":cr.Back.MAGENTA + cr.Fore.CYAN + "{}", "help": "CLI Interior workings (threads starting, sockets opened, etc)"}, } err_formatter = cr.Back.RED + cr.Fore.GREEN + "ERR: {}" level = 1 console_socket = None console = None port = 1069 def __init__(self, console_level=2): self.console_level = console_level self.dvt_ver = None self.clk_calib_val = None def __del__(self): self.stop_console() def write(self, msg, level = 1): if ((self.level == 0) or (level == 0)): return if level <= self.level: outstr = "" whitespace = "" whitespace = msg[:len(msg) - len(msg.lstrip())] msg_str = msg[len(msg) - len(msg.lstrip()):] output_str = whitespace + self.msg_formatters[level]["fmt"].format(msg_str) if level >= self.console_level and self.console_socket != None: self.console_write(output_str) else: print output_str def console_write(self, text, level = 1): if self.console != None: try: self.console_socket.send(text) except socket.error as e: if e.errno == errno.ENETRESET or e.errno == errno.ECONNABORTED or e.errno == errno.ECONNRESET: self.err("Socket error: {}".format(e), force_print=True) self.err("Attempting to restart the console...", force_print=True) self.stop_console() self.start_console() else: self.err("Socket error: {}".format(e), force_print=True) def err(self, msg, level = 1, force_print=False): if lowtouch.Enable_lowtouch == True: print(" skip sending Command to device...") return None output_str = self.err_formatter.format(msg) if self.console_socket != None and self.level >= self.console_level and not force_print: self.console_write(output_str) else: print output_str def set_level(self, level): self.level = level if level >= self.console_level and self.console == None: self.start_console() elif level <= self.console_level and self.console_socket != None: self.stop_console() def start_console(self): if "nt" in os.name: self.console = subprocess.Popen("start python.exe console_window.py {}".format(self.port), shell=True) else: self.console = subprocess.Popen("exec xterm -e \"python console_window.py {}\"".format(self.port), shell=True) try: self.console_socket = socket.socket() self.console_socket.connect(('localhost', self.port)) except socket.error as e: self.err("Couldn't open the socket to the secondary console: {}".format(e)) def stop_console(self): self.console_socket.close() self.console.terminate() self.console_socket = None self.console = None class m2m2_shell(cmd.Cmd): cr.init(autoreset=True) intro = 'This is the m2m2 UART shell. Type "help" or "?" to list commands.\n' prompt = cr.Fore.GREEN + '#>' # Make error messages stand out more. vrb = verboser() m2m2_server = None sock_map = {} tx_q = Queue.Queue() rx_q = Queue.Queue() dispatcher_map = {} for addr in vars(M2M2_ADDR_ENUM_t).keys(): if addr.count("__") <= 0: addr_val = vars(M2M2_ADDR_ENUM_t)[addr] dispatcher_map[addr_val] = Queue.Queue() # A dictionary of useful/common command sequences to be executed. The 'commands' key contains a list of CLI commands to be run for the sequence. quickstarts = { # "adpd4000": { "commands":["clockCalibration", "setSlot 1 1 1 0x04", "sub radpd1 add", "sensor adpd4000 start"], # "help":"Setup the ADPD in 32 bit summation mode with the default DCFG."}, "adpd4000": { "commands":[ "loadAdpdCfg 40", "clockCalibration", "adpdAGCControl 1:1", "sensor adpd4000 start", "sub radpd6 add"], "help":"Setup the ADPD in 32 bit summation mode with the default DCFG for green LED."}, "ecg4k": { "commands":[ "loadAdpdCfg 40","SetEcg4kLcfg 0:300", "clockCalibration", "controlECGElectrodeSwitch 4k_sw 1", "sensor adpd4000 start", "sub radpd1 add"], "help":"Setup the ADPD for measuring ecg in slot A on DVT1/2/3 Watch"}, "ecg4k_dvt1": { "commands":["write_dcb_config adpd4000 ADPD4K_ECG_DVT1.dcfg", "loadAdpdCfg 40", "clockCalibration", "controlECGElectrodeSwitch 4k_sw 1", "sensor adpd4000 start", "sub radpd1 add"], "help":"Setup the ADPD for measuring ecg in slot A on DVT1 Watch"}, "ecg4k_dvt2": { "commands":["write_dcb_config adpd4000 ADPD4K_ECG_DVT2.dcfg", "loadAdpdCfg 40","clockCalibration", "controlECGElectrodeSwitch 4k_sw 1", "sensor adpd4000 start", "sub radpd1 add"], "help":"Setup the ADPD for measuring ecg in slot A on DVT2/3 Watch"}, "adpd4000_g": { "commands":["loadAdpdCfg 40", "clockCalibration", "adpdAGCControl 1:1", "sensor adpd4000 start", "sub radpd6 add"], "help":"Setup the ADPD in 32 bit summation mode with the default DCFG for green LED."}, "adpd4000_r": { "commands":["loadAdpdCfg 41", "clockCalibration", "adpdAGCControl 2:1", "sensor adpd4000 start", "sub radpd7 add"], "help":"Setup the ADPD in 32 bit summation mode with the default DCFG for Red LED."}, "adpd4000_ir": { "commands":["loadAdpdCfg 42", "clockCalibration", "adpdAGCControl 3:1", "sensor adpd4000 start", "sub radpd8 add"], "help":"Setup the ADPD in 32 bit summation mode with the default DCFG for IR LED."}, "adpd4000_b": { "commands":["loadAdpdCfg 43", "clockCalibration", "adpdAGCControl 4:1", "sensor adpd4000 start", "sub radpd9 add"], "help":"Setup the ADPD in 32 bit summation mode with the default DCFG for Blue LED."}, "adpd4000_g_agc_off": { "commands":["loadAdpdCfg 40", "clockCalibration", "adpdAGCControl 1:0", "sensor adpd4000 start", "sub radpd6 add", "plot radpd6"], "help":"Setup the ADPD in 32 bit summation mode with the default DCFG for Green LED."}, "adpd4000_r_agc_off": { "commands":["loadAdpdCfg 41", "clockCalibration", "adpdAGCControl 2:0", "sensor adpd4000 start", "sub radpd7 add", "plot radpd7"], "help":"Setup the ADPD in 32 bit summation mode with the default DCFG for Red LED."}, "adpd4000_ir_agc_off": { "commands":["loadAdpdCfg 42", "clockCalibration", "adpdAGCControl 3:0", "sensor adpd4000 start", "sub radpd8 add", "plot radpd8"], "help":"Setup the ADPD in 32 bit summation mode with the default DCFG for IR LED."}, "adpd4000_b_agc_off": { "commands":["loadAdpdCfg 43", "clockCalibration", "adpdAGCControl 4:0", "sensor adpd4000 start", "sub radpd9 add", "plot radpd9"], "help":"Setup the ADPD in 32 bit summation mode with the default DCFG for Blue LED."}, "adpd4000_g_r": { "commands":["create_adpd4k_dcfg 6:4 7:5", "loadAdpdCfg 40", "clockCalibration", "adpdAGCControl 1:1 2:1", "sensor adpd4000 start", "sub radpd6 add","sub radpd7 add", "plot radpd6", "plot radpd7"], "help":"Setup the ADPD in 32 bit summation mode with the default DCFG for green + red LED."}, "adpd4000_g_ir": { "commands":["create_adpd4k_dcfg 6:4 8:6", "loadAdpdCfg 40", "clockCalibration", "adpdAGCControl 1:1 3:1", "sensor adpd4000 start", "sub radpd6 add","sub radpd8 add", "plot radpd6", "plot radpd8"], "help":"Setup the ADPD in 32 bit summation mode with the default DCFG for green + ir LED."}, "adpd4000_r_ir": { "commands":["create_adpd4k_dcfg 7:5 8:6", "loadAdpdCfg 40", "clockCalibration", "adpdAGCControl 2:1 3:1", "sensor adpd4000 start", "sub radpd7 add","sub radpd8 add", "plot radpd7", "plot radpd8"], "help":"Setup the ADPD in 32 bit summation mode with the default DCFG for red+ir LED."}, "adpd4000_g_b": { "commands":["create_adpd4k_dcfg 6:4 9:7", "loadAdpdCfg 40", "clockCalibration", "adpdAGCControl 1:1 4:1", "sensor adpd4000 start", "sub radpd6 add","sub radpd9 add", "plot radpd6", "plot radpd9"], "help":"Setup the ADPD in 32 bit summation mode with the default DCFG for green+blue LED."}, "adpd4000_r_b": { "commands":["create_adpd4k_dcfg 7:5 9:7", "loadAdpdCfg 40", "clockCalibration", "adpdAGCControl 2:1 4:1", "sensor adpd4000 start", "sub radpd7 add","sub radpd9 add", "plot radpd7", "plot radpd9"], "help":"Setup the ADPD in 32 bit summation mode with the default DCFG for red+blue LED."}, "adpd4000_ir_b": { "commands":["create_adpd4k_dcfg 8:6 9:7", "loadAdpdCfg 40", "clockCalibration", "adpdAGCControl 3:1 4:1", "sensor adpd4000 start", "sub radpd8 add","sub radpd9 add", "plot radpd8", "plot radpd9"], "help":"Setup the ADPD in 32 bit summation mode with the default DCFG for ir+blue LED."}, "adpd4000_g_r_ir": { "commands":["create_adpd4k_dcfg 6:4 7:5 8:6", "loadAdpdCfg 40", "adpdAGCControl 1:1 2:1 3:1", "clockCalibration", "sensor adpd4000 start", "sub radpd6 add", "sub radpd7 add", "sub radpd8 add", "plot radpd6", "plot radpd7", "plot radpd8"], "help":"Setup the ADPD in 32 bit summation mode with the default DCFG for green+red+ir LED."}, "adpd4000_r_ir_b": { "commands":["create_adpd4k_dcfg 7:5 8:6 9:7", "loadAdpdCfg 40", "clockCalibration", "adpdAGCControl 2:1 3:1 4:1", "sensor adpd4000 start", "sub radpd7 add","sub radpd8 add","sub radpd9 add", "plot radpd7", "plot radpd8", "plot radpd9"], "help":"Setup the ADPD in 32 bit summation mode with the default DCFG for red+ir+blue LED."}, "adpd4000_g_r_b": { "commands":["create_adpd4k_dcfg 6:4 7:5 9:7", "loadAdpdCfg 40", "clockCalibration", "adpdAGCControl 1:1 2:1 4:1", "sensor adpd4000 start", "sub radpd6 add", "sub radpd7 add","sub radpd9 add", "plot radpd6", "plot radpd7", "plot radpd9"], "help":"Setup the ADPD in 32 bit summation mode with the default DCFG for green+red+blue LED."}, "adpd4000_g_ir_b": { "commands":["create_adpd4k_dcfg 6:4 8:6 9:7", "loadAdpdCfg 40", "clockCalibration", "adpdAGCControl 1:1 3:1 4:1", "sensor adpd4000 start", "sub radpd6 add", "sub radpd8 add","sub radpd9 add", "plot radpd6", "plot radpd8", "plot radpd9"], "help":"Setup the ADPD in 32 bit summation mode with the default DCFG for green+ir+blue LED."}, "ctr": { "commands":["loadAdpdCfg 6", "clockCalibration", "setSlot 0x00:0x14", "getCtrValue"], "help":"Get ctr value."}, "adxl": { "commands":["sensor adxl start","sub radxl add"], "help":"Start ADXL"}, "ad7156": { "commands":["sensor ad7156 start","sub rad7156 add"], "help":"Start AD7156"}, "plot-adpd4000": {"commands":["quickstart adpd4000", "plot radpd6"], "help":"Quickstarts the ADPD and starts a plot of the raw ADPD SlotF."}, "plot-ecg4000": {"commands":["quickstart ecg4000", "plot radpd1"], "help":"Quickstarts the ADPD and starts a plot of the raw ECG from SlotA."}, "plot-adpd4000_g": {"commands":["quickstart adpd4000_g", "plot radpd6"], "help":"Quickstarts the ADPD and starts a plot of the raw ADPD SlotF."}, "plot-adpd4000_r": {"commands":["quickstart adpd4000_r", "plot radpd7"], "help":"Quickstarts the ADPD and starts a plot of the raw ADPD SlotG."}, "plot-adpd4000_ir": {"commands":["quickstart adpd4000_ir", "plot radpd8"], "help":"Quickstarts the ADPD and starts a plot of the raw ADPD SlotH."}, "plot-adpd4000_b": {"commands":["quickstart adpd4000_b", "plot radpd9"], "help":"Quickstarts the ADPD and starts a plot of the raw ADPD SlotI."}, "temperature": { "commands":["create_adpd4k_dcfg 4:2 5:3", "loadAdpdCfg 40", "sensor temperature start", "sub rtemperature add"], "help":"Start Temperature"}, "temperature_C_D_J_K_L": { "commands":["create_adpd4k_dcfg 3:2 4:2 5:3 10:2 11:2 12:2", "loadAdpdCfg 40", "sensor temperature start", "sub rtempr3 add", "sub rtemperature add","sub rtempr10 add","sub rtempr11 add","sub rtempr12 add"], "help":"Start Temperature slot C,D,J,K,L"}, "temperature_C_D_J_K_L_uc1": { "commands":["loadAdpdUCDcfg 6","tempr_lcfg_write 0x01 0x0E1C","reg adpd4000 w 0x0010:0x0B00","reg adpd4000 w 0x0150:0x0004", "reg adpd4000 w 0x0230:0x0004","reg adpd4000 w 0x0250:0x0004", "reg adpd4000 w 0x0270:0x0004", "reg adpd4000 w 0x0143:0x5A40", "reg adpd4000 w 0x0163:0x5A40","reg adpd4000 w 0x0183:0x5A40", "reg adpd4000 w 0x0223:0x5A40","reg adpd4000 w 0x0243:0x5A40", "reg adpd4000 w 0x0263:0x5A40","sensor temperature start", "sub rtempr3 add", "sub rtemperature add","sub rtempr10 add","sub rtempr11 add","sub rtempr12 add"], "help":"Start Temperature slot C,D,J,K,L dvt1"}, "start_log_temperature_C_D_J_K_L_uc1": { "commands":["loadAdpdUCDcfg 6","tempr_lcfg_write 0x01 0x0E1C","reg adpd4000 w 0x0010:0x0B00","reg adpd4000 w 0x0150:0x0004", "reg adpd4000 w 0x0230:0x0004","reg adpd4000 w 0x0250:0x0004", "reg adpd4000 w 0x0270:0x0004", "reg adpd4000 w 0x0143:0x5A40", "reg adpd4000 w 0x0163:0x5A40","reg adpd4000 w 0x0183:0x5A40", "reg adpd4000 w 0x0223:0x5A40","reg adpd4000 w 0x0243:0x5A40", "reg adpd4000 w 0x0263:0x5A40","fs_sub rtempr3 add", "fs_sub rtemperature add","fs_sub rtempr10 add","fs_sub rtempr11 add","fs_sub rtempr12 add","sensor temperature start","fs_log start"], "help":"Start Temperature slot C,D,J,K,L dvt1"}, "temperature_C_D": { "commands":["create_adpd4k_dcfg 3:2 4:2 5:3", "loadAdpdCfg 40", "sensor temperature start", "sub rtempr3 add", "sub rtemperature add"], "help":"Start Temperature slot C and D"}, "ped": { "commands":["sensor adxl start", "sensor ped start", "sub rped add"], "help":"Starts the Pedometer."}, "sqi_ext":{ "commands":["plot rsqi", "SQISetSlot 6", "set_adpd_ext_datastream_odr 100", "sensor sqi start","sub rsqi add","send_ext_adpd_datastream 11173863_ADPDAppStream_SlotFChannel1.csv 6 2", "sub rsqi remove", "sensor sqi stop"], "help":"send external sqi data stream"}, "sqi_green": { "commands":["plot radpd6","plot rsqi","loadAdpdCfg 40", "reg w adpd4000 0x0D:0x2710", "clockCalibration","SQISetSlot 6","sensor sqi start","sub rsqi add","adpdAGCControl 1:1","sensor adpd4000 start","sub radpd6 add"], "help":"Starts the SQI with Green LED on slot F of ADPD4000 at 100Hz"}, "sqi_mm": { "commands":["plot radpd1","plot rsqi", "sensor sqi start","sub rsqi add","sub radpd1 add","sensor adpd4000 start"], "help":"Starts the SQI with Green LED on slot F of ADPD4000 at 100Hz"}, "sqi_mm_test": { "commands":["sub radpd1 add","sub radpd4 add","sub radpd5 add","sensor sqi start","sub rtemperature add", "sub rsqi add","plot rsqi","plot radpd1","plot rtemperature","sensor adpd4000 start"], "help":"Starts the SQI with Green LED on slot F of ADPD4000 at 100Hz"}, "sqi_green_50": { "commands":["plot radpd6","plot rsqi","loadAdpdCfg 40", "reg w adpd4000 0x0D:0x4E20", "clockCalibration 6","SQISetSlot 6","sensor sqi start","sub rsqi add","adpdAGCControl 1:1","sensor adpd4000 start","sub radpd6 add"], "help":"Starts the SQI with Green LED on slot F of ADPD4000 at 50Hz"}, "sqi_green_25": { "commands":["plot radpd6","plot rsqi","loadAdpdCfg 40", "reg w adpd4000 0x0D:0x9C40", "clockCalibration","SQISetSlot 6","sensor sqi start","sub rsqi add","adpdAGCControl 1:1","sensor adpd4000 start","sub radpd6 add"], "help":"Starts the SQI with Green LED on slot F of ADPD4000 at 25Hz"}, "sqi_ppg":{ "commands":["plot rsyncppg","plot rppg", "plot rsqi", "loadAdpdCfg 40", "clockCalibration", "setPpgLcfg 40", "SQISetSlot 6","sensor sqi start", "sub rsqi add", "sensor ppg start","sub rppg add"], "help":"starts SQI along with PPG stream"}, "start_log_sqi_green": { "commands":["loadAdpdCfg 40","reg w adpd4000 0x0D:0x2710", "clockCalibration", "SQISetSlot 6", "fs_sub rsqi add", "fs_sub radpd6 add","sensor sqi start", "sensor adpd4000 start", "fs_log start"], "help":"log the SQI data with Green LED on slot F of ADPD4000 at 100Hz"}, "sqi_agc_off_green": { "commands":["plot radpd6","plot rsqi","loadAdpdCfg 40","reg w adpd4000 0x0D:0x2710","clockCalibration","SQISetSlot 6","sensor sqi start","sub rsqi add","adpdAGCControl 1:0","sensor adpd4000 start","sub radpd6 add"], "help":"Starts the SQI with Green LED on slot F of ADPD4000 at 100Hz"}, "sqi_red": { "commands":["plot radpd7","plot rsqi","loadAdpdCfg 41","reg w adpd4000 0x0D:0x2710","clockCalibration","SQISetSlot 7","sensor sqi start","sub rsqi add","adpdAGCControl 2:1","sensor adpd4000 start","sub radpd7 add"], "help":"Starts the SQI with Red LED on slot G of ADPD4000 at 100Hz"}, "start_log_sqi_red": { "commands":["loadAdpdCfg 41","reg w adpd4000 0x0D:0x2710", "clockCalibration", "SQISetSlot 7", "fs_sub rsqi add", "fs_sub radpd7 add","sensor sqi start", "sensor adpd4000 start", "fs_log start"], "help":"log the SQI data with Red LED on slot G of ADPD4000 at 100Hz"}, "sqi_agc_off_red": { "commands":["plot radpd7","plot rsqi","loadAdpdCfg 41","reg w adpd4000 0x0D:0x2710","clockCalibration","SQISetSlot 7","sensor sqi start","sub rsqi add","adpdAGCControl 2:0","sensor adpd4000 start","sub radpd7 add"], "help":"Starts the SQI with Red LED on slot G of ADPD4000 at 100Hz"}, "sqi_ir": { "commands":["plot radpd8","plot rsqi","loadAdpdCfg 42","reg w adpd4000 0x0D:0x2710","clockCalibration","SQISetSlot 8","sensor sqi start","sub rsqi add","adpdAGCControl 3:1","sensor adpd4000 start","sub radpd8 add"], "help":"Starts the SQI with IR LED on slot H of ADPD4000 at 100Hz"}, "start_log_sqi_ir": { "commands":["loadAdpdCfg 42","reg w adpd4000 0x0D:0x2710", "clockCalibration", "SQISetSlot 8", "fs_sub rsqi add", "fs_sub radpd8 add","sensor sqi start", "sensor adpd4000 start", "fs_log start"], "help":"log the SQI data with IR LED on slot H of ADPD4000 at 100Hz"}, "sqi_agc_off_ir": { "commands":["plot radpd8","plot rsqi","loadAdpdCfg 42","reg w adpd4000 0x0D:0x2710","clockCalibration","SQISetSlot 8","sensor sqi start","sub rsqi add","adpdAGCControl 3:0","sensor adpd4000 start","sub radpd8 add"], "help":"Starts the SQI with IR LED on slot H of ADPD4000 at 100Hz"}, "sqi_blue": { "commands":["plot radpd9","plot rsqi","loadAdpdCfg 43","reg w adpd4000 0x0D:0x2710","clockCalibration","SQISetSlot 9","sensor sqi start","sub rsqi add","adpdAGCControl 4:1","sensor adpd4000 start","sub radpd9 add"], "help":"Starts the SQI with Blue LED on slot I of ADPD4000 at 100Hz"}, "start_log_sqi_blue": { "commands":["loadAdpdCfg 43","reg w adpd4000 0x0D:0x2710", "clockCalibration", "SQISetSlot 9", "fs_sub rsqi add", "fs_sub radpd9 add","sensor sqi start", "sensor adpd4000 start", "fs_log start"], "help":"log the SQI data with Blue LED on slot I of ADPD4000 at 100Hz"}, "sqi_agc_off_blue": { "commands":["plot radpd9","plot rsqi","loadAdpdCfg 43","reg w adpd4000 0x0D:0x2710","clockCalibration","SQISetSlot 9","sensor sqi start","sub rsqi add","adpdAGCControl 4:0","sensor adpd4000 start","sub radpd9 add"], "help":"Starts the SQI with Blue LED on slot I of ADPD4000 at 100Hz"}, "start_log_adp": { "commands":["fs_sub radp add", "fs_log start"], "help":"Starts logging the Battery info."}, "stop_log_adp": { "commands":["fs_sub radp remove","fs_log stop"], "help":"Stops logging the Battery info."}, "start_log_adxl": { "commands":["fs_sub radxl add","sensor adxl start","fs_log start"], "help":"Starts logging the ADXL."}, "start_reg_read_adxl": { "commands":["reg r adxl 0x00","reg r adxl 0x01","reg r adxl 0x02", "reg r adxl 0x03","reg r adxl 0x08","reg r adxl 0x09", "reg r adxl 0x0A","reg r adxl 0x0B","reg r adxl 0x0C","reg r adxl 0x0D","reg r adxl 0x0E","reg r adxl 0x0F", # "reg r adxl 0x0A","reg r adxl 0x0B","reg r adxl 0x0C","reg r adxl 0x0D","reg r adxl 0x0E","reg r adxl 0x0F", "reg r adxl 0x10","reg r adxl 0x11","reg r adxl 0x12","reg r adxl 0x13","reg r adxl 0x14","reg r adxl 0x15", "reg r adxl 0x16","reg r adxl 0x17","reg r adxl 0x1F","reg r adxl 0x20","reg r adxl 0x21","reg r adxl 0x22", "reg r adxl 0x23","reg r adxl 0x24","reg r adxl 0x25","reg r adxl 0x26","reg r adxl 0x27","reg r adxl 0x28", "reg r adxl 0x29","reg r adxl 0x2A","reg r adxl 0x2B","reg r adxl 0x2C","reg r adxl 0x2D","reg r adxl 0x2E", ], "help":"Starts logging the ADXL."}, "start_log_adxl_252": { "commands":["setDateTime","getDateTime", "quickstart start_log_adxl","reg w adxl 0x2C:0x98","delay 20","quickstop stop_log_adxl", "quickstart start_log_adxl","reg w adxl 0x2C:0x99","delay 20","quickstop stop_log_adxl", "quickstart start_log_adxl","reg w adxl 0x2C:0x9A","delay 20","quickstop stop_log_adxl", "quickstart start_log_adxl","reg w adxl 0x2C:0x9B","delay 20","quickstop stop_log_adxl", "quickstart start_log_adxl","reg w adxl 0x2C:0x9C","delay 20","quickstop stop_log_adxl", "quickstart start_log_adxl","reg w adxl 0x2C:0x9D","delay 20","quickstop stop_log_adxl" # "quickstart adxl","delay 20","quickstop adxl" ], "help":"Takes 1 min log of ADXL in the external trigger mode."}, "start_log_ext_adxl": { "commands":["fs_sub radxl add","sensor adxl start","fs_log start","delay 60","reg w adxl 0x2C:0x98", "sensor adxl stop", "sensor adxl start","reg w adxl 0x2C:0x99","delay 60", "sensor adxl stop", "sensor adxl start","reg w adxl 0x2C:0x9A","delay 60", "sensor adxl stop", "sensor adxl start","reg w adxl 0x2C:0x9B","delay 60", "sensor adxl stop", "sensor adxl start","reg w adxl 0x2C:0x9C","delay 60", "sensor adxl stop", "sensor adxl start","reg w adxl 0x2C:0x9D","delay 60", "sensor adxl stop", "sensor adxl start","reg w adxl 0x2C:0x9E","delay 60", "sensor adxl stop", "fs_sub radxl remove", "fs_log stop"], "help":"Starts logging the ADXL."}, "start_log_adpd4000_g": { "commands":["loadAdpdCfg 40", "clockCalibration", "adpdAGCControl 1:1", "fs_sub radpd6 add","sensor adpd4000 start","fs_log start"], "help":"Starts logging the ADPD4000."}, "start_log_adpd4000_r": { "commands":["loadAdpdCfg 41", "clockCalibration", "adpdAGCControl 2:1", "fs_sub radpd7 add","sensor adpd4000 start","fs_log start"], "help":"Starts logging the ADPD4000."}, "start_log_adpd4000_ir": { "commands":["loadAdpdCfg 42", "clockCalibration", "adpdAGCControl 3:1", "fs_sub radpd8 add","sensor adpd4000 start","fs_log start"], "help":"Starts logging the ADPD4000."}, "start_log_adpd4000_b": { "commands":["loadAdpdCfg 43", "clockCalibration", "adpdAGCControl 4:1", "fs_sub radpd9 add","sensor adpd4000 start","fs_log start"], "help":"Starts logging the ADPD4000."}, "start_log_adpd4000_g_append": { "commands":["loadAdpdCfg 40", "clockCalibration", "adpdAGCControl 1:1", "fs_sub radpd6 add","sensor adpd4000 start","fs_log_append"], "help":"Starts logging the ADPD4000."}, "start_log_ppg": { "commands":["loadAdpdCfg 40", "clockCalibration","setPpgLcfg 40", "lcfgPpgWrite 0x4 0x1210", "fs_sub rppg add", "sensor ppg start", "fs_log start"], "help":"Starts logging the PPG with Static AGC enabled"}, "start_log_ppg_dynamic_agc": { "commands":["loadAdpdCfg 40", "clockCalibration","setPpgLcfg 40", "fs_sub rppg add","fs_sub rstatic_agc add", "fs_sub rdynamic_agc add", "sensor ppg start", "fs_log start"], "help":"Starts logging the PPG with Static+Dynamic AGC enabled"}, "start_log_hrv": { "commands":["loadAdpdCfg 40", "clockCalibration", "setPpgLcfg 40", "fs_sub rppg add","fs_sub rstatic_agc add", "fs_sub rdynamic_agc add", "fs_sub rhrv add", "sensor ppg start","fs_log start"], "help":"Starts logging the PPG+HRV Stream"}, "start_log_temperature": { "commands":["create_adpd4k_dcfg 4:2 5:3", "loadAdpdCfg 40", "fs_sub rtemperature add", "sensor temperature start","fs_log start"], "help":"Start Temperature"}, "start_log_ecg": { "commands":["lcfgEcgWrite 0:100","fs_sub recg add","sensor ecg start","fs_log start"], "help":"Start ecg"}, "start_log_ecg_1500": { "commands":["lcfgEcgWrite 0:1500","fs_sub recg add","sensor ecg start","fs_log start"], "help":"log ecg @ 1500Hz"}, "start_log_ecg_1600": { "commands":["lcfgEcgWrite 0:1600","fs_sub recg add","sensor ecg start","fs_log start"], "help":"log ecg @ 1600Hz"}, "start_log_ecg_2000": { "commands":["lcfgEcgWrite 0:2000","fs_sub recg add","sensor ecg start","fs_log start"], "help":"log ecg @ 2000Hz"}, "start_log_ped": {"commands":["fs_sub rped add","sensor adxl start", "sensor ped start","fs_log start"], "help":"Starts the Pedometer. logging"}, "start_log_adpd4000_r_adxl": {"commands":["loadAdpdCfg 41", "clockCalibration","fs_sub radpd7 add","fs_sub radxl add","fs_log start","sensor adpd4000 start","sensor adxl start"], "help":"Starts the ADPD4000_r, ADXL logging"}, ###Testing ECG4K in UC3 in place of ECG from AD5940### "uc3_ecg4k_dvt2": { "commands":["write_dcb_config adpd4000 TEST_ECG4K_DVT2_UC3.dcfg", "loadAdpdCfg 40", "setPpgLcfg 40", "clockCalibration", "setUCHREnab 1 6", "controlECGElectrodeSwitch 4k_sw 1", "sub rstatic_agc add", "sub rdynamic_agc add", "sub rppg add", "sub rsqi add", "sub radpd1 add", "sub radpd6 add", "sub radxl add", "sub rtemperature add", "sensor adxl start", "SQISetSlot 6", "sensor sqi start", "adpdAGCControl 1:1", "sensor adpd4000 start", "sensor temperature start"], "help":"Starts streaming UC3 - ECG4K@300Hz, Adxl@50Hz, SQI, Adpd@100Hz, Temperature"}, "ecg4k_eda_dvt2": { "commands":["quickstart ecg4k_dvt2", "quickstart eda"], "help":"Running ECG from ADPD4K + EDA"}, ###################################################### #"start_log_mv_uc1": { "commands":["loadAdpdUCDcfg 1", "clockCalibration","fs_sub radpd6 add","fs_sub radxl add","fs_sub rtemperature add","adpdAGCControl 1:1","sensor adpd4000 start","sensor adxl start","sensor temperature start","fs_log start"], "start_log_mv_uc1": { "commands":["loadAdpdUCDcfg 1","setPpgLcfg 40", "loadPpgUCLcfg 1", "clockCalibration", "setUCHREnab 1 6", "fs_sub rstatic_agc add", "fs_sub rdynamic_agc add", "fs_sub rppg add", "fs_sub radpd6 add","fs_sub radxl add","fs_sub rtemperature add","adpdAGCControl 1:1","sensor adpd4000 start","sensor adxl start","sensor temperature start","fs_log start"], "help":"Starts logging for MV UC1 - Adpd@500Hz, Adxl, HR, Temperature"}, "start_log_mv_uc1_without_hr": { "commands":["loadAdpdUCDcfg 1","clockCalibration", "fs_sub rstatic_agc add", "fs_sub radpd6 add","fs_sub radxl add","fs_sub rtemperature add","adpdAGCControl 1:1","sensor adpd4000 start","sensor adxl start","sensor temperature start","fs_log start"], "help":"Starts logging for MV UC1 - Adpd@500Hz, Adxl,Temperature"}, "start_log_mv_uc1_without_hr_recalibration": { "commands":["loadAdpdUCDcfg 1","clockCalibration", "fs_sub rstatic_agc add", "fs_sub radpd6 add","fs_sub radxl add","fs_sub rtemperature add","adpdAGCControl 1:1","sensor adpd4000 start","sensor adxl start","sensor temperature start","fs_log start","delay 15","sensor temperature stop","sensor adpd4000 stop","delay 15","adpdAGCControl 1:1","sensor adpd4000 start","sensor temperature start"], "help":"Starts logging for MV UC1 - Adpd@500Hz, Adxl,Temperature"}, "start_log_mv_uc2": { "commands":["loadAdpdUCDcfg 2","setPpgLcfg 40", "loadPpgUCLcfg 2", "clockCalibration","setUCHREnab 1 6","fs_sub rstatic_agc add", "fs_sub rdynamic_agc add", "fs_sub rppg add", "fs_sub rsqi add", "fs_sub radpd6 add","fs_sub radxl add","fs_sub reda add","fs_sub rtemperature add","lcfgEdaWrite 0:30","lcfgEdaWrite 2:1","sensor eda start","sensor adxl start","SQISetSlot 6","sensor sqi start","adpdAGCControl 1:1","sensor adpd4000 start","sensor temperature start","fs_log start"], "help":"Starts logging for MV UC2 - Eda@30Hz, Adxl, SQI, Adpd@100Hz, HR, Temperature"}, "start_log_mv_uc3": { "commands":["loadAdpdUCDcfg 3","setPpgLcfg 40", "loadPpgUCLcfg 3", "clockCalibration","setUCHREnab 1 6","fs_sub rstatic_agc add","fs_sub rdynamic_agc add", "fs_sub rppg add", "fs_sub rsqi add","fs_sub radpd6 add","fs_sub radxl add","fs_sub recg add","fs_sub rtemperature add","lcfgEcgWrite 0:250","sensor ecg start","SQISetSlot 6","sensor sqi start","adpdAGCControl 1:1","sensor adpd4000 start","sensor adxl start","sensor temperature start","fs_log start"], "help":"Starts logging for MV UC3 - Ecg@250Hz, SQI, Adpd@100Hz, HR, Adxl, Temperature"}, "start_log_mv_uc4": { "commands":["loadAdpdUCDcfg 4","setPpgLcfg 40", "loadPpgUCLcfg 4", "clockCalibration","fs_sub rppg add","fs_sub rstatic_agc add","fs_sub rdynamic_agc add", "fs_sub rsqi add","fs_sub recg add","fs_sub rtemperature add","lcfgEcgWrite 0:1000","sensor ecg start","SQISetSlot 6","sensor sqi start","sensor ppg start","sensor temperature start","fs_log start"], "help":"Starts logging for MV UC4 - Ecg@1000Hz, SQI, ppg, Temperature"}, "start_log_mv_uc5": { "commands":["loadAdpdUCDcfg 5","setPpgLcfg 40", "loadPpgUCLcfg 5","clockCalibration","setUCHREnab 1 6","fs_sub rstatic_agc add", "fs_sub rdynamic_agc add", "fs_sub rppg add", "fs_sub rsqi add","fs_sub radpd6 add","fs_sub radpd7 add","fs_sub radpd8 add","fs_sub radpd9 add","fs_sub radxl add","SQISetSlot 6","sensor sqi start","adpdAGCControl 0:1","sensor adpd4000 start","sensor adxl start","fs_log start"], "help":"Starts logging for MV UC5 - 4 LED Slots at 100Hz, SQI, HR, Adxl"}, "start_log_mv_uc5_recalibration": { "commands":["loadAdpdUCDcfg 5","setPpgLcfg 40", "loadPpgUCLcfg 5","clockCalibration","setUCHREnab 1 6","fs_sub rstatic_agc add", "fs_sub rdynamic_agc add", "fs_sub rppg add", "fs_sub rsqi add","fs_sub radpd6 add","fs_sub radpd7 add","fs_sub radpd8 add","fs_sub radpd9 add","fs_sub radxl add","SQISetSlot 6","sensor sqi start","adpdAGCControl 0:1","sensor adpd4000 start","sensor adxl start","fs_log start","delay 15","sensor temperature stop","sensor adpd4000 stop","delay 15","adpdAGCControl 0:1","sensor adpd4000 start","sensor temperature start"], "help":"Starts logging for MV UC5 - 4 LED Slots at 100Hz, SQI, HR, Adxl"}, "start_log_mv_uc5_without_hr": { "commands":["loadAdpdUCDcfg 5","setPpgLcfg 40","clockCalibration","fs_sub rstatic_agc add","fs_sub rsqi add","fs_sub radpd6 add","fs_sub radpd7 add","fs_sub radpd8 add","fs_sub radpd9 add","fs_sub radxl add","SQISetSlot 6","sensor sqi start","adpdAGCControl 0:1","sensor adpd4000 start","sensor adxl start","fs_log start"], "help":"Starts logging for MV UC5 - 4 LED Slots at 100Hz, SQI, Adxl"}, "start_log_mv_uc5_without_hr_recalibration": { "commands":["loadAdpdUCDcfg 5","clockCalibration","fs_sub rstatic_agc add","fs_sub rsqi add","fs_sub radpd6 add","fs_sub radpd7 add","fs_sub radpd8 add","fs_sub radpd9 add","fs_sub radxl add","SQISetSlot 6","sensor sqi start","adpdAGCControl 0:1","sensor adpd4000 start","sensor adxl start","fs_log start","delay 15","sensor temperature stop","sensor adpd4000 stop","delay 15","adpdAGCControl 0:1","sensor adpd4000 start","sensor temperature start"], "help":"Starts logging for MV UC5 - 4 LED Slots at 100Hz, SQI, Adxl"}, "start_log_nk_uc": { "commands":["loadAdpdCfg 40","clockCalibration","fs_sub rtemperature add","fs_sub radpd6 add","fs_sub radxl add","fs_sub reda add","adpdAGCControl 1:1","sensor temperature start","set_eda_dcb_lcfg","sensor eda start","sensor adxl start","sensor adpd4000 start","fs_log_append"], "help":"Starts logging for NK UC - Eda@8Hz, Adxl, Adpd@500Hz, Temperature"}, "start_log_nk_uc_set_bat_thresh": { "commands":["loadAdpdCfg 40","clockCalibration","fs_sub rtemperature add","fs_sub radpd6 add","fs_sub radxl add","fs_sub reda add","adpdAGCControl 1:1","sensor temperature start","set_eda_dcb_lcfg","sensor eda start","sensor adxl start","sensor adpd4000 start","fs_log_append","setBatteryThreshold 87 88"], "help":"Starts logging for NK UC - Eda@8Hz, Adxl, Adpd@500Hz, Temperature after setting Battery threshold"}, "start_log_mv_uc6": { "commands":["loadAdpdUCDcfg 6","setPpgLcfg 40", "loadPpgUCLcfg 3", "clockCalibration","setUCHREnab 1 6","fs_sub rstatic_agc add", "fs_sub rdynamic_agc add", "fs_sub rppg add", "fs_sub rsqi add","fs_sub radpd6 add","fs_sub radxl add","fs_sub rbia add","fs_sub rtemperature add","lcfgBiaWrite 0:20","sensor bia start","SQISetSlot 6","sensor sqi start","adpdAGCControl 1:1","sensor adpd4000 start","sensor adxl start","sensor temperature start","fs_log start"], "help":"Starts logging for MV UC6 - 4 LED Slots at 100Hz, SQI, HR, Adxl, Bia at 20 Hz"}, "ppg": { "commands":["loadAdpdCfg 40", "clockCalibration", "setPpgLcfg 40", "lcfgPpgWrite 0x4 0x1210", "sensor ppg start", "sub rppg add"], "help":"Starts the PPG application with Static AGC enabled"}, "ppg_dynamic_agc": { "commands":["loadAdpdCfg 40", "clockCalibration","setPpgLcfg 40", "sensor ppg start", "sub rppg add","sub rstatic_agc add", "sub rdynamic_agc add"], "help":"Starts the PPG application with Static+Dynamic AGC enabled"}, "hrv":{ "commands":["loadAdpdCfg 40", "clockCalibration", "setPpgLcfg 40", "sensor ppg start","sub rppg add","sub rstatic_agc add", "sub rdynamic_agc add", "sub rhrv add"], "help":"starts PPG+HRV stream with Static+Dynamic AGC enabled"}, "periodic_ppg": { "commands":["loadAdpdCfg 40","clockCalibration","setPpgLcfg 40","lcfgPpgWrite 6 0x000F001E", "sensor ppg start", "sub rppg add"], "help":"Starts Duty cycle based periodic PPG, Ton=15sec,Toff=30sec"}, "ecg": {"commands": ["lcfgEcgWrite 0:100", "sensor ecg start", "sub recg add"], "help": "Start ECG"}, "ecg_dcb": {"commands": ["set_ecg_dcb_lcfg", "sensor ecg start", "sub recg add"], "help": "Start ECG, writes lcfg from DCB if present"}, "eda": {"commands":["lcfgEdaWrite 0:4","lcfgEdaWrite 2:2", "sensor eda start", "sub reda add"], "help":"Starts the eda."}, "tst_issue_330_1": {"commands":["fs_log start","fs_log stop"], "help":"Open and close "}, "tst_issue_330_2": {"commands":["pattern_write 16384 0 1 1 1"], "help":"pattern write with 2 pages "}, "eda_4": {"commands":["lcfgEdaWrite 0:4","lcfgEdaWrite 2:2","sensor eda start","sub reda add"], "help":"Starts the eda."}, "eda_8": {"commands":["lcfgEdaWrite 0:8","lcfgEdaWrite 2:2","sensor eda start","sub reda add"], "help":"Starts the eda."}, "eda_16": {"commands":["lcfgEdaWrite 0:16","lcfgEdaWrite 2:2","sensor eda start","sub reda add"], "help":"Starts the eda."}, "eda_25": {"commands":["lcfgEdaWrite 0:25","lcfgEdaWrite 2:1","sensor eda start","sub reda add"], "help":"Starts the eda."}, "eda_30": {"commands":["lcfgEdaWrite 0:30","lcfgEdaWrite 2:1","sensor eda start","sub reda add"], "help":"Starts the eda."}, "use_case_2": {"commands":["quickstart start_log_eda","quickstart start_log_adxl","quickstart start_log_adpd4000_g","quickstart start_log_temperature"], "help":"Starts the use case 2."}, "eda_dcb": {"commands":["set_eda_dcb_lcfg","sub reda add","sensor eda start"], "help":"Starts the eda, writes the lcfg from DCB if present"}, "plot-eda": {"commands":["quickstart eda", "plot reda"], "help":"Quickstarts the EDA and starts a plot of the EDA data."}, "start_log_eda": {"commands":["fs_sub reda add","sensor eda start","delay 5","fs_log start"], "help":"Start eda"}, "bia": {"commands": ["sensor bia start","sub rbia add"], "help":"Starts BIA."}, "bcm": {"commands": ["lcfgBiaWrite 5:1","sensor bia start","sub rbia add"], "help":"Starts BIA and BCM."}, "plot-bia": { "commands":["quickstart bia", "plot rbia"], "help":"Quickstarts the BIA and starts a plot of the Bia data."}, "plot-ped": { "commands":["quickstart ped", "plot rped"], "help":"Quickstarts the Pedometer and starts a plot of the Ped count data."}, "start_log_bia": {"commands":["fs_sub rbia add","sensor bia start","fs_log start"], "help":"Start bia logging"}, "start_log_bcm": {"commands":["lcfgBiaWrite 5:1","fs_sub rbia add","sensor bia start","fs_log start"], "help":"Start bia and bcm logging"}, "start_range_sweep_bia": {"commands":["lcfgBiaWrite 18:1","lcfgBiaWrite 20:1000","lcfgBiaWrite 21:50000","lcfgBiaWrite 22:100","lcfgBiaWrite 23:1","lcfgBiaWrite 24:0","quickstart bia"], "help":"Start bia with sweep range"}, "start_fixed_sweep_bia": {"commands":["lcfgBiaWrite 19:1","lcfgBiaWrite 25:100000","quickstart bia"], "help":"Start bia"}, "start_stop_adpd4k": {"commands":["quickstart adpd4000","quickstop adpd4000"], "help":"Start - stop tests addp4k "}, "start_stop_230": {"commands":["quickstart adxl","quickstart adpd4000","quickstart eda", "sensor temperature start","sub rtemperature add","quickstop adxl","quickstop adpd4000","quickstop eda", "sub rtemperature remove","sensor temperature stop"], "help":"Start - stop tests addp4k "}, "mv_uc1_streaming_start": {"commands":["loadAdpdCfg 40","reg w adpd4000 0x0D:0x07D0","clockCalibration","adpdAGCControl 1:1","sensor adpd4000 start","quickstart adpd_reg_tab_update","sub radpd6 add", "delay 1", "sensor adxl start","sub radxl add","sensor temperature start","sub rtemperature add"], "help":"Start MV UC1 streaming "}, "adpd500Hz_stream_start_stop": {"commands":["loadAdpdCfg 40","reg w adpd4000 0x0D:0x07D0","clockCalibration","adpdAGCControl 1:1","sensor adpd4000 start","quickstart adpd_reg_tab_update","sub radpd6 add", "delay 2", "quickstop adpd4000"], "help":"Start adpd at 500Hz streaming "}, "mv_uc1_245_issue": {"commands":["write_dcb_config adpd4000 UseCase1.dcfg","adpdAGCControl 1:1","sensor adpd4000 start","quickstart adpd_reg_tab_update","sub radpd6 add","sensor adxl start","sub radxl add","sensor temperature start","sub rtemperature add","delay 1","quickstop mv_uc1_streaming_stop","delete_dcb_config adpd4000","sensor adpd4000 start","sensor adpd4000 stop","loadAdpdCfg 40","reg w adpd4000 0x0D:0x07D0","clockCalibration","adpdAGCControl 1:1","sensor adpd4000 start","quickstart adpd_reg_tab_update","sub radpd6 add","sensor adxl start","sub radxl add","sensor temperature start","sub rtemperature add","delay 1","quickstop mv_uc1_streaming_stop"], "help":"Start MV UC1 245 issue commands"}, "mv_uc1_245_issue_200Hz": {"commands":["write_dcb_config adpd4000 UseCase1_200Hz.dcfg","adpdAGCControl 1:1","sensor adpd4000 start","quickstart adpd_reg_tab_update","sub radpd6 add","sensor adxl start","sub radxl add","sensor temperature start","sub rtemperature add","delay 1","quickstop mv_uc1_streaming_stop","delete_dcb_config adpd4000","sensor adpd4000 start","sensor adpd4000 stop","loadAdpdCfg 40","reg w adpd4000 0x0D:0x1388","clockCalibration","adpdAGCControl 1:1","sensor adpd4000 start","quickstart adpd_reg_tab_update","sub radpd6 add","sensor adxl start","sub radxl add","sensor temperature start","sub rtemperature add","delay 1","quickstop mv_uc1_streaming_stop"], "help":"Start MV UC1 245 issue commands"}, "mv_uc1_245_issue_300Hz": {"commands":["write_dcb_config adpd4000 UseCase1_300Hz.dcfg","adpdAGCControl 1:1","sensor adpd4000 start","quickstart adpd_reg_tab_update","sub radpd6 add","sensor adxl start","sub radxl add","sensor temperature start","sub rtemperature add","delay 1","quickstop mv_uc1_streaming_stop","delete_dcb_config adpd4000","sensor adpd4000 start","sensor adpd4000 stop","loadAdpdCfg 40","reg w adpd4000 0x0D:0x0D05","clockCalibration","adpdAGCControl 1:1","sensor adpd4000 start","quickstart adpd_reg_tab_update","sub radpd6 add","sensor adxl start","sub radxl add","sensor temperature start","sub rtemperature add","delay 1","quickstop mv_uc1_streaming_stop"], "help":"Start MV UC1 245 issue commands"}, "mv_uc1_245_issue_wo_dcb": {"commands":["loadAdpdCfg 40","reg w adpd4000 0x0D:0x07D0","clockCalibration","adpdAGCControl 1:1","sensor adpd4000 start","quickstart adpd_reg_tab_update","sub radpd6 add","sensor adxl start","sub radxl add","sensor temperature start","sub rtemperature add","delay 1","quickstop mv_uc1_streaming_stop","loadAdpdCfg 40","reg w adpd4000 0x0D:0x07D0","clockCalibration","adpdAGCControl 1:1","sensor adpd4000 start","quickstart adpd_reg_tab_update","sub radpd6 add","sensor adxl start","sub radxl add","sensor temperature start","sub rtemperature add","delay 1","quickstop mv_uc1_streaming_stop"], "help":"Start MV UC1 245 issue commands"}, "adpd_dcb_test": {"commands":["write_dcb_config adpd4000 adpd4000_dcb_test1.dcfg","delay 4","read_dcb_config adpd4000","delay 4","compare_cfg_files adpd4000_dcb_test1.dcfg adpd4000_dcb_get.dcfg","delete_dcb_config adpd4000","delay 4"], "help":"Starts the adpd dcb test."}, "adxl_dcb_test": {"commands":["write_dcb_config adxl adxl_dcb_test1.dcfg","delay 4","read_dcb_config adxl","delay 4","compare_cfg_files adxl_dcb_test1.dcfg adxl_dcb_get.dcfg","delete_dcb_config adxl","delay 4"], "help":"Starts the adxl dcb test."}, "ppg_dcb_test": {"commands":["write_dcb_config ppg ppg_dcb.lcfg","delay 4","read_dcb_config ppg","delay 4","compare_cfg_files ppg_dcb.lcfg ppg_dcb_get.lcfg","delete_dcb_config ppg","delay 4"], "help":"Starts the ppg dcb test."}, "ecg_dcb_test": {"commands":["write_dcb_config ecg ecg_dcb.lcfg","delay 2","read_dcb_config ecg","delay 2","compare_cfg_files ecg_dcb.lcfg ecg_dcb_get.lcfg","delete_dcb_config ecg","delay 4"], "help":"Starts the ecg dcb test."}, "eda_dcb_test": {"commands":["write_dcb_config eda eda_dcb.lcfg","delay 2","read_dcb_config eda","delay 2","compare_cfg_files eda_dcb.lcfg eda_dcb_get.lcfg","delete_dcb_config eda","delay 4"], "help":"Starts the eda dcb test."}, "dcb_test": {"commands":["quickstart adpd_dcb_test","quickstart adxl_dcb_test","quickstart ecg_dcb_test","quickstart eda_dcb_test"], "help":"Starts the dcb test."}, "temp_delete_dcb_test": {"commands":["delete_dcb_config adpd4000","create_adpd4k_dcfg 4:2 5:3","loadAdpdCfg 40","delay 5","sub rtemperature remove", "sensor temperature stop","reg r adpd4000 0x0170 0x0190"], "help":"Starts the temp del dcb test."}, "temp_write_dcb_test": {"commands":["write_dcb_config adpd4000 adpd4000_dcb_temp.dcfg","loadAdpdCfg 40","create_adpd4k_dcfg 4:2 5:3", "loadAdpdCfg 40","delay 5","sub rtemperature remove","sensor temperature stop","reg r adpd4000 0x0170 0x0190"], "help":"Starts the temp write dcb test."}, "temp_dcb_test": {"commands": ["quickstart temp_delete_dcb_test","quickstart temp_write_dcb_test","quickstart temp_delete_dcb_test"], "help":"Starts the temp robot dcb test."}, "combined_dcb_write_adpd_test": {"commands": ["delete_dcb_config adpd4000","delay 2","loadAdpdCfg 40","clockCalibration","sub radpd6 add","delay 5","sub radpd6 remove","sensor adpd4000 stop","reg r adpd4000 0x01B0", "write_dcb_config adpd4000 adpd4000_dcb_test1.dcfg","delay 2","loadAdpdCfg 40","clockCalibration","sensor adpd4000 start","sub radpd6 add","delay 5","reg r adpd4000 0x01B0", "delay 2","sub radpd6 remove","sensor adpd4000 stop"], "help":"Starts the adpd robot combined test."}, "combined_dcb_write_adxl_test": {"commands": ["delete_dcb_config adxl","delay 2","sensor adxl start","sub radxl add","delay 5","delay 4","reg r adxl 0x020 0x2C 0x2E","sub radxl remove","sensor adxl stop", "write_dcb_config adxl adxl_dcb_test1.dcfg","delay 2","sensor adxl start","sub radxl add","delay 5","reg r adxl 0x20 0x2C 0x2E", "sub radxl remove","sensor adxl stop"], "help":"Starts the adxl robot combined test."}, "combined_dcb_write_ecg_test": {"commands": ["delete_dcb_config ecg","set_ecg_dcb_lcfg","sensor ecg start","sub recg add","delay 10","sub recg remove","sensor ecg stop", "write_dcb_config ecg ecg_dcb.lcfg","delay 2","set_ecg_dcb_lcfg","sensor ecg start","sub recg add","delay 10", "sub recg remove","sensor ecg stop","lcfgEcgRead 0"], "help":"Starts the ecg robot combined test."}, "combined_dcb_write_eda_test": {"commands": ["delete_dcb_config eda","set_eda_dcb_lcfg","sensor eda start","sub reda add","delay 10","sub recg remove","sensor eda stop","lcfgEdaRead 0","lcfgEdaRead 2", "write_dcb_config eda eda_dcb.lcfg","delay 2","set_eda_dcb_lcfg","sensor eda start","sub reda add","delay 10", "sub reda remove","sensor eda stop","lcfgEdaRead 0","lcfgEdaRead 2"], "help":"Starts the eda robot combined test."}, "combined_dcb_write_ppg_test": {"commands": ["delete_dcb_config ppg","loadAdpdCfg 40","clockCalibration","setPpgLcfg 40","sensor ppg start","sub rppg add","sub rppg remove","sensor ppg stop","lcfgPpgCheck 40 ppg_dcb.lcfg", "write_dcb_config ppg ppg_dcb.lcfg","delay 5","loadAdpdCfg 40","clockCalibration","setPpgLcfg 40","sensor ppg start","sub rppg add", "sub rppg remove","sensor ppg stop","lcfgPpgCheck 40 ppg_dcb.lcfg"], "help":"Starts the ppg robot combined test."}, "combined_dcb_test": {"commands": ["quickstart combined_dcb_adpd_test","quickstart combined_dcb_adxl_test","quickstart combined_dcb_ecg_test","quickstart combined_dcb_eda_test","quickstart combined_dcb_ppg_test"], "help":"Starts the robot combined test."}, "combined_dcb_ad7156_test": {"commands": ["delete_dcb_config ad7156","delay 2","loadAd7156Cfg","reg r ad7156 0xA 0xB", "write_dcb_config ad7156 ad7156_dcb_test.dcfg","delay 2","loadAd7156Cfg","reg r ad7156 0xA 0xB", "delete_dcb_config ad7156","delay 2","loadAd7156Cfg","reg r ad7156 0xA 0xB"], "help":"Starts the ad7156 robot combined test."}, "combined_del_adpd_dcb_test": {"commands": ["delete_dcb_config adpd4000 ","delay 2","loadAdpdCfg 40","clockCalibration","sensor adpd4000 start", "sub radpd6 add","delay 5","sub radpd6 remove","sensor adpd4000 stop","reg r adpd4000 0x01B0"], "help":"Starts the deletion adpd dcb robot combined test."}, "combined_del_adxl_dcb_test": {"commands": ["delete_dcb_config adxl","delay 2","sensor adxl start","sub radxl add","delay 5","reg r adxl 0x020 0x2C 0x2E", "sub radxl remove","sensor adxl stop"], "help":"Starts the deletion adxl dcb robot combined test."}, "combined_del_ppg_dcb_test": {"commands": ["delete_dcb_config ppg","loadAdpdCfg 40","clockCalibration","setPpgLcfg 40", "sensor ppg start","sub rppg add","delay 5","sub rppg remove", "sensor ppg stop","lcfgPpgCheck 40 ppg_dcb.lcfg"], "help":"Starts the deletion ppg dcb robot combined test."}, "combined_del_ecg_dcb_test": {"commands": ["delete_dcb_config ecg","set_ecg_dcb_lcfg","sensor ecg start","sub recg add","delay 10", "sub recg remove","sensor ecg stop","lcfgEcgRead 0"], "help":"Starts the deletion ecg dcb robot combined test."}, "combined_del_eda_dcb_test": {"commands": ["delete_dcb_config eda","set_eda_dcb_lcfg","sensor eda start","sub reda add","delay 10", "sub reda remove","sensor eda stop","lcfgEdaRead 0","lcfgEdaRead 2"], "help":"Starts the deletion eda dcb robot combined test."}, "combined_dcb_adpd_test": {"commands": ["quickstart combined_dcb_write_adpd_test","quickstart combined_del_adpd_dcb_test"], "help":"Starts adpd dcb robot combined test."}, "combined_dcb_adxl_test": {"commands": ["quickstart combined_dcb_write_adxl_test","quickstart combined_del_adxl_dcb_test"], "help":"Starts adxl dcb robot combined test."}, "combined_dcb_ppg_test": {"commands": ["quickstart combined_dcb_write_ppg_test","quickstart combined_del_ppg_dcb_test"], "help":"Starts ppg dcb robot combined test."}, "combined_dcb_ecg_test": {"commands": ["quickstart combined_dcb_write_ecg_test","quickstart combined_del_ecg_dcb_test"], "help":"Starts ecg dcb robot combined test."}, "combined_dcb_eda_test": {"commands": ["quickstart combined_dcb_write_eda_test","quickstart combined_del_eda_dcb_test"], "help":"Starts eda dcb robot combined test."}, "adxl_dcb_test_2_chunk1": {"commands": ["delete_dcb_config adxl","delay 2","sensor adxl start","sub radxl add","delay 4","reg r adxl 0x020 0x2C 0x2E", "sub radxl remove","sensor adxl stop"], "help":"Starts the adxl dcb test 2 chunk1 robot test."}, "adxl_dcb_test_2_chunk2": {"commands": ["write_dcb_config adxl adxl_dcb_test1.dcfg","delay 2","sensor adxl start","sub radxl add","delay 5","reg r adxl 0x020 0x2C 0x2E", "sub radxl remove","sensor adxl stop","delay 2"], "help":"Starts the adxl dcb test 2 chunk3 robot test."}, "adxl_dcb_test_2_chunk3": {"commands": ["write_dcb_config adxl adxl_dcb_test2.dcfg","delay 2","sensor adxl start","sub radxl add","delay 4","reg r adxl 0x020 0x2C 0x2E", "sub radxl remove","sensor adxl stop"], "help":"Starts the adxl dcb test 2 chunk3 robot test."}, "adxl_dcb_test_2_chunk4": {"commands": ["delete_dcb_config adxl ","delay 2","sensor adxl start","sub radxl add","delay 4","reg r adxl 0x020 0x2C 0x2E", "sub radxl remove","sensor adxl stop"], "help":"Starts the adxl dcb test 2 chunk4 robot test."}, "adxl_dcb_test_2": {"commands": ["quickstart adxl_dcb_test_2_chunk1","quickstart adxl_dcb_test_2_chunk2","quickstart adxl_dcb_test_2_chunk3","quickstart adxl_dcb_test_2_chunk4"], "help":"Starts the adxl dcb test 2 robot test."}, "ppg_status_check_robot_test": {"commands": ["sensor ppg start","sub rppg add","sub rppg remove","sensor ppg stop","status ppg","sub rppg add","sub rppg add","sub rppg add", "status ppg","sub rppg remove","sub rppg remove","status ppg","sub rppg remove","status ppg","sensor ppg start","sensor ppg start", "sensor ppg start","status ppg","sensor ppg stop","sensor ppg stop","status ppg","sensor ppg stop","status ppg"], "help":"Starts the ppg robot test."}, #"adpd_reg_tab_update": {"commands":["reg r adpd4000 0x00000 0x0001 0x0002 0x0003 0x0004 0x0005 0x0006 0x0007 0x0008 0x0009", "delay 1", "reg r adpd4000 0x000a 0x000b 0x000c 0x000d 0x000e 0x000F 0x0010 0x0011 0x0012 0x0013", "delay 1", "reg r adpd4000 0x0014 0x0015 0x0016 0x0017 0x0018 0x0019 0x001a 0x001b 0x001c 0x001d", "delay 1", "reg r adpd4000 0x001E 0x0020 0x0021 0x0022 0x0023 0x0024 0x0025 0x002E 0x002F 0x0030", "delay 1", "reg r adpd4000 0x0031 0x0032 0x0033 0x0034 0x0035 0x0036 0x0037 0x0038 0x0039 0x003A", "delay 1", "reg r adpd4000 0x003B 0x003C 0x003D 0x003E 0x003F 0x0040 0x0041 0x0042 0x0043 0x0044", "delay 1", "reg r adpd4000 0x0045 0x0046 0x0047 0x0048 0x0049 0x004A 0x004B 0x004C 0x004D 0x004E", "delay 1", "reg r adpd4000 0x004F 0x0050 0x0051 0x0052 0x0053 0x0054 0x0055 0x0056 0x0057 0x0058", "delay 1", "reg r adpd4000 0x0059 0x005A 0x005B 0x005C 0x005D 0x005E 0x005F 0x0060 0x0061 0x0062", "delay 1", "reg r adpd4000 0x0063 0x0064 0x0065 0x0066 0x0067 0x0068 0x0069 0x006A 0x006B 0x006C", "delay 1", "reg r adpd4000 0x006D 0x006E 0x006F 0x0070 0x0071 0x0072 0x0073 0x0074 0x0075 0x0076", "delay 1", "reg r adpd4000 0x0077 0x0078 0x0079 0x007A 0x007B 0x007C 0x007D 0x007E 0x007F 0x0080", "delay 1", "reg r adpd4000 0x0081 0x0082 0x0083 0x0084 0x0085 0x0086 0x0087 0x0088 0x0089 0x008A", "delay 1", "reg r adpd4000 0x008B 0x008C 0x008D 0x008E 0x008F 0x00A0 0x00A1 0x00A2 0x00A3 0x00A4", "delay 1", "reg r adpd4000 0x00A5 0x00A6 0x00A7 0x00A8 0x00A9 0x00AA 0x00AB 0x00AC 0x00AD 0x00AE", "delay 1", "reg r adpd4000 0x00AF 0x00B0 0x00B1 0x00B2 0x00B3 0x00B4 0x00B5 0x00B6 0x00B7 0x00B8", "delay 1", "reg r adpd4000 0x0100 0x0101 0x0102 0x0103 0x0104 0x0105 0x0106 0x0107 0x0108 0x0109", "delay 1", "reg r adpd4000 0x010A 0x010B 0x010C 0x010D 0x010E 0x010F 0x0110 0x0111 0x0112 0x0113" , "reg r adpd4000 0x0114 0x0115 0x0116 0x0117 0x0120 0x0121 0x0122 0x0123 0x0124 0x0125", "delay 1", "reg r adpd4000 0x0126 0x0127 0x0128 0x0129 0x012A 0x012B 0x012C 0x012D 0x012E 0x012F", "delay 1", "reg r adpd4000 0x0130 0x0131 0x0132 0x0133 0x0134 0x0135 0x0136 0x0137 0x0140 0x0141", "delay 1", "reg r adpd4000 0x0142 0x0143 0x0144 0x0145 0x0146 0x0147 0x0148 0x0149 0x014A 0x014B", "delay 1", "reg r adpd4000 0x014C 0x014D 0x014E 0x014F 0x0150 0x0151 0x0152 0x0153 0x0154 0x0155", "delay 1", "reg r adpd4000 0x0156 0x0157 0x0160 0x0161 0x0162 0x0163 0x0164 0x0165 0x0166 0x0167", "delay 1", "reg r adpd4000 0x0168 0x0169 0x016A 0x016B 0x016C 0x016D 0x016E 0x016F 0x0170 0x0171", "delay 1", "reg r adpd4000 0x0172 0x0173 0x0174 0x0175 0x0176 0x0177 0x0180 0x0181 0x0182 0x0183", "delay 1", "reg r adpd4000 0x0184 0x0185 0x0186 0x0187 0x0188 0x0189 0x018A 0x018B 0x018C 0x018D", "delay 1", "reg r adpd4000 0x018E 0x018F 0x0190 0x0191 0x0192 0x0193 0x0194 0x0195 0x0196 0x0197", "delay 1", "reg r adpd4000 0x01A0 0x01A1 0x01A2 0x01A3 0x01A4 0x01A5 0x01A6 0x01A7 0x01A8 0x01A9", "delay 1", "reg r adpd4000 0x01AA 0x01AB 0x01AC 0x01AD 0x01AE 0x01AF 0x01B0 0x01B1 0x01B2 0x01B3", "delay 1", "reg r adpd4000 0x01B4 0x01B5 0x01B6 0x01B7 0x01C0 0x01C1 0x01C2 0x01C3 0x01C4 0x01C5", "delay 1", "reg r adpd4000 0x01C6 0x01C7 0x01C8 0x01C9 0x01CA 0x01CB 0x01CC 0x01CD 0x01CE 0x01CF", "delay 1", "reg r adpd4000 0x01D0 0x01D1 0x01D2 0x01D3 0x01D4 0x01D5 0x01D6 0x01D7 0x01E0 0x01E1", "delay 1", "reg r adpd4000 0x01E2 0x01E3 0x01E4 0x01E5 0x01E6 0x01E7 0x01E8 0x01E9 0x01EA 0x01EB", "delay 1", "reg r adpd4000 0x01EC 0x01ED 0x01EE 0x01EF 0x01F0 0x01F1 0x01F2 0x01F3 0x01F4 0x01F5", "delay 1", "reg r adpd4000 0x01F6 0x01F7 0x0200 0x0201 0x0202 0x0203 0x0204 0x0205 0x0206 0x0207", "delay 1", "reg r adpd4000 0x0208 0x0209 0x020A 0x020B 0x020C 0x020D 0x020E 0x020F 0x0210 0x0211", "delay 1", "reg r adpd4000 0x0212 0x0213 0x0214 0x0215 0x0216 0x0217 0x0220 0x0221 0x0222 0x0223", "delay 1", "reg r adpd4000 0x0224 0x0225 0x0226 0x0227 0x0228 0x0229 0x022A 0x022B 0x022C 0x022D", "delay 1", "reg r adpd4000 0x022E 0x022F 0x0230 0x0231 0x0232 0x0233 0x0234 0x0235 0x0236 0x0237", "delay 1", "reg r adpd4000 0x0240 0x0241 0x0242 0x0243 0x0244 0x0245 0x0246 0x0247 0x0248 0x0249", "delay 1", "reg r adpd4000 0x024A 0x024B 0x024C 0x024D 0x024E 0x024F 0x0250 0x0251 0x0252 0x0253", "delay 1", "reg r adpd4000 0x0254 0x0255 0x0256 0x0257 0x0260 0x0261 0x0262 0x0263 0x0264 0x0265", "delay 1", "reg r adpd4000 0x0266 0x0267 0x0268 0x0269 0x026A 0x026B 0x026C 0x026D 0x026E 0x026F", "delay 1", "reg r adpd4000 0x0270 0x0271 0x0272 0x0273 0x0274 0x0275 0x0276 0x0277"], "adpd_reg_tab_update": {"commands":["reg r adpd4000 0x00000 0x0001 0x0002 0x0003 0x0004 0x0005 0x0006 0x0007 0x0008 0x0009", "reg r adpd4000 0x000a 0x000b 0x000c 0x000d 0x000e 0x000F 0x0010 0x0011 0x0012 0x0013", "reg r adpd4000 0x0014 0x0015 0x0016 0x0017 0x0018 0x0019 0x001a 0x001b 0x001c 0x001d", "reg r adpd4000 0x001E 0x0020 0x0021 0x0022 0x0023 0x0024 0x0025 0x002E 0x002F 0x0030", "reg r adpd4000 0x0031 0x0032 0x0033 0x0034 0x0035 0x0036 0x0037 0x0038 0x0039 0x003A", "reg r adpd4000 0x003B 0x003C 0x003D 0x003E 0x003F 0x0040 0x0041 0x0042 0x0043 0x0044", "reg r adpd4000 0x0045 0x0046 0x0047 0x0048 0x0049 0x004A 0x004B 0x004C 0x004D 0x004E", "reg r adpd4000 0x004F 0x0050 0x0051 0x0052 0x0053 0x0054 0x0055 0x0056 0x0057 0x0058", "reg r adpd4000 0x0059 0x005A 0x005B 0x005C 0x005D 0x005E 0x005F 0x0060 0x0061 0x0062", "reg r adpd4000 0x0063 0x0064 0x0065 0x0066 0x0067 0x0068 0x0069 0x006A 0x006B 0x006C", "reg r adpd4000 0x006D 0x006E 0x006F 0x0070 0x0071 0x0072 0x0073 0x0074 0x0075 0x0076", "reg r adpd4000 0x0077 0x0078 0x0079 0x007A 0x007B 0x007C 0x007D 0x007E 0x007F 0x0080", "reg r adpd4000 0x0081 0x0082 0x0083 0x0084 0x0085 0x0086 0x0087 0x0088 0x0089 0x008A", "reg r adpd4000 0x008B 0x008C 0x008D 0x008E 0x008F 0x00A0 0x00A1 0x00A2 0x00A3 0x00A4", "reg r adpd4000 0x00A5 0x00A6 0x00A7 0x00A8 0x00A9 0x00AA 0x00AB 0x00AC 0x00AD 0x00AE", "reg r adpd4000 0x00AF 0x00B0 0x00B1 0x00B2 0x00B3 0x00B4 0x00B5 0x00B6 0x00B7 0x00B8", "reg r adpd4000 0x0100 0x0101 0x0102 0x0103 0x0104 0x0105 0x0106 0x0107 0x0108 0x0109", "reg r adpd4000 0x010A 0x010B 0x010C 0x010D 0x010E 0x010F 0x0110 0x0111 0x0112 0x0113" , "reg r adpd4000 0x0114 0x0115 0x0116 0x0117 0x0120 0x0121 0x0122 0x0123 0x0124 0x0125", "reg r adpd4000 0x0126 0x0127 0x0128 0x0129 0x012A 0x012B 0x012C 0x012D 0x012E 0x012F", "reg r adpd4000 0x0130 0x0131 0x0132 0x0133 0x0134 0x0135 0x0136 0x0137 0x0140 0x0141", "reg r adpd4000 0x0142 0x0143 0x0144 0x0145 0x0146 0x0147 0x0148 0x0149 0x014A 0x014B", "reg r adpd4000 0x014C 0x014D 0x014E 0x014F 0x0150 0x0151 0x0152 0x0153 0x0154 0x0155", "reg r adpd4000 0x0156 0x0157 0x0160 0x0161 0x0162 0x0163 0x0164 0x0165 0x0166 0x0167", "reg r adpd4000 0x0168 0x0169 0x016A 0x016B 0x016C 0x016D 0x016E 0x016F 0x0170 0x0171", "reg r adpd4000 0x0172 0x0173 0x0174 0x0175 0x0176 0x0177 0x0180 0x0181 0x0182 0x0183", "reg r adpd4000 0x0184 0x0185 0x0186 0x0187 0x0188 0x0189 0x018A 0x018B 0x018C 0x018D", "reg r adpd4000 0x018E 0x018F 0x0190 0x0191 0x0192 0x0193 0x0194 0x0195 0x0196 0x0197", "reg r adpd4000 0x01A0 0x01A1 0x01A2 0x01A3 0x01A4 0x01A5 0x01A6 0x01A7 0x01A8 0x01A9", "reg r adpd4000 0x01AA 0x01AB 0x01AC 0x01AD 0x01AE 0x01AF 0x01B0 0x01B1 0x01B2 0x01B3", "reg r adpd4000 0x01B4 0x01B5 0x01B6 0x01B7 0x01C0 0x01C1 0x01C2 0x01C3 0x01C4 0x01C5", "reg r adpd4000 0x01C6 0x01C7 0x01C8 0x01C9 0x01CA 0x01CB 0x01CC 0x01CD 0x01CE 0x01CF", "reg r adpd4000 0x01D0 0x01D1 0x01D2 0x01D3 0x01D4 0x01D5 0x01D6 0x01D7 0x01E0 0x01E1", "reg r adpd4000 0x01E2 0x01E3 0x01E4 0x01E5 0x01E6 0x01E7 0x01E8 0x01E9 0x01EA 0x01EB", "reg r adpd4000 0x01EC 0x01ED 0x01EE 0x01EF 0x01F0 0x01F1 0x01F2 0x01F3 0x01F4 0x01F5", "reg r adpd4000 0x01F6 0x01F7 0x0200 0x0201 0x0202 0x0203 0x0204 0x0205 0x0206 0x0207", "reg r adpd4000 0x0208 0x0209 0x020A 0x020B 0x020C 0x020D 0x020E 0x020F 0x0210 0x0211", "reg r adpd4000 0x0212 0x0213 0x0214 0x0215 0x0216 0x0217 0x0220 0x0221 0x0222 0x0223", "reg r adpd4000 0x0224 0x0225 0x0226 0x0227 0x0228 0x0229 0x022A 0x022B 0x022C 0x022D", "reg r adpd4000 0x022E 0x022F 0x0230 0x0231 0x0232 0x0233 0x0234 0x0235 0x0236 0x0237", "reg r adpd4000 0x0240 0x0241 0x0242 0x0243 0x0244 0x0245 0x0246 0x0247 0x0248 0x0249", "reg r adpd4000 0x024A 0x024B 0x024C 0x024D 0x024E 0x024F 0x0250 0x0251 0x0252 0x0253", "reg r adpd4000 0x0254 0x0255 0x0256 0x0257 0x0260 0x0261 0x0262 0x0263 0x0264 0x0265", "reg r adpd4000 0x0266 0x0267 0x0268 0x0269 0x026A 0x026B 0x026C 0x026D 0x026E 0x026F", "reg r adpd4000 0x0270 0x0271 0x0272 0x0273 0x0274 0x0275 0x0276 0x0277"], "help":"ADPD Reg Table update - 448 registers are updated"}, #"start_config_log_test": { "commands":[ "fs_config_log start","quickstart start_log_syncppg","quickstart start_log_ecg","quickstart start_log_eda","quickstart start_log_temperature","quickstop stop_log_syncppg","quickstop stop_log_ecg","quickstop stop_log_eda","quickstop stop_log_temperature","fs_config_log_file write","fs_config_log stop", "pm_activate_touch_sensor"], "start_config_log_test": { "commands":[ "fs_config_log start","quickstart start_log_ecg","quickstop stop_log_ecg","fs_config_log_file write","fs_config_log stop", "pm_activate_touch_sensor"], "help":"Starts logging pre-defined user config data into DCFG sectors of LFS"}, "config_write_test": { "commands":[ "fs_config_log start","quickstart start_log_adpd4000_g","quickstop stop_log_adpd4000_g","fs_config_log_file write","fs_config_log stop"], "help":"Starts creation of pre-defined user config data into DCFG sectors of LFS"}, "nand_config_file_create_mv_uc1": { "commands":[ "fs_config_log start","quickstart start_log_mv_uc1", "quickstop stop_log_mv_uc1","fs_config_log_file write","fs_config_log stop"], "help":"Starts creation of MV UC1 log commands, as user config file into DCFG sectors of LFS"}, "nand_config_file_create_mv_uc2": { "commands":[ "fs_config_log start","quickstart start_log_mv_uc2", "quickstop stop_log_mv_uc2","fs_config_log_file write","fs_config_log stop"], "help":"Starts creation of MV UC2 log commands, as user config file into DCFG sectors of LFS"}, "nand_config_file_create_mv_uc3": { "commands":[ "fs_config_log start","quickstart start_log_mv_uc3", "quickstop stop_log_mv_uc3","fs_config_log_file write","fs_config_log stop"], "help":"Starts creation of MV UC3 log commands, as user config file into DCFG sectors of LFS"}, "nand_config_file_create_mv_uc4": { "commands":[ "fs_config_log start","quickstart start_log_mv_uc4", "quickstop stop_log_mv_uc4","fs_config_log_file write","fs_config_log stop"], "help":"Starts creation of MV UC4 log commands, as user config file into DCFG sectors of LFS"}, "nand_config_file_create_mv_uc5": { "commands":[ "fs_config_log start","quickstart start_log_mv_uc5", "quickstop stop_log_mv_uc5","fs_config_log_file write","fs_config_log stop"], "help":"Starts creation of MV UC5 log commands, as user config file into DCFG sectors of LFS"}, "nand_config_file_create_mv_uc6": { "commands":[ "fs_config_log start","quickstart start_log_mv_uc6", "quickstop stop_log_mv_uc6","fs_config_log_file write","fs_config_log stop"], "help":"Starts creation of MV UC6 log commands, as user config file into DCFG sectors of LFS"}, "nand_config_file_create_nk_uc": { "commands":[ "fs_config_log start","quickstart start_log_nk_uc", "quickstop stop_log_nk_uc","fs_config_log_file write","fs_config_log stop"], "help":"Starts creation of MV UC2 log commands, as user config file into DCFG sectors of LFS"}, "nand_config_file_create_nk_uc_set_bat_thresh": { "commands":[ "fs_config_log start","quickstart start_log_nk_uc_set_bat_thresh", "quickstop stop_log_nk_uc","fs_config_log_file write","fs_config_log stop"], "help":"Starts creation of MV UC2 log commands, as user config file into DCFG sectors of LFS"}, "eda_freq_seq_test": {"commands":["quickstart eda_4","delay 7","quickstop eda","quickstart eda_8","delay 7","quickstop eda", "quickstart eda_4","delay 7","quickstop eda","quickstart eda_16","delay 7","quickstop eda", "quickstart eda_8","delay 7","quickstop eda", "quickstart eda_4","delay 7","quickstop eda", "quickstart eda_16","delay 7","quickstop eda"], "help":"Start EDA frquency change test: 4->8->4->16->8->4->16"}, "gen_blk_dcb_file_create_test1": { "commands":[ "create_gen_blk_dcb start", "quickstart start_log_adpd4000_r_adxl", "quickstop stop_log_adpd4000_r_adxl", "gen_blk_dcb_file_create write", "create_gen_blk_dcb stop"], "help":"Starts the creation of file with adpd4000_r_adxl log commands, that will be put in General Block DCB, which will be used for LT application; Copies this file to dcb_cfg folder as gen_blk_dcb.lcfg"}, "gen_blk_dcb_file_create_test2": { "commands":[ "create_gen_blk_dcb start", "quickstart start_log_eda", "quickstop stop_log_eda", "gen_blk_dcb_file_create write", "create_gen_blk_dcb stop"], "help":"Starts the creation of file with EDA log commands, that will be put in General Block DCB, which will be used for LT application; Copies this file to dcb_cfg folder as gen_blk_dcb.lcfg"}, "gen_blk_dcb_file_create_test3": { "commands":[ "create_gen_blk_dcb start", "quickstart start_log_adpd4000_r", "quickstop stop_log_adpd4000_r", "gen_blk_dcb_file_create write", "create_gen_blk_dcb stop"], "help":"Starts the creation of file with adpd4000_r log commands, that will be put in General Block DCB, which will be used for LT application; Copies this file to dcb_cfg folder as gen_blk_dcb.lcfg"}, "gen_blk_dcb_file_create_test4": { "commands":[ "create_gen_blk_dcb start", "quickstart start_log_ecg", "quickstop stop_log_ecg", "gen_blk_dcb_file_create write", "create_gen_blk_dcb stop"], "help":"Starts the creation of file with ECG log commands, that will be put in General Block DCB, which will be used for LT application; Copies this file to dcb_cfg folder as gen_blk_dcb.lcfg"}, "gen_blk_dcb_file_create_mv_uc1": { "commands":[ "create_gen_blk_dcb start", "quickstart start_log_mv_uc1", "quickstop stop_log_mv_uc1", "gen_blk_dcb_file_create write", "create_gen_blk_dcb stop"], "help":"Starts the creation of file with MV UC1 log commands, that will be put in General Block DCB, which will be used for LT application; Copies this file to dcb_cfg folder as gen_blk_dcb.lcfg"}, "gen_blk_dcb_file_create_mv_uc2": { "commands":[ "create_gen_blk_dcb start", "quickstart start_log_mv_uc2", "quickstop stop_log_mv_uc2", "gen_blk_dcb_file_create write", "create_gen_blk_dcb stop"], "help":"Starts the creation of file with MV UC2 log commands, that will be put in General Block DCB, which will be used for LT application; Copies this file to dcb_cfg folder as gen_blk_dcb.lcfg"}, "gen_blk_dcb_file_create_mv_uc3": { "commands":[ "create_gen_blk_dcb start", "quickstart start_log_mv_uc3", "quickstop stop_log_mv_uc3", "gen_blk_dcb_file_create write", "create_gen_blk_dcb stop"], "help":"Starts the creation of file with MV UC3 log commands, that will be put in General Block DCB, which will be used for LT application; Copies this file to dcb_cfg folder as gen_blk_dcb.lcfg"}, "gen_blk_dcb_file_create_mv_uc4": { "commands":[ "create_gen_blk_dcb start", "quickstart start_log_mv_uc4", "quickstop stop_log_mv_uc4", "gen_blk_dcb_file_create write", "create_gen_blk_dcb stop"], "help":"Starts the creation of file with MV UC4 log commands, that will be put in General Block DCB, which will be used for LT application; Copies this file to dcb_cfg folder as gen_blk_dcb.lcfg"}, "gen_blk_dcb_file_create_mv_uc5": { "commands":[ "create_gen_blk_dcb start", "quickstart start_log_mv_uc5", "quickstop stop_log_mv_uc5", "gen_blk_dcb_file_create write", "create_gen_blk_dcb stop"], "help":"Starts the creation of file with MV UC5 log commands, that will be put in General Block DCB, which will be used for LT application; Copies this file to dcb_cfg folder as gen_blk_dcb.lcfg"}, "start_dcb_low_touch_test": { "commands":[ "quickstart gen_blk_dcb_file_create_test", "write_dcb_config lt_dcb_config gen_blk_dcb.lcfg", "pm_activate_touch_sensor"], "help":"Generate DCB file, write DCB, Start Low touch with DCB configurations"}, "ppg_dark_test": {"commands":["delete_dcb_config adpd4000","write_dcb_config adpd4000 ppg_dark_test.dcfg","toggleSaveCSV","quickstart adpd4000","plot radpd6","delay 5","quickstop adpd4000","toggleSaveCSV","delete_dcb_config adpd4000"], "help":"Quickstarts the PPG dark test for 5 secs and saves the PPG data as CSV file"}, "ppg_static_agc_dis_50Hz_test": {"commands":["loadAdpdCfg 40","clockCalibration","setPpgLcfg 40","lcfgPpgWrite 0x4 0x1010 0x7 0x32", "sensor ppg start", "sub rppg add", "plot rppg", "plot rsyncppg", "delay 120", "quickstop ppg"], "help":"Quickstarts the PPG with changes in lcfg to disable STATIC AGC, run ADPD at 50Hz "}, "ppg_static_agc_en_50Hz_test": {"commands": ["loadAdpdCfg 40","clockCalibration","setPpgLcfg 40","lcfgPpgWrite 0x4 0x1210 0x7 0x32", "sensor ppg start", "sub rppg add", "plot rppg", "plot rsyncppg","delay 120", "quickstop ppg"], "help":"Quickstarts the PPG with changes in lcfg to enable STATIC AGC, run ADPD at 50Hz "}, "ppg_static_agc_en_100Hz_test": {"commands": ["loadAdpdCfg 40","clockCalibration","setPpgLcfg 40","lcfgPpgWrite 0x4 0x1210 0x7 0x64", "sensor ppg start", "sub rppg add", "plot rppg", "plot rsyncppg","delay 120", "quickstop ppg"], "help":"Quickstarts the PPG with changes in lcfg to enable STATIC AGC, run ADPD at 100Hz "}, "ppg_static_agc_en_500Hz_test": {"commands": ["loadAdpdCfg 40","clockCalibration","setPpgLcfg 40","lcfgPpgWrite 0x4 0x1210 0x7 0x1F4", "sensor ppg start", "sub rppg add", "plot rppg", "plot rsyncppg","delay 120", "quickstop ppg"], "help":"Quickstarts the PPG with changes in lcfg to enable STATIC AGC, run ADPD at 500Hz "}, "ppg_static_agc_dis_100Hz_test": {"commands": ["loadAdpdCfg 40","clockCalibration","setPpgLcfg 40","lcfgPpgWrite 0x4 0x1010 0x7 0x64", "sensor ppg start", "sub rppg add", "plot rppg", "plot rsyncppg","delay 120", "quickstop ppg"], "help":"Quickstarts the PPG with changes in lcfg to disable STATIC AGC, run ADPD at 100Hz "}, "ppg_static_agc_dis_500Hz_test": {"commands": ["loadAdpdCfg 40","clockCalibration","setPpgLcfg 40","lcfgPpgWrite 0x4 0x1010 0x7 0x1F4", "sensor ppg start", "sub rppg add", "plot rppg", "plot rsyncppg","delay 120", "quickstop ppg"], "help":"Quickstarts the PPG with changes in lcfg to disable STATIC AGC, run ADPD at 500Hz "}, "ppg_static_agc_en_50Hz_recalibrate_test": {"commands": ["loadAdpdCfg 40","clockCalibration","setPpgLcfg 40","lcfgPpgWrite 0x4 0x1210 0x7 0x32", "sensor ppg start", "sub rppg add", "plot rppg", "plot rsyncppg","delay 5", "sensor ppg stop", "sensor ppg start", "delay 15", "quickstop ppg"], "help":"Quickstarts the PPG with changes in lcfg to enable STATIC AGC, run ADPD at 50Hz, run for 5secs, Do AGC recalibrate, run for 15 sec"}, "ppg_static_agc_on": {"commands": ["setPpgLcfg 40","lcfgPpgWrite 0x4 0x1210", "sensor ppg start", "sub rppg add", "plot rsyncppg","plot rppg"], "help":"Turns PPG static AGC ON in ppg lcfg- ppg app start to follow, after this command"}, "ppg_static_agc_off": {"commands": ["setPpgLcfg 40","lcfgPpgWrite 0x4 0x1010", "sensor ppg start", "sub rppg add", "plot rsyncppg","plot rppg"], "help":"Turns PPG static AGC OFF in ppg lcfg- ppg app start to follow, after this command"}, "ppg_static_agc_on_off_test": {"commands": ["loadAdpdCfg 40","clockCalibration","setPpgLcfg 40","lcfgPpgWrite 0x4 0x1210", "sensor ppg start", "sub rppg add", "plot rppg", "plot rsyncppg","delay 10","sub rppg remove","sensor ppg stop","setPpgLcfg 40","lcfgPpgWrite 0x4 0x1010","sub rppg add","sensor ppg start","delay 20","quickstop ppg"], "help":"Turns PPG static AGC ON, starts ppg app, stops ppg app, Turns PPG static AGC OFF, starts ppg app, stops ppg app"}, "mwl_view": {"commands": ["loadAdpdCfg 44","clockCalibration", "adpdAGCControl 0:1", "sensor adpd4000 start", "sub radpd6 add","sub radpd7 add","sub radpd8 add","sub radpd9 add", "plot radpd6", "plot radpd7","plot radpd8","plot radpd9"], "help":"Opens MWL view with Green, Red, IR, Blue LED from Slot F, G, H, I of ADPD4000"}, "mwl_view_agc_off": {"commands": ["loadAdpdCfg 44","clockCalibration","adpdAGCControl 0:0", "sensor adpd4000 start", "sub radpd6 add","sub radpd7 add","sub radpd8 add","sub radpd9 add","plot radpd6", "plot radpd7","plot radpd8","plot radpd9"], "help":"Opens MWL view with Green, Red, IR, Blue LED from Slot F, G, H, I of ADPD4000, with static AGC OFF"}, "eda_dcfg_test": {"commands":[ "edadcfg_write 1","loadEdaDcfg","edadcfg_read","lcfgEdaWrite 0:4","sub add reda","sensor eda start"], "help":"DCFG reg commands usage for EDA app and start sensor"}, "eda_reg_read_write_test":{"commands":["LDOControl 3 1","reg r eda 0x0400","reg r eda 0x0404","reg r eda 0x20D0","reg w eda 0x20D0:0x3000C1","reg r eda 0x20D0","LDOControl 3 0"], "help": "eda register read write test"}, "running_eda_reg_read_test" : {"commands":["quickstart eda", "reg r eda 0x0400","reg r eda 0x0404","quickstop eda"], "help": "eda register read test while eda is running"}, ################################################################################################################### #### Commands for Slot Switching, to be used only when Watch is loaded with FW built with "SLOT_SELECT" macro #### ################################################################################################################# "adpd4000_g_A": { "commands":["create_adpd4k_dcfg 1:4", "loadAdpdCfg 40", "reg w adpd4000 0x0D:0x4E20", "clockCalibration","adpdAGCControl 1:1", "sensor adpd4000 start", "sub radpd1 add"], "help":"starts adpd4k with the DCFG for green LED in slot-A, with Static AGC ON"}, "adpd4000_g_B": { "commands":["create_adpd4k_dcfg 2:4", "loadAdpdCfg 40", "reg w adpd4000 0x0D:0x4E20", "clockCalibration", "adpdAGCControl 1:1", "sensor adpd4000 start", "sub radpd2 add"], "help":"starts adpd4k with the DCFG for green LED in slot-B, with Static AGC ON"}, "adpd4000_r_A": { "commands":["create_adpd4k_dcfg 1:5", "loadAdpdCfg 40", "reg w adpd4000 0x0D:0x4E20", "clockCalibration", "adpdAGCControl 2:1", "sensor adpd4000 start", "sub radpd1 add"], "help":"starts adpd4k with the DCFG for red LED in slot-A, with Static AGC ON"}, "adpd4000_r_B": { "commands":["create_adpd4k_dcfg 2:5", "loadAdpdCfg 40", "reg w adpd4000 0x0D:0x4E20", "clockCalibration", "adpdAGCControl 2:1", "sensor adpd4000 start", "sub radpd2 add"], "help":"starts adpd4k with the DCFG for red LED in slot-B, with Static AGC ON"}, "adpd4000_ir_A": { "commands":["create_adpd4k_dcfg 1:6", "loadAdpdCfg 40", "reg w adpd4000 0x0D:0x4E20", "clockCalibration", "adpdAGCControl 3:1", "sensor adpd4000 start", "sub radpd1 add"], "help":"starts adpd4k with the DCFG for ir LED in slot-A, with Static AGC ON"}, "adpd4000_ir_B": { "commands":["create_adpd4k_dcfg 2:6", "loadAdpdCfg 40", "reg w adpd4000 0x0D:0x4E20", "clockCalibration", "adpdAGCControl 3:1", "sensor adpd4000 start", "sub radpd2 add"], "help":"starts adpd4k with the DCFG for ir LED in slot-B, with Static AGC ON"}, "adpd4000_b_A": { "commands":["create_adpd4k_dcfg 1:7", "loadAdpdCfg 40", "reg w adpd4000 0x0D:0x4E20", "clockCalibration", "adpdAGCControl 4:1", "sensor adpd4000 start", "sub radpd1 add"], "help":"starts adpd4k with the DCFG for blue LED in slot-A, with Static AGC ON"}, "adpd4000_b_B": { "commands":["create_adpd4k_dcfg 2:7", "loadAdpdCfg 40", "reg w adpd4000 0x0D:0x4E20", "clockCalibration", "adpdAGCControl 4:1", "sensor adpd4000 start", "sub radpd2 add"], "help":"starts adpd4k with the DCFG for blue LED in slot-B, with Static AGC ON"}, "adpd4000_g_A_agc_off": { "commands":["create_adpd4k_dcfg 1:4", "loadAdpdCfg 40", "clockCalibration", "reg w adpd4000 0x0D:0x4E20", "sensor adpd4000 start", "sub radpd1 add"], "help":"starts adpd4k with the DCFG for green LED in slot-A, with Static AGC OFF"}, "adpd4000_g_B_agc_off": { "commands":["create_adpd4k_dcfg 2:4", "loadAdpdCfg 40", "clockCalibration", "reg w adpd4000 0x0D:0x4E20", "sensor adpd4000 start", "sub radpd2 add"], "help":"starts adpd4k with the DCFG for green LED in slot-B, with Static AGC OFF"}, "adpd4000_r_A_agc_off": { "commands":["create_adpd4k_dcfg 1:5", "loadAdpdCfg 40", "clockCalibration", "reg w adpd4000 0x0D:0x4E20", "sensor adpd4000 start", "sub radpd1 add"], "help":"starts adpd4k with the DCFG for red LED in slot-A, with Static AGC OFF"}, "adpd4000_r_B_agc_off": { "commands":["create_adpd4k_dcfg 2:5", "loadAdpdCfg 40", "clockCalibration", "reg w adpd4000 0x0D:0x4E20", "sensor adpd4000 start", "sub radpd2 add"], "help":"starts adpd4k with the DCFG for red LED in slot-B, with Static AGC OFF"}, "adpd4000_ir_A_agc_off": { "commands":["create_adpd4k_dcfg 1:6", "loadAdpdCfg 40", "clockCalibration", "reg w adpd4000 0x0D:0x4E20", "sensor adpd4000 start", "sub radpd1 add"], "help":"starts adpd4k with the DCFG for ir LED in slot-A, with Static AGC OFF"}, "adpd4000_ir_B_agc_off": { "commands":["create_adpd4k_dcfg 2:6", "loadAdpdCfg 40", "clockCalibration","reg w adpd4000 0x0D:0x4E20", "sensor adpd4000 start", "sub radpd2 add"], "help":"starts adpd4k with the DCFG for ir LED in slot-B, with Static AGC OFF"}, "adpd4000_b_A_agc_off": { "commands":["create_adpd4k_dcfg 1:7", "loadAdpdCfg 40", "clockCalibration","reg w adpd4000 0x0D:0x4E20", "sensor adpd4000 start", "sub radpd1 add"], "help":"starts adpd4k with the DCFG for blue LED in slot-A, with Static AGC OFF"}, "adpd4000_b_B_agc_off": { "commands":["create_adpd4k_dcfg 2:7", "loadAdpdCfg 40", "clockCalibration","reg w adpd4000 0x0D:0x4E20", "sensor adpd4000 start", "sub radpd2 add"], "help":"starts adpd4k with the DCFG for blue LED in slot-B, with Static AGC OFF"}, "ppg_A": {"commands":["create_adpd4k_dcfg 1:1", "loadAdpdCfg 40", "clockCalibration", "setPpgLcfg 40", "sensor ppg start", "sub rppg add"], "help": "starts ppg in slot A, with Static AGC ON"}, "ppg_B": {"commands":["create_adpd4k_dcfg 2:1", "loadAdpdCfg 40", "clockCalibration", "setPpgLcfg 40", "sensor ppg start", "sub rppg add"], "help": "starts ppg in slot B, with Static AGC ON"}, "ppg_C": {"commands":["create_adpd4k_dcfg 3:1", "loadAdpdCfg 40", "clockCalibration", "setPpgLcfg 40", "sensor ppg start", "sub rppg add"], "help": "starts ppg in slot C, with Static AGC ON"}, "ppg_F": {"commands":["create_adpd4k_dcfg 6:1", "loadAdpdCfg 40", "clockCalibration", "setPpgLcfg 40", "sensor ppg start", "sub rppg add"], "help": "starts ppg in slot F, with Static AGC ON"}, "ppg_A_agc_off": {"commands":["create_adpd4k_dcfg 1:1", "loadAdpdCfg 40", "clockCalibration", "setPpgLcfg 40", "lcfgPpgWrite 0x4 0x1010", "sensor ppg start", "sub rppg add"], "help": "starts ppg in slot A"}, "ppg_B_agc_off": {"commands":["create_adpd4k_dcfg 2:1", "loadAdpdCfg 40", "clockCalibration", "setPpgLcfg 40", "lcfgPpgWrite 0x4 0x1010", "sensor ppg start", "sub rppg add"], "help": "starts ppg in slot B"}, "ppg_C_agc_off": {"commands":["create_adpd4k_dcfg 3:1", "loadAdpdCfg 40", "clockCalibration", "setPpgLcfg 40", "lcfgPpgWrite 0x4 0x1010", "sensor ppg start", "sub rppg add"], "help": "starts ppg in slot C"}, "ppg_F_agc_off": {"commands":["create_adpd4k_dcfg 6:1", "loadAdpdCfg 40", "clockCalibration", "setPpgLcfg 40", "lcfgPpgWrite 0x4 0x1010", "sensor ppg start", "sub rppg add"], "help": "starts ppg in slot F"}, "temp_AB": { "commands":["create_adpd4k_dcfg 1:2 2:3", "loadAdpdCfg 40", "sensor temperature start", "sub rtemperature add"], "help":"Start Temperature in slot A&B"}, "temp_BC": { "commands":["create_adpd4k_dcfg 2:2 3:3", "loadAdpdCfg 40", "sensor temperature start", "sub rtemperature add"], "help":"Start Temperature in slot B&C"}, "temp_DE": { "commands":["create_adpd4k_dcfg 4:2 5:3", "loadAdpdCfg 40", "sensor temperature start", "sub rtemperature add"], "help":"Start Temperature in slot D&E"}, "ecg4k_A": { "commands":[ "controlECGElectrodeSwitch 4k_sw 1", "create_adpd4k_dcfg 1:0", "loadAdpdCfg 40","SetEcg4kLcfg 0:300", "clockCalibration", "sensor adpd4000 start", "sub radpd1 add"], "help":"Setup the ADPD for measuring ecg in slot A."}, "ppg_temp_ABC_agc_off": {"commands":["create_adpd4k_dcfg 1:1 2:2 3:3", "loadAdpdCfg 40", "clockCalibration", "setPpgLcfg 40","lcfgPpgWrite 0x4 0x1010", "sensor ppg start", "sub rppg add", "sensor temperature start", "sub rtemperature add"], "help": "starts ppg in Slot-A and temp using slot B and C"}, "temp_ppg_ABC_agc_off": {"commands":["create_adpd4k_dcfg 1:2 2:3 3:1", "loadAdpdCfg 40", "clockCalibration", "setPpgLcfg 40","lcfgPpgWrite 0x4 0x1010", "sensor ppg start", "sub rppg add", "sensor temperature start", "sub rtemperature add"], "help": "starts ppg in Slot-C and temp using slot A and B"}, "ecg4k_ppg_temp_agc_off": {"commands":["controlECGElectrodeSwitch 4k_sw 1", "create_adpd4k_dcfg 1:0 2:1 3:2 4:3", "loadAdpdCfg 40", "SetEcg4kLcfg 0:300", "clockCalibration", "setPpgLcfg 40","lcfgPpgWrite 0x4 0x1010", "sensor ppg start", "sub rppg add", "delay 1","sensor adpd4000 start", "sub radpd1 add", "delay 1", "sensor temperature start", "sub rtemperature add"], "help": "starts ecg in Slot A, ppg in Slot-B and temp using slot C and D"}, "ecg4k_temp_ppg_agc_off": {"commands":["controlECGElectrodeSwitch 4k_sw 1", "create_adpd4k_dcfg 1:0 2:2 3:3 4:1", "loadAdpdCfg 40", "SetEcg4kLcfg 0:300", "clockCalibration", "setPpgLcfg 40","lcfgPpgWrite 0x4 0x1010", "sensor ppg start", "sub rppg add", "delay 1","sensor adpd4000 start", "sub radpd1 add", "delay 1", "sensor temperature start", "sub rtemperature add"], "help": "starts ecg in Slot A, ppg in Slot-D and temp using slot B and C"}, "ecg4k_temp": {"commands":["controlECGElectrodeSwitch 4k_sw 1", "create_adpd4k_dcfg 1:0 2:2 3:3", "loadAdpdCfg 40", "SetEcg4kLcfg 0:300", "clockCalibration", "sensor adpd4000 start", "sub radpd1 add", "sensor temperature start", "sub rtemperature add"], "help": "starts ecg in Slot A, and temp using slot B and C"}, "ecg4k_ppg_agc_off": {"commands":["controlECGElectrodeSwitch 4k_sw 1", "create_adpd4k_dcfg 1:0 2:1", "loadAdpdCfg 40", "SetEcg4kLcfg 0:300", "clockCalibration", "setPpgLcfg 40","lcfgPpgWrite 0x4 0x1010", "sensor ppg start", "sub rppg add", "sensor adpd4000 start", "sub radpd1 add"], "help": "starts ecg in Slot A and ppg in Slot-B"}, "ppg_temp_ABC": {"commands":["create_adpd4k_dcfg 1:1 2:2 3:3", "loadAdpdCfg 40", "clockCalibration", "setPpgLcfg 40", "sensor ppg start", "sub rppg add", "sensor temperature start", "sub rtemperature add"], "help": "starts ppg in Slot-A and temp using slot B and C"}, "temp_ppg_ABC": {"commands":["create_adpd4k_dcfg 1:2 2:3 3:1", "loadAdpdCfg 40", "clockCalibration", "setPpgLcfg 40", "sensor ppg start", "sub rppg add", "sensor temperature start", "sub rtemperature add"], "help": "starts ppg in Slot-C and temp using slot A and B"}, "ecg4k_ppg_temp": {"commands":["controlECGElectrodeSwitch 4k_sw 1", "create_adpd4k_dcfg 1:0 2:1 3:2 4:3", "loadAdpdCfg 40", "SetEcg4kLcfg 0:300", "clockCalibration", "setPpgLcfg 40", "sensor ppg start", "sub rppg add", "delay 1","sensor adpd4000 start", "sub radpd1 add", "delay 1", "sensor temperature start", "sub rtemperature add"], "help": "starts ecg in Slot A, ppg in Slot-B and temp using slot C and D"}, "ecg4k_temp_ppg": {"commands":["controlECGElectrodeSwitch 4k_sw 1", "create_adpd4k_dcfg 1:0 2:2 3:3 4:1", "loadAdpdCfg 40", "SetEcg4kLcfg 0:300", "clockCalibration", "setPpgLcfg 40", "sensor ppg start", "sub rppg add", "delay 1","sensor adpd4000 start", "sub radpd1 add", "delay 1", "sensor temperature start", "sub rtemperature add"], "help": "starts ecg in Slot A, ppg in Slot-D and temp using slot B and C"}, "ecg4k_ppg": {"commands":["controlECGElectrodeSwitch 4k_sw 1", "create_adpd4k_dcfg 0:0 1:1", "loadAdpdCfg 40", "SetEcg4kLcfg 0:300", "clockCalibration", "setPpgLcfg 40", "sensor ppg start", "sub rppg add", "sensor adpd4000 start", "sub radpd1 add"], "help": "starts ecg in Slot A and ppg in Slot-B"}, "mwl_view_ABCD": {"commands": ["create_adpd4k_dcfg 1:4 2:5 3:6 4:7", "loadAdpdCfg 40", "clockCalibration", "reg w adpd4000 0x0D:0x4E20","sensor adpd4000 start", "sub radpd1 add","sub radpd2 add","sub radpd3 add","sub radpd4 add", "plot radpd1", "plot radpd2","plot radpd3","plot radpd4"], "help":"Opens MWL view with Green, Red, IR, Blue LED from Slot A, B, C, D of ADPD4000"}, "mwl_view_ABCD_agc_off": {"commands": ["create_adpd4k_dcfg 1:4 2:5 3:6 4:7", "loadAdpdCfg 40", "clockCalibration", "reg w adpd4000 0x0D:0x4E20", "adpdAGCControl 0:0", "sensor adpd4000 start","sub radpd1 add","sub radpd2 add","sub radpd3 add","sub radpd4 add", "plot radpd1", "plot radpd2","plot radpd3","plot radpd4"], "help":"Opens MWL view with Green, Red, IR, Blue LED from Slot A, B, C, D of ADPD4000"}, "mwl_view_FGHI": {"commands": ["create_adpd4k_dcfg 6:4 7:5 8:6 9:7", "loadAdpdCfg 40", "clockCalibration", "reg w adpd4000 0x0D:0x4E20", "sensor adpd4000 start","sub radpd6 add","sub radpd7 add","sub radpd8 add","sub radpd9 add", "plot radpd6", "plot radpd7","plot radpd8","plot radpd9"], "help":"Opens MWL view with Green, Red, IR, Blue LED from Slot F, G, H, I of ADPD4000"}, "mwl_view_FGHI_agc_off": {"commands": ["create_adpd4k_dcfg 6:4 7:5 8:6 9:7", "loadAdpdCfg 40", "clockCalibration", "reg w adpd4000 0x0D:0x4E20", "adpdAGCControl 0:0", "sensor adpd4000 start","sub radpd6 add","sub radpd7 add","sub radpd8 add","sub radpd9 add", "plot radpd6", "plot radpd7","plot radpd8","plot radpd9"], "help":"Opens MWL view with Green, Red, IR, Blue LED from Slot F, G, H, I of ADPD4000"}, "uc_hr_enab_adpd50_adxl50": {"commands": ["setPpgLcfg 40","sub rppg add", "setUCHREnab 1 6", "plot rppg", "plot radpd6", "plot radxl", "loadAdpdCfg 40", "clockCalibration", "adpdAGCControl 1:1", "sensor adpd4000 start", "sub radpd6 add", "sensor adxl start","sub radxl add"], "help":"Starts UC HR enable test"}, "uc_hr_enab_adpd50": {"commands": ["sub rppg add", "setUCHREnab 1 6", "loadAdpdCfg 40", "clockCalibration", "adpdAGCControl 1:1", "sensor adpd4000 start", "sub radpd6 add"], "help":"Starts UC HR enable test only with ADPD"}, "start_log_uc_hr_enab_adpd50": {"commands": ["fs_sub rppg add", "setUCHREnab 1 6", "loadAdpdCfg 40", "clockCalibration", "adpdAGCControl 1:1", "sensor adpd4000 start", "fs_sub radpd6 add","fs_log start"], "help":"Starts UC HR enable test only with ADPD"}, "uc_hr_enab_adpd100_adxl50": {"commands": ["setPpgLcfg 40","sub rppg add", "setUCHREnab 1 6", "plot rppg", "plot radpd6", "plot radxl", "loadAdpdCfg 40", "reg w adpd4000 0x0D:0x2710", "clockCalibration", "adpdAGCControl 1:1", "sensor adpd4000 start", "sub radpd6 add", "quickstart adxl"], "help":"Starts UC HR enable test"}, "uc_hr_enab_adpd500_adxl50": {"commands": ["setPpgLcfg 40","sub rppg add", "setUCHREnab 1 6", "plot rppg", "plot radpd6", "plot radxl", "loadAdpdCfg 40", "reg w adpd4000 0x0D:0x07D0", "clockCalibration", "adpdAGCControl 1:1", "sensor adpd4000 start", "sub radpd6 add", "quickstart adxl"], "help":"Starts UC HR enable test"}, "uc_hr_enab_adpd50_adxl100": {"commands": ["setPpgLcfg 40","sub rppg add", "setUCHREnab 1 6", "plot rppg", "plot radpd6", "plot radxl", "loadAdpdCfg 40", "clockCalibration", "adpdAGCControl 1:1", "sensor adpd4000 start", "sub radpd6 add", "quickstart adxl","reg w adxl 0x2C:0x9B"], "help":"Starts UC HR enable test"}, "uc_hr_enab_adpd500_adxl100": {"commands": ["setPpgLcfg 40","sub rppg add", "setUCHREnab 1 6", "plot rppg", "plot radpd6", "plot radxl", "loadAdpdCfg 40", "reg w adpd4000 0x0D:0x07D0", "clockCalibration", "adpdAGCControl 1:1", "sensor adpd4000 start", "sub radpd6 add", "quickstart adxl","reg w adxl 0x2C:0x9B"], "help":"Starts UC HR enable test"}, "uc_hr_enab_adpd100_adxl100": {"commands": ["setPpgLcfg 40","sub rppg add", "setUCHREnab 1 6", "plot rppg", "plot radpd6", "plot radxl", "loadAdpdCfg 40", "reg w adpd4000 0x0D:0x02710", "clockCalibration", "adpdAGCControl 1:1", "sensor adpd4000 start", "sub radpd6 add", "quickstart adxl","reg w adxl 0x2C:0x9B"], "help":"Starts UC HR enable test"}, "start_stream_mv_uc1_1": { "commands":["setPpgLcfg 40","sub rppg add", "setUCHREnab 1 6","loadAdpdUCDcfg 1", "clockCalibration","adpdAGCControl 1:1","sensor adpd4000 start","sub radpd6 add","sensor adxl start","sub radxl add","sensor temperature start","sub rtemperature add"], "help":"Starts streaming for MV UC1 - UC HR, Adpd@500Hz, Adxl@50Hz, Temperature"}, "start_stream_mv_uc2_1": { "commands":["loadAdpdUCDcfg 2","clockCalibration","lcfgEdaWrite 0:30","lcfgEdaWrite 2:1","sensor eda start","sub reda add","sensor adxl start","sub radxl add","SQISetSlot 6","sensor sqi start","sub rsqi add","setPpgLcfg 40","sub rppg add", "setUCHREnab 1 6","adpdAGCControl 1:1","sensor adpd4000 start","sub radpd6 add","sensor temperature start","sub rtemperature add"], "help":"Starts streaming for MV UC2 - Eda@30Hz, Adxl, UC HR, SQI, Adpd@100Hz, Temperature"}, "start_stream_mv_uc2_2": { "commands":["loadAdpdUCDcfg 2","clockCalibration","lcfgEdaWrite 0:30","lcfgEdaWrite 2:1","sensor eda start","sub reda add","sensor adxl start","sub radxl add","SQISetSlot 6","sensor sqi start","sub rsqi add","setPpgLcfg 40","sub rppg add", "setUCHREnab 1 6","adpdAGCControl 1:1","sensor adpd4000 start","sensor temperature start"], "help":"Starts streaming for MV UC2(modified for testing) - Eda@30Hz, Adxl, UC HR, SQI, Adpd@100Hz, Temperature"}, "start_stream_mv_uc3_1": { "commands":["lcfgEcgWrite 0:250","sensor ecg start","sub recg add","loadAdpdUCDcfg 3", "clockCalibration","reg w adpd4000 0x0D:0x2710","SQISetSlot 6","sensor sqi start","sub rsqi add","setPpgLcfg 40","sub rppg add", "setUCHREnab 1 6","adpdAGCControl 1:1","sensor adpd4000 start","sub radpd6 add","sensor adxl start","sub radxl add","sensor temperature start","sub rtemperature add"], "help":"Starts streaming for MV UC3 - Ecg@250Hz, UC HR, SQI, Adpd@100Hz, Adxl, Temperature"}, "start_stream_mv_uc4_1": { "commands":["lcfgEcgWrite 0:1000","sensor ecg start","sub recg add","loadAdpdUCDcfg 4", "clockCalibration","setPpgLcfg 40","SQISetSlot 6","sensor sqi start", "sub rsqi add","sensor ppg start","sub rppg add","sensor temperature start","sub rtemperature add"], "help":"Starts streaming for MV UC4 - Ecg@1000Hz, SQI, ppg, Temperature"}, "start_stream_mv_uc5_1": { "commands":["loadAdpdUCDcfg 5","reg w adpd4000 0x0D:0x2710","clockCalibration","SQISetSlot 6","sensor sqi start","sub rsqi add","setPpgLcfg 40","sub rppg add", "setUCHREnab 1 6","adpdAGCControl 0:1","sensor adpd4000 start","sub radpd6 add","sub radpd7 add","sub radpd8 add","sub radpd9 add","sensor adxl start","sub radxl add",], "help":"Starts streaming for MV UC5 - 4 LED Slots at 100Hz, SQI, UC HR, Adxl"}, "start_stream_mv_uc6_1": { "commands":["lcfgBiaWrite 0:20","sensor bia start","sub rbia add","loadAdpdUCDcfg 6", "clockCalibration","reg w adpd4000 0x0D:0x2710","SQISetSlot 6","sensor sqi start","sub rsqi add","setPpgLcfg 40","sub rppg add", "setUCHREnab 1 6","adpdAGCControl 1:1","sensor adpd4000 start","sub radpd6 add","sensor adxl start","sub radxl add","sensor temperature start","sub rtemperature add"], "help":"Starts streaming for MV UC6 - Bia@20Hz, UC HR, SQI, Adpd@100Hz, Adxl, Temperature"}, "start_log_mv_uc4_1": { "commands":["fs_log start","fs_sub rppg add","fs_sub recg add","fs_sub rtemperature add"], "help":"Start MV UC4 start log cmd sequence, in b/w streaming"}, "start_nk_uc_log_dcb": { "commands":["delete_config_file","fs_format","setDateTime","LTAppTuning dcb 3","write_dcb_config adpd4000 cust4_dvt2_adpd_dcb.dcfg","write_dcb_config adxl cust4_adxl_dcb.dcfg","write_dcb_config eda cust4_eda_dcb.lcfg","quickstart nand_config_file_create_nk_uc","write_dcb_config user0_config cust4_user0_blk_dcb.lcfg"], "help":"Start NK UC log with user0 config app DCB"}, "start_nk_uc_log_dcb_set_bat_thresh": { "commands":["delete_config_file","fs_format","setDateTime","LTAppTuning dcb 3","write_dcb_config adpd4000 cust4_dvt2_adpd_dcb.dcfg","write_dcb_config adxl cust4_adxl_dcb.dcfg","write_dcb_config eda cust4_eda_dcb.lcfg","quickstart nand_config_file_create_nk_uc_set_bat_thresh","write_dcb_config user0_config cust4_user0_blk_dcb.lcfg"], "help":"Start NK UC log with user0 config app DCB after setting batter thresholds"}, "start_adxl_interval_logging": { "commands":["lcfgLTAppWrite 0x4 4","lcfgUser0ConfigAppWrite 0x5 0xA","lcfgUser0ConfigAppWrite 0x6 0xA","lcfgUser0ConfigAppWrite 0x7 0xA","quickstart start_log_adxl","delay 60","quickstop stop_log_adxl"], "help":"Start adxl app interval based logging"}, "start_temp_interval_logging": { "commands":["lcfgLTAppWrite 0x4 4","lcfgUser0ConfigAppWrite 0x8 0xA","lcfgUser0ConfigAppWrite 0x9 0xA","lcfgUser0ConfigAppWrite 0xA 0xA","quickstart start_log_temperature","delay 60","quickstop stop_log_temperature"], "help":"Start temp app interval based logging"}, "start_adpd_interval_logging": { "commands":["lcfgLTAppWrite 0x4 4","lcfgUser0ConfigAppWrite 0xB 0xA","lcfgUser0ConfigAppWrite 0xC 0xA","lcfgUser0ConfigAppWrite 0xD 0xA","quickstart start_log_adpd4000_g","delay 60","quickstop stop_log_adpd4000_g"], "help":"Start adpd app interval based logging"}, "start_eda_interval_logging": { "commands":["lcfgLTAppWrite 0x4 4","lcfgUser0ConfigAppWrite 0xE 0x0","lcfgUser0ConfigAppWrite 0xF 0x14","lcfgUser0ConfigAppWrite 0x10 0x5","quickstart start_log_eda","delay 60","quickstop stop_log_eda"], "help":"Start eda app interval based logging"}, "start_eda_cont_stream_in_bw_nk_uc": { "commands":["delete_config_file","fs_format","setDateTime","LTAppTuning dcb 3","write_dcb_config adpd4000 cust4_dvt2_adpd_dcb.dcfg","write_dcb_config adxl cust4_adxl_dcb.dcfg","write_dcb_config eda cust4_eda_dcb.lcfg","quickstart nand_config_file_create_nk_uc","write_dcb_config user0_config cust4_user0_blk_dcb.lcfg","bypass_user0_timings 1", "plot reda","quickstart eda"], "help":"Start EDA continuous streams in between NK UC log with user0 config app DCB"},} # A dictionary of useful/common command sequences to be executed for stopping applications/sensors. The 'commands' key contains a list of CLI commands to be run for the sequence. quickstops = { "adpd4000": { "commands":["sub radpd6 remove", "sensor adpd4000 stop"], "help":"Stops the ADPD application and unsubscribes it."}, "ecg4k": { "commands":["sub radpd1 remove", "sensor adpd4000 stop", "controlECGElectrodeSwitch 4k_sw 0", "LDOControl 3 0"], "help":"Stops and Unsubscribes the ADPD that is measuring ecg in slot A on DVT1/2/3 Watch"}, "ecg4k_dvt1": { "commands":["sub radpd1 remove", "sensor adpd4000 stop", "controlECGElectrodeSwitch 4k_sw 0", "LDOControl 3 0", "delete_dcb_config adpd4000"], "help":"Stops and Unsubscribes the ADPD that is measuring ecg in slot A on DVT1 Watch"}, "ecg4k_dvt2": { "commands":["sub radpd1 remove", "sensor adpd4000 stop", "controlECGElectrodeSwitch 4k_sw 0", "LDOControl 3 0", "delete_dcb_config adpd4000"], "help":"Stops and Unsubscribes the ADPD that is measuring ecg in slot A on DVT2 Watch"}, "adpd4000_g": { "commands":["sub radpd6 remove", "sensor adpd4000 stop"], "help":"Stops the ADPD application and unsubscribes it."}, "adpd4000_r": { "commands":["sub radpd7 remove", "sensor adpd4000 stop"], "help":"Stops the ADPD application and unsubscribes it."}, "adpd4000_ir": { "commands":["sub radpd8 remove", "sensor adpd4000 stop"], "help":"Stops the ADPD application and unsubscribes it."}, "adpd4000_b": { "commands":["sub radpd9 remove", "sensor adpd4000 stop"], "help":"Stops the ADPD application and unsubscribes it."}, "adpd4000_g_agc_off": { "commands":["sub radpd6 remove", "sensor adpd4000 stop"], "help":"Stops the ADPD application and unsubscribes it."}, "adpd4000_r_agc_off": { "commands":["sub radpd7 remove", "sensor adpd4000 stop"], "help":"Stops the ADPD application and unsubscribes it."}, "adpd4000_ir_agc_off": { "commands":["sub radpd8 remove", "sensor adpd4000 stop"], "help":"Stops the ADPD application and unsubscribes it."}, "adpd4000_b_agc_off": { "commands":["sub radpd9 remove", "sensor adpd4000 stop"], "help":"Stops the ADPD application and unsubscribes it."}, "adpd4000_g_r": { "commands":["sub radpd6 remove", "sub radpd7 remove", "sensor adpd4000 stop"], "help":"Stops the ADPD application and unsubscribes it."}, "adpd4000_g_ir": { "commands":["sub radpd6 remove", "sub radpd8 remove", "sensor adpd4000 stop"], "help":"Stops the ADPD application and unsubscribes it."}, "adpd4000_r_ir": { "commands":["sub radpd7 remove", "sub radpd8 remove", "sensor adpd4000 stop"], "help":"Stops the ADPD application and unsubscribes it."}, "adpd4000_g_b": { "commands":["sub radpd6 remove", "sub radpd9 remove", "sensor adpd4000 stop"], "help":"Stops the ADPD application and unsubscribes it."}, "adpd4000_r_b": { "commands":["sub radpd7 remove", "sub radpd9 remove", "sensor adpd4000 stop"], "help":"Stops the ADPD application and unsubscribes it."}, "adpd4000_ir_b": { "commands":["sub radpd8 remove", "sub radpd9 remove", "sensor adpd4000 stop"], "help":"Stops the ADPD application and unsubscribes it."}, "adpd4000_g_r_ir": { "commands":["sub radpd6 remove", "sub radpd7 remove", "sub radpd8 remove", "sensor adpd4000 stop"], "help":"Stops the ADPD application and unsubscribes it."}, "adpd4000_r_ir_b": { "commands":["sub radpd7 remove", "sub radpd8 remove", "sub radpd9 remove", "sensor adpd4000 stop"], "help":"Stops the ADPD application and unsubscribes it."}, "adpd4000_g_r_b": { "commands":["sub radpd6 remove", "sub radpd7 remove", "sub radpd9 remove", "sensor adpd4000 stop"], "help":"Stops the ADPD application and unsubscribes it."}, "adpd4000_g_ir_b": { "commands":["sub radpd6 remove", "sub radpd8 remove", "sub radpd9 remove", "sensor adpd4000 stop"], "help":"Stops the ADPD application and unsubscribes it."}, "adxl": {"commands":["sub radxl remove", "sensor adxl stop"], "help":"Stop ADXL"}, "ad7156": {"commands":["sub rad7156 remove", "sensor ad7156 stop"], "help": "Stop AD7156"}, "use_case_2": {"commands":["quickstop stop_log_eda","quickstop stop_log_adxl","quickstop stop_log_temperature","quickstop stop_log_adpd4000_g",], "help":"Stops the use case 2."}, "mv_uc1_streaming_stop": {"commands":["sub rtemperature remove","sensor temperature stop","sub radxl remove","sensor adxl stop","sub radpd6 remove","sensor adpd4000 stop"], "help":"Stop MV UC1 streaming "}, "temperature": { "commands":["sub rtemperature remove", "sensor temperature stop"], "help":"Stop Temperature"}, "temperature_C_D_J_K_L": { "commands":["sub rtempr3 r", "sub rtemperature r","sub rtempr10 r","sub rtempr11 r","sub rtempr12 r","sensor temperature stop"], "help":"Start Temperature"}, "temperature_C_D_J_K_L_uc1": { "commands":["reg adpd4000 w 0x0143:0x0000", "reg adpd4000 w 0x0163:0x0000","reg adpd4000 w 0x0183:0x0000", "reg adpd4000 w 0x0223:0x0000","reg adpd4000 w 0x0243:0x0000", "reg adpd4000 w 0x0263:0x0000","sub rtempr3 r", "sub rtemperature r","sub rtempr10 r","sub rtempr11 r","sub rtempr12 r","sensor temperature stop"], "help":"Start Temperature"}, "stop_log_temperature_C_D_J_K_L_uc1": { "commands":["reg adpd4000 w 0x0143:0x0000", "reg adpd4000 w 0x0163:0x0000","reg adpd4000 w 0x0183:0x0000", "reg adpd4000 w 0x0223:0x0000","reg adpd4000 w 0x0243:0x0000", "reg adpd4000 w 0x0263:0x0000","fs_sub rtempr3 r", "fs_sub rtemperature r","fs_sub rtempr10 r","fs_sub rtempr11 r","fs_sub rtempr12 r","sensor temperature stop","fs_log stop"], "help":"Stop logging Temperature"}, "temperature_C_D": { "commands":["sub rtemperature r","sub rtempr3 r","sensor temperature stop"], "help":"Start Temperature"}, "ped": { "commands":["sensor ped stop", "sub rped remove", "sensor adxl stop"], "help":"Stops the Pedometer application, unsubscribes it and disables the ADXL sensor."}, "sqi_green": { "commands":["sub radpd6 remove","sensor adpd4000 stop","sub rsqi remove","sensor sqi stop"], "help":"Stops the SQI application, un-subscribes it and disables the ADPD sensor running @100Hz"}, "sqi_mm": { "commands":["sub radpd1 remove","sensor adpd4000 stop","sub rsqi remove","sensor sqi stop"], "help":"Stops the SQI application, un-subscribes it and disables the ADPD sensor running @100Hz"}, "sqi_green_50": { "commands":["sub radpd6 remove","sensor adpd4000 stop","sub rsqi remove","sensor sqi stop"], "help":"Stops the SQI application, un-subscribes it and disables the ADPD sensor running @50Hz"}, "sqi_green_25": { "commands":["sub radpd6 remove","sensor adpd4000 stop","sub rsqi remove","sensor sqi stop"], "help":"Stops the SQI application, un-subscribes it and disables the ADPD sensor running @25Hz"}, "sqi_ppg": { "commands":["sub rppg remove","sensor ppg stop","sub rsqi remove","sensor sqi stop"], "help":"Stops the SQI & PPG application, & un-subscribes them"}, "stop_log_sqi_green": { "commands":["fs_sub radpd6 remove", "fs_sub rsqi remove", "sensor adpd4000 stop", "sensor sqi stop","fs_log stop"], "help":"log the SQI data with Green LED on slot F of ADPD4000 at 100Hz"}, "sqi_agc_off_green": { "commands":["sub radpd6 remove","sensor adpd4000 stop","sub rsqi remove","sensor sqi stop"], "help":"Stops the SQI application, un-subscribes it and disables the ADPD sensor."}, "sqi_red": { "commands":["sub radpd7 remove","sensor adpd4000 stop","sub rsqi remove","sensor sqi stop"], "help":"Stops the SQI application, un-subscribes it and disables the ADPD sensor."}, "stop_log_sqi_red": { "commands":["fs_sub radpd7 remove", "fs_sub rsqi remove", "sensor adpd4000 stop", "sensor sqi stop","fs_log stop"], "help":"Stops the logging of SQI data with Red LED on slot G of ADPD4000 at 100Hz"}, "sqi_agc_off_red": { "commands":["sub radpd7 remove","sensor adpd4000 stop","sub rsqi remove","sensor sqi stop"], "help":"Stops the SQI application, un-subscribes it and disables the ADPD sensor."}, "sqi_ir": { "commands":["sub radpd8 remove","sensor adpd4000 stop","sub rsqi remove","sensor sqi stop"], "help":"Stops the SQI application, un-subscribes it and disables the ADPD sensor."}, "stop_log_sqi_ir": { "commands":["fs_sub radpd8 remove", "fs_sub rsqi remove", "sensor adpd4000 stop", "sensor sqi stop","fs_log stop"], "help":"log the SQI data with IR LED on slot H of ADPD4000 at 100Hz"}, "sqi_agc_off_ir": { "commands":["sub radpd8 remove","sensor adpd4000 stop","sub rsqi remove","sensor sqi stop"], "help":"Stops the SQI application, un-subscribes it and disables the ADPD sensor."}, "sqi_blue": { "commands":["sub radpd9 remove","sensor adpd4000 stop","sub rsqi remove","sensor sqi stop"], "help":"Stops the SQI application, un-subscribes it and disables the ADPD sensor."}, "stop_log_sqi_blue": { "commands":["fs_sub radpd9 remove", "fs_sub rsqi remove", "sensor adpd4000 stop", "sensor sqi stop","fs_log stop"], "help":"log the SQI data with Blue LED on slot I of ADPD4000 at 100Hz"}, "sqi_agc_off_blue": { "commands":["sub radpd9 remove","sensor adpd4000 stop","sub rsqi remove","sensor sqi stop"], "help":"Stops the SQI application, un-subscribes it and disables the ADPD sensor."}, "stop_log_adxl": {"commands":["fs_sub radxl remove", "sensor adxl stop","fs_log stop"], "help":"Stops logging the ADXL"}, "stop_log_adp": {"commands": ["fs_sub radp remove", "fs_log stop"], "help": "Stops logging the Battery info."}, "ppg": { "commands":["sub rppg remove", "sensor ppg stop"], "help":"Stops the PPG library with Unsubscribes it."}, "ppg_dynamic_agc": { "commands":["sub rstatic_agc remove","sub rdynamic_agc remove", "sub rppg remove", "sensor ppg stop"], "help":"Stops the PPG library with Unsubscribes it."}, "hrv": { "commands":["sub rhrv remove","sub rstatic_agc remove", "sub rdynamic_agc remove", "sub rppg remove", "sensor ppg stop"], "help":"Stops the PPG+HRV stream"}, "periodic_ppg": { "commands":["sub rppg remove", "sensor ppg stop"], "help":"Stops the PPG library with Unsubscribes it."}, "stop_log_adpd4000_g": { "commands":["sensor adpd4000 stop","fs_sub radpd6 remove","fs_log stop"], "help":"Stops logging the ADPD_g data"}, "stop_log_adpd4000_r": { "commands":["sensor adpd4000 stop","fs_sub radpd7 remove","fs_log stop"], "help":"Stops logging the ADPD_r data "}, "stop_log_adpd4000_ir": { "commands":["sensor adpd4000 stop","fs_sub radpd8 remove","fs_log stop"], "help":"Stops logging the ADPD_ir data"}, "stop_log_adpd4000_b": { "commands":["sensor adpd4000 stop","fs_sub radpd9 remove","fs_log stop"], "help":"Stops logging the ADPD_b data"}, "stop_log_ppg": { "commands":["sensor ppg stop","fs_sub rppg remove","fs_log stop"], "help":"Stops logging the PPG data"}, "stop_log_ppg_dynamic_agc": { "commands":["sensor ppg stop","fs_sub rstatic_agc remove", "fs_sub rdynamic_agc remove", "fs_sub rppg remove", "fs_log stop"], "help":"Stops logging the PPG data"}, "stop_log_hrv": { "commands":["sensor ppg stop","fs_sub rstatic_agc remove", "fs_sub rdynamic_agc remove", "fs_sub rhrv remove", "fs_sub rppg remove","fs_log stop"], "help":"Stops logging the PPG+HRV data"}, "stop_log_temperature": { "commands":["sensor temperature stop","fs_sub rtemperature remove","fs_log stop"], "help":"Stop Temperature"}, "stop_log_ecg": { "commands":["sensor ecg stop","fs_sub recg remove","fs_log stop"], "help":"Stop ecg"}, "stop_log_ecg_1500": { "commands":["sensor ecg stop","fs_sub recg remove","fs_log stop"], "help":"Stop ecg"}, "stop_log_ecg_1600": { "commands":["sensor ecg stop","fs_sub recg remove","fs_log stop"], "help":"Stop ecg"}, "stop_log_ecg_2000": { "commands":["sensor ecg stop","fs_sub recg remove","fs_log stop"], "help":"Stop ecg"}, "ecg": {"commands": ["sub recg remove", "sensor ecg stop"], "help": "Stop ECG"}, "ecg_dcb": {"commands": ["sub recg remove", "sensor ecg stop"], "help": "Stop ECG"}, "eda": {"commands":["sub reda remove", "sensor eda stop"], "help":"Stops the EDA application and unsubscribes it."}, "eda_dcb": {"commands":["sub reda remove", "sensor eda stop"], "help":"Stops the EDA application and unsubscribes it."}, "stop_log_eda": {"commands":["sensor eda stop","fs_sub reda remove","fs_log stop"], "help":"Start eda"}, "stop_log_bia": {"commands":["sensor bia stop","fs_sub rbia remove","fs_log stop"], "help":"Stop BIA logging"}, "stop_log_bcm": {"commands":["lcfgBiaWrite 5:0","sensor bia stop","fs_sub rbia remove","fs_log stop"], "help":"Stops BIA and BCM logging"}, "stop_log_ped":{"commands":["sensor ped stop", "sensor adxl stop","fs_sub rped remove","fs_log stop"], "help":"Stops the Pedometer application, unsubscribes it and disables the ADXL sensor."}, "stop_log_adpd4000_r_adxl": {"commands":["fs_sub radpd7 remove","fs_sub radxl remove","fs_log stop","sensor adpd4000 stop","sensor adxl stop"], "help":"Stops the ADPD4000_r, ADXL logging"}, ###Testing ECG4K in UC3 in place of ECG from AD5940### "uc3_ecg4k_dvt2": { "commands":["sub rtemperature remove", "sub radxl remove", "sub radpd6 remove", "sub radpd1 remove", "sub rsqi remove", "sub rppg remove", "sub rdynamic_agc remove", "sub rstatic_agc remove", "sensor temperature stop", "sensor adxl stop", "sensor adpd4000 stop", "sensor sqi stop", "controlECGElectrodeSwitch 4k_sw 0", "LDOControl 3 0", "delete_dcb_config adpd4000"], "help":"Stops streaming for UC3 - ECG4K, SQI, Adpd, Adxl, Temperature"}, "ecg4k_eda_dvt2": { "commands":["sub radpd1 remove", "sensor adpd4000 stop", "controlECGElectrodeSwitch 4k_sw 0", "delete_dcb_config adpd4000", "quickstop eda"], "help":"Running ECG from ADPD4K + EDA"}, ##################################################### #"stop_log_mv_uc1": { "commands":["sensor temperature stop","sensor adxl stop","sensor adpd4000 stop","fs_sub radpd6 remove","fs_sub radxl remove","fs_sub rtemperature remove","fs_log stop"], "stop_log_mv_uc1": { "commands":["sensor temperature stop","sensor adxl stop","sensor adpd4000 stop", "setUCHREnab 0 6","fs_sub rstatic_agc remove", "fs_sub rdynamic_agc remove", "fs_sub rppg remove", "fs_sub radpd6 remove","fs_sub radxl remove","fs_sub rtemperature remove","fs_log stop"], "help":"Stops logging for MV UC1 - Adpd, Adxl, HR, Temperature"}, "stop_log_mv_uc1_without_hr": { "commands":["sensor temperature stop","sensor adxl stop","sensor adpd4000 stop","fs_sub rstatic_agc remove","fs_sub radpd6 remove","fs_sub radxl remove","fs_sub rtemperature remove","fs_log stop"], "help":"Stops logging for MV UC1 - Adpd, Adxl,Temperature"}, "stop_log_mv_uc2": { "commands":["sensor temperature stop","sensor adpd4000 stop","sensor sqi stop","sensor adxl stop","sensor eda stop","setUCHREnab 0 6","fs_sub rstatic_agc remove", "fs_sub rdynamic_agc remove", "fs_sub rppg remove","fs_sub radpd6 remove","fs_sub rsqi remove","fs_sub radxl remove","fs_sub reda remove","fs_sub rtemperature remove","fs_log stop"], "help":"Stops logging for MV UC2 - Eda, Adxl, SQI, Adpd, HR, Temperature"}, "stop_log_mv_uc3": { "commands":["sensor temperature stop","sensor adxl stop","sensor adpd4000 stop","sensor sqi stop","sensor ecg stop","setUCHREnab 0 6","fs_sub rstatic_agc remove", "fs_sub rdynamic_agc remove", "fs_sub rppg remove","fs_sub radpd6 remove","fs_sub rsqi remove","fs_sub radxl remove","fs_sub recg remove","fs_sub rtemperature remove","fs_log stop"], "help":"Stops logging for MV UC3 - Ecg, SQI, Adpd, HR, Adxl, Temperature"}, "stop_log_mv_uc4": { "commands":["sensor temperature stop","sensor ppg stop","sensor sqi stop","sensor ecg stop","fs_sub rppg remove","fs_sub rsqi remove","fs_sub rstatic_agc remove", "fs_sub rdynamic_agc remove", "fs_sub recg remove","fs_sub rtemperature remove","fs_log stop"], "help":"Stops logging for MV UC4 - Ecg, ppg, SQI,Temperature"}, "stop_log_mv_uc5": { "commands":["sensor adxl stop","sensor adpd4000 stop","sensor sqi stop","setUCHREnab 0 6","fs_sub rppg remove","fs_sub radpd6 remove","fs_sub rstatic_agc remove", "fs_sub rdynamic_agc remove", "fs_sub rsqi remove","fs_sub radpd7 remove","fs_sub radpd8 remove","fs_sub radpd9 remove","fs_sub radxl remove","fs_log stop"], "help":"Stops logging for MV UC5- 4 LED Slots at 100Hz, HR, SQI, ADXL"}, "stop_log_mv_uc5_without_hr": { "commands":["sensor adxl stop","sensor adpd4000 stop","sensor sqi stop","fs_sub radpd6 remove", "fs_sub rstatic_agc remove", "fs_sub rsqi remove","fs_sub radpd7 remove","fs_sub radpd8 remove","fs_sub radpd9 remove","fs_sub radxl remove","fs_log stop"], "help":"Stops logging for MV UC5- 4 LED Slots at 100Hz, HR, SQI, ADXL"}, "stop_log_mv_uc6": { "commands":["sensor temperature stop","sensor adxl stop","sensor adpd4000 stop","sensor sqi stop","sensor bia stop","setUCHREnab 0 6", "fs_sub rstatic_agc remove","fs_sub rdynamic_agc remove", "fs_sub rppg remove","fs_sub radpd6 remove","fs_sub rsqi remove","fs_sub radxl remove","fs_sub rbia remove","fs_sub rtemperature remove","fs_log stop"], "help":"Stops logging for MV UC6 - Bia, SQI, Adpd, HR, Adxl, Temperature"}, "stop_log_nk_uc": { "commands":["sensor temperature stop","sensor adpd4000 stop","sensor adxl stop","sensor eda stop","fs_sub radpd6 remove","fs_sub radxl remove","fs_sub reda remove","fs_sub rtemperature remove","fs_log stop"], "help":"Stops logging for NK UC - Eda, Adxl, Adpd, Temperature"}, "bia": {"commands": ["sub rbia remove", "sensor bia stop"], "help": "Stop BIA"}, "bcm": {"commands": ["lcfgBiaWrite 5:0","sub rbia remove", "sensor bia stop"], "help": "Stop BIA and BCM"}, "mwl_view": {"commands": ["sub radpd6 remove","sub radpd7 remove","sub radpd8 remove","sub radpd9 remove","sensor adpd4000 stop"], "help":"Opens MWL view with Green, Red IR, Blue LED from Slot F, G, H, I of ADPD4000"}, "mwl_view_agc_off": {"commands": ["sub radpd6 remove","sub radpd7 remove","sub radpd8 remove","sub radpd9 remove","sensor adpd4000 stop"], "help":"Opens MWL view with Green, Red IR, Blue LED from Slot F, G, H, I of ADPD4000"}, "ppg_static_agc_on": { "commands":["sub rppg remove", "sensor ppg stop"], "help":"Stops the PPG library with Unsubscribes it."}, "ppg_static_agc_off": { "commands":["sub rppg remove", "sensor ppg stop"], "help":"Stops the PPG library with Unsubscribes it."}, ################################################################################################################### #### Commands for Slot Switching, to be used only when Watch is loaded with FW built with "SLOT_SELECT" macro #### ################################################################################################################# "adpd4000_g_A": { "commands":["sub radpd1 remove", "sensor adpd4000 stop", "disable_adpd4k_slots"], "help":"Stops the ADPD application, unsubscribes it and disables the slot"}, "adpd4000_g_B": { "commands":["sub radpd2 remove", "sensor adpd4000 stop", "disable_adpd4k_slots"], "help":"Stops the ADPD application, unsubscribes it and disables the slot"}, "adpd4000_r_A": { "commands":["sub radpd1 remove", "sensor adpd4000 stop", "disable_adpd4k_slots"], "help":"Stops the ADPD application, unsubscribes it and disables the slot"}, "adpd4000_r_B": { "commands":["sub radpd2 remove", "sensor adpd4000 stop", "disable_adpd4k_slots"], "help":"Stops the ADPD application, unsubscribes it and disables the slot"}, "adpd4000_ir_A": { "commands":["sub radpd1 remove", "sensor adpd4000 stop", "disable_adpd4k_slots"], "help":"Stops the ADPD application, unsubscribes it and disables the slot"}, "adpd4000_ir_B": { "commands":["sub radpd2 remove", "sensor adpd4000 stop", "disable_adpd4k_slots"], "help":"Stops the ADPD application, unsubscribes it and disables the slot"}, "adpd4000_b_A": { "commands":["sub radpd1 remove", "sensor adpd4000 stop", "disable_adpd4k_slots"], "help":"Stops the ADPD application, unsubscribes it and disables the slot"}, "adpd4000_b_B": { "commands":["sub radpd2 remove", "sensor adpd4000 stop", "disable_adpd4k_slots"], "help":"Stops the ADPD application, unsubscribes it and disables the slot"}, "adpd4000_g_A_agc_off": { "commands":["sub radpd1 remove", "sensor adpd4000 stop", "disable_adpd4k_slots"], "help":"Stops the ADPD application, unsubscribes it and disables the slot"}, "adpd4000_g_B_agc_off": { "commands":["sub radpd2 remove", "sensor adpd4000 stop", "disable_adpd4k_slots"], "help":"Stops the ADPD application, unsubscribes it and disables the slot"}, "adpd4000_r_A_agc_off": { "commands":["sub radpd1 remove", "sensor adpd4000 stop", "disable_adpd4k_slots"], "help":"Stops the ADPD application, unsubscribes it and disables the slot"}, "adpd4000_r_B_agc_off": { "commands":["sub radpd2 remove", "sensor adpd4000 stop", "disable_adpd4k_slots"], "help":"Stops the ADPD application, unsubscribes it and disables the slot"}, "adpd4000_ir_A_agc_off": { "commands":["sub radpd1 remove", "sensor adpd4000 stop", "disable_adpd4k_slots"], "help":"Stops the ADPD application, unsubscribes it and disables the slot"}, "adpd4000_ir_B_agc_off": { "commands":["sub radpd2 remove", "sensor adpd4000 stop", "disable_adpd4k_slots"], "help":"Stops the ADPD application, unsubscribes it and disables the slot"}, "adpd4000_b_A_agc_off": { "commands":["sub radpd1 remove", "sensor adpd4000 stop", "disable_adpd4k_slots"], "help":"Stops the ADPD application, unsubscribes it and disables the slot"}, "adpd4000_b_B_agc_off": { "commands":["sub radpd2 remove", "sensor adpd4000 stop", "disable_adpd4k_slots"], "help":"Stops the ADPD application, unsubscribes it and disables the slot"}, "temp_AB": { "commands":["sub rtemperature remove", "sensor temperature stop","disable_adpd4k_slots"], "help":"Stops Temperature app and disables the slot"}, "temp_BC": { "commands":["sub rtemperature remove", "sensor temperature stop","disable_adpd4k_slots"], "help":"Stops Temperature app and disables the slot"}, "temp_DE": { "commands":["sub rtemperature remove", "sensor temperature stop","disable_adpd4k_slots"], "help":"Stops Temperature app and disables the slot"}, "ppg_A": { "commands":["sub rppg remove", "sensor ppg stop", "disable_adpd4k_slots"], "help":"Stops the PPG library with Unsubscribes it."}, "ppg_B": { "commands":["sub rppg remove", "sensor ppg stop", "disable_adpd4k_slots"], "help":"Stops the PPG library with Unsubscribes it."}, "ppg_C": { "commands":["sub rppg remove", "sensor ppg stop", "disable_adpd4k_slots"], "help":"Stops the PPG library with Unsubscribes it."}, "ppg_F": { "commands":["sub rppg remove", "sensor ppg stop", "disable_adpd4k_slots"], "help":"Stops the PPG library with Unsubscribes it."}, "ppg_A_agc_off": { "commands":["sub rppg remove", "sensor ppg stop", "disable_adpd4k_slots"], "help":"Stops the PPG library with Unsubscribes it."}, "ppg_B_agc_off": { "commands":["sub rppg remove", "sensor ppg stop", "disable_adpd4k_slots"], "help":"Stops the PPG library with Unsubscribes it."}, "ppg_C_agc_off": { "commands":["sub rppg remove", "sensor ppg stop", "disable_adpd4k_slots"], "help":"Stops the PPG library with Unsubscribes it."}, "ppg_F_agc_off": { "commands":["sub rppg remove", "sensor ppg stop", "disable_adpd4k_slots"], "help":"Stops the PPG library with Unsubscribes it."}, "ecg4k_A": { "commands":["sub radpd1 remove", "sensor adpd4000 stop", "controlECGElectrodeSwitch 4k_sw 0", "disable_adpd4k_slots"], "help":"Stops and Unsubscribes the ADPD that is measuring ecg in slot A."}, "ecg4k_ppg_temp": { "commands":["sub radpd1 remove", "sensor adpd4000 stop", "controlECGElectrodeSwitch 4k_sw 0", "sub rppg remove", "sensor ppg stop", "sub rtemperature remove", "sensor temperature stop", "disable_adpd4k_slots"], "help":"Stops ecg4k, ppg and temperature app and disables the slot"}, "ecg4k_temp_ppg": { "commands":["sub radpd1 remove", "sensor adpd4000 stop", "controlECGElectrodeSwitch 4k_sw 0", "sub rtemperature remove", "sensor temperature stop", "sub rppg remove", "sensor ppg stop", "disable_adpd4k_slots"], "help":"Stops ecg4k, ppg and temperature app and disables the slot"}, "ecg4k_ppg": { "commands":["sub radpd1 remove", "sensor adpd4000 stop", "controlECGElectrodeSwitch 4k_sw 0", "sub rppg remove", "sensor ppg stop", "disable_adpd4k_slots"], "help":"Stops ecg4k, ppg and temperature app and disables the slot"}, "ecg4k_temp": { "commands":["sub radpd1 remove", "sensor adpd4000 stop", "controlECGElectrodeSwitch 4k_sw 0", "sub rtemperature remove", "sensor temperature stop", "disable_adpd4k_slots"], "help":"Stops ecg4k and temperature app and disables the slot"}, "ppg_temp_ABC": { "commands":["sub rppg remove", "sensor ppg stop", "sub rtemperature remove", "sensor temperature stop", "disable_adpd4k_slots"], "help":"Stops ppg and temperature app and disables the slot"}, "temp_ppg_ABC": { "commands":["sub rtemperature remove", "sensor temperature stop", "sub rppg remove", "sensor ppg stop", "disable_adpd4k_slots"], "help":"Stops ppg and temperature app and disables the slot"}, "ecg4k_ppg_temp_agc_off": { "commands":[ "sub radpd1 remove", "sensor adpd4000 stop", "controlECGElectrodeSwitch 4k_sw 0", "sub rppg remove", "sensor ppg stop", "sub rtemperature remove", "sensor temperature stop", "disable_adpd4k_slots"], "help":"Stops ecg4k, ppg and temperature app and disables the slot"}, "ecg4k_temp_ppg_agc_off": { "commands":["sub radpd1 remove", "sensor adpd4000 stop", "controlECGElectrodeSwitch 4k_sw 0", "sub rtemperature remove", "sensor temperature stop", "sub rppg remove", "sensor ppg stop", "disable_adpd4k_slots"], "help":"Stops ecg4k, ppg and temperature app and disables the slot"}, "ecg4k_ppg_agc_off": { "commands":[ "sub radpd1 remove", "sensor adpd4000 stop", "controlECGElectrodeSwitch 4k_sw 0", "sub rppg remove", "sensor ppg stop", "disable_adpd4k_slots"], "help":"Stops ecg4k, ppg and temperature app and disables the slot"}, "ppg_temp_ABC_agc_off": { "commands":["sub rppg remove", "sensor ppg stop", "sub rtemperature remove", "sensor temperature stop", "disable_adpd4k_slots"], "help":"Stops ppg and temperature app and disables the slot"}, "temp_ppg_ABC_agc_off": { "commands":["sub rtemperature remove", "sensor temperature stop", "sub rppg remove", "sensor ppg stop", "disable_adpd4k_slots"], "help":"Stops ppg and temperature app and disables the slot"}, "mwl_view_ABCD": {"commands": [ "sub radpd1 remove","sub radpd2 remove","sub radpd3 remove","sub radpd4 remove", "sensor adpd4000 stop", "disable_adpd4k_slots"], "help":"Opens MWL view with Green, Red IR, Blue LED from Slot A, B, C, D of ADPD4000"}, "mwl_view_ABCD_agc_off": {"commands": [ "sub radpd1 remove","sub radpd2 remove","sub radpd3 remove","sub radpd4 remove", "sensor adpd4000 stop", "disable_adpd4k_slots"], "help":"Opens MWL view with Green, Red IR, Blue LED from Slot A, B, C, D of ADPD4000"}, "mwl_view_FGHI": {"commands": [ "sub radpd6 remove","sub radpd7 remove","sub radpd8 remove","sub radpd9 remove", "sensor adpd4000 stop", "disable_adpd4k_slots"], "help":"Opens MWL view with Green, Red IR, Blue LED from Slot F, G, H, I of ADPD4000"}, "mwl_view_FGHI_agc_off": {"commands": ["sub radpd6 remove","sub radpd7 remove","sub radpd8 remove","sub radpd9 remove", "sensor adpd4000 stop", "disable_adpd4k_slots"], "help":"Opens MWL view with Green, Red IR, Blue LED from Slot F, G, H, I of ADPD4000"}, "uc_hr_enab": {"commands": ["sub rppg remove","quickstop adpd4000", "quickstop adxl", "setUCHREnab 0 6"], "help":"Stops UC HR enable test"}, "uc_hr_disable_adpd": {"commands": ["sub radpd6 remove","sub rppg remove","quickstop adpd4000", "setUCHREnab 0 6"], "help":"Stops UC HR enable test only with ADPD"}, "stop_log_uc_hr_disable_adpd": {"commands": ["fs_sub radpd6 remove","fs_sub rppg remove","quickstop adpd4000", "setUCHREnab 0 6","fs_log stop"], "help":"Stops UC HR enable test only with ADPD"}, "stop_stream_mv_uc1_1": { "commands":["sub rppg remove","sensor temperature stop","sensor adxl stop","sensor adpd4000 stop","sub radpd6 remove","sub radxl remove","sub rtemperature remove", "setUCHREnab 0 6"], "help":"Stops streaming for MV UC1 - UC HR, Adpd@500Hz, Adxl@50Hz, Temperature"}, "stop_stream_mv_uc2_1": { "commands":["sub rppg remove","sensor temperature stop","sensor adpd4000 stop","sensor adxl stop","sensor eda stop","sub radpd6 remove","sub radxl remove","sub reda remove","sub rtemperature remove", "setUCHREnab 0 6","sub rsqi remove","sensor sqi stop"], "help":"Stops streaming for MV UC2 - Eda@30Hz, Adxl, UC HR, SQI, Adpd@100Hz, Temperature"}, "stop_stream_mv_uc2_2": { "commands":["sub rppg remove","sensor temperature stop","sensor adpd4000 stop","sensor adxl stop","sensor eda stop","sub radxl remove","sub reda remove", "setUCHREnab 0 6","sub rsqi remove","sensor sqi stop"], "help":"Stops streaming for MV UC2 - Eda@30Hz, Adxl, UC HR, SQI, Adpd@100Hz, Temperature"}, "stop_stream_mv_uc3_1": { "commands":["sub rppg remove","sensor temperature stop","sensor adxl stop","sensor adpd4000 stop","sensor ecg stop","sub radpd6 remove","sub radxl remove","sub recg remove","sub rtemperature remove", "setUCHREnab 0 6","sub rsqi remove","sensor sqi stop"], "help":"Stops streaming for MV UC3 - Ecg@250Hz, UC HR, SQI, Adpd@100Hz, Adxl, Temperature"}, "stop_stream_mv_uc4_1": { "commands":["sensor temperature stop","sensor ppg stop","sensor ecg stop","sub rppg remove","sub recg remove","sub rtemperature remove","sub rsqi remove","sensor sqi stop"], "help":"Stops streaming for MV UC4 - Ecg@1000Hz, SQI, ppg, Temperature"}, "stop_stream_mv_uc5_1": { "commands":["sub rppg remove","sensor adxl stop","sensor adpd4000 stop","sub radpd6 remove","sub radpd7 remove","sub radpd8 remove","sub radpd9 remove","sub radxl remove", "setUCHREnab 0 6","sub rsqi remove","sensor sqi stop"], "help":"Stops streaming for MV UC5- 4 LED Slots at 100Hz, SQI, UC HR, Adxl"}, "stop_stream_mv_uc6_1": { "commands":["sub rppg remove","sensor temperature stop","sensor adxl stop","sensor adpd4000 stop","sensor bia stop","sub radpd6 remove","sub radxl remove","sub rbia remove","sub rtemperature remove", "setUCHREnab 0 6","sub rsqi remove","sensor sqi stop"], "help":"Stops streaming for MV UC6 - Bia@20Hz, UC HR, SQI, Adpd@100Hz, Adxl, Temperature"}, "stop_log_mv_uc4_1": { "commands":["fs_sub rppg remove","fs_sub recg remove","fs_sub rtemperature remove","fs_log stop"], "help":"Give the MV UC4 stop stream cmd sequence"}, "stop_nk_uc_log_fw": { "commands":["delete_config_file"], "help":"Stop NK UC log"}, "stop_nk_uc_log_dcb": { "commands":["delete_config_file", "delete_dcb_config lt_app_lcfg", "delete_dcb_config user0_config"], "help":"Stop NK UC log"}, "stop_eda_cont_stream_in_bw_nk_uc": { "commands":["delete_config_file", "delete_dcb_config lt_app_lcfg", "delete_dcb_config user0_config","bypass_user0_timings 0","quickstop eda"], "help":"Stop EDA continuous streams in between NK UC log with user0 config app DCB"},} def precmd(self, line): """ This function overrides the cmd.Cmd base class method. It gets called after every command that the user gives. """ if (not "connect" in line) and (self.m2m2_server == None): self.vrb.write("NOTE: Not connected to a serial device!") self.vrb.console_write(self.prompt + cr.Style.RESET_ALL + line) return line def postcmd(self, stop, line): """ This function overrides the cmd.Cmd base class method. It gets called after every command that the user gives. :param stop: :param line: :return: """ return None ''' if type(stop) == tuple: if len(stop) > 0: return stop[0] else: return None else: return stop ''' def do_flush(self, arg): """ Flush all of the CLI's message queues. This is useful when you receive messages that aren't handled by a command. For example: if a command times out but a response is eventually received, then the next time that command is run, the previous response will be in the queue and will be returned instead of the newer response. """ for addr in self.dispatcher_map: q = self.dispatcher_map[addr].queue name = self._get_enum_name(M2M2_ADDR_ENUM_t, addr) self.vrb.write("Flushing Queue with {} items: {}({})".format(len(q), name, addr), 2) q.clear() @cli_logger def do_connect_usb(self, arg): """ Connect to serial device over USB CDC. Provide a serial device identifier appropriate for your platform (COMX for Windows, /dev/ttyX for Linux and OSX). #>connect_usb COM7 """ args = self._parse_args(arg, 1) if args == None: return try: self.vrb.write("Starting serial interface threads...", 4) self.m2m2_server = m2m2_server.m2m2_uart_server(self.rx_q, self.tx_q, self.vrb) #if not self.m2m2_server.connect(args[0], int(args[1])): if not self.m2m2_server.connect(args[0]): self.vrb.write("Failed to connect to the serial port!") return self.vrb.write("Starting dispatcher thread...", 4) dispatcher_thread = threading.Thread(target=self._dispatcher, args=[self.dispatcher_map, self.sock_map]) dispatcher_thread.setDaemon(True) dispatcher_thread.start() except serial.serialutil.SerialException as e: self.vrb.err("Error opening the serial device!") self.vrb.err("You might be connected already, or have given an incorrect serial device identifier.") self.vrb.err("Error was:\n\t{}".format(e)) return set_cli_addr(M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_CLI) self.onecmd("getVersion") self._check_dvt_version() def do_connect_dongle(self, arg): """ Connect to remote device via BLE through BLE Dongle COM Port. Provide a serial device identifier appropriate for your platform (COMX for Windows, /dev/ttyX for Linux and OSX). #>connect_dongle COM8 """ args = self._parse_args(arg, 1) if args == None: return try: self.vrb.write("Starting serial interface threads...", 4) self.m2m2_server = m2m2_server.m2m2_uart_server(self.rx_q, self.tx_q, self.vrb) #if not self.m2m2_server.connect(args[0], int(args[1])): if not self.m2m2_server.connect(args[0]): self.vrb.write("Failed to connect to the serial port!") return self.vrb.write("Starting dispatcher thread...", 4) dispatcher_thread = threading.Thread(target=self._dispatcher, args=[self.dispatcher_map, self.sock_map]) dispatcher_thread.setDaemon(True) dispatcher_thread.start() except serial.serialutil.SerialException as e: self.vrb.err("Error opening the serial device!") self.vrb.err("You might be connected already, or have given an incorrect serial device identifier.") self.vrb.err("Error was:\n\t{}".format(e)) return set_cli_addr(M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_CLI_BLE) self.onecmd("getVersion") self._check_dvt_version() def _check_dvt_version(self): global clk_calib_val err_stat, chip_id = self.do_getChipID('2') if chip_id == 0xc0: print("DVT1 Watch Connected") self.dvt_ver = "dvt1" self.clk_calib_val = 6 else: print("DVT2 Watch Connected") self.dvt_ver = "dvt2" self.clk_calib_val = 2 def help_connect(self): print "Connect to a serial device." print "Provide a serial device identifier appropriate for your platform (COMX for Windows, /dev/ttyX for Linux and OSX)." print "Example usage for usb connection with Watch: \n\t#>connect_usb COM7\n" print "Example usage for ble connection with Watch: \n\t#>connect_dongle COM7\n" print "Wait a moment, searching for available serial ports..." # Find a list of available serial ports result = [] if sys.platform.startswith('win'): ports = ['COM%s' % (i + 1) for i in range(256)] elif sys.platform.startswith('linux') or sys.platform.startswith('cygwin'): # this excludes your current terminal "/dev/tty" ports = glob.glob('/dev/tty[A-Za-z]*') elif sys.platform.startswith('darwin'): ports = glob.glob('/dev/tty.*') else: raise EnvironmentError('Unsupported platform') for port in ports: try: s = serial.Serial(port) s.close() result.append(port) except (OSError, serial.SerialException): pass print "Available serial ports are:" for p in result: print "==> {}".format(p) def do_msg_verbose(self, arg): args = self._parse_args(arg, 1) if args == None: return try: lvl = int(args[0]) self.vrb.set_level(lvl) except: self.vrb.err("Invalid argument!", 1) def help_msg_verbose(self): print "Sets the verbosity level of the CLI. Valid levels are from 0-4." print "Verbosity levels are cumulative, meaning that a level of 3 will" print "also print messages from levels 1 and 2." print "Setting a verbosity level >= {} will open a separate window that".format(self.vrb.console_level) print "will display the prints." print "In general, the different levels are used for:" print "The different levels are printed with different formatting:" for i in range(1, 5): print " " + self.vrb.msg_formatters[i]["fmt"].format("Level {}: {}".format(i, self.vrb.msg_formatters[i]["help"])) print "Note that error messages and help text are not affected by this setting." def do_raw_msg(self, arg): """ Send a raw packet. Bytes are specified in hex, with a colon separating each byte. Note that there is no receive method associated with this command. If verbosity is disabled, or there is no other handler, then there will be no handling of a received response. #>raw_msg AB:CD:EF:AB """ args = self._parse_args(arg, 1) if args == None: return data = binascii.unhexlify(args[0].replace(":", "")) self._send_packet(data) def do_exit(self, arg): """Exit the shell.""" try: self.m2m2_server.quit() except: pass quit() def do_plot(self, arg): global enable_csv_logs args = self._parse_args(arg, 1) if args == None: return for a in args: if a in stream_name_map: app_name = a address = stream_name_map[a]["application"] stream = stream_name_map[a]["stream"] else: address = None if address == None: self.vrb.err("Incorrect usage! You did not provide a valid stream.") return s = socket.socket() if "nt" in os.name: # If this is on windows, run the plotter automatically. Portnumber = self._get_free_port() if(Portnumber!= None): plotter_path = os.path.join(os.path.abspath(__file__), '../plotter.py') os.system("start cmd /k python {} {} {} {}".format(plotter_path, app_name,Portnumber,enable_csv_logs)) time.sleep(0) else: self.vrb.err("Could not find a free socket for plotting") return else: self.vrb.write("Automatic plot starting is not yet implemented on non-Windows platforms! Feel free to do it yourself ;)") for i in range(5): try: self.vrb.write("Connecting to the plotter...", 4) s.connect(('localhost',Portnumber)) self.sock_map[stream] = s self.vrb.write("Successfully connected to the plotter!", 4) return except socket.error: self.vrb.err("Count not connect to the plotter! Retrying {}...".format(5-i)) time.sleep(2) self.vrb.err("Failed to connect to the plotter!") def help_plot(self): device_str_list = "" for device in stream_name_map: application_name = self._get_enum_name(M2M2_ADDR_ENUM_t, int(stream_name_map[device]["application"])) if application_name == None: application_name = stream_name_map[device]["application"] stream_name = self._get_enum_name(M2M2_ADDR_ENUM_t, int(stream_name_map[device]["stream"])) if stream_name == None: stream_name = stream_name_map[device]["stream"] device_str_list += "\n'{}':{}:{}\n\t{}\n".format(device, application_name, stream_name, stream_name_map[device]["help"]) print "Plot a data stream." print "Note that you must start the stream separately." print print "Available streams: ('name':application:stream){}".format(device_str_list) print "-----------------------------------------------" print "Usage:" print " #>plot [stream]" print print " #>plot radpd1" print " #>plot radxl" print " #>plot rppg" print " #>plot reda" print " #>plot recg" print def do_getVersion(self, arg): """ Get the system application version information. Gets both the PS and PM versions. #>getVersion """ args = self._parse_args(arg, 0) if args == None: return version = None version = self._get_version(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM) if version != None: self._print_version_pkt(version) try: fw_ver_dict = {'major': int(version.payload.major), 'minor': int(version.payload.minor), 'patch': int(version.payload.patch), 'info': cast(version.payload.verstr, c_char_p).value, 'date': cast(version.payload.str, c_char_p).value.split('|')[-1]} err_stat = 0 except: fw_ver_dict = {} err_stat = 1 else: self.vrb.err("Timed out waiting for the PM version response.") fw_ver_dict = {} err_stat = 1 return err_stat, fw_ver_dict def do_getMcuVersion(self, arg): """ Get the MCU type information (M3/M4). #>getMcuVersion """ msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_mcu_version_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_GET_MCU_VERSION_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_mcu_version_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_PM_SYS_MCU_TYPE_ENUM_t, reply_msg.payload.mcu) self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Get MCU version failed!") def do_ping(self, arg): """ Ping a system, to start getting a particular pkt_size every 20 ms. The approximate round-trip response time is recorded to calculate the throughput of BLE/USB. #>ping [system] [number of pings] [pkt size] #>ping PM 10 70 Pkt size Min Value: should be greater than or equal to 15 Pkt size Max Value: should be less than or equal to 244 """ if len(arg.split()) == 4: arg_len = 4 else: arg_len = 3 args = self._parse_args(arg, arg_len) enable_pong_print = True if args == None: self.vrb.write("Incorrect usage! Please check help.") return for a in args: if "pm" in a.lower(): addr = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM try: num_pings = int(args[1]) pkt_sz = int(args[2]) if len(args) >= 4: enable_pong_print = eval(args[3]) else: enable_pong_print = True if (pkt_sz < 15) | (pkt_sz >= 245): self.vrb.write("Incorrect pkt size, Min Value: should be greater than or equal to 15; Max value: must be less than or equal to 244") return except: return self.vrb.write("Starting ping for count:{} pkt sz:{}".format(num_pings, pkt_sz)) missed_seq_no_list = [] total_time = 0 total_bytes = 0 src_addr = get_cli_addr() retry_cnt = 0 tx_pkt = m2m2_packet(addr, m2m2_app_common_ping_t()) tx_pkt.header.src = src_addr tx_pkt.header.dest = addr tx_pkt.header.length = pkt_sz tx_pkt.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_PING_REQ tx_pkt.payload.sequence_num = num_pings self._send_packet(tx_pkt) total_time = time.time() for i in range(num_pings): start_time = time.clock() rx_pkt = self._get_packet(addr, m2m2_app_common_ping_t(), 2) if rx_pkt == None : missed_seq_no_list.append(i+1) self.vrb.err("Request timed out for rx pkt {}!".format(i+1)) self.vrb.write("Request timed out for rx pkt {}!".format(i+1)) self.vrb.write("Missed packet sequence numbers: {}!".format(missed_seq_no_list)) retry_cnt += 1 if retry_cnt == 3: return elif rx_pkt.payload.sequence_num != i + 1: missed_seq_no_list.append(i+1) retry_cnt += 1 self.vrb.write("Incorrect ping response received! Expected: {} Got: {}".format(i + 1, rx_pkt.payload.sequence_num)) self.vrb.write("Missed packet sequence numbers: {}!".format(missed_seq_no_list)) if retry_cnt == 3: return else: elapsed_time = time.clock() - start_time if enable_pong_print: self.vrb.write("\tpong# {}: took {}s".format(i, elapsed_time), 1) total_bytes+=(pkt_sz) i += 1 total_time = time.time() - total_time self.vrb.write("Missed packet sequence numbers: {}!".format(missed_seq_no_list)) self.vrb.write("Total Bytes: {} byte Total time: {}sec!".format(total_bytes,total_time)) throughput = float(total_bytes)/total_time self.vrb.write("Throughput from CLI: {}byte/sec!".format(throughput)) self.vrb.write("Throughput theoretical: {}byte/sec!".format(pkt_sz*1000/20)) ping_results_dict = {'missed_pkt_seq_num': missed_seq_no_list, 'total_bytes': total_bytes, 'total_time': total_time, 'throughput_cli': throughput, 'throughput_theoretical': pkt_sz * 1000 / 20} return 0, ping_results_dict def do_getAdpdVersion(self, arg): """ Get the ADPD application version information. #>getAdpdVersion """ version = self._get_version(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000) if version != None: self._print_version_pkt(version) else: self.vrb.err("Timed out waiting for the ADPD version response.") def help_loadAdpdCfg(self): enum_dict = self.get_enum_fields_vals(M2M2_SENSOR_ADPD_DEVICE_ID_ENUM_t) for key in enum_dict: print("{}::{}".format(key, enum_dict[key])) def get_enum_fields_vals(self,enum): # Save a copy of the dictionary so that we can remove items from it (can't remove items from a dictionary while iterating over that dictionary) d = dict(vars(enum)) for item in vars(enum): # Remove the internal variables that are used by the language (i.e. the __name__ field), which by convention start and end with double underscores if item.startswith("__") and item.endswith("__"): del(d[item]) return d def do_testCmd(self, arg): """ Issue a test command. #>testCmd [1] [2] [3] #>testCmd 1 0 """ args = self._parse_args(arg, 1) version = None msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_adpd_testcommand_resp_t()) msg.payload.command = M2M2_SENSOR_ADPD_COMMAND_ENUM_t.M2M2_SENSOR_ADPD_COMMAND_DO_TEST1_REQ msg.payload.retdata[0] = int(args[0]) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_adpd_testcommand_resp_t(), 20) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write(" Data1 '{}'".format(hex(reply_msg.payload.retdata[0])), 2) self.vrb.write(" Data2 '{}'".format(reply_msg.payload.retdata[1]), 2) self.vrb.write(" Data3 '{}'".format(reply_msg.payload.retdata[2]), 2) else: self.vrb.err("Test command failed!") def do_loadAdpdCfg(self, arg): args = self._parse_args(arg,1) if args == None: self.vrb.err("No configuration specified, loading the default ADPD4000 config", 2) return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_adpd_resp_t()) msg.payload.command = M2M2_SENSOR_ADPD_COMMAND_ENUM_t.M2M2_SENSOR_ADPD_COMMAND_LOAD_CFG_REQ if args == None: msg.payload.deviceid = M2M2_SENSOR_ADPD_DEVICE_ID_ENUM_t.M2M2_SENSOR_ADPD4000_DEVICE_4000_G else: try: msg.payload.deviceid = int(args[0]) except ValueError: self._LoadCfg(".\\cfgs\\"+args[0]) return if (msg.payload.deviceid != M2M2_SENSOR_ADPD_DEVICE_ID_ENUM_t.M2M2_SENSOR_ADPD4000_DEVICE_4000_B) and (msg.payload.deviceid != M2M2_SENSOR_ADPD_DEVICE_ID_ENUM_t.M2M2_SENSOR_ADPD4000_DEVICE_4000_R) \ and (msg.payload.deviceid != M2M2_SENSOR_ADPD_DEVICE_ID_ENUM_t.M2M2_SENSOR_ADPD4000_DEVICE_4000_IR) and (msg.payload.deviceid != M2M2_SENSOR_ADPD_DEVICE_ID_ENUM_t.M2M2_SENSOR_ADPD4000_DEVICE_4000_G) \ and (msg.payload.deviceid != M2M2_SENSOR_ADPD_DEVICE_ID_ENUM_t.M2M2_SENSOR_ADPD4000_DEVICE_4000_G_R_IR_B): self.vrb.write("Invalid device type") return self._send_packet(msg) time.sleep(3) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_adpd_resp_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) if reply_msg != None: self.vrb.write("Loaded adpd device configuration:", 2) self.vrb.write(" Device: '{}'".format(int(reply_msg.payload.deviceid)), 2) self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Loading Adpd device configuration failed!") def do_getCtrValue(self, arg): """ Get ctr value of a reflective object #>getCtrValue """ address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000 stream = M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD_STREAM1 msg = m2m2_packet(address, m2m2_sensor_adpd_resp_t()) msg.payload.command = M2M2_SENSOR_ADPD_COMMAND_ENUM_t.M2M2_SENSOR_ADPD_COMMAND_GET_CTR_REQ msg.payload.stream = stream self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_adpd_resp_t(), 20) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write("Get CTR Value:", 2) self.vrb.write(" Status: '{}'".format(status)) self.vrb.write(" CTR =: '{}'".format(int(reply_msg.payload.retdata[0])), 2) else: self.vrb.err("Get CTR Value failed!") def do_getSlot(self, arg): """ Get the Adpd device slots. Return slot num, its data format. #>getSlot : show all the slots #>getSlot slot_num : show only this slot """ args = self._parse_args(arg, 1) if args == None: i_start = 1 i_stop = 13 else: i_start = int(args[0]) i_stop = int(args[0]) + 1 msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_adpd4000_slot_resp_t()) msg.payload.command = M2M2_SENSOR_ADPD_COMMAND_ENUM_t.M2M2_SENSOR_ADPD_COMMAND_GET_SLOT_REQ for i in range(i_start, i_stop): msg.payload.slot_num = i self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_adpd4000_slot_resp_t(), 20) if reply_msg != None: slotNum = reply_msg.payload.slot_num slotActive = reply_msg.payload.slot_enable slotChannel = reply_msg.payload.channel_num slotSz = reply_msg.payload.slot_format self.vrb.write(" Slot Num:='{}' Enable='{}' Channel='{}' Format='{}'".format(slotNum, slotActive, slotChannel, hex(slotSz)), 2) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write(" Status: '{}'".format(status)) def do_setSlot(self, arg): """ Set the Adpd device slot. Slot settings are: Slot Size = 0IDS #>setSlot SLOT_NUM ENABLE CHANNEL_NUM SLOT_FORMAT #>SLOT_FORMAT 0IDS """ args = self._parse_args(arg, 4) if args == None: self.vrb.err("No slot settings supplied") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_adpd4000_slot_resp_t()) msg.payload.command = M2M2_SENSOR_ADPD_COMMAND_ENUM_t.M2M2_SENSOR_ADPD_COMMAND_SET_SLOT_REQ msg.payload.slot_num = int(args[0]) msg.payload.slot_enable = int(args[1]) msg.payload.channel_num = int(args[2]) msg.payload.slot_format = int(args[3], 16) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_adpd4000_slot_resp_t(), 30) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) slotNum = reply_msg.payload.slot_num slotActive = reply_msg.payload.slot_enable slotChannel = reply_msg.payload.channel_num slotSz = reply_msg.payload.slot_format self.vrb.write(" Slot Num='{}', Enable='{}', Channel= '{}', Format='{}'".format(slotNum, slotActive, slotChannel, hex(slotSz)), 2) self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Slot setting failed!") def do_loadAdxlCfg(self, arg): """ Load the ADXL device configuration. The argument is the device ID to choose the dcfg file: '362' for ADXL362 #>loadAdxlCfg [device id] #>loadAdxlCfg 362 """ args = self._parse_args(arg, 1) if args == None: self.vrb.write("No configuration specified, loading the default ADXL362 config", 2) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADXL, m2m2_sensor_adxl_resp_t()) msg.payload.command = M2M2_SENSOR_ADXL_COMMAND_ENUM_t.M2M2_SENSOR_ADXL_COMMAND_LOAD_CFG_REQ if args == None: msg.payload.deviceid = M2M2_SENSOR_ADXL_DEVICE_ID_ENUM_t.M2M2_SENSOR_ADXL_DEVICE_362 else: msg.payload.deviceid = int(args[0]) if msg.payload.deviceid != M2M2_SENSOR_ADXL_DEVICE_ID_ENUM_t.M2M2_SENSOR_ADXL_DEVICE_362: self.vrb.write("Invalid device type") return self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADXL, m2m2_sensor_adxl_resp_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write("Loaded adxl device configuration:", 2) self.vrb.write(" Device: '{}'".format(int(reply_msg.payload.deviceid)), 2) self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Loading Adxl device configuration failed!") def do_adxl_self_test(self, arg): """ Do ADXL362 self test #>adxl_self_test """ msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADXL, m2m2_sensor_adxl_resp_t()) msg.payload.command = M2M2_SENSOR_ADXL_COMMAND_ENUM_t.M2M2_SENSOR_ADXL_COMMAND_SELF_TEST_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADXL, m2m2_sensor_adxl_resp_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write("ADXL self test Done", 2) self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Adxl self test failed!") def do_getSlotActive(self, arg): """ Get the Adpd slots active state. #>getSlotActive : show all the slots #>getSlotActive slot_num : show only this slot #> 0 = inactive, 1 = active """ args = self._parse_args(arg, 1) if args == None: i_start = 1 i_stop = 13 else: i_start = int(args[0]) i_stop = int(args[0]) + 1 msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_adpd4000_slot_active_resp_t()) msg.payload.command = M2M2_SENSOR_ADPD_COMMAND_ENUM_t.M2M2_SENSOR_ADPD_COMMAND_GET_SLOT_ACTIVE_REQ for i in range(i_start, i_stop): msg.payload.slot_num = i self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_adpd4000_slot_active_resp_t(), 20) if reply_msg != None: slotNum = reply_msg.payload.slot_num slotActive = reply_msg.payload.slot_active self.vrb.write(" Slot Num:='{}' Active='{}' ".format(slotNum, slotActive), 2) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write(" Status: '{}'".format(status)) def do_setSlotActive(self, arg): """ Set the Adpd slot active. #>setSlotActive SLOT_NUM ENABLE ACTIVE #> 0 = inactive, 1 = active """ args = self._parse_args(arg, 2) if args == None: self.vrb.err("No slot settings supplied") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_adpd4000_slot_active_resp_t()) msg.payload.command = M2M2_SENSOR_ADPD_COMMAND_ENUM_t.M2M2_SENSOR_ADPD_COMMAND_SET_SLOT_ACTIVE_REQ msg.payload.slot_num = int(args[0]) msg.payload.slot_active = int(args[1]) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_adpd4000_slot_active_resp_t(), 30) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) slotNum = reply_msg.payload.slot_num slotActive = reply_msg.payload.slot_active self.vrb.write(" Slot Num='{}', Active='{}'".format(slotNum, slotActive), 2) self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Slot setting failed!") def do_setDecSample(self, arg): """ Set the decimation factor for stream samples. The argument is the stream and decimation factor: Note that decimation of samples is limited to radpd, radxl, recg, rsyncppg streams Eg: = setDecSample radpd 2 """ address = None args = self._parse_args(arg, 2) if args == None: return for a in args: if a in stream_name_map: address = stream_name_map[a]["application"] stream = stream_name_map[a]["stream"] if address == None: self.vrb.err("Incorrect usage! You did not provide a valid stream.") return if address in [M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000 , M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADXL,\ M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_ECG ]: msg = m2m2_packet(address, m2m2_sensor_common_decimate_stream_t()) msg.payload.command = M2M2_SENSOR_COMMON_CMD_ENUM_t.M2M2_SENSOR_COMMON_CMD_SET_STREAM_DEC_FACTOR_REQ msg.payload.stream = stream if args[1] != None: msg.payload.dec_factor = int(args[1]) else: self.vrb.write("No decimation factor specified, setting the default to 1", 2) msg.payload.dec_factor = 1 self._send_packet(msg) reply_msg = self._get_packet(address, m2m2_sensor_common_decimate_stream_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write("Decimation factor set: '{}'".format(reply_msg.payload.dec_factor)) self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Failed setting the decimation factor!") else: self.vrb.err("Unsupported for this application!") return def do_getDecSample(self, arg): """ Get the decimation factor for stream samples. The argument is the stream Note that decimation of samples is limited to radpd, radxl, recg, rsyncppg streams Eg: = getDecSample radpd """ address = None args = self._parse_args(arg, 1) if args == None: return for a in args: if a in stream_name_map: address = stream_name_map[a]["application"] stream = stream_name_map[a]["stream"] if address == None: self.vrb.err("Incorrect usage! You did not provide a valid stream.") return if address in [M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000 , M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADXL,\ M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_ECG ]: msg = m2m2_packet(address, m2m2_sensor_common_decimate_stream_t()) msg.payload.command = M2M2_SENSOR_COMMON_CMD_ENUM_t.M2M2_SENSOR_COMMON_CMD_GET_STREAM_DEC_FACTOR_REQ msg.payload.stream = stream self._send_packet(msg) reply_msg = self._get_packet(address, m2m2_sensor_common_decimate_stream_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) dec_factor = reply_msg.payload.dec_factor self.vrb.write("Got the Decimation factor:") self.vrb.write(" Decimation Factor: '{}'".format(dec_factor)) self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Failed getting the decimation factor!") else: self.vrb.err("Unsupported for this application!") return def do_setPause(self, arg): """ Set device to pause or unpause #>setPause 1|0 """ args = self._parse_args(arg, 1) if args != None: pauseEnable = int(args[0]) else: pauseEnable = 1 msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_adpd4000_resp_t()) msg.payload.command = M2M2_SENSOR_ADPD_COMMAND_ENUM_t.M2M2_SENSOR_ADPD_COMMAND_SET_PAUSE_REQ msg.payload.retdata[0] = pauseEnable self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_adpd4000_resp_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write("Set pause device: {}".format(status)) else: self.vrb.err("Set pause device failed!") def do_fs_DebugInfo(self, arg): """ File system debug information, Command to get the packet loss and count information. #>fs_DebugInfo radpd """ args = self._parse_args(arg, 1) if args == None: return for a in args: if a in stream_name_map: stream = stream_name_map[a]["stream"] if stream == None: self.vrb.err("Incorrect usage! You did not provide a valid stream.") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_debug_info_req_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_GET_FS_DEBUG_INFO_REQ msg.payload.stream = stream self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_debug_info_resp_t(), 10) if reply_msg != None: self._print_file_system_status(reply_msg) self.vrb.write(" Total Packets received : '{}'".format(int(reply_msg.payload.packets_received))) self.vrb.write(" Total Packets missed : '{}'".format(int(reply_msg.payload.packets_missed))) self.vrb.write(" last page read : '{}'".format(int(reply_msg.payload.last_page_read))) self.vrb.write(" last page read offset : '{}'".format(int(reply_msg.payload.last_page_read_offset))) self.vrb.write(" number of bytes transferred : '{}'".format(int(reply_msg.payload.num_bytes_transferred))) self.vrb.write(" usb cdc write failed status : '{}'".format(int(reply_msg.payload.usb_cdc_write_failed))) self.vrb.write(" bytes processed from fs task : '{}'".format(int(reply_msg.payload.bytes_read))) else: self.vrb.err("No response from device.Getting volume info failed.") def do_fs_format_DebugInfo(self, arg): """ File system debug information, Command to get the packet loss and count information. #>fs_format_DebugInfo """ args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_format_debug_info_req_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_GET_FS_FORMAT_INFO_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_format_debug_info_resp_t(), 10) if reply_msg != None: self._print_file_system_status(reply_msg) self.vrb.write(" Start block for fs format : '{}'".format(int(reply_msg.payload.format_src_blk_ind))) self.vrb.write(" End block for fs format without wrap around : '{}'".format(int(reply_msg.payload.format_dest_blk_ind_1))) self.vrb.write(" End block for fs format with wrap around : '{}'".format(int(reply_msg.payload.format_dest_blk_ind_2))) self.vrb.write(" wrap around condition flag status : '{}'".format(int(reply_msg.payload.wrap_around_cond))) self.vrb.write(" Nothing is written erase flag status : '{}'".format(int(reply_msg.payload.nothing_is_written_to_erase_error))) self.vrb.write(" Memory full flag in partial erase : '{}'".format(int(reply_msg.payload.mem_full_in_partial_erase))) self.vrb.write(" Number of blocks to be erased when memory full : '{}'".format(int(reply_msg.payload.mem_full_in_partial_erase))) self.vrb.write(" Number of blocks to be erased partial erase 1 : '{}'".format(int(reply_msg.payload.num_blocks_erased_in_partial_erase_1))) self.vrb.write(" Number of blocks to be erased partial erase 2 : '{}'".format(int(reply_msg.payload.num_blocks_erased_in_partial_erase_2))) self.vrb.write(" Number of times format failed due to bad blocks 1: '{}'".format(int(reply_msg.payload.num_times_format_failed_due_bad_blocks_1))) self.vrb.write(" Number of times format failed due to bad blocks 2 : '{}'".format(int(reply_msg.payload.num_times_format_failed_due_bad_blocks_2))) self.vrb.write(" TOC memory erased flag : '{}'".format(int(reply_msg.payload.toc_mem_erased_flag))) self.vrb.write(" Erase failed due to bad block : '{}'".format(int(reply_msg.payload.erase_failed_due_bad_block_check))) self.vrb.write(" Successfull erase flag : '{}'".format(int(reply_msg.payload.succesfull_erase_flag))) else: self.vrb.err("fs format debug info failed!!.") def do_getSystemInfo(self, arg): """ Get the PM device system Information #>getSystemInfo """ sys_info_dict = {} msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_GET_INFO_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_info_t(), 10) if reply_msg != None: mac_string = ''.join('{:02X}'.format(x) for x in reply_msg.payload.mac_addr) self.vrb.write("System Information") self.vrb.write(" Version: '{}'".format(reply_msg.payload.version)) self.vrb.write(" MAC Address: '{}'".format(mac_string)) self.vrb.write(" Device ID: '{}'".format(reply_msg.payload.device_id)) self.vrb.write(" Model Number:'{}'".format(reply_msg.payload.model_number)) self.vrb.write(" PS Hardware: '{}'".format(reply_msg.payload.hw_id)) self.vrb.write(" BOM: '{}'".format(reply_msg.payload.bom_id)) self.vrb.write(" Batch: '{}'".format(reply_msg.payload.batch_id)) self.vrb.write(" Date: '{}'".format(reply_msg.payload.date)) board_type = self._get_enum_name(ADI_PM_BOARD_TYPE_t, reply_msg.payload.board_type) self.vrb.write(" Board_Type: '{}'".format(board_type)) status = self._get_enum_name(M2M2_PM_SYS_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write(" Status: '{}'".format(status)) sys_info_dict['version'] = reply_msg.payload.version sys_info_dict['mac_addr'] = mac_string sys_info_dict['device_id'] = reply_msg.payload.device_id sys_info_dict['model_number'] = reply_msg.payload.model_number sys_info_dict['hw_id'] = reply_msg.payload.hw_id sys_info_dict['bom_id'] = reply_msg.payload.bom_id sys_info_dict['batch_id'] = reply_msg.payload.batch_id sys_info_dict['date'] = reply_msg.payload.date sys_info_dict['board_type'] = reply_msg.payload.board_type error_stat = 0 else: self.vrb.err("response timeout from device.") error_stat = 1 return error_stat, sys_info_dict def do_msg_debug(self, arg): args = self._parse_args(arg.lower(), 1) if args == None: self.vrb.err("No argument was supplied!") return addr = M2M2_ADDR_ENUM_t.M2M2_ADDR_POST_OFFICE pkt = m2m2_packet(addr, post_office_config_t()) if "on" in args: pkt.payload.cmd = POST_OFFICE_CFG_CMD_ENUM_t.POST_OFFICE_CFG_CMD_MAILBOX_SUBSCRIBE else: pkt.payload.cmd = POST_OFFICE_CFG_CMD_ENUM_t.POST_OFFICE_CFG_CMD_MAILBOX_UNSUBSCRIBE pkt.payload.sub = M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_CLI pkt.payload.box = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_DBG_STREAM self._send_packet(pkt) def help_msg_debug(self): print "Enable/Disable debug messages output from the firmware" print "Note that you must use Verbosity levels 2 and above." print print "-----------------------------------------------" print "Usage:" print " #>msg_debug [on/off]" print def do_clockCalibration(self, arg): """ Calibrate the 32M and 1M or 32K clock to reduce the deviation to a minimum #>clockCalibration '0' for no calibration '1' for 32K clock calibration '2' for 1M clock calibration '4' for 32M clock calibration '5' for 32K and 32M clock calibration '6' for 1M and 32M clock calibration ----------------------------------------------- Usage: #>clockCalibration [clockcalid] #>clockCalibration 6 """ args = arg.split() if len(args) == 0: self.vrb.write("No clockcalid specified", 2) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_clockcal_resp_t()) msg.payload.command = M2M2_SENSOR_ADPD_COMMAND_ENUM_t.M2M2_SENSOR_ADPD_COMMAND_CLOCK_CAL_REQ if len(args) == 0: msg.payload.clockcalid = self.clk_calib_val else: msg.payload.clockcalid = int(args[0]) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_clockcal_resp_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write("Clock Calibration:", 2) self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Clock Calibration failed!") def do_getAdpdComModeCl(self, arg): """ Get ctr value of a reflective object #>getAdpdComModeCl """ msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_com_mode_resp_t()) msg.payload.command = M2M2_SENSOR_ADPD_COMMAND_ENUM_t.M2M2_SENSOR_ADPD_COMMUNICATION_MODE_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_com_mode_resp_t(), 20) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write("Get ADPD4000 Communication Mode:", 2) self.vrb.write(" Status: '{}'".format(status)) self.vrb.write(" Com Mode =: '{}'".format(int(reply_msg.payload.com_mode)), 2) else: self.vrb.err("Get ADPD Communication mode failed!") def do_getAdpdComMode(self, arg): """ Get ctr value of a reflective object #>getAdpdComMode """ msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD, m2m2_sensor_com_mode_resp_t()) msg.payload.command = M2M2_SENSOR_ADPD_COMMAND_ENUM_t.M2M2_SENSOR_ADPD_COMMUNICATION_MODE_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD, m2m2_sensor_com_mode_resp_t(), 20) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write("Get ADPD Communication Mode:", 2) self.vrb.write(" Status: '{}'".format(status)) self.vrb.write(" Com Mode =: '{}'".format(int(reply_msg.payload.com_mode)), 2) else: self.vrb.err("Get ADPD Communication mode failed!") def do_sensor(self, arg): address = None args = self._parse_args(arg, 2) max_retry_cnt=1 if args == None: return for a in args: if a in application_name_map: address = application_name_map[a]["address"] if address == None: self.vrb.err("Incorrect usage! You did not provide a valid device.") return msg = m2m2_packet(address, m2m2_app_common_sub_op_t()) if "start" in args: msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_STREAM_START_REQ elif "stop" in args: msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_STREAM_STOP_REQ while max_retry_cnt > 0: self._send_packet(msg) reply_msg = self._get_packet(address, m2m2_app_common_sub_op_t(), 20) if reply_msg != None: self._print_packet_status(reply_msg) break else: self.vrb.err("The device did not respond!") max_retry_cnt = max_retry_cnt-1 def help_sensor(self): device_str_list = "" for device in application_name_map: device_str_list += "\n'{}': {}".format(device, application_name_map[device]["help"]) print "Start or stop a sensor." print print "Available devices:{}".format(device_str_list) print "-----------------------------------------------" print "Usage:" print " #>sensor [device] [start/stop]" print print " #>sensor adxl start" print " #>sensor adpd stop" print def do_sub(self, arg): address = None args = self._parse_args(arg, 2) max_retry_cnt=1 if args == None: return for a in args: if a in stream_name_map: address = stream_name_map[a]["application"] stream = stream_name_map[a]["stream"] if address == None: self.vrb.err("Incorrect usage! You did not provide a valid stream.") return msg = m2m2_packet(address, m2m2_app_common_sub_op_t()) if "r" in args or "remove" in args: msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_STREAM_UNSUBSCRIBE_REQ elif "a" in args or "add" in args: msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_STREAM_SUBSCRIBE_REQ else: self.vrb.err("Incorrect usage! You did not provide a valid subscription operation.") return msg.payload.stream = stream while max_retry_cnt > 0: self._send_packet(msg) reply_msg = self._get_packet(address, m2m2_app_common_sub_op_t(), 20) if reply_msg != None: self._print_subscription_status(reply_msg) break else: self.vrb.err("No response from device. Subscription operation failed.") max_retry_cnt=max_retry_cnt-1 def help_sub(self): device_str_list = "" for device in stream_name_map: application_name = self._get_enum_name(M2M2_ADDR_ENUM_t, int(stream_name_map[device]["application"])) if application_name == None: application_name = stream_name_map[device]["application"] stream_name = self._get_enum_name(M2M2_ADDR_ENUM_t, int(stream_name_map[device]["stream"])) if stream_name == None: stream_name = stream_name_map[device]["stream"] device_str_list += "\n'{}':{}:{}\n\t{}\n".format(device, application_name, stream_name, stream_name_map[device]["help"]) print "Subscribe to or unsubscribe from a data stream." print "Note that you must start the stream separately." print print "Available streams: ('name':application:stream){}".format(device_str_list) print "-----------------------------------------------" print "Usage:" print " #>sub [stream] [a(dd)/r(emove)]" print print " #>sub radxl add" print " #>sub radpd remove" print def do_reg(self, arg): ''' Perform a register operation on a device. ''' args = self._parse_args(arg, None) address = None ops = [] if len(args) == 0: self.vrb.err("No arguments supplied!") return 1, [] if (("r" or "read") in args and ("w" or "write") in args): self.vrb.err("Incorrect usage! You can read OR write, not both.") return 1, [] for l in args: if l in application_name_map: address = application_name_map[l]["address"] if address == None: self.vrb.err("Incorrect usage! You did not provide a valid device.") return 1, [] if ("r" or "read") in args: do_write = False elif ("w" or "write") in args: do_write = True else: self.vrb.err("Incorrect usage! You did not specify if you want to read or write!") return 1, [] for arg in args: # Filter out the device argument if ("0x" or ":") in arg: # See if it's a address:value pair if ":" in arg: reg_addr = int(arg.split(':')[0], 16) reg_val = int(arg.split(':')[1], 16) # See if it's just an address elif "0x" in arg: reg_addr = int(arg, 16) reg_val = 0 ops.append({"address":reg_addr, "value":reg_val}) resp = self._reg_op(address, ops, do_write) reg_result_list = [] if resp != None: self._print_reg_result(resp) for i in range(resp.payload.num_ops): reg_result_list.append((hex(resp.payload.ops[i].address), hex(resp.payload.ops[i].value))) err_stat = 0 else: self.vrb.err("The device did not respond!") err_stat = 1 return err_stat, reg_result_list def help_reg(self): device_str_list = "" for device in application_name_map: device_str_list += "\n'{}': {}".format(device, application_name_map[device]["help"]) print "Operate on a register. You can read OR write, and pass up to 10 register operations per command." print print "For adxl, only a range of registers are writable. Refer AdxlDcfg.h for this register map" print "In the register map, bits CANNOT be written in fields which are mentioned as UNUSED in" print "data sheet. Any write to the UNUSED bits are silently ignored during this command." print "List of UNUSED bits for various ADXL362 registers:" print "---------------------------------" print "| Register | UNUSED bits |" print "---------------------------------" print "| 0x21 | [7:3] |" print "---------------------------------" print "| 0x24 | [7:3] |" print "---------------------------------" print "| 0x28 | [7:4] |" print "---------------------------------" print "| 0x2E | [7:1] |" print "---------------------------------" print print "Available devices:{}".format(device_str_list) print "-----------------------------------------------" print "Usage:" print " #>reg [read/write] [device] [address](:[value])" print print " #>reg r adpd 0x11 0x25" print " #>reg r ad5940 0x000021E0" print " #>reg w adxl 0x01:0x34 0x20:0x69 0x69:0xAB" print " #>reg w adpd 0x01:0x1234 0x20:0x6959 0x69:0xBEEF" print " #>reg w ad5940 0x000021E0:0x00000002" print print " Note that if you perform a write operation and give only an address, a value of 0 will be written." print " These two commands do the same thing:" print " #>reg w adxl 0x01:0x00" print " #>reg w adxl 0x01" print def do_getDcfg(self, arg): """ Get the DCFG of the device. The actual contents of dcfg registers is returned in RegisterValue format as a 32-bit value """ args = self._parse_args(arg, None) address = None if len(args) == 0: self.vrb.err("No arguments supplied!") return for device in args: if device in application_name_map: address = application_name_map[device]["address"] if address == None: self.vrb.err("Incorrect usage! You did not provide a valid device.") return resp = [] msg = m2m2_packet(address, m2m2_pm_sys_cmd_t()) msg.payload.command = M2M2_SENSOR_COMMON_CMD_ENUM_t.M2M2_SENSOR_COMMON_CMD_GET_DCFG_REQ self._send_packet(msg) if address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000 or address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_TEMPERATURE: _resp = self._get_packet(address, m2m2_sensor_dcfg_data_t(), 20) if _resp == None: self.vrb.err("Error! Timed out waiting for the device!") return 1 status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, _resp.payload.status) resp.append(_resp) pkt_count = _resp.payload.num_tx_pkts; for i in range(pkt_count - 1): _resp = self._get_packet(address, m2m2_sensor_dcfg_data_t(), 20) if _resp == None: self.vrb.err("Error! Timed out waiting for the device!") return 1 status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, _resp.payload.status) resp.append(_resp) print "Obtained the ADPD Device Configuration:" self.vrb.write(" Num of dcfg registers: '{}'".format(int(sum(obj.payload.size for obj in resp)))) self._print_dcfg_result(resp, device) print "Status: '{}'".format(status) elif address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADXL: _resp = self._get_packet(address, m2m2_sensor_dcfg_data_t(), 20) if _resp == None: self.vrb.err("Error! Timed out waiting for the device!") return 1 else: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, _resp.payload.status) print "Obtained the ADXL Device Configuration:" resp.append(_resp) self._print_dcfg_result(resp, device) print "Status: '{}'".format(status) else: self.vrb.err("Incorrect usage! You did not provide a valid device.") def do_dumpAdpdCfg(self, arg): """ Dump all the register values to a file #>dumpAdpdCfg [filename] """ args = self._parse_args(arg, None) if len(args) == 0: self.vrb.err("No arguments supplied!") filename_dump = ".\\cfgs\\dcfg_dump.dcfg" else: filename_dump = ".\\cfgs\\" + args[0] try: fileHandler = open(filename_dump, "w") except IOError: print filename_dump + " not found" else: print "dump data to " + filename_dump # range [0, 2e], [a0-b8], [100, 117] [120, 137], [140, 157] start_a = 0 end_a = 0x2E length = end_a - start_a + 1 msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_common_reg_op_16_hdr_t(length)) cmd = M2M2_SENSOR_COMMON_CMD_ENUM_t.M2M2_SENSOR_COMMON_CMD_READ_REG_16_REQ msg.payload.command = cmd msg.payload.num_ops = length for i in range(0, length): msg.payload.ops[i].address = start_a + i self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_common_reg_op_16_hdr_t(length), 1) for i in range (0, length): address = '{:04x}'.format(reply_msg.payload.ops[i].address) value = '{:04x}'.format(reply_msg.payload.ops[i].value) message = address + " " + value + "\n" fileHandler.write(message) # range [0, 2e], [a0-b8], [100, 117] [120, 137], [140, 157] start_a = 0xA0 end_a = 0xB8 length = end_a - start_a + 1 msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_common_reg_op_16_hdr_t(length)) cmd = M2M2_SENSOR_COMMON_CMD_ENUM_t.M2M2_SENSOR_COMMON_CMD_READ_REG_16_REQ msg.payload.command = cmd msg.payload.num_ops = length for i in range(0, length): msg.payload.ops[i].address = start_a + i self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_common_reg_op_16_hdr_t(length), 1) for i in range (0, length): address = '{:04x}'.format(reply_msg.payload.ops[i].address) value = '{:04x}'.format(reply_msg.payload.ops[i].value) message = address + " " + value + "\n" fileHandler.write(message) # range [0, 2e], [a0-b8], [100, 117] [120, 137], [140, 157] start_a = 0x100 end_a = 0x117 length = end_a - start_a + 1 msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_common_reg_op_16_hdr_t(length)) cmd = M2M2_SENSOR_COMMON_CMD_ENUM_t.M2M2_SENSOR_COMMON_CMD_READ_REG_16_REQ msg.payload.command = cmd msg.payload.num_ops = length for i in range(0, length): msg.payload.ops[i].address = start_a + i self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_common_reg_op_16_hdr_t(length), 1) for i in range (0, length): address = '{:04x}'.format(reply_msg.payload.ops[i].address) value = '{:04x}'.format(reply_msg.payload.ops[i].value) message = address + " " + value + "\n" fileHandler.write(message) fileHandler.close() def help_getDcfg(self): device_str_list = "" for device in application_name_map: device_str_list += "\n'{}': {}".format(device, application_name_map[device]["help"]) print "Get the dcfg" print print "Available devices:{}".format(device_str_list) print "-----------------------------------------------" print "Usage:" print " #>getDcfg [device]" print print " #>getDcfg adpd" print " #>getDcfg adxl" print def do_status(self, arg): """ Query status of the sensor #>status [sensor_name] [slot_no] Arguments: sensor_name -> is the name of the sensor slot -> radpd1 to radpd12 for ADPD slot A to slot L, mandatory only when sensor_name is adpd4000 Eg: #>status adxl #>status adpd4000 radpd6 """ address = None stream = None args = self._parse_args(arg, None) if args == None: return for a in args: if a in application_name_map: address = application_name_map[a]["address"] if a in stream_name_map: if address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000: stream = stream_name_map[a]["stream"] if address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_TEMPERATURE: stream = stream_name_map[a]["stream"] if address == None: self.vrb.err("Incorrect usage! You did not provide a valid device.") return if address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000 and stream == None: self.vrb.err("Incorrect usage! You did not provide a valid slot for ADPD.") return if address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_TEMPERATURE and stream == None: self.vrb.err("Incorrect usage! You did not provide a valid slot for Temperature.") return msg = m2m2_packet(address, m2m2_app_common_status_t()) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_SENSOR_STATUS_QUERY_REQ if address != M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000 and address != M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_TEMPERATURE: msg.payload.stream = address else: msg.payload.stream = stream self._send_packet(msg) reply_msg = self._get_packet(address, m2m2_app_common_status_t(), 10) if reply_msg != None: self._print_sensor_app_status(reply_msg) else: self.vrb.err("The device did not respond!") def help_status(self): device_str_list = "" for device in application_name_map: device_str_list += "\n'{}': {}".format(device, application_name_map[device]["help"]) print "Get status of a sensor or application." print print "Available devices:{}".format(device_str_list) print "-----------------------------------------------" print "Usage:" print " #>status [device]" print print " #>status adpd4000" print " #>status adxl" print " #>status ppg" print def do_get_sensor_apps_status(self, arg): address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PS msg = m2m2_packet(address, m2m2_app_common_status_t()) msg.payload.command = M2M2_PS_SYS_COMMAND_ENUM_t.M2M2_PS_SYS_COMMAND_GET_PS_APPS_INFO_REQ msg.payload.stream = address self._send_packet(msg) reply_msg = self._get_packet(address, m2m2_ps_sys_sensor_apps_info_req_t(), 100) if reply_msg != None: self._print_sensor_apps_info_status(reply_msg) else: self.vrb.err("The device did not respond!") @cli_logger def do_quickstart(self, arg): ''' Execute pre-set startup sequences for different sensors. ''' args = self._parse_args(arg, None) if args == None: self.vrb.err("Incorrect usage! You did not provide any arguments.") if args[0] in self.quickstarts: for command in self.quickstarts[args[0]]["commands"]: if(lowtouch.Enable_lowtouch == True) and lowtouch.Stopcmd == False: lowtouch.Startcmd = True self.onecmd(command) def help_quickstart(self): sequence_str_list = "" for sequence in self.quickstarts: sequence_str_list += "\n===============================" sequence_str_list += "\n'{}': {}".format(sequence, self.quickstarts[sequence]["help"]) for cmd in self.quickstarts[sequence]["commands"]: sequence_str_list += "\n\t->{}".format(cmd) print "Execute pre-defined startup sequences for different sensors." print "Implemented sequences are:{}".format(sequence_str_list) @cli_logger def do_quickstop(self, arg): ''' Execute pre-set stop sequences for different applications/sensors. ''' args = self._parse_args(arg, None) if args == None: self.vrb.err("Incorrect usage! You did not provide any arguments.") if args[0] in self.quickstops: for command in self.quickstops[args[0]]["commands"]: if(lowtouch.Enable_lowtouch == True) and lowtouch.Startcmd == True: lowtouch.Stopcmd = True lowtouch.Startcmd = False self.onecmd(command) def help_quickstop(self): sequence_str_list = "" for sequence in self.quickstops: sequence_str_list += "\n===============================" sequence_str_list += "\n'{}': {}".format(sequence, self.quickstops[sequence]["help"]) for cmd in self.quickstops[sequence]["commands"]: sequence_str_list += "\n\t->{}".format(cmd) print "Execute pre-defined stop sequences for different applications/sensors." print "Implemented sequences are:{}".format(sequence_str_list) def do_setPpgLcfg(self, arg): """ Set the PPG LCFG. The argument is the LCFG ID to choose the ppg lcfg file. Supported values are either: '105' for ADPD105 '107' for ADPD107 Eg: = setPpgLcfg 107 """ args = self._parse_args(arg, 1) if args == None: self.vrb.write("No configuration specified, setting the default ADPD107 library config", 2) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_PPG, ppg_app_set_lcfg_req_t()) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_SET_LCFG_REQ if args == None: msg.payload.lcfgid = M2M2_SENSOR_PPG_LCFG_ID_ENUM_t.M2M2_SENSOR_PPG_LCFG_ID_ADPD4000 else: msg.payload.lcfgid = int(args[0]) if msg.payload.lcfgid not in (M2M2_SENSOR_PPG_LCFG_ID_ENUM_t.M2M2_SENSOR_PPG_LCFG_ID_ADPD107, M2M2_SENSOR_PPG_LCFG_ID_ENUM_t.M2M2_SENSOR_PPG_LCFG_ID_ADPD185, M2M2_SENSOR_PPG_LCFG_ID_ENUM_t.M2M2_SENSOR_PPG_LCFG_ID_ADPD188, M2M2_SENSOR_PPG_LCFG_ID_ENUM_t.M2M2_SENSOR_PPG_LCFG_ID_ADPD108, M2M2_SENSOR_PPG_LCFG_ID_ENUM_t.M2M2_SENSOR_PPG_LCFG_ID_ADPD4000): self.vrb.write("Invalid lcfg id") return self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_PPG, ppg_app_set_lcfg_resp_t(), 40) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) if reply_msg != None: self.vrb.write("Set the PPG library configuration:", 2) self.vrb.write(" Device: '{}'".format(int(msg.payload.lcfgid)), 1) self.vrb.write(" Status: '{}'".format(status), 1) else: self.vrb.err("Loading Ppg library configuration failed!") def do_lcfgPpgRead(self, arg): """ Read the PPG LCFG used in the Watch. The argument are the PPG LCFG addresses or the LCFG ID to choose from the ppg lcfg file: Note that the the range of addr varies from 0x0 to 0x2E Eg: = lcfgPpgRead addr1 addr2 ... """ args = self._parse_args(arg, None) if len(args) == 0: self._p_err("No arguments supplied!") return num_ops = len(args) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_PPG, ppg_app_lcfg_op_hdr_t(num_ops)) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_READ_LCFG_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i] if ("0x") in tempVal: reg_addr = int(tempVal, 16) elif ("0X") in tempVal: reg_addr = int(tempVal, 16) else: reg_addr = int(tempVal) msg.payload.ops[i].field = reg_addr self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_PPG, ppg_app_lcfg_op_hdr_t(num_ops), 60) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) if reply_msg == None: print "Reading PPG LCFG failed!" return self._print_ppg_lcfg_result(reply_msg) def do_lcfgPpgWrite(self, arg): """ Set the PPG LCFG. The argument is the PPG LCFG addresses or the LCFG ID, then VALUE to modify the ppg lcfg value: Note that the the range of addr varies from 0x0 to 0x2E Eg: = lcfgPpgWrite addr1 value1 addr2 value2 ... """ args = self._parse_args(arg, None) if len(args) == 0: self._p_err("No arguments supplied!") return num_ops = len(args) num_ops >>= 1 msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_PPG, ppg_app_lcfg_op_hdr_t(num_ops)) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_WRITE_LCFG_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i*2] if ("0x") in tempVal: reg_addr = int(tempVal, 16) elif ("0X") in tempVal: reg_addr = int(tempVal, 16) else: reg_addr = int(tempVal) tempVal = args[i*2+1] if ("0x") in tempVal: reg_val = int(tempVal, 16) elif ("0X") in tempVal: reg_val = int(tempVal, 16) else: reg_val = int(tempVal) msg.payload.ops[i].field = reg_addr msg.payload.ops[i].value = reg_val self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_PPG, ppg_app_lcfg_op_hdr_t(num_ops), 60) if reply_msg == None: #status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) print "Writing PPG LCFG failed!" return self._print_ppg_lcfg_result(reply_msg) def do_getPpgStates(self, arg): """ Get the states of PPG. Eg: = getPpgStates """ msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_PPG, ppg_app_lib_state_t()) msg.payload.command = M2M2_PPG_APP_CMD_ENUM_t.M2M2_PPG_APP_CMD_GET_LAST_STATES_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_PPG, ppg_app_lib_state_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write("Got the PPG States:\n") self.vrb.write("{} -> {} -> {} -> {} -> {} -> {} -> {} -> {} -> {} -> {}\n".format(reply_msg.payload.states[0], reply_msg.payload.states[1], reply_msg.payload.states[2], reply_msg.payload.states[3], reply_msg.payload.states[4], reply_msg.payload.states[5], reply_msg.payload.states[6], reply_msg.payload.states[7], reply_msg.payload.states[8], reply_msg.payload.states[9])) self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Failed getting the ppg states!") def do_SQISetSlot(self, arg): """ Set the ADPD Slot on which SQI algo needs to run Argument -> slot_no slot_no -> ranges from 1 to 12 6 --> slotF 7 --> slotG 8 --> slotH 9 --> slotI ----------------------------------------------- Usage: #>SQISetSlot [slot_no] To run SQI Algo on slot F: #>SQISetSlot 6 To run SQI Algo on slot G: #>SQISetSlot 7 """ args = self._parse_args(arg, 1) if args == None: self.vrb.err("Invalid argument! please type help <command>(help SQISetSlot) to know the usage.") return if int(args[0]) < 1 or int(args[0]) > 12: self.vrb.err("Invalid Argument, out of range. please type help <command>(help SQISetSlot) to know the valid arguments") return slot_no = int(args[0]) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_SQI, sqi_app_set_slot_t()) msg.payload.command = M2M2_SQI_APP_CMD_ENUM_t.M2M2_SQI_APP_CMD_SET_SLOT_REQ msg.payload.nSQISlot = pow(2, (slot_no-1)) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_SQI, sqi_app_set_slot_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write("Done with SQI Slot Set:") self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Failed doing SQI Slot Set!") def do_setUCHREnab(self, arg): """ Command to enable/disable HR calculation from UC1,2,3,5 from a particular slot UC HR is to be started when raw adxl, adpd applications are started Usage: #>setUCHREnab [enab] [slotNum] Argument -> enab 1 --> to enable 0 --> to disable Argument -> slotNum slot_no -> ranges from 1 to 12 6 --> slotF 7 --> slotG 8 --> slotH 9 --> slotI To enable UC HR calculation from slot F: #>setUCHREnab 1 6 To disable UC HR calculation from slot F: #>setUCHREnab 0 6 """ args = self._parse_args(arg, 2) if args == None: self.vrb.err("Invalid argument! please type help <command>(help setUCHREnab) to know the usage.") return if int(args[0]) < 0 or int(args[0]) > 1: self.vrb.err("Invalid Argument, please type help <command>(help setUCHREnab) to know the valid arguments") return enab = int(args[0]) slotNum = int(args[1]) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_adpd_set_uc_hr_enab_t()) msg.payload.command = M2M2_SENSOR_ADPD_COMMAND_ENUM_t.M2M2_SENSOR_ADPD_COMMAND_UC_HR_ENAB_REQ msg.payload.control = enab msg.payload.slotNum = slotNum self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_adpd_set_uc_hr_enab_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write("Done with setUCHREnab:") self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Failed doing setUCHREnab!") def do_adpdAGCControl(self, arg): """ Control the Static AGC ON/OFF for ADPD app. Arguments to be specified in following format->adpdAGCControl agc_type:agc_cntrl agc_type:agc_cntrl ... agc_type-> 1. To Select between g/b/r/ir/mwl mwl = 0 g = 1 r = 2 ir = 3 b = 4 agc_cntrl-> 2. To Enable AGC, arg. = 1 To Disable AGC, arg. = 0 ----------------------------------------------- Usage: #>adpdAGCControl [agc_cntrl] To Turn OFF STATIC AGC for MWL_VIEW #>adpdAGCControl 0:0 To Turn ON STATIC AGC for MWL VIEW #>adpdAGCControl 0:1 To Turn ON STATIC AGC for Green LED and Turn OFF STATIC AGC for Red LED #>adpdAGCControl 1:1 2:0 To Turn ON STATIC AGC for IR LED and Blue LED #>adpdAGCControl 3:1 4:1 """ args = self._parse_args(arg, None) if args == None: print("Invalid arguments! please type help <command>(help ppgAGCControl) to know the usage.") return num_ops = len(args) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_adpd_agc_cntrl_t(num_ops)) msg.payload.command = M2M2_SENSOR_ADPD_COMMAND_ENUM_t.M2M2_SENSOR_ADPD_COMMAND_AGC_ON_OFF_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i] if (':' not in tempVal): self.vrb.err("Invalid Argument Format, check help!") return elif ("0x") in tempVal: agc_type = int(tempVal.split(':')[0], 16) agc_cntrl = int(tempVal.split(':')[1], 16) elif ("0X") in tempVal: agc_type = int(tempVal.split(':')[0], 16) agc_cntrl = int(tempVal.split(':')[1], 16) else: agc_type = int(tempVal.split(':')[0]) agc_cntrl = int(tempVal.split(':')[1]) if(agc_cntrl < 0 or agc_cntrl > 1 or agc_type < 0 or agc_type > 4): self.vrb.err("Invalid arguments! please type help <command>(help ppgAGCControl) to know the usage.") return if(agc_type == 0 and num_ops > 1): self.vrb.err("Invalid, When AGC Control is done for MWL View, AGC Control for individual LEDs should be avoided") return msg.payload.ops[i].agc_cntrl = agc_cntrl msg.payload.ops[i].agc_type = agc_type self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000,m2m2_adpd_agc_cntrl_t(num_ops), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write("Done with Static AGC Control:") self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Failed doing Static AGC COntrol!") def do_adpdGetAGCInfo(self, arg): """ Get AGC Algo Info for 4 LEDs after AGC is done and save it as file. LED 1 -> Green LED 2 -> Red LED 3 -> IR LED 4 -> Blue No argument ----------------------------------------------- Usage: #>adpdGetAGCInfo """ msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_adpd_agc_info_t()) msg.payload.command = M2M2_SENSOR_ADPD_COMMAND_ENUM_t.M2M2_SENSOR_ADPD_COMMAND_AGC_INFO_REQ led_index = 0 led=4 f = open('agc_info.csv','w') while led_index < led: msg.payload.led_index = led_index+1 self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_adpd_agc_info_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write("Received AGC Info") self.vrb.write(" Status: '{}'".format(status)) #f = open('agc_info.csv','w') sample_cnt = 0 sample=10 f.write('LED {}\n'.format(led_index+1)); f.write('CH1, CH2\n'); while sample_cnt < sample: f.write('{:02d}, {:02d}\n'.format((reply_msg.payload.green_ch1[sample_cnt]),(reply_msg.payload.green_ch2[sample_cnt]))) sample_cnt+=1 f.write('DC0_LEDcurrent, {:02d}\n'.format((reply_msg.payload.DC0_LEDcurrent))) f.write('TIA_ch1_i, {:02d}\n'.format((reply_msg.payload.TIA_ch1_i))) f.write('TIA_ch2_i, {:02d}\n\n'.format((reply_msg.payload.TIA_ch2_i))) #f.close() else: self.vrb.err("Failed doing Static AGC COntrol!") f.close() return led_index += 1 f.close() def do_set_adpd_ext_datastream_odr(self, arg): """ specify/set odr of externally fed adpd data for sqi stream for eg: data logged @50Hz, cmd will be - #>set_adpd_ext_datastream_odr 50 """ args = self._parse_args(arg, 1) sampling_freq = int(args[0]) if(sampling_freq < 25 or sampling_freq > 100): self.vrb.err("sampling frequency not in range") msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000,adpd_ext_data_stream_odr_t()) msg.payload.command = M2M2_SENSOR_ADPD_COMMAND_ENUM_t.M2M2_SENSOR_ADPD_COMMAND_SET_EXT_DATA_STREAM_ODR_REQ msg.payload.sampling_freq = sampling_freq self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000,adpd_ext_data_stream_odr_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Failed in setting odr for externally fed adpd data!") def do_send_ext_adpd_datastream(self, arg): """ Send external adpd data to device (csv file should contain adpd data logged in AWT) #>send_ext_adpd_datastream csvfilename start_row column_index """ args = self._parse_args(arg, 3) num_ops = len(args) src_addr = get_cli_addr() start_row = int(args[1]) column_index = int(args[2]) sequence_counter=0 delay = 0.01 max_rtc_ticks_24_hr = 2764800000 column_data_list = read_csv_column(str(args[0]),column_index,start_row) with open(str(args[0]), 'r') as f_ref: line_list = f_ref.readline() time_hrmmss=line_list.split(',')[2].strip() tz_seconds=int(line_list.split(',')[4].strip()) hours=int(time_hrmmss[0:2]) minutes=int(time_hrmmss[3:5]) seconds=int(time_hrmmss[6:8]) absolute_time_ms = ((hours*3600)+(minutes*60)+seconds+tz_seconds)*1000 column_time_list = read_csv_column(str(args[0]),0,start_row) for row in range(len(column_data_list)): msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000,adpd_ext_data_stream_t()) msg.header.src = src_addr msg.payload.command = M2M2_SENSOR_ADPD_COMMAND_ENUM_t.M2M2_SENSOR_ADPD_COMMAND_EXT_ADPD_DATA_STREAM msg.payload.data = int(column_data_list[row]) msg.payload.timestamp = int(((column_time_list[row]+absolute_time_ms) * 32.768) % max_rtc_ticks_24_hr) # converting ms to ticks sequence_counter = sequence_counter + 1 msg.payload.sequence_num = sequence_counter self._send_packet(msg) time.sleep(delay) def do_LTAppReadCh2Cap(self, arg): """ Read the AD7156 CH2 Capacitace in fF. This returns SUCCESS, if LT app is ruuning, else ERROR This is used to note the Cap value in different surfaces #>LTAppReadCh2Cap [ntimes] where 'ntimes' is the no:of times Cap value needs to be read ----------------------------------------------- Usage: #>LTAppReadCh2Cap [ntimes] #>LTAppReadCh2Cap 6 """ args = self._parse_args(arg, None) if len(args) == 0: self.vrb.write("No arguments supplied!") return ntimes = int(args[0]) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_LT_APP, lt_app_rd_ch2_cap()) msg.payload.command = M2M2_LT_COMMAND_ENUM_t.M2M2_LT_COMMAND_RD_CH2_CAP_REQ avgCap = 0 minCap = 1490 maxCap =0 for i in range(ntimes): self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_LT_APP, lt_app_rd_ch2_cap(), 60) if reply_msg != None: avgCap += reply_msg.payload.capVal if reply_msg.payload.capVal > maxCap: maxCap = reply_msg.payload.capVal if reply_msg.payload.capVal < minCap: minCap = reply_msg.payload.capVal self.vrb.write("Ch2 Cap Value:{}".format(reply_msg.payload.capVal)) status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write(" Status: '{}'".format(status)) else: print "Read Ch2 Cap failed!" return avgCap = avgCap/ntimes self.vrb.write("Average Ch2 Cap: '{}'".format(avgCap)) self.vrb.write("Minimum Ch2 Cap: '{}'".format(minCap)) self.vrb.write("Maximum Ch2 Cap: '{}'".format(maxCap)) return avgCap, minCap, maxCap def do_lcfgLTAppRead(self, arg): """ Read the LT application LCFG used in the Watch. The argument are the LT LCFG addresses: Note that the the range of addr varies from 0x0 to 0x3, as given below: LT_APP_LCFG_ONWR_TIME = 0x0 LT_APP_LCFG_OFFWR_TIME = 0x1 LT_APP_LCFG_AIR_CAP_VAL = 0x2 LT_APP_LCFG_SKIN_CAP_VAL = 0x3 LT_APP_LCFG_TRIGGER_METHOD = 0x4 Eg: = lcfgLTAppRead addr1 addr2 ... """ args = self._parse_args(arg, None) if len(args) == 0: self._p_err("No arguments supplied!") return num_ops = len(args) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_LT_APP, lt_app_lcfg_op_hdr_t(num_ops)) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_READ_LCFG_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i] if ("0x") in tempVal: reg_addr = int(tempVal, 16) elif ("0X") in tempVal: reg_addr = int(tempVal, 16) else: reg_addr = int(tempVal) msg.payload.ops[i].field = reg_addr self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_LT_APP, lt_app_lcfg_op_hdr_t(num_ops), 20) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) if reply_msg == None: print "Reading LT App LCFG failed!" return self._print_lt_app_lcfg_result(reply_msg) def do_lcfgLTAppWrite(self, arg): """ Set the LT application LCFG. The argument is the LT app LCFG addresses or the LCFG ID, then VALUE to modify the lt app lcfg value: Note that the the range of addr varies from 0x0 to 0x3 as given below: LT_APP_LCFG_ONWR_TIME = 0x0 LT_APP_LCFG_OFFWR_TIME = 0x1 LT_APP_LCFG_AIR_CAP_VAL = 0x2 LT_APP_LCFG_SKIN_CAP_VAL = 0x3 LT_APP_LCFG_TRIGGER_METHOD = 0x4 Eg: = lcfgLTAppWrite addr1 value1 addr2 value2 ... """ args = self._parse_args(arg, None) if len(args) == 0: self._p_err("No arguments supplied!") return num_ops = len(args) num_ops >>= 1 msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_LT_APP, lt_app_lcfg_op_hdr_t(num_ops)) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_WRITE_LCFG_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i*2] if ("0x") in tempVal: reg_addr = int(tempVal, 16) elif ("0X") in tempVal: reg_addr = int(tempVal, 16) else: reg_addr = int(tempVal) tempVal = args[i*2+1] if ("0x") in tempVal: reg_val = int(tempVal, 16) elif ("0X") in tempVal: reg_val = int(tempVal, 16) else: reg_val = int(tempVal) msg.payload.ops[i].field = reg_addr msg.payload.ops[i].value = reg_val self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_LT_APP, lt_app_lcfg_op_hdr_t(num_ops), 60) if reply_msg == None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) print "Writing LT App LCFG failed!" return self._print_lt_app_lcfg_result(reply_msg) def _print_lt_app_lcfg_result(self, packet): self._print_packet_status(packet) self.vrb.write(" Num of registers: '{}'".format(int(packet.payload.num_ops))) t = table(["Field", "Value"]) for i in range(packet.payload.num_ops): t.add_row([hex(packet.payload.ops[i].field), packet.payload.ops[i].value]) t.display() def do_LTAppTuning(self, arg): """ Steps for LT App Tuning and how to use LT App: 1. Do the tuning of LT App lcfg paramters to change either fw lcfg or add DCB lcfg 2. Choose the Trigger method: 0000 -> LT_APP_CAPSENSE_TUNED_TRIGGER, 0001 -> LT_APP_CAPSENSE_DISPLAY_TRIGGER, 0002 -> LT_APP_BUTTON_TRIGGER, 0003->LT_APP_INTERMITTENT_TRIGGER 3. Write the LT Configuration NAND/DCB after doing the LTAppTuning #>quickstart nand_config_file_create_mv_uc1 (or any other UC..) OR #>quickstart gen_blk_dcb_file_create_mv_uc1 (or any other UC..) #>write_dcb_config lt_dcb_config gen_blk_dcb.lcfg 4. To test the LT App: For [trig_method] 0 -> LT_APP_CAPSENSE_TUNED_TRIGGER: Wrist On/Off should turn On/Off LT logging For [trig_method] 1 -> LT_APP_CAPSENSE_DISPLAY_TRIGGER: Wear the watch on the wrist. From Watch display, LOW_TOUCH_LOGGING page, navigate to LT_APP sub-page, press select button to enable LT App and turn On LT logging Remove the Watch to turn off LT logging. From LT_APP sub-page, press select button to disable the LT App. OR Wear the watch on the wrist. Give pm_activate_touch_sensor command to turn On LT logging. Remove the Watch to turn off LT logging. Give pm_deactivate_touch_sensor command to turn Off LT App For [trig_method] 2 -> LT_APP_BUTTON_TRIGGER: Wear the watch on the wrist. From Watch display, Home page, press Select button for 3 secs, to enter into "LOG EN" sub-page, press select button to enable and turn On LT logging Remove the Watch and connect it to USB cable to turn off LT logging. For [trig_method] 3 -> LT_APP_INTERMITTENT_TRIGGER: Wear the watch, by intermittent operation logs are started and stopped ------------------------------------------------------------------------------------------------ For the LT application LCFG, Note that the range of addr varies from 0x0 to 0x4 as given below: LT_APP_LCFG_ONWR_TIME = 0x0 LT_APP_LCFG_OFFWR_TIME = 0x1 LT_APP_LCFG_AIR_CAP_VAL = 0x2 LT_APP_LCFG_SKIN_CAP_VAL = 0x3 LT_APP_LCFG_TRIGGER_METHOD = 0x4 ------------------------------------------------------------------------------------------------ Usage: LTAppTuning [option] [trig_method] [airCap] [skinCap] [option] dcb --> To Tune and write to lt_app_lcfg DCB blk fw --> To Tune and replace default fw lcfg [trig_method] 0 -> LT_APP_CAPSENSE_TUNED_TRIGGER 1 -> LT_APP_CAPSENSE_DISPLAY_TRIGGER 2 -> LT_APP_BUTTON_TRIGGER 3 -> LT_APP_INTERMITTENT_TRIGGER [airCap] Value in fF as observed from Display page which reads Ch2 Cap, when Watch is placed in Air, without touching bottom touch electrodes Try to keep a min value of the lot, observed in about 10 trials [skinCap] Value in fF as observed from Display page which reads Ch2 Cap, when Watch is placed in skin, touching the bottom touch electrodes Try to keep a max value of the lot, observed in about 10 trials [airCap] & [skinCap] is not required for [trig_method] = 1 or 2 Eg: = LTAppTuning fw 0 1400 1380 Eg: = LTAppTuning dcb 0 1285 1250 Eg: = LTAppTuning fw 1 Eg: = LTAppTuning dcb 1 Eg: = LTAppTuning fw 2 Eg: = LTAppTuning dcb 2 Eg: = LTAppTuning fw 3 Eg: = LTAppTuning dcb 3 """ args = self._parse_args(arg, None) if len(args) == 0 or len(args) < 2: self.vrb.write("Wrong arguments supplied!") return option = args[0] trig_method = args[1] if trig_method == "0": if len(args) != 4: self.vrb.write("Wrong arguments supplied!") return minCap_air = args[2] maxCap_skin = args[3] elif trig_method == "1" or trig_method == "2" or trig_method == "3": minCap_air = "1380" maxCap_skin = "1340" else: self.vrb.write("trig_method: Wrong arguments supplied!") return self.vrb.write("Deactivating LT app") self.do_pm_deactivate_touch_sensor("0") if option == "fw" and trig_method == "0" : cmd = "lcfgLTAppWrite 0x2 " + minCap_air self.onecmd(cmd) cmd = "lcfgLTAppWrite 0x3 " + maxCap_skin self.onecmd(cmd) elif option == "fw" and trig_method == "1" : cmd = "lcfgLTAppWrite 0x4 " + trig_method self.onecmd(cmd) elif option == "fw" and trig_method == "2" : cmd = "lcfgLTAppWrite 0x4 " + trig_method self.onecmd(cmd) elif option == "fw" and trig_method == "3" : cmd = "lcfgLTAppWrite 0x4 " + trig_method self.onecmd(cmd) elif option == "dcb" : #Write to lt_app_lcfg_dcb.lcfg file str1 = "#lt_app_lcfg_dcb.lcfg" str2 = "00 1B58 #ONWR_TIME hex value in ms; 7000ms = 0x1B58" str3 = "01 1388 #OFFWR_TIME hex value in ms; 5000ms = 0x1388" airCap = str(hex(int(minCap_air,10))) airCap = airCap.replace('0x','0') str4 = "02 " + airCap + " #AIR_CAP_VAL hex value in fF;" skinCap = str(hex(int(maxCap_skin,10))) skinCap = skinCap.replace('0x','0') str5 = "03 " + skinCap + " #SKIN_CAP_VAL hex value in fF;" trig_method = "000"+trig_method str6 = "04 " + trig_method + " #LT_APP_LCFG_TRIGGER_METHOD hex value; 0000 -> LT_APP_CAPSENSE_TUNED_TRIGGER, 0001 -> LT_APP_CAPSENSE_DISPLAY_TRIGGER, 0002 -> LT_APP_BUTTON_TRIGGER, 0003->LT_APP_INTERMITTENT_TRIGGER" f = open('dcb_cfg/lt_app_dcb.lcfg','w') f.write(str1) f.write('\n') f.write(str2) f.write('\n') f.write(str3) f.write('\n') f.write(str4) f.write('\n') f.write(str5) f.write('\n') f.write(str6) f.write('\n') f.close() #Write to lt_app_lcfg_dcb block cmd = "write_dcb_config lt_app_lcfg lt_app_dcb.lcfg" self.onecmd(cmd) else: self.vrb.write("Wrong option given, check help & retry") #self.vrb.write("Activating LT app") #self.do_pm_activate_touch_sensor("0") def do_get_user0_config_app_state(self, arg): """ This is a command, to get the current user0 config app state in the Watch Fw. It is based on this value that current operation happens in user0 config app. Its either one of the enum value: STATE_ADMIT_STANDBY = 0, STATE_START_MONITORING = 1, STATE_SLEEP = 2, STATE_INTERMITTENT_MONITORING = 3, STATE_INTERMITTENT_MONITORING_START_LOG = 4, STATE_INTERMITTENT_MONITORING_STOP_LOG = 5, STATE_END_MONITORING = 6, STATE_CHARGING_BATTERY = 7, STATE_OUT_OF_BATTERY_STATE_BEFORE_START_MONITORING = 8, STATE_OUT_OF_BATTERY_STATE_DURING_INTERMITTENT_MONITORING = 9, #>get_user0_config_app_state """ address = M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP msg = m2m2_packet(address, user0_config_app_set_state_t()) msg.payload.command = M2M2_USER0_CONFIG_APP_COMMAND_ENUM_t.M2M2_USER0_CONFIG_APP_COMMAND_GET_STATE_REQ msg.payload.status = M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP, user0_config_app_set_state_t(), 20) if reply_msg != None: if (reply_msg.payload.status == M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK): print("user0 config app state in the Watch Fw : {} ").format(reply_msg.payload.state) self._print_user0_app_state(reply_msg.payload.state) else: self.vrb.err("Error occured while reading state") else: self.vrb.err("The device did not respond!") def do_set_user0_config_app_state(self, arg): """ This is a command, to set the current user0 config app state in the Watch Fw. It is based on this value that current operation happens in user0 config app. Its either one of the enum value: STATE_ADMIT_STANDBY = 0, STATE_START_MONITORING = 1, STATE_SLEEP = 2, STATE_INTERMITTENT_MONITORING = 3, STATE_INTERMITTENT_MONITORING_START_LOG = 4, STATE_INTERMITTENT_MONITORING_STOP_LOG = 5, STATE_END_MONITORING = 6, STATE_CHARGING_BATTERY = 7, STATE_OUT_OF_BATTERY_STATE_BEFORE_START_MONITORING = 8, STATE_OUT_OF_BATTERY_STATE_DURING_INTERMITTENT_MONITORING = 9, #>set_max_tx_pkt_comb_cnt 0 """ args = self._parse_args(arg, 1) if args == None: return address = M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP msg = m2m2_packet(address, user0_config_app_set_state_t()) msg.payload.command = M2M2_USER0_CONFIG_APP_COMMAND_ENUM_t.M2M2_USER0_CONFIG_APP_COMMAND_SET_STATE_REQ msg.payload.status = M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK msg.payload.state = int(args[0]) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP, user0_config_app_set_state_t(), 20) if reply_msg != None: if (reply_msg.payload.status == M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK): print("state changed in Watch Fw to : {} ").format(reply_msg.payload.state) else: self.vrb.err("Error occured while writing state") else: self.vrb.err("The device did not respond!") def do_hw_id(self, arg): """ This is a command, to read/write/delete hw_id in the Watch Fw. Usage: hw_id [option] [hw_id] [option] read --> To read the hw_id write --> To write the hw_id delete --> To delete the hw_id [hw_id] value from 1- 99 range to be set the hw_id as. Pass this only for the write option. Eg: #>hw_id read #>hw_id write 99 #>hw_id delete """ args = self._parse_args(arg, None) if len(args) == 0: self._p_err("No arguments supplied!") return address = M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP msg = m2m2_packet(address, user0_config_app_id_t()) msg.payload.command = M2M2_USER0_CONFIG_APP_COMMAND_ENUM_t.M2M2_USER0_CONFIG_APP_COMMAND_ID_OP_REQ if args[0] == "read": msg.payload.id_sel = ID_SELECTION_ENUM_t.ID_HW_ID msg.payload.id_op = ID_OPERATION_MODE_ENUM_t.ID_OPERATION_MODE_READ elif args[0] == "write" and args[1]: msg.payload.id_sel = ID_SELECTION_ENUM_t.ID_HW_ID msg.payload.id_op = ID_OPERATION_MODE_ENUM_t.ID_OPERATION_MODE_WRITE msg.payload.id_num = int(args[1]) elif args[0] == "delete": msg.payload.id_sel = ID_SELECTION_ENUM_t.ID_HW_ID msg.payload.id_op = ID_OPERATION_MODE_ENUM_t.ID_OPERATION_MODE_DELETE else: print("Wrong arguments chosen, please check help and retry") return msg.payload.status = M2M2_USER0_CONFIG_APP_STATUS_ENUM_t.M2M2_USER0_CONFIG_APP_STATUS_OK self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP, user0_config_app_id_t(), 20) if reply_msg != None: if (reply_msg.payload.status == M2M2_USER0_CONFIG_APP_STATUS_ENUM_t.M2M2_USER0_CONFIG_APP_STATUS_OK): if args[0] == "read": print("HW ID Read back: {} ").format(reply_msg.payload.id_num) elif args[0] == "write": print("HW ID write successful") elif args[0] == "delete": print("HW ID delete successful") else: self.vrb.err("Error occured while doing hw_id request") else: self.vrb.err("The device did not respond!") def do_exp_id(self, arg): """ This is a command, to read/write/delete exp_id in the Watch Fw. Usage: exp_id [option] [exp_id] [option] read --> To read the exp_id write --> To write the exp_id delete --> To delete the exp_id [exp_id] value from 1-9999 to be set the exp_id as. Pass this only for the write option. Eg: #>exp_id read #>exp_id write 1122 #>exp_id delete """ args = self._parse_args(arg, None) if len(args) == 0: self._p_err("No arguments supplied!") return address = M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP msg = m2m2_packet(address, user0_config_app_id_t()) msg.payload.command = M2M2_USER0_CONFIG_APP_COMMAND_ENUM_t.M2M2_USER0_CONFIG_APP_COMMAND_ID_OP_REQ if args[0] == "read": msg.payload.id_sel = ID_SELECTION_ENUM_t.ID_EXP_ID msg.payload.id_op = ID_OPERATION_MODE_ENUM_t.ID_OPERATION_MODE_READ elif args[0] == "write" and args[1]: msg.payload.id_sel = ID_SELECTION_ENUM_t.ID_EXP_ID msg.payload.id_op = ID_OPERATION_MODE_ENUM_t.ID_OPERATION_MODE_WRITE msg.payload.id_num = int(args[1]) elif args[0] == "delete": msg.payload.id_sel = ID_SELECTION_ENUM_t.ID_EXP_ID msg.payload.id_op = ID_OPERATION_MODE_ENUM_t.ID_OPERATION_MODE_DELETE else: print("Wrong arguments chosen, please check help and retry") return msg.payload.status = M2M2_USER0_CONFIG_APP_STATUS_ENUM_t.M2M2_USER0_CONFIG_APP_STATUS_OK self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP, user0_config_app_id_t(), 20) if reply_msg != None: if (reply_msg.payload.status == M2M2_USER0_CONFIG_APP_STATUS_ENUM_t.M2M2_USER0_CONFIG_APP_STATUS_OK): if args[0] == "read": print("Exp ID Read back: {} ").format(reply_msg.payload.id_num) elif args[0] == "write": print("Exp ID write successful") elif args[0] == "delete": print("Exp ID delete successful") else: self.vrb.err("Error occured while doing exp_id request") else: self.vrb.err("The device did not respond!") def do_clear_user0_prev_state_event(self, arg): """ This is a command, to clear the user0 config app's previous state, event received and the corresponding timestamp structure maintained registered in the Watch Fw. Usage: clear_user0_prev_state_event Eg: #>clear_user0_prev_state_event """ args = self._parse_args(arg, 0) address = M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP msg = m2m2_packet(address, m2m2_user0_config_app_cmd_t()) msg.payload.command = M2M2_USER0_CONFIG_APP_COMMAND_ENUM_t.M2M2_USER0_CONFIG_APP_CLEAR_PREV_ST_EVT_REQ msg.payload.status = M2M2_USER0_CONFIG_APP_STATUS_ENUM_t.M2M2_USER0_CONFIG_APP_STATUS_OK self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP, m2m2_user0_config_app_cmd_t(), 10) if reply_msg != None: if (reply_msg.payload.status == M2M2_USER0_CONFIG_APP_STATUS_ENUM_t.M2M2_USER0_CONFIG_APP_STATUS_OK): print("The structure is cleared") else: self.vrb.err("Error occured while doing clear_user0_prev_state_event request") else: self.vrb.err("The device did not respond!") def do_get_user0_prev_state_event(self, arg): """ This is a command, to get the user0 config app's previous state, event received and the corresponding timestamp registered in the Watch Fw. Usage: get_user0_prev_state_event Eg: #>get_user0_prev_state_event """ args = self._parse_args(arg, 0) address = M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP msg = m2m2_packet(address, user0_app_prev_state_event_pkt_t()) msg.payload.command = M2M2_USER0_CONFIG_APP_COMMAND_ENUM_t.M2M2_USER0_CONFIG_APP_GET_PREV_ST_EVT_REQ msg.payload.status = M2M2_USER0_CONFIG_APP_STATUS_ENUM_t.M2M2_USER0_CONFIG_APP_STATUS_OK self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP, user0_app_prev_state_event_pkt_t(), 10) if reply_msg != None: if (reply_msg.payload.status == M2M2_USER0_CONFIG_APP_STATUS_ENUM_t.M2M2_USER0_CONFIG_APP_STATUS_OK): print("Prev state0 Read back-") self._print_user0_app_state(reply_msg.payload.prev_st_evt[0].prev_state) print("Prev event0 Read back-") self._print_user0_app_event(reply_msg.payload.prev_st_evt[0].prev_event) print("Prev timestamp0 Read back: {} ").format(reply_msg.payload.prev_st_evt[0].prev_timestamp) print("Prev state1 Read back-") self._print_user0_app_state(reply_msg.payload.prev_st_evt[1].prev_state) print("Prev event1 Read back-") self._print_user0_app_event(reply_msg.payload.prev_st_evt[1].prev_event) print("Prev timestamp1 Read back: {} ").format(reply_msg.payload.prev_st_evt[1].prev_timestamp) print("Prev state2 Read back-") self._print_user0_app_state(reply_msg.payload.prev_st_evt[2].prev_state) print("Prev event2 Read back-") self._print_user0_app_event(reply_msg.payload.prev_st_evt[2].prev_event) print("Prev timestamp2 Read back: {} ").format(reply_msg.payload.prev_st_evt[2].prev_timestamp) print("Prev state3 Read back-") self._print_user0_app_state(reply_msg.payload.prev_st_evt[3].prev_state) print("Prev event3 Read back-") self._print_user0_app_event(reply_msg.payload.prev_st_evt[3].prev_event) print("Prev timestamp3 Read back: {} ").format(reply_msg.payload.prev_st_evt[3].prev_timestamp) print("Intermittent Operation Count: {} ").format(reply_msg.payload.intermittent_op_cnt) else: self.vrb.err("Error occured while doing get_user0_prev_state_event request") else: self.vrb.err("The device did not respond!") def do_bypass_user0_timings(self, arg): """ This is a command, to enable/disable bypass of user0 config app timings, to force continuous mode of operation from the sensors. Will be needed when continuous operation is required, when there is user timings meant for intermittent operation and it is LT Mode3 Usage: #>bypass_user0_timings 0 #>bypass_user0_timings 1 """ args = self._parse_args(arg, 1) if args == None: return address = M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP msg = m2m2_packet(address, user0_app_bypass_user0_timings_pkt_t()) msg.payload.command = M2M2_USER0_CONFIG_APP_COMMAND_ENUM_t.M2M2_USER0_CONFIG_APP_BYPASS_USER0_TIMINGS_REQ msg.payload.status = M2M2_USER0_CONFIG_APP_STATUS_ENUM_t.M2M2_USER0_CONFIG_APP_STATUS_OK msg.payload.enable = int(args[0]) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP, user0_app_bypass_user0_timings_pkt_t(), 10) if reply_msg != None: if (reply_msg.payload.status == M2M2_USER0_CONFIG_APP_STATUS_ENUM_t.M2M2_USER0_CONFIG_APP_STATUS_OK): print("Bypass user0 timings set to : {} ").format(reply_msg.payload.enable) else: self.vrb.err("Error occured while doing bypass_user0_timings request") else: self.vrb.err("The device did not respond!") def _print_user0_app_event(self, event): status = self._get_enum_name(USER0_CONFIG_APP_EVENT_t, event) if status == None: self.vrb.err("Error decoding a packet's return event value!") return self.vrb.write(" Event Read back: '{}'".format(status)) def _print_user0_app_state(self, state): status = self._get_enum_name(USER0_CONFIG_APP_STATE_t, state) if status == None: self.vrb.err("Error decoding a packet's return state value!") return self.vrb.write(" State Read back: '{}'".format(status)) def do_lcfgUser0ConfigAppRead(self, arg): """ Read the User0 Config application LCFG used in the Watch. The argument are the user0_config_app LCFG addresses: Note that the the range of addr varies from 0x0 to 0x8, as given below: USER0_CONFIG_LCFG_AGC_UP_TH = 0x0 //!< Upper limit of LED light intensity for Static AGC - 1byte, Setting range: 1-127, Increment: 1, Unit: LSB USER0_CONFIG_LCFG_AGC_LOW_TH = 0x1 //!< Lower limit of LED light intensity for Static AGC - 1byte, Setting range: 1-127, Increment: 1, Unit: LSB USER0_CONFIG_LCFG_ADV_TIMEOUT_MONITOR = 0x2 /* Elapsed time from the start of advertising after finishing PPG, * EDA, body temperature, and acceleration measurement during * intermittent operation to the time out - 2byte, Setting range: 10-300, Increment: 10, Unit: second */ USER0_CONFIG_LCFG_HW_ID = 0x3 //!< Unique identification number assigned to each wristband - 2byte, Setting range: 00-99, Increment: 1, Unit: NA USER0_CONFIG_LCFG_EXP_ID = 0x4 //!< Patient-specific identification number assigned to each case - 2byte, Setting range: 0000-9999, Increment: 1, Unit: NA USER0_CONFIG_LCFG_TEMP_MEAS_SEC = 0x5 //!< Time to measure body temperature - 2byte, Setting range: 1-60, Increment: 1, Unit: second USER0_CONFIG_LCFG_PPG_MEAS_SEC = 0x6 //! Time to measure PPG, EDA, acceleration - 2byte, Setting range: 1-180, Increment: 1, Unit: second USER0_CONFIG_LCFG_SLEEP_MIN = 0x7 //!< Sleep time of intermittent operation - 2byte, Setting range: 1-180, Increment: 1, Unit: minute USER0_CONFIG_LCFG_SIGNAL_THRESHOLD = 0x8 //!< AD threshold for determining light intensity and TIA gain with static AGC - 4byte, Setting range: 1-(2^(14-1)*127), Increment: 1, Unit: LSB Eg: = lcfgUser0ConfigAppRead addr1 addr2 ... """ args = self._parse_args(arg, None) if len(args) == 0: self._p_err("No arguments supplied!") return num_ops = len(args) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP, user0_config_app_lcfg_op_hdr_t(num_ops)) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_READ_LCFG_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i] if ("0x") in tempVal: reg_addr = int(tempVal, 16) elif ("0X") in tempVal: reg_addr = int(tempVal, 16) else: reg_addr = int(tempVal) msg.payload.ops[i].field = reg_addr self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP, user0_config_app_lcfg_op_hdr_t(num_ops), 20) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) if reply_msg == None: print "Reading user0 config App LCFG failed!" return self._print_user0_config_app_lcfg_result(reply_msg) def do_lcfgUser0ConfigAppWrite(self, arg): """ Set the User0 Config App application LCFG. The argument is the User0 Config App LCFG addresses or the LCFG ID, then VALUE to modify the User0 Config App lcfg value: Note that the the range of addr varies from 0x0 to 0x8 as given below: USER0_CONFIG_LCFG_AGC_UP_TH = 0x0 //!< Upper limit of LED light intensity for Static AGC - 1byte, Setting range: 1-127, Increment: 1, Unit: LSB USER0_CONFIG_LCFG_AGC_LOW_TH = 0x1 //!< Lower limit of LED light intensity for Static AGC - 1byte, Setting range: 1-127, Increment: 1, Unit: LSB USER0_CONFIG_LCFG_ADV_TIMEOUT_MONITOR = 0x2 /* Elapsed time from the start of advertising after finishing PPG, * EDA, body temperature, and acceleration measurement during * intermittent operation to the time out - 2byte, Setting range: 10-300, Increment: 10, Unit: second */ USER0_CONFIG_LCFG_HW_ID = 0x3 //!< Unique identification number assigned to each wristband - 2byte, Setting range: 00-99, Increment: 1, Unit: NA USER0_CONFIG_LCFG_EXP_ID = 0x4 //!< Patient-specific identification number assigned to each case - 2byte, Setting range: 0000-9999, Increment: 1, Unit: NA USER0_CONFIG_LCFG_TEMP_MEAS_SEC = 0x5 //!< Time to measure body temperature - 2byte, Setting range: 1-60, Increment: 1, Unit: second USER0_CONFIG_LCFG_PPG_MEAS_SEC = 0x6 //! Time to measure PPG, EDA, acceleration - 2byte, Setting range: 1-180, Increment: 1, Unit: second USER0_CONFIG_LCFG_SLEEP_MIN = 0x7 //!< Sleep time of intermittent operation - 2byte, Setting range: 1-180, Increment: 1, Unit: minute USER0_CONFIG_LCFG_SIGNAL_THRESHOLD = 0x8 //!< AD threshold for determining light intensity and TIA gain with static AGC - 4byte, Setting range: 1-(2^(14-1)*127), Increment: 1, Unit: LSB Choose value to the addr according to range mentioned. Eg: = lcfgUser0ConfigAppWrite addr1 value1 addr2 value2 ... """ args = self._parse_args(arg, None) if len(args) == 0: self._p_err("No arguments supplied!") return num_ops = len(args) num_ops >>= 1 msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP, user0_config_app_lcfg_op_hdr_t(num_ops)) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_WRITE_LCFG_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i*2] if ("0x") in tempVal: reg_addr = int(tempVal, 16) elif ("0X") in tempVal: reg_addr = int(tempVal, 16) else: reg_addr = int(tempVal) tempVal = args[i*2+1] if ("0x") in tempVal: reg_val = int(tempVal, 16) elif ("0X") in tempVal: reg_val = int(tempVal, 16) else: reg_val = int(tempVal) msg.payload.ops[i].field = reg_addr msg.payload.ops[i].value = reg_val self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP, user0_config_app_lcfg_op_hdr_t(num_ops), 60) if reply_msg == None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) print "Writing user0 config App LCFG failed!" return self._print_user0_config_app_lcfg_result(reply_msg) def _print_user0_config_app_lcfg_result(self, packet): self._print_packet_status(packet) self.vrb.write(" Num of registers: '{}'".format(int(packet.payload.num_ops))) t = table(["Field", "Value"]) for i in range(packet.payload.num_ops): t.add_row([hex(packet.payload.ops[i].field), packet.payload.ops[i].value]) t.display() def _print_ppg_lcfg_result(self, packet): self._print_packet_status(packet) self.vrb.write(" Num of registers: '{}'".format(int(packet.payload.num_ops))) t = table(["Field", "Value"]) for i in range(packet.payload.num_ops): t.add_row([hex(packet.payload.ops[i].field), hex(packet.payload.ops[i].value)]) t.display() def _print_file_system_status(self, packet): status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, packet.payload.status) if status == None: status = self._get_enum_name(M2M2_FILE_SYS_STATUS_ENUM_t, packet.payload.status) if status == None: self.vrb.err("Error decoding a packet's return status value!") return self.vrb.write(" Status: '{}'".format(status)) def _print_file_count_status(self, packet): status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, packet.payload.status) if status == None: status = self._get_enum_name(M2M2_FILE_SYS_STATUS_ENUM_t, packet.payload.status) if status == None: self.vrb.err("Error decoding a packet's return status value!") return self.vrb.write(" Status: '{}'".format(status)) self.vrb.write(" No. of Files Present: '{}'".format(packet.payload.file_count)) def _print_file_info_status(self, packet): status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, packet.payload.status) if status == None: status = self._get_enum_name(M2M2_FILE_SYS_STATUS_ENUM_t, packet.payload.status) if status == None: self.vrb.err("Error decoding a packet's return status value!") return self.vrb.write(" Status: '{}'".format(status)) self.vrb.write(" File Name: {}".format(cast(packet.payload.file_name, c_char_p).value)) self.vrb.write(" Start Page used: '{}'".format(packet.payload.start_page)) self.vrb.write(" End Page used: '{}'".format(packet.payload.end_page)) self.vrb.write(" File Size: '{}'".format(packet.payload.file_size)) def _print_page_read_test_status(self, packet,print_en): status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, packet.payload.status) if status == None: status = self._get_enum_name(M2M2_FILE_SYS_STATUS_ENUM_t, packet.payload.status) if status == None: self.vrb.err("Error decoding a packet's return status value!") return self.vrb.write(" Page Number Read: '{}'".format(packet.payload.page_num)) self.vrb.write(" Data Region status: '{}'".format(packet.payload.data_region_status)) self.vrb.write(" ECC Zone Status: '{}'".format(packet.payload.ecc_zone_status)) if(print_en): self.vrb.write(" Next Writeable Page: {}".format(packet.payload.next_page)) self.vrb.write(" Is Page Occupied: '{}'".format(packet.payload.occupied)) self.vrb.write(" Number of bytes written in current page: '{}'".format(packet.payload.num_bytes_written)) self.vrb.write(" Number of bytes read: '{}'".format(packet.payload.num_bytes)) print(" Samples of Data read : ") cnt = 0 for item in packet.payload.sample_data[:packet.payload.num_bytes]: cnt +=1 print( "{}: '{}'".format(cnt,item)) def _print_packet_status(self, packet): status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, packet.payload.status) if status == None: self.vrb.err("Error decoding a packet's return status value!") return self.vrb.write(" Status: '{}'".format(status)) def _print_low_touch_status(self, packet): #status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, packet.payload.status) status = self._get_enum_name(M2M2_PM_SYS_STATUS_ENUM_t, packet.payload.status) #if status == None: # status = self._get_enum_name(M2M2_PM_SYS_STATUS_ENUM_t, packet.payload.status) if status == None: self.vrb.err("Error decoding a packet's return status value!") return self.vrb.write(" Status: '{}'".format(status)) def _print_reg_result(self, packet): self._print_packet_status(packet) self.vrb.write(" Num of registers: '{}'".format(int(packet.payload.num_ops))) t = table(["Address", "Value"]) for i in range(packet.payload.num_ops): t.add_row([hex(packet.payload.ops[i].address), hex(packet.payload.ops[i].value)]) t.display() def _print_dcfg_result(self, resp_packets, sensor): t = table(["Address", "Value"]) for j in range(len(resp_packets)): packet = resp_packets[j] for i in range(packet.payload.size): if sensor == 'adpd4000' or sensor == 'temperature': value = packet.payload.dcfgdata[i] & 0X0000FFFF; address = (packet.payload.dcfgdata[i] & 0XFFFF0000)>> 16; t.add_row([hex(address), hex(value)]) elif sensor == 'adxl': Reg1 = (packet.payload.dcfgdata[i] & 0xFFFF0000)>> 16; Reg2 = packet.payload.dcfgdata[i] & 0X0000FFFF; if Reg1 != 0: address = (Reg1 & (0xFF00)) >> 8; value = (Reg1 & (0x00FF)); t.add_row([hex(address), hex(value)]) if Reg2 != 0: address = (Reg2 & (0xFF00)) >> 8; value = (Reg2 & (0x00FF)); t.add_row([hex(address), hex(value)]) t.display() def _print_sensor_app_status(self, packet): status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, packet.payload.status) if status == None: status = int(packet.payload.status) app = self._get_enum_name(M2M2_ADDR_ENUM_t, packet.header.src) stream = self._get_enum_name(M2M2_ADDR_ENUM_t, packet.payload.stream) if stream == None: stream = hex(packet.payload.stream) num_subscribers = int(packet.payload.num_subscribers) num_start_reqs = int(packet.payload.num_start_reqs) self.vrb.write("Application: {}:".format(app)) self.vrb.write(" Status: '{}'".format(status)) self.vrb.write(" Stream ID: '{}'".format(stream)) self.vrb.write(" Number of Subscribers: '{}'".format(num_subscribers)) self.vrb.write(" Number of Start Requests: '{}'".format(num_start_reqs)) def _print_sensor_apps_info_status(self, packet): status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, packet.payload.status) if status == None: status = int(packet.payload.status) stream = self._get_enum_name(M2M2_ADDR_ENUM_t, packet.header.src) if stream == None: stream = hex(packet.payload.stream) #self.vrb.write("Resp Pkt: {}:".format((packet))) num_sensor_apps = int(packet.payload.num_sensor_apps) self.vrb.write("Number of sensor apps on PS board: {} \n".format(num_sensor_apps)) for i in range(num_sensor_apps): sensor_application = self._get_enum_name(M2M2_ADDR_ENUM_t,packet.payload.app_info[i].sensor_app_id) num_subscribers = int(packet.payload.app_info[i].num_subscribers) num_start_reqs = int(packet.payload.app_info[i].num_start_reqs) self.vrb.write("Sensor Application: {}:".format(sensor_application)) self.vrb.write(" Number of Subscribers: '{}'".format(num_subscribers)) self.vrb.write(" Number of Start Requests: '{}'\n\n".format(num_start_reqs)) def _reg_op(self, address, ops, do_write): ''' Operate on some registers. The address is 'address', 'ops' is a list of address:value dictionaries, and do_write is True to write, False to read. ''' num_ops = len(ops) if address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA: msg = m2m2_packet(address, m2m2_sensor_common_reg_op_32_hdr_t(num_ops)) if do_write == True: cmd = M2M2_SENSOR_COMMON_CMD_ENUM_t.M2M2_SENSOR_COMMON_CMD_WRITE_REG_32_REQ elif do_write == False: cmd = M2M2_SENSOR_COMMON_CMD_ENUM_t.M2M2_SENSOR_COMMON_CMD_READ_REG_32_REQ else: msg = m2m2_packet(address, m2m2_sensor_common_reg_op_16_hdr_t(num_ops)) if do_write == True: cmd = M2M2_SENSOR_COMMON_CMD_ENUM_t.M2M2_SENSOR_COMMON_CMD_WRITE_REG_16_REQ elif do_write == False: cmd = M2M2_SENSOR_COMMON_CMD_ENUM_t.M2M2_SENSOR_COMMON_CMD_READ_REG_16_REQ msg.payload.command = cmd msg.payload.num_ops = num_ops for i in range(0, num_ops): msg.payload.ops[i].address = ops[i]['address'] msg.payload.ops[i].value = ops[i]['value'] self._send_packet(msg) if address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA: return self._get_packet(address, m2m2_sensor_common_reg_op_32_hdr_t(num_ops)) else: return self._get_packet(address, m2m2_sensor_common_reg_op_16_hdr_t(num_ops)) def _dispatcher(self, q_map, s_map): """ The 'dispatcher' thread function. This takes messages from the server, and inserts them into a queue based on their source address. """ self.vrb.write("Dispatcher up!", 4) while True: packet = self.rx_q.get() pkt_header = pack_header(packet) if int(pkt_header.src) == M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_DBG_STREAM: pkt = m2m2_packet(0, m2m2_debug_data_t()) pkt.unpack(packet) self.vrb.write("##DBG:{}".format((cast(pkt.payload.str, c_char_p).value)), 2) if int(pkt_header.src) in self.sock_map: self.vrb.write("RX::{}".format(format_hex(packet)), 4) try: self.sock_map[pkt_header.src].send(packet) except: self.sock_map.pop(pkt_header.src, None) addr = self._get_enum_name(M2M2_ADDR_ENUM_t, int(pkt_header.src)) if addr == None: addr = "" self.vrb.err("The socket {}({}) was broken!".format(addr, int(pkt_header.src))) else: self.vrb.write("RX::{}".format(format_hex(packet)), 3) try: destination_q = q_map[pkt_header.src].put(packet) except: self.vrb.err("Error routing a packet: {}".format(format_hex(packet))) def _get_enum_name(self, enum, value): """ Search an enum class "enum" to see if it contains an attribute "value". Use this to get enum name strings from their raw integer values. """ try: name = vars(enum).keys()[vars(enum).values().index(value)] except Exception as e: self.vrb.write("Couldn't find value {} in enum {}".format(value, type(enum).__name__), 4) name = None return name def _send_packet(self, msg): """ Send a packet to the server. """ if self.m2m2_server == None or not self.m2m2_server.is_connected(): self.vrb.err("Not connected to a serial device!") return try: pkt = msg.pack() except: pkt = msg if lowtouch.Enable_lowtouch == True: if lowtouch.Startcmd == True: lowtouch.Startcmdcount += 1 lowtouch.Startcmdlen += len(pkt) elif lowtouch.Stopcmd == True: lowtouch.Stopcmdcount += 1 lowtouch.Stopcmdlen += len(pkt) lowtouch.User_File += pkt self.vrb.write("TX::{}".format(format_hex(pkt))) return None self.vrb.write("TX::{}".format(format_hex(pkt)), 3) self.tx_q.put(pkt) def _get_packet(self, source, payload, timeout = 1): """ Get block for 'timeout' seconds waiting for a message from the 'source' address. Returns None if it times out. """ if lowtouch.Enable_lowtouch == True: return None packet = m2m2_packet(0, payload) try: raw_pkt = self.dispatcher_map[source].get(timeout=timeout) except Queue.Empty: return None try: packet.unpack(raw_pkt) return packet except Exception as e: self.vrb.err("Error unpacking a packet: {}".format(e)) return None def _parse_args(self, arg, num_args = None): """ Check that an 'arg' string contains the correct number of arguments, and returns a list of those arguments. Set num_args = None to prevent checking. """ args = arg.split() if num_args == None: return args if len(args) != num_args: self.vrb.err("Incorrect number of arguments! You passed: '{}'".format(arg)) return None else: return args def _get_version(self, address): msg = m2m2_packet(address, m2m2_app_common_version_t()) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_GET_VERSION_REQ self._send_packet(msg) return self._get_packet(address, m2m2_app_common_version_t(),1000) def _print_version_pkt(self, packet): app_name = self._get_enum_name(M2M2_ADDR_ENUM_t, packet.header.src) if app_name == None: app_name = "{}".format(hex(packet.header.src)) try: self.vrb.write("Version info from '{}':".format(app_name)) self.vrb.write(" Major: '{}'".format(int(packet.payload.major))) self.vrb.write(" Minor: '{}'".format(int(packet.payload.minor))) self.vrb.write(" Patch: '{}'".format(int(packet.payload.patch))) self.vrb.write(" String: '{}'".format(cast(packet.payload.verstr, c_char_p).value)) # Cast the git_version string to a c_char_p pointer, and access its value as a Python string self.vrb.write(" String: '{}'".format(cast(packet.payload.str, c_char_p).value)) except Exception as e: self.vrb.err("There was an error printing the version information: {}".format(e)) def _print_file_list(self, packet): file_info_dict = {} try: filetype = self._get_enum_name(FILE_TYPE_ENUM_t, packet.payload.filetype) filesize = int(packet.payload.filesize) self.vrb.write("FILE: {}".format(cast(packet.payload.full_file_name, c_char_p).value)) self.vrb.write("FILE_TYPE: '{}'".format(filetype)) self.vrb.write("FILE_SIZE: '{}'\n".format(filesize)) file_info_dict = {'file': cast(packet.payload.full_file_name, c_char_p).value, 'file_type': filetype, 'file_size': filesize} except Exception as e: self.vrb.err("There was an error printing the file list: {}".format(e)) return file_info_dict def _subscribe(self, address, stream, sub_unsub): msg = m2m2_packet(address, m2m2_app_common_sub_op_t()) if sub_unsub == True: command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_STREAM_SUBSCRIBE_REQ elif sub_unsub == False: command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_STREAM_UNSUBSCRIBE_REQ else: return None msg.payload.command = command msg.payload.stream = stream self._send_packet(msg) return self._get_packet(address, m2m2_app_common_sub_op_t()) def _print_subscription_status(self, packet): status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, packet.payload.status) if status == None: status = int(packet.payload.status) stream = self._get_enum_name(M2M2_ADDR_ENUM_t, packet.payload.stream) if stream == None: stream = hex(packet.payload.stream) source = self._get_enum_name(M2M2_ADDR_ENUM_t, packet.header.src) if source == None: source = hex(packet.header.src) self.vrb.write("Application: {}:".format(source), 2) self.vrb.write(" Status: '{}'".format(status)) self.vrb.write(" Stream ID: '{}'".format(stream), 2) def _LoadCfg(self, filename): print filename try: f = open(filename) except IOError: print filename + "not found" else: lines = f.readlines() f.close() msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_common_reg_op_16_hdr_t(1)) cmd = M2M2_SENSOR_COMMON_CMD_ENUM_t.M2M2_SENSOR_COMMON_CMD_WRITE_REG_16_REQ msg.payload.command = cmd msg.payload.num_ops = 1 timeStart = time.time() for line in lines: if not line.strip(): continue splitline = line.rstrip('\n').split(None,2) try: address = int(splitline[0], 16) value = int(splitline[1], 16) if (len(splitline)>2): comment = splitline[2] else: comment = '' #Bridge.writeReg(address, value) print ("Set [0x%04x] = 0x%04x " % (address, value) + comment) msg.payload.ops[0].address = address msg.payload.ops[0].value = value self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_common_reg_op_16_hdr_t(1), 1) except ValueError: print " " + line elaspTime = time.time() elaspTime -= timeStart print "Loading configure file done. Elasp time=%f" %(elaspTime) return def _get_free_port(self): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.bind(('localhost', 0)) except socket.error as msg: print 'Bind failed. Error Code : ' + str(msg[0]) + ' Message ' + msg[1] return None s.listen(1) port = s.getsockname()[1] s.close() return port def do_setDateTime(self, arg): """ Set date and time. Current PC time will be sent to the device. #>setDateTime """ args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_date_time_req_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_SET_DATE_TIME_REQ now = datetime.datetime.now() is_dst = time.daylight and time.localtime().tm_isdst > 0 utc_offset = - (time.altzone if is_dst else time.timezone) msg.payload.year = now.year msg.payload.month = now.month msg.payload.day = now.day msg.payload.hour = now.hour msg.payload.minute = now.minute msg.payload.second = now.second msg.payload.TZ_sec = utc_offset self._send_packet(msg) self.vrb.write("date and time: {}".format(now), 2) self.vrb.write("timezone: {}".format(utc_offset), 2) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_cmd_t(), 10) if reply_msg == None: self.vrb.err("Error! Timed out waiting for the device!") return status = self._get_enum_name(M2M2_PM_SYS_STATUS_ENUM_t, reply_msg.payload.status) if status == None: status = format(reply_msg.payload.status, '#04x') self.vrb.write("Command return status: {}".format(status)) def do_getDateTime(self, arg): """ Get date and time.Getting PM current date and time. #>getDateTime """ args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_GET_DATE_TIME_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_date_time_req_t(), 10) if reply_msg == None: self.vrb.err("Error! Timed out waiting for the device!") return status = self._get_enum_name(M2M2_PM_SYS_STATUS_ENUM_t, reply_msg.payload.status) if status == None: status = format(reply_msg.payload.status, '#04x') self.vrb.write("date and time: {}-{:02}-{:02} {:02}:{:02}:{:02}".format((int(reply_msg.payload.year)),(int(reply_msg.payload.month)),(int(reply_msg.payload.day)),(int(reply_msg.payload.hour)),(int(reply_msg.payload.minute)),(int(reply_msg.payload.second)))) self.vrb.write("timezone: {}".format(int(reply_msg.payload.TZ_sec))) self.vrb.write("Command return status: {}".format(status)) now = datetime.datetime.now() self.vrb.write("PC Time: {}".format(now)) date_time_dict = {'year': int(reply_msg.payload.year), 'month': int(reply_msg.payload.month), 'day': int(reply_msg.payload.day), 'hour': int(reply_msg.payload.hour), 'min': int(reply_msg.payload.minute), 'sec': int(reply_msg.payload.second), 'timezone': int(reply_msg.payload.TZ_sec)} err_stat = int(status, 16) return err_stat, date_time_dict def do_getPoMemUtil(self, arg): """ Get PO memory utilization statistics #>getPoMemUtil """ args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_GET_PO_MEMORY_UTILIZATION_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_get_po_memory_utilization_cmd_t(), 10) if reply_msg == None: self.vrb.err("Error! Timed out waiting for the device!") return status = self._get_enum_name(M2M2_PM_SYS_STATUS_ENUM_t, reply_msg.payload.status) if status == None: status = format(reply_msg.payload.status, '#04x') self.vrb.write("Min_num_free_blks_type2: {}".format(int(reply_msg.payload.min_num_free_blks_type2))) self.vrb.write("Min_num_free_blks_type4: {}".format(int(reply_msg.payload.min_num_free_blks_type4))) self.vrb.write("Min_num_free_blks_type5: {}".format(int(reply_msg.payload.min_num_free_blks_type5))) self.vrb.write("Block_2_allocated: {}".format(int(reply_msg.payload.block_2_allocated))) self.vrb.write("Block_4_allocated: {}".format(int(reply_msg.payload.block_4_allocated))) self.vrb.write("Block_5_allocated: {}".format(int(reply_msg.payload.block_5_allocated))) self.vrb.write("Block_2_freed: {}".format(int(reply_msg.payload.block_2_freed))) self.vrb.write("Block_4_freed: {}".format(int(reply_msg.payload.block_4_freed))) self.vrb.write("Block_5_freed: {}".format(int(reply_msg.payload.block_5_freed))) self.vrb.write("Command return status: {}".format(status)) def do_clearPoMemUtil(self, arg): """ Clear memory utilization statistics #>clearPoMemUtil """ args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_CLEAR_PO_MEMORY_UTILIZATION_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_clear_po_memory_utilization_cmd_t(), 10) if reply_msg == None: self.vrb.err("Error! Timed out waiting for the device!") return status = self._get_enum_name(M2M2_PM_SYS_STATUS_ENUM_t, reply_msg.payload.status) if status == None: status = format(reply_msg.payload.status, '#04x') self.vrb.write("Command return status: {}".format(status)) def do_setManufactureDate(self, arg): """ Set the Manufacture Date of Watch, from the current PC time. #>setManufactureDate """ args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_manufacture_date_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_SET_MANUFACTURE_DATE_REQ now = datetime.datetime.now() msg.payload.year = now.year msg.payload.month = now.month msg.payload.day = now.day self._send_packet(msg) self.vrb.write("Manufacture date being set to : {} {} {}".format(now.year,now.month,now.day), 1) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_manufacture_date_t(), 10) if reply_msg == None: self.vrb.err("Error! Timed out waiting for the device!") return status = self._get_enum_name(M2M2_PM_SYS_STATUS_ENUM_t, reply_msg.payload.status) if status == None: status = format(reply_msg.payload.status, '#04x') self.vrb.write("Command return status: {}".format(status)) def do_getManufactureDate(self, arg): """ Get the Manufacture Date of Watch. #>getDateTime """ args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_manufacture_date_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_GET_MANUFACTURE_DATE_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_manufacture_date_t(), 10) if reply_msg == None: self.vrb.err("Error! Timed out waiting for the device!") return status = self._get_enum_name(M2M2_PM_SYS_STATUS_ENUM_t, reply_msg.payload.status) if status == None: status = format(reply_msg.payload.status, '#04x') self.vrb.write("Manufacture date: {}-{:02}-{:02}".format((int(reply_msg.payload.year)),(int(reply_msg.payload.month)),(int(reply_msg.payload.day)))) self.vrb.write("Command return status: {}".format(status)) def do_system_reset(self, arg): """ Send a command to the Device to Restart - software reset. #>system_reset """ args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_pwr_state_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_SYSTEM_RESET_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_pwr_state_t(), 3) if reply_msg == None: self.vrb.err("Error! Timed out waiting for the device!") return status = self._get_enum_name(M2M2_PM_SYS_STATUS_ENUM_t, reply_msg.payload.status) if status == None: status = format(reply_msg.payload.status, '#04x') self.vrb.write("Command return status: {}".format(status)) #self.m2m2_server.quit() def do_system_hw_reset(self, arg): """ Send a command to the Device to Restart - hardware reset. #>system_hw_reset """ args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_pwr_state_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_SYSTEM_HW_RESET_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_pwr_state_t(), 3) if reply_msg == None: self.vrb.err("Error! Timed out waiting for the device!") return status = self._get_enum_name(M2M2_PM_SYS_STATUS_ENUM_t, reply_msg.payload.status) if status == None: status = format(reply_msg.payload.status, '#04x') self.vrb.write("Command return status: {}".format(status)) self.vrb.write("Wait for 25.6 secs for reset to happen") #self.m2m2_server.quit() def get_dcfg_data_list(self, f_path): """ To get address & variable list from a file that contains dcfg :param f_path: :return: """ with open(f_path, 'r') as f_ref: line_list = f_ref.readlines() addr_list, val_list = [], [] for line in line_list: if line.strip() and line.strip()[0] != '#': addr_val_list = line.split(' ') addr_list.append(addr_val_list[0].strip()) val_list.append(addr_val_list[1].strip()) return addr_list, val_list def do_loadAdpdUCDcfg(self, arg): """ Load ADPD device with UC dcfg using "reg w add:val" command. Argument to be passed is: [uc] as defined below: 1 --> UC1 2 --> UC2 ... 5 --> UC5 The Watch dvt version specific register set is loaded ----------------------------------------------- Usage: #>loadAdpdUCDcfg [uc] #>loadAdpdUCDcfg 2 #>loadAdpdUCDcfg 5 """ args = self._parse_args(arg, None) if len(args) == 0 or len(args) != 1: self._p_err("Wrong arguments supplied!") return uc = int(args[0]) dvt_revision = self.dvt_ver self.vrb.write("Loading Cfg for Watch version: {}".format(dvt_revision)) curr_dir = os.getcwd() dcb_cfg_dir = os.path.join(curr_dir, 'mv_uc_dcfg') if uc == 1 and dvt_revision == "dvt1": dcb_file = os.path.join(dcb_cfg_dir, 'DVT1_UseCase1.dcfg') elif uc == 1 and dvt_revision == "dvt2": dcb_file = os.path.join(dcb_cfg_dir, 'DVT2_UseCase1.dcfg') elif uc == 2 and dvt_revision == "dvt1": dcb_file = os.path.join(dcb_cfg_dir, 'DVT1_UseCase2.dcfg') elif uc == 2 and dvt_revision == "dvt2": dcb_file = os.path.join(dcb_cfg_dir, 'DVT2_UseCase2.dcfg') elif uc == 3 and dvt_revision == "dvt1": dcb_file = os.path.join(dcb_cfg_dir, 'DVT1_UseCase3.dcfg') elif uc == 3 and dvt_revision == "dvt2": dcb_file = os.path.join(dcb_cfg_dir, 'DVT2_UseCase3.dcfg') elif uc == 4 and dvt_revision == "dvt1": dcb_file = os.path.join(dcb_cfg_dir, 'DVT1_UseCase4.dcfg') elif uc == 4 and dvt_revision == "dvt2": dcb_file = os.path.join(dcb_cfg_dir, 'DVT2_UseCase4.dcfg') elif uc == 5 and dvt_revision == "dvt1": dcb_file = os.path.join(dcb_cfg_dir, 'DVT1_UseCase5.dcfg') elif uc == 5 and dvt_revision == "dvt2": dcb_file = os.path.join(dcb_cfg_dir, 'DVT2_UseCase5.dcfg') elif uc == 6 and dvt_revision == "dvt1": dcb_file = os.path.join(dcb_cfg_dir, 'DVT1_UseCase1_multi_thermistor.dcfg') elif uc == 6 and dvt_revision == "dvt2": dcb_file = os.path.join(dcb_cfg_dir, 'DVT2_UseCase1_multi_thermistor.dcfg') else: self.vrb.write("Wrong UC/DVT revision selected. Check help & retry") return addr_list, val_list = self.get_dcfg_data_list(dcb_file) j=0 add_val_list = "adpd4000 w" for i, addr in enumerate(addr_list): add_val = " 0x" + addr + ":0x" + val_list[i] add_val_list = add_val_list + add_val j+=1 if j == 30: self.do_reg(add_val_list) j=0 add_val_list = "adpd4000 w " #self.do_reg('{} w 0x{}:0x{}'.format(dev, addr, val_list[i])) #self.do_reg('adpd4000 w 0x{}:0x{}'.format(addr, val_list[i])) self.do_reg(add_val_list) def do_loadPpgUCLcfg(self, arg): """ Load PPG Application with UC lcfg using "lcfgPpgWrite add:val" command. Argument to be passed is: [uc] as defined below: 1 --> UC1 2 --> UC2 ... 5 --> UC5 ----------------------------------------------- Usage: #>loadPpgUCLcfg [uc] #>loadPpgUCLcfg 2 #>loadPpgUCLcfg 5 #>loadPpgUCLcfg 6 """ args = self._parse_args(arg, None) if len(args) == 0 or len(args) != 1: self._p_err("Wrong arguments supplied!") return uc = int(args[0]) curr_dir = os.getcwd() dcb_cfg_dir = os.path.join(curr_dir, 'mv_uc_dcfg') if uc == 1: lcfg_file = os.path.join(dcb_cfg_dir, 'ppg_case1.lcfg') elif uc == 2: lcfg_file = os.path.join(dcb_cfg_dir, 'ppg_case2.lcfg') elif uc == 3: lcfg_file = os.path.join(dcb_cfg_dir, 'ppg_case3.lcfg') elif uc == 4: lcfg_file = os.path.join(dcb_cfg_dir, 'ppg_case4.lcfg') elif uc == 5: lcfg_file = os.path.join(dcb_cfg_dir, 'ppg_case5.lcfg') elif uc == 6: lcfg_file = os.path.join(dcb_cfg_dir, 'ppg_case6.lcfg') else: self.vrb.write("Wrong UC selected. Check help & retry") return addr_list, val_list = self.get_dcfg_data_list(lcfg_file) #print addr_list #print val_list j=0 add_val = "" for i, addr in enumerate(addr_list): add_val += " 0x" + addr + " 0x" + val_list[i] j+=1 if j == 30: self.do_lcfgPpgWrite(add_val) #print add_val j=0 add_val = "" self.do_lcfgPpgWrite(add_val) def do_delay(self, arg): """ Give a fixed delay to be used in between running the test ----------------------------------------------- Usage: #>delay 2 """ args = self._parse_args(arg) try: delay = int(args[0]) except: delay = 5 time.sleep(delay) def do_msg_verbose(self, arg): args = self._parse_args(arg, 1) if args == None: return try: lvl = int(args[0]) self.vrb.set_level(lvl) except: self.vrb.err("Invalid argument!", 1) def do_read_dcb_config(self,arg): """ Send a command to Read the DCB Configurations of the specific sensor of the Board, which is then saved into a file with name [sensor_name]_dcb_get.dcfg (ex. - adxl_dcb_get.dcfg), which will be present in 'tools/dcb_dcfg/' directory. Currently dcb configurations can be read for adpd4000, adxl, ppg, ecg, eda, lt_dcb_config, ad7156, lt_app_lcfg, user0_config. ex. read_dcb_config [sensor_name] #>read_dcb_config adpd4000 """ Sensor_Address = None args = self._parse_args(arg,1) if args == None: self.vrb.err("Incorrect usage! Please check help.") return 1 for a in args: if a in application_name_map: Sensor_Address = application_name_map[a]["address"] if Sensor_Address == None: self.vrb.err("Incorrect usage! You did not provide a valid device.") return 1 if(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_TEMPERATURE): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADXL): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_PPG): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_ECG): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_LT_APP): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_AD7156): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) else: print "The requested config dcb is not supported" return Array_Element = None msg.payload.command = M2M2_DCB_COMMAND_ENUM_t.M2M2_DCB_COMMAND_READ_CONFIG_REQ #msg.payload.size = rdSize #len(Adpd_org_4000_g) + 1 self._send_packet(msg) if(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_adpd4000_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_TEMPERATURE): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_temperature_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADXL): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_adxl_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_PPG): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_ppg_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_ECG): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_ecg_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_eda_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_bia_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_LT_APP): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_gen_blk_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_AD7156): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_ad7156_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_lt_app_lcfg_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_user0_blk_data_t(), 20) else: pass if reply_msg == None: self.vrb.err("Error! Timed out waiting for the device!") return 1 status = self._get_enum_name(M2M2_DCB_STATUS_ENUM_t, reply_msg.payload.status) if status == None: status = format(reply_msg.payload.status, '#04x') ECnt = 0 Array_Element_Count_r = int(reply_msg.payload.size) if(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADXL): f = open('dcb_cfg/adxl_dcb_get.dcfg','w') while ECnt < Array_Element_Count_r: self.vrb.write("Read Settings : 0x{:04X} {}".format(int(reply_msg.payload.dcbdata[ECnt]),ECnt)) f.write('{:02X} {:02X}'.format((reply_msg.payload.dcbdata[ECnt]>>8),(reply_msg.payload.dcbdata[ECnt] & 0xff))) f.write('\n') ECnt+=1 elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000): num_of_pkts = int(reply_msg.payload.num_of_pkts) reply_msg_list = [] status_list = [] p = 1 #Start with second packet while p < num_of_pkts: _reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_adpd4000_data_t(), 20) if _reply_msg == None: self.vrb.err("Error! Timed out waiting for the device! at pkt 2") return 1 status = self._get_enum_name(M2M2_DCB_STATUS_ENUM_t, _reply_msg.payload.status) if status == None: status = format(_reply_msg.payload.status, '#04x') reply_msg_list.append(_reply_msg) status_list.append(status) p = p+1 f = open('dcb_cfg/adpd4000_dcb_get.dcfg','w') while ECnt < Array_Element_Count_r: self.vrb.write("Read Settings : 0x{:08X} {}".format(int(reply_msg.payload.dcbdata[ECnt]),ECnt)) f.write('{:04X} {:04X}'.format((reply_msg.payload.dcbdata[ECnt]>>16),(reply_msg.payload.dcbdata[ECnt] & 0xffff))) f.write('\n') ECnt+=1 Array_Element_Count_r = int(reply_msg.payload.size) if(len(reply_msg_list) > 0): p = 0 #Start with second packet, index=0 from reply_msg_list while p < (num_of_pkts-1): reply_msg = reply_msg_list[p] Array_Element_Count_r_pkt = int(reply_msg.payload.size) ECnt = 0 while ECnt < Array_Element_Count_r_pkt: self.vrb.write("Read Settings : 0x{:08X} {}".format(int(reply_msg.payload.dcbdata[ECnt]),ECnt)) f.write('{:04X} {:04X}'.format((reply_msg.payload.dcbdata[ECnt]>>16),(reply_msg.payload.dcbdata[ECnt] & 0xffff))) f.write('\n') ECnt+=1 Array_Element_Count_r = Array_Element_Count_r + int(reply_msg.payload.size) #Update total read size p = p+1 status = status_list[p-2] elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_TEMPERATURE): num_of_pkts = int(reply_msg.payload.num_of_pkts) reply_msg_list = [] status_list = [] reply_msg_list.append(reply_msg) status_list.append(status) p = 1 #Start with second packet while p < num_of_pkts: _reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_temperature_data_t(), 20) if _reply_msg == None: self.vrb.err("Error! Timed out waiting for the device! at pkt 2") return 1 status = self._get_enum_name(M2M2_DCB_STATUS_ENUM_t, _reply_msg.payload.status) if status == None: status = format(_reply_msg.payload.status, '#04x') reply_msg_list.append(_reply_msg) status_list.append(status) p = p+1 f = open('dcb_cfg/temperature_lcfg_dcb_get.lcfg','w') Array_Element_Count_r = 0 if(len(reply_msg_list) > 0): p = 0 #Start with first packet, index=0 from reply_msg_list while p < (num_of_pkts): reply_msg = reply_msg_list[p] Array_Element_Count_r_pkt = int(reply_msg.payload.size) ECnt = 0 while ECnt < Array_Element_Count_r_pkt: self.vrb.write("Read Settings : 0x{:08X} {}".format(int(reply_msg.payload.dcbdata[ECnt]),ECnt)) f.write('{:08X}'.format((reply_msg.payload.dcbdata[ECnt]))) f.write('\n') ECnt+=1 Array_Element_Count_r = Array_Element_Count_r + int(reply_msg.payload.size) #Update total read size p = p+1 status = status_list[p-1] elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_PPG): f = open('dcb_cfg/ppg_dcb_get.lcfg','w') while ECnt < Array_Element_Count_r: self.vrb.write("Read Settings : 0x{:08X} {}".format(int(reply_msg.payload.dcbdata[ECnt]),ECnt)) f.write('{:02X} {:08X}'.format(ECnt,(reply_msg.payload.dcbdata[ECnt]))) f.write('\n') ECnt+=1 elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_ECG): f = open('dcb_cfg/ecg_dcb_get.lcfg','w') while ECnt < Array_Element_Count_r: self.vrb.write("Read Settings : 0x{:08X} {}".format(int(reply_msg.payload.dcbdata[ECnt]),ECnt)) f.write('{:02X} {:04X}'.format(((reply_msg.payload.dcbdata[ECnt]>>16) & 0xff),(reply_msg.payload.dcbdata[ECnt] & 0xffff))) f.write('\n') ECnt+=1 elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA): f = open('dcb_cfg/eda_dcb_get.lcfg','w') while ECnt < Array_Element_Count_r: self.vrb.write("Read Settings : 0x{:08X} {}".format(int(reply_msg.payload.dcbdata[ECnt]),ECnt)) f.write('{:02X} {:04X}'.format(((reply_msg.payload.dcbdata[ECnt]>>16) & 0xff),(reply_msg.payload.dcbdata[ECnt] & 0xffff))) f.write('\n') ECnt+=1 elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA): f = open('dcb_cfg/bia_dcb_get.lcfg','w') while ECnt < Array_Element_Count_r: self.vrb.write("Read Settings : 0x{:010X} {}".format(int(reply_msg.payload.dcbdata[ECnt]),ECnt)) f.write('{:02X} {:08X}'.format(ECnt,(reply_msg.payload.dcbdata[ECnt] & 0xffffffff))) f.write('\n') ECnt+=1 elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_LT_APP): num_of_pkts = int(reply_msg.payload.num_of_pkts) reply_msg_list = [] status_list = [] p = 1 #Start with second packet while p < num_of_pkts: _reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_gen_blk_data_t(), 20) if _reply_msg == None: self.vrb.err("Error! Timed out waiting for the device! at pkt 2") return 1 status = self._get_enum_name(M2M2_DCB_STATUS_ENUM_t, _reply_msg.payload.status) if status == None: status = format(_reply_msg.payload.status, '#04x') reply_msg_list.append(_reply_msg) status_list.append(status) p = p+1 f = open('dcb_cfg/gen_blk_dcb_get.lcfg','w') self.vrb.write("**********Packet {} **********".format(1)) while ECnt < Array_Element_Count_r: self.vrb.write("Read Settings : 0x{:08X} {}".format(int(reply_msg.payload.dcbdata[ECnt]),ECnt)) f.write('0x{:08X}'.format(((reply_msg.payload.dcbdata[ECnt])))) f.write('\n') ECnt+=1 #print(len(reply_msg_list)) Array_Element_Count_r = int(reply_msg.payload.size) p = 0 #Start with second packet, index=0 from reply_msg_list while p < (num_of_pkts-1): self.vrb.write("**********Packet {} **********".format(p+2)) reply_msg = reply_msg_list[p] Array_Element_Count_r_pkt = int(reply_msg.payload.size) ECnt = 0 while ECnt < Array_Element_Count_r_pkt: self.vrb.write("Read Settings : 0x{:08X} {}".format(int(reply_msg.payload.dcbdata[ECnt]),ECnt)) f.write('0x{:08X}'.format(((reply_msg.payload.dcbdata[ECnt])))) f.write('\n') ECnt+=1 Array_Element_Count_r = Array_Element_Count_r + int(reply_msg.payload.size) #Update total read size p = p+1 if num_of_pkts > 1: status = status_list[p-2] elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_AD7156): f = open('dcb_cfg/ad7156_dcb_get.dcfg','w') while ECnt < Array_Element_Count_r: self.vrb.write("Read Settings : 0x{:04X} {}".format(int(reply_msg.payload.dcbdata[ECnt]),ECnt)) f.write('{:02X} {:02X}'.format((reply_msg.payload.dcbdata[ECnt]>>8),(reply_msg.payload.dcbdata[ECnt] & 0xff))) f.write('\n') ECnt+=1 elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM): f = open('dcb_cfg/lt_app_lcfg_dcb_get.lcfg','w') while ECnt < Array_Element_Count_r: self.vrb.write("Read Settings : 0x{:04X} {}".format(int(reply_msg.payload.dcbdata[ECnt]),ECnt)) f.write('{:02X} {:02X}'.format((reply_msg.payload.dcbdata[ECnt]>>8),(reply_msg.payload.dcbdata[ECnt] & 0xff))) f.write('\n') ECnt+=1 elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP): f = open('dcb_cfg/user0_blk_dcb_get.lcfg','w') while ECnt < Array_Element_Count_r: self.vrb.write("Read Settings : 0x{:08X} {}".format(int(reply_msg.payload.dcbdata[ECnt]),ECnt)) f.write('{:08X}'.format((reply_msg.payload.dcbdata[ECnt]))) f.write('\n') ECnt+=1 else: pass f.close self.vrb.write("DCB Entries: {:02} Size : {:02} bytes".format(int(Array_Element_Count_r),int(Array_Element_Count_r)*4)) self.vrb.write("Command return status: {}".format(status)) if status.upper() == 'M2M2_DCB_STATUS_OK': return 0 else: return 1 def do_write_dcb_config(self,arg): """ Send a command to Write the DCB Configurations of the specific sensor of the Board from its dcfg file. Currently the dcb configuration to be written is read from a dcfg file, stored in 'tools/dcb_dcfg/' directory. The dcb configurations can be written for adpd4000, adxl, ppg, ecg eda, lt_dcb_config, ad7156, lt_app_lcfg, user0_config. ex. write_dcb_config [sensor_name] [file_name] #>write_dcb_config adxl adxl_dcb.dcfg """ Sensor_Address = None pkt_cnt = 1 args = self._parse_args(arg,2) if args == None: self.vrb.err("Incorrect usage! Please check help.") return 1 filename = 'dcb_cfg/' + args[1] for a in args: if a in application_name_map: Sensor_Address = application_name_map[a]["address"] if Sensor_Address == None: self.vrb.err("Incorrect usage! You did not provide a valid device.") return 1 if(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000): adpd4000_dcb_cfg = arr.array('I',[]) try: f = open(filename) except: self.vrb.err("Invalid File Name") return 1 for line in f.readlines(): if(line[0] == '#' or line[0]=='\n' or line[0]==' ' or line[0]=='\t'): continue else: str = line.split('#') dcb =str[0].replace(' ','').replace('\t','').replace('\n','') adpd4000_dcb_cfg.append(int(dcb,16)) f.close() msg = m2m2_packet(Sensor_Address, m2m2_dcb_adpd4000_data_t()) pkt_cnt = len(adpd4000_dcb_cfg)/MAXADPD4000DCBSIZE if len(adpd4000_dcb_cfg)%MAXADPD4000DCBSIZE: pkt_cnt = pkt_cnt + 1 if pkt_cnt > 4 : print "ADPD4000 DCB File Size exceed. Retry with smaller files" return if pkt_cnt == 1 : msg.payload.size = 0 msg.payload.num_of_pkts = 1 elif pkt_cnt == 2 : msg.payload.size = 0 msg.payload.num_of_pkts = 2 #2nd pkt# msg2 = m2m2_packet(Sensor_Address, m2m2_dcb_adpd4000_data_t()) msg2.payload.size = 0 msg2.payload.num_of_pkts = msg.payload.num_of_pkts msg2.payload.command = M2M2_DCB_COMMAND_ENUM_t.M2M2_DCB_COMMAND_WRITE_CONFIG_REQ elif pkt_cnt == 3 : msg.payload.size = 0 msg.payload.num_of_pkts = 3 #2nd pkt# msg2 = m2m2_packet(Sensor_Address, m2m2_dcb_adpd4000_data_t()) msg2.payload.size = 0 msg2.payload.num_of_pkts = msg.payload.num_of_pkts msg2.payload.command = M2M2_DCB_COMMAND_ENUM_t.M2M2_DCB_COMMAND_WRITE_CONFIG_REQ #3rd pkt# msg3 = m2m2_packet(Sensor_Address, m2m2_dcb_adpd4000_data_t()) msg3.payload.size = 0 msg3.payload.num_of_pkts = msg.payload.num_of_pkts msg3.payload.command = M2M2_DCB_COMMAND_ENUM_t.M2M2_DCB_COMMAND_WRITE_CONFIG_REQ elif pkt_cnt == 4 : msg.payload.size = 0 msg.payload.num_of_pkts = 4 #2nd pkt# msg2 = m2m2_packet(Sensor_Address, m2m2_dcb_adpd4000_data_t()) msg2.payload.size = 0 msg2.payload.num_of_pkts = msg.payload.num_of_pkts msg2.payload.command = M2M2_DCB_COMMAND_ENUM_t.M2M2_DCB_COMMAND_WRITE_CONFIG_REQ #3rd pkt# msg3 = m2m2_packet(Sensor_Address, m2m2_dcb_adpd4000_data_t()) msg3.payload.size = 0 msg3.payload.num_of_pkts = msg.payload.num_of_pkts msg3.payload.command = M2M2_DCB_COMMAND_ENUM_t.M2M2_DCB_COMMAND_WRITE_CONFIG_REQ #4th pkt# msg4 = m2m2_packet(Sensor_Address, m2m2_dcb_adpd4000_data_t()) msg4.payload.size = 0 msg4.payload.num_of_pkts = msg.payload.num_of_pkts msg4.payload.command = M2M2_DCB_COMMAND_ENUM_t.M2M2_DCB_COMMAND_WRITE_CONFIG_REQ print "Number of pkts:{}".format(msg.payload.num_of_pkts) Array_Element = adpd4000_dcb_cfg elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_TEMPERATURE): adpd4000_dcb_temp_cfg = arr.array('I',[]) try: f = open(filename) except: self.vrb.err("Invalid File Name") return 1 slot_tag_list = { '<TEMP_SLOTA_DCB>': 0x01, '<TEMP_SLOTB_DCB>': 0x02, '<TEMP_SLOTC_DCB>': 0x04, '<TEMP_SLOTD_DCB>': 0x08, '<TEMP_SLOTE_DCB>': 0x10, '<TEMP_SLOTF_DCB>': 0x20, '<TEMP_SLOTG_DCB>': 0x40, '<TEMP_SLOTH_DCB>': 0x80, '<TEMP_SLOTI_DCB>': 0x100, '<TEMP_SLOTJ_DCB>': 0x200, '<TEMP_SLOTK_DCB>': 0x400, '<TEMP_SLOTL_DCB>': 0x800, } hdr_start_list = ['<TEMP_DCB>','<TEMP_SLOTC_DCB>','<TEMP_SLOTD_DCB>','<TEMP_SLOTJ_DCB>', '<TEMP_SLOTK_DCB>','<TEMP_SLOTL_DCB>'] hdr_stop_list = ['</TEMP_DCB>', '</TEMP_SLOTC_DCB>', '</TEMP_SLOTD_DCB>', '</TEMP_SLOTJ_DCB>', '</TEMP_SLOTK_DCB>', '</TEMP_SLOTL_DCB>'] hdr_start = 0 slots_selected = 0 hdr_start_str = "" loop_cnt = 0 hdr_stop = 0 for line in f.readlines(): if(line[0] == '#' or line[0]=='\n' or line[0]==' ' or line[0]=='\t'): continue else: str = line.split('#') str = str[0].replace('\t', '').replace('\n', '') str = str.split(' ') # print("lines: {}".format(str[0])) if str[0] in hdr_start_list: hdr_start = 1 hdr_stop = 0 hdr_start_str = str[0] # print("hdr start str = {}".format(hdr_start_str)) loop_cnt = 0; elif str[0] in hdr_stop_list: hdr_start = 0 hdr_stop = 1 hdr_stop_str = str[0] # print("hdr stop str = {}".format(hdr_stop_str)) elif hdr_start == 1: if hdr_start_str == '<TEMP_DCB>': adpd4000_dcb_temp_cfg.append(int(str[1], 16)) loop_cnt += 1 if loop_cnt == 2: slots_selected = int(str[1],16) # print("slots selected = {:X}".format(slots_selected)) else: loop_cnt += 1 slot_en_mask = slot_tag_list[hdr_start_str] # print("slot_en_mask :{}".format(slot_en_mask)) if slots_selected & slot_en_mask: adpd4000_dcb_temp_cfg.append(int(str[1], 16)) # print("slot tag:{}, val ={}".format(hdr_start_str,int(str[1],16))) # dcb =str[0].replace(' ','').replace('\t','').replace('\n','') #adpd4000_dcb_temp_cfg.append(int(dcb,16)) f.close() msg = m2m2_packet(Sensor_Address, m2m2_dcb_temperature_data_t()) pkt_cnt = len(adpd4000_dcb_temp_cfg)/MAXADPD4000DCBSIZE if len(adpd4000_dcb_temp_cfg)%MAXADPD4000DCBSIZE: pkt_cnt = pkt_cnt + 1 if pkt_cnt > 2 : print "Temperature LCFG DCB File Size exceed. Retry with smaller files" return if pkt_cnt == 1 : msg.payload.size = 0 msg.payload.num_of_pkts = 1 elif pkt_cnt == 2 : msg.payload.size = 0 msg.payload.num_of_pkts = 2 #2nd pkt# msg2 = m2m2_packet(Sensor_Address, m2m2_dcb_temperature_data_t()) msg2.payload.size = 0 msg2.payload.num_of_pkts = msg.payload.num_of_pkts msg2.payload.command = M2M2_DCB_COMMAND_ENUM_t.M2M2_DCB_COMMAND_WRITE_CONFIG_REQ print "Number of pkts:{}".format(msg.payload.num_of_pkts) Array_Element = adpd4000_dcb_temp_cfg elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADXL): adxl_dcb_cfg = arr.array('I',[]) try: f = open(filename) except: self.vrb.err("Invalid File Name") return 1 for line in f.readlines(): if(line[0] == '#' or line[0]=='\n' or line[0]==' ' or line[0]=='\t'): continue else: str = line.split('#') dcb = str[0].replace(' ','').replace('\t','').replace('\n','') adxl_dcb_cfg.append(int(dcb,16)) f.close() msg = m2m2_packet(Sensor_Address, m2m2_dcb_adxl_data_t()) Array_Element = adxl_dcb_cfg elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_PPG): ppg_dcb_lcfg = arr.array('i',[]) try: f = open(filename) except: self.vrb.err("Invalid File Name") return 1 for line in f.readlines(): if(line[0] == '#' or line[0]=='\n' or line[0]==' ' or line[0]=='\t' or line[0]=='/'): continue else: str = line.split('#') str = str[0].split('/') str = str[0].split(' ') str = str[1].replace(' ','').replace('\t','').replace('\n','') ppg_dcb_lcfg.append(int(str,16)) f.close() msg = m2m2_packet(Sensor_Address, m2m2_dcb_ppg_data_t()) Array_Element = ppg_dcb_lcfg elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_ECG): ecg_dcb_cfg = arr.array('I',[]) try: f = open(filename) except: self.vrb.err("Invalid File Name") return 1 for line in f.readlines(): if(line[0] == '#' or line[0]=='\n' or line[0]==' ' or line[0]=='\t'): continue else: str = line.split('#') str = str[0].split(' ') val1 = str[0] val2 = str[1].replace(' ','').replace('\t','').replace('\n','') val = (int(val1,16) << 16) | (int(val2,16)) ecg_dcb_cfg.append(val) f.close() msg = m2m2_packet(Sensor_Address, m2m2_dcb_ecg_data_t()) Array_Element = ecg_dcb_cfg elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA): eda_dcb_cfg = arr.array('I',[]) try: f = open(filename) except: self.vrb.err("Invalid File Name") return 1 for line in f.readlines(): if(line[0] == '#' or line[0]=='\n' or line[0]==' ' or line[0]=='\t'): continue else: str = line.split('#') str = str[0].split(' ') val1 = str[0] val2 = str[1].replace(' ','').replace('\t','').replace('\n','') val = (int(val1,16) << 16) | (int(val2,16)) eda_dcb_cfg.append(val) f.close() msg = m2m2_packet(Sensor_Address, m2m2_dcb_eda_data_t()) Array_Element = eda_dcb_cfg elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA): bia_dcb_cfg = [] try: f = open(filename) except: self.vrb.err("Invalid File Name") return 1 for line in f.readlines(): if(line[0] == '#' or line[0]=='\n' or line[0]==' ' or line[0]=='\t'): continue else: str = line.split('#') str = str[0].split('/') str = str[0].split(' ') str = str[1].replace(' ','').replace('\t','').replace('\n','') bia_dcb_cfg.append(int(str,16)) f.close() msg = m2m2_packet(Sensor_Address, m2m2_dcb_bia_data_t()) Array_Element = bia_dcb_cfg elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_LT_APP): gen_blk_dcb_cfg = arr.array('I',[]) try: f = open(filename,"rb") except: self.vrb.err("Invalid File Name") return 1 chunk_len = 256 bytes_read = 0 if f != None: while True: config_buffer = f.read(chunk_len) if not config_buffer: break temp_array = list(bytearray(config_buffer)) index = 0 while index < len(temp_array): data = [ temp_array[index], temp_array[index+1] , temp_array[index+2], temp_array[index+3] ] index += 4 # Show hex values of data. #print(list('%02x' % b for b in data)) #Convert to 4 byte unsigned integer data interpreting data as being in little-endian byte order. value = struct.unpack("<I", bytearray(data))[0] #print(hex(value)) gen_blk_dcb_cfg.append(value) bytes_read = bytes_read + len(config_buffer) f.close() pkt_cnt = len(gen_blk_dcb_cfg)/MAXGENBLKDCBSIZE if len(gen_blk_dcb_cfg)%MAXGENBLKDCBSIZE : pkt_cnt =pkt_cnt + 1 if pkt_cnt > 18 : #Max no: of pkt count = 18 , to support 57*4*18 = 4104 bytes in gen block DCB print "GEN_BLK DCB File Size exceed.Retry with smaller files" return print "Number of pkts:{}".format(pkt_cnt) msg_list = [] #list to hold msg of type m2m2_packet() p = 0 #Start with 1st msg pkt; index from 0 to pkt_cnt-1 while p < pkt_cnt: msg = m2m2_packet(Sensor_Address, m2m2_dcb_gen_blk_data_t()) msg.payload.size = 0 msg.payload.num_of_pkts = pkt_cnt msg.payload.command = M2M2_DCB_COMMAND_ENUM_t.M2M2_DCB_COMMAND_WRITE_CONFIG_REQ msg_list.append(msg) p+=1 Array_Element = gen_blk_dcb_cfg elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_AD7156): ad7156_dcb_cfg = arr.array('I',[]) try: f = open(filename) except: self.vrb.err("Invalid File Name") return 1 for line in f.readlines(): if(line[0] == '#' or line[0]=='\n' or line[0]==' ' or line[0]=='\t'): continue else: str = line.split('#') dcb = str[0].replace(' ','').replace('\t','').replace('\n','') ad7156_dcb_cfg.append(int(dcb,16)) f.close() msg = m2m2_packet(Sensor_Address, m2m2_dcb_ad7156_data_t()) Array_Element = ad7156_dcb_cfg elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM): lt_app_lcfg_dcb_cfg = arr.array('I',[]) try: f = open(filename) except: self.vrb.err("Invalid File Name") return 1 for line in f.readlines(): if(line[0] == '#' or line[0]=='\n' or line[0]==' ' or line[0]=='\t'): continue else: str = line.split('#') dcb = str[0].replace(' ','').replace('\t','').replace('\n','') lt_app_lcfg_dcb_cfg.append(int(dcb,16)) f.close() msg = m2m2_packet(Sensor_Address, m2m2_dcb_lt_app_lcfg_data_t()) Array_Element = lt_app_lcfg_dcb_cfg elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP): user0_config_app_dcb_cfg = arr.array('I',[]) try: f = open(filename) except: self.vrb.err("Invalid File Name") return 1 for line in f.readlines(): if(line[0] == '#' or line[0]=='\n' or line[0]==' ' or line[0]=='\t'): continue else: str = line.split('#') dcb = str[0].replace(' ','').replace('\t','').replace('\n','') user0_config_app_dcb_cfg.append(int(dcb,16)) f.close() msg = m2m2_packet(Sensor_Address, m2m2_dcb_user0_blk_data_t()) Array_Element = user0_config_app_dcb_cfg else: Array_Element = NULL print "Wrong blk name selected" return 1 msg.payload.command = M2M2_DCB_COMMAND_ENUM_t.M2M2_DCB_COMMAND_WRITE_CONFIG_REQ ECnt = 0 Array_Element_Count_w = len(Array_Element) while ECnt < Array_Element_Count_w: #print "{} {}".format(Array_Element_Count_w, ECnt) if(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000): if ECnt >= (3*MAXADPD4000DCBSIZE) and ECnt <= (4*MAXADPD4000DCBSIZE): msg4.payload.size = msg4.payload.size + 1 msg4.payload.dcbdata[ECnt%MAXADPD4000DCBSIZE] = int(Array_Element[ECnt]) self.vrb.write("Write Settings : 0x{:08X} {}".format(int(msg4.payload.dcbdata[ECnt%MAXADPD4000DCBSIZE]), int(ECnt%(MAXADPD4000DCBSIZE)))) elif ECnt >= (2*MAXADPD4000DCBSIZE) and ECnt <= (3*MAXADPD4000DCBSIZE): msg3.payload.size = msg3.payload.size + 1 msg3.payload.dcbdata[ECnt%MAXADPD4000DCBSIZE] = int(Array_Element[ECnt]) self.vrb.write("Write Settings : 0x{:08X} {}".format(int(msg3.payload.dcbdata[ECnt%MAXADPD4000DCBSIZE]), int(ECnt%(MAXADPD4000DCBSIZE)))) elif ECnt >= (1*MAXADPD4000DCBSIZE) and ECnt <= (2*MAXADPD4000DCBSIZE): msg2.payload.size = msg2.payload.size + 1 msg2.payload.dcbdata[ECnt%MAXADPD4000DCBSIZE] = int(Array_Element[ECnt]) self.vrb.write("Write Settings : 0x{:08X} {}".format(int(msg2.payload.dcbdata[ECnt%MAXADPD4000DCBSIZE]), int(ECnt%(MAXADPD4000DCBSIZE)))) else: msg.payload.size = msg.payload.size + 1 msg.payload.dcbdata[ECnt] = int(Array_Element[ECnt]) self.vrb.write("Write Settings : 0x{:08X} {}".format(int(msg.payload.dcbdata[ECnt]), int(ECnt))) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_TEMPERATURE): if ECnt >= (1*MAXTEMPRLCFGDCBSIZE) and ECnt <= (2*MAXTEMPRLCFGDCBSIZE): msg2.payload.size = msg2.payload.size + 1 msg2.payload.dcbdata[ECnt%MAXTEMPRLCFGDCBSIZE] = int(Array_Element[ECnt]) self.vrb.write("Write Settings : 0x{:08X} {}".format(int(msg2.payload.dcbdata[ECnt%MAXTEMPRLCFGDCBSIZE]), int(ECnt%(MAXTEMPRLCFGDCBSIZE)))) else: msg.payload.size = msg.payload.size + 1 msg.payload.dcbdata[ECnt] = int(Array_Element[ECnt]) self.vrb.write("Write Settings : 0x{:08X} {}".format(int(msg.payload.dcbdata[ECnt]), int(ECnt))) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADXL): msg.payload.dcbdata[ECnt] = int(Array_Element[ECnt]) self.vrb.write("Write Settings : 0x{:04X} {}".format(int(msg.payload.dcbdata[ECnt]),int(ECnt))) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_PPG): msg.payload.dcbdata[ECnt] = int(Array_Element[ECnt]) self.vrb.write("Write Settings : 0x{:08X} {}".format(int(msg.payload.dcbdata[ECnt]),int(ECnt))) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_ECG): msg.payload.dcbdata[ECnt] = int(Array_Element[ECnt]) self.vrb.write("Write Settings : 0x{:08X} {}".format(int(msg.payload.dcbdata[ECnt]), int(ECnt))) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA): msg.payload.dcbdata[ECnt] = int(Array_Element[ECnt]) self.vrb.write("Write Settings : 0x{:08X} {}".format(int(msg.payload.dcbdata[ECnt]), int(ECnt))) elif (Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA): msg.payload.dcbdata[ECnt] = int(Array_Element[ECnt]) self.vrb.write("Write Settings : 0x{:08X} {}".format(int(msg.payload.dcbdata[ECnt]), int(ECnt))) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_LT_APP): p = 0 #For M2M2_ADDR_APP_LT_APP, Start with 1st msg pkt; index from 0 to pkt_cnt-1 while p < pkt_cnt: if ECnt >= (p*MAXGENBLKDCBSIZE) and ECnt < ((p+1)*MAXGENBLKDCBSIZE): msg_list[p].payload.size = msg_list[p].payload.size + 1 msg_list[p].payload.dcbdata[ECnt%MAXGENBLKDCBSIZE] = int(Array_Element[ECnt]) self.vrb.write("Write Settings : 0x{:08X} {}".format(int(msg_list[p].payload.dcbdata[ECnt%MAXGENBLKDCBSIZE]), int(ECnt%(MAXGENBLKDCBSIZE)))) break p+=1 elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_AD7156): msg.payload.dcbdata[ECnt] = int(Array_Element[ECnt]) self.vrb.write("Write Settings : 0x{:08X} {}".format(int(msg.payload.dcbdata[ECnt]), int(ECnt))) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM): msg.payload.dcbdata[ECnt] = int(Array_Element[ECnt]) self.vrb.write("Write Settings : 0x{:08X} {}".format(int(msg.payload.dcbdata[ECnt]), int(ECnt))) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP): msg.payload.dcbdata[ECnt] = int(Array_Element[ECnt]) self.vrb.write("Write Settings : 0x{:08X} {}".format(int(msg.payload.dcbdata[ECnt]), int(ECnt))) else: pass ECnt += 1 if(Sensor_Address != M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_LT_APP): if pkt_cnt == 1: msg.payload.size = Array_Element_Count_w else: if(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000): msg.payload.size = MAXADPD4000DCBSIZE elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_TEMPERATURE): msg.payload.size = MAXTEMPRLCFGDCBSIZE '''else: print"Something went wrong" return''' self._send_packet(msg) else: p = 0 #Only for M2M2_ADDR_APP_LT_APP, Start with 1st msg pkt; index from 0 to pkt_cnt-1 num_bytes = 0 while p < pkt_cnt: self.vrb.write("********** Sending gen_blk_dcb packet ({}) of size:{} ********** ".format((p+1),msg_list[p].payload.size)) self._send_packet(msg_list[p]) #time.sleep(0.01) reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_gen_blk_data_t(), 20) if reply_msg == None: self.vrb.err("Error! Timed out waiting for the device, with {} pkt receive!".format((p+1))) return 1 status = self._get_enum_name(M2M2_DCB_STATUS_ENUM_t, reply_msg.payload.status) if status == None: status = format(reply_msg.payload.status, '#04x') self.vrb.write("Size : {:02}".format(int(msg_list[p].payload.size))) num_bytes += int(msg_list[p].payload.size) self.vrb.write("Command return status: {}".format(status)) if status.upper() != 'M2M2_DCB_STATUS_OK': return 1 p+=1 self.vrb.write("\nWrite DCB Entries: {:02} Size : {:02} bytes".format(num_bytes, num_bytes*4)) return 0 if(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_adpd4000_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_TEMPERATURE): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_temperature_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADXL): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_adxl_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_PPG): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_ppg_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_ECG): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_ecg_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_eda_data_t(), 20) elif (Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_bia_data_t(), 20) #elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_LT_APP): # reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_gen_blk_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_AD7156): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_ad7156_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_lt_app_lcfg_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_user0_blk_data_t(), 20) else: pass if reply_msg == None: self.vrb.err("Error! Timed out waiting for the device!") print"Error! Timed out waiting for the device!" return 1 status = self._get_enum_name(M2M2_DCB_STATUS_ENUM_t, reply_msg.payload.status) if status == None: status = format(reply_msg.payload.status, '#04x') self.vrb.write("Size : {:02}".format(int(msg.payload.size))) self.vrb.write("Command return status: {}".format(status)) if status.upper() != 'M2M2_DCB_STATUS_OK': return 1 if pkt_cnt >= 2: if(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000): self.vrb.write("Sending ADPD4000 DCB packet 2 of size:{}".format(msg2.payload.size)) #time.sleep(0.04) self._send_packet(msg2) reply_msg2 = self._get_packet(Sensor_Address, m2m2_dcb_adpd4000_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_TEMPERATURE): self.vrb.write("Sending Temperature LCFG DCB packet 2 of size:{}".format(msg2.payload.size)) #time.sleep(0.04) self._send_packet(msg2) reply_msg2 = self._get_packet(Sensor_Address, m2m2_dcb_temperature_data_t(), 20) if reply_msg2 == None: self.vrb.err("Error! Timed out waiting for the device, with 2nd pkt receive!") return 1 status2 = self._get_enum_name(M2M2_DCB_STATUS_ENUM_t, reply_msg2.payload.status) if status2 == None: status2 = format(reply_msg2.payload.status, '#04x') self.vrb.write("Size : {:02}".format(int(msg2.payload.size))) self.vrb.write("Command return status: {}".format(status2)) if status2.upper() != 'M2M2_DCB_STATUS_OK': return 1 if pkt_cnt >= 3: if(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000): self.vrb.write("Sending ADPD4000 DCB packet 3 of size:{}".format(msg3.payload.size)) #time.sleep(0.04) self._send_packet(msg3) reply_msg3 = self._get_packet(Sensor_Address, m2m2_dcb_adpd4000_data_t(), 20) if reply_msg3 == None: self.vrb.err("Error! Timed out waiting for the device, with 3rd pkt receive!") return 1 status3 = self._get_enum_name(M2M2_DCB_STATUS_ENUM_t, reply_msg3.payload.status) if status3 == None: status3 = format(reply_msg3.payload.status, '#04x') self.vrb.write("Size : {:02}".format(int(msg3.payload.size))) self.vrb.write("Command return status: {}".format(status3)) if status3.upper() != 'M2M2_DCB_STATUS_OK': return 1 if pkt_cnt >= 4: if(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000): self.vrb.write("Sending ADPD4000 DCB packet 4 of size:{}".format(msg4.payload.size)) #time.sleep(0.04) self._send_packet(msg4) reply_msg4 = self._get_packet(Sensor_Address, m2m2_dcb_adpd4000_data_t(), 20) if reply_msg4 == None: self.vrb.err("Error! Timed out waiting for the device, with 4th pkt receive!") return 1 status4 = self._get_enum_name(M2M2_DCB_STATUS_ENUM_t, reply_msg4.payload.status) if status4 == None: status4 = format(reply_msg4.payload.status, '#04x') self.vrb.write("Size : {:02}".format(int(msg4.payload.size))) self.vrb.write("Command return status: {}".format(status4)) if status4.upper() == 'M2M2_DCB_STATUS_OK': return 1 return 0 def do_delete_dcb_config(self,arg): """ Send a command to Delete the DCB Configurations of the specific sensor of the Board. Currently dcb configurations is supported for adpd4000, adxl, ppg, ecg, eda, lt_dcb_config, ad7156, lt_app_lcfg, user0_config. ex. delete_dcb_config [sensor_name] #>delete_dcb_config adpd4000 """ Sensor_Address = None args = self._parse_args(arg,1) if args == None: self.vrb.err("Incorrect usage! Please check help.") return 1 for a in args: if a in application_name_map: Sensor_Address = application_name_map[a]["address"] if Sensor_Address == None: self.vrb.err("Incorrect usage! You did not provide a valid device.") return 1 if(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_TEMPERATURE): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADXL): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_PPG): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_ECG): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_LT_APP): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_AD7156): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP): msg = m2m2_packet(Sensor_Address, m2m2_pm_sys_cmd_t()) else: Array_Element = NULL msg.payload.command = M2M2_DCB_COMMAND_ENUM_t.M2M2_DCB_COMMAND_ERASE_CONFIG_REQ self._send_packet(msg) if(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_adpd4000_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_TEMPERATURE): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_temperature_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADXL): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_adxl_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_PPG): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_ppg_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_ECG): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_ecg_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_eda_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_bia_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_LT_APP): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_gen_blk_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_AD7156): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_ad7156_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_lt_app_lcfg_data_t(), 20) elif(Sensor_Address == M2M2_ADDR_ENUM_t.M2M2_ADDR_USER0_CONFIG_APP): reply_msg = self._get_packet(Sensor_Address, m2m2_dcb_user0_blk_data_t(), 20) else: pass if reply_msg == None: self.vrb.err("Error! Timed out waiting for the device!") return 1 status = self._get_enum_name(M2M2_DCB_STATUS_ENUM_t, reply_msg.payload.status) if status == None: status = format(reply_msg.payload.status, '#04x') self.vrb.write("Command return status: {}".format(status)) if status.upper() == 'M2M2_DCB_STATUS_OK': return 0 else: return 1 def do_query_dcb_blk_status(self,arg): """ Send a command to Query the Status of DCB Block index within the Watch - whether DCB is present / absent. Currently dcb configurations is supported for adpd4000, adxl, ppg, ecg, eda, lt_dcb_config, ad7156, lt_app_lcfg, user0_config. All supported DCB Blocks are queried and presented here ex. query_dcb_blk_status #>query_dcb_blk_status """ msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_dcb_cmd_t()) msg.payload.command = M2M2_DCB_COMMAND_ENUM_t.M2M2_DCB_COMMAND_QUERY_STATUS_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_dcb_block_status_t(), 20) if reply_msg == None: self.vrb.err("Error! Timed out waiting for the device!") return status = self._get_enum_name(M2M2_PM_SYS_STATUS_ENUM_t, reply_msg.payload.status) if status == None: status = format(reply_msg.payload.status, '#04x') self.vrb.write("Command return status: {}".format(status)) self.vrb.write("-------------------------------------") if reply_msg.payload.dcb_blk_array[M2M2_DCB_CONFIG_BLOCK_INDEX_t.ADI_DCB_GENERAL_BLOCK_IDX]: self.vrb.write("ADI_DCB_GENERAL_BLOCK_IDX Present") else: self.vrb.write("ADI_DCB_GENERAL_BLOCK_IDX Absent") if reply_msg.payload.dcb_blk_array[M2M2_DCB_CONFIG_BLOCK_INDEX_t.ADI_DCB_ADPD4000_BLOCK_IDX]: self.vrb.write("ADI_DCB_ADPD4000_BLOCK_IDX Present") else: self.vrb.write("ADI_DCB_ADPD4000_BLOCK_IDX Absent") if reply_msg.payload.dcb_blk_array[M2M2_DCB_CONFIG_BLOCK_INDEX_t.ADI_DCB_ADXL362_BLOCK_IDX]: self.vrb.write("ADI_DCB_ADXL362_BLOCK_IDX Present") else: self.vrb.write("ADI_DCB_ADXL362_BLOCK_IDX Absent") if reply_msg.payload.dcb_blk_array[M2M2_DCB_CONFIG_BLOCK_INDEX_t.ADI_DCB_PPG_BLOCK_IDX]: self.vrb.write("ADI_DCB_PPG_BLOCK_IDX Present") else: self.vrb.write("ADI_DCB_PPG_BLOCK_IDX Absent") if reply_msg.payload.dcb_blk_array[M2M2_DCB_CONFIG_BLOCK_INDEX_t.ADI_DCB_ECG_BLOCK_IDX]: self.vrb.write("ADI_DCB_ECG_BLOCK_IDX Present") else: self.vrb.write("ADI_DCB_ECG_BLOCK_IDX Absent") if reply_msg.payload.dcb_blk_array[M2M2_DCB_CONFIG_BLOCK_INDEX_t.ADI_DCB_EDA_BLOCK_IDX]: self.vrb.write("ADI_DCB_EDA_BLOCK_IDX Present") else: self.vrb.write("ADI_DCB_EDA_BLOCK_IDX Absent") if reply_msg.payload.dcb_blk_array[M2M2_DCB_CONFIG_BLOCK_INDEX_t.ADI_DCB_AD7156_BLOCK_IDX]: self.vrb.write("ADI_DCB_AD7156_BLOCK_IDX Present") else: self.vrb.write("ADI_DCB_AD7156_BLOCK_IDX Absent") if reply_msg.payload.dcb_blk_array[M2M2_DCB_CONFIG_BLOCK_INDEX_t.ADI_DCB_LT_APP_LCFG_BLOCK_IDX]: self.vrb.write("ADI_DCB_LT_APP_LCFG_BLOCK_IDX Present") else: self.vrb.write("ADI_DCB_LT_APP_LCFG_BLOCK_IDX Absent") if reply_msg.payload.dcb_blk_array[M2M2_DCB_CONFIG_BLOCK_INDEX_t.ADI_DCB_USER0_BLOCK_IDX]: self.vrb.write("ADI_DCB_USER0_BLOCK_IDX Present") else: self.vrb.write("ADI_DCB_USER0_BLOCK_IDX Absent") if reply_msg.payload.dcb_blk_array[M2M2_DCB_CONFIG_BLOCK_INDEX_t.ADI_DCB_TEMPERATURE_BLOCK_IDX]: self.vrb.write("ADI_DCB_TEMPERATURE_BLOCK_IDX Present") else: self.vrb.write("ADI_DCB_TEMPERATURE_BLOCK_IDX Absent") self.vrb.write("-------------------------------------") def do_getTimeOffset(self, arg): """ Send a command to get the time offset from the Board #>getTimeOffset """ args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_GET_DATE_TIME_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_date_time_req_t(), 10) if reply_msg == None: self.vrb.err("Error! Timed out waiting for the device!") return status = self._get_enum_name(M2M2_PM_SYS_STATUS_ENUM_t, reply_msg.payload.status) if status == None: status = format(reply_msg.payload.status, '#04x') """Convert the Read Time from the board to seconds""" sHr = int(int(reply_msg.payload.hour - int(12)) * int(3600)) sMin = int(reply_msg.payload.minute * int(60)) sSec = int(reply_msg.payload.second) TosSec = int(sHr) + int(sMin) + int(sSec) """Convert the Time of PC to seconds""" Get_Time_now = datetime.datetime.now() nHr = int(int(Get_Time_now.hour - int(12)) * int(3600)) nMin = int(Get_Time_now.minute * int(60)) nSec = int(Get_Time_now.second) TonSec = int(nHr) + int(nMin) + int(nSec) """Calculate the Offset Value""" ToOffValue = int(TonSec) - int(TosSec) if ToOffValue > int(3599): ToOffvalue_Dec = float(float(ToOffValue) / float(60)) / float(60) ToOffvalueHr = int(ToOffvalue_Dec) ToOffvalueMin = int((ToOffvalue_Dec - int(ToOffvalueHr)) * int(60)) ToOffvalueSec = ((float(ToOffvalue_Dec - int(ToOffvalueHr)) * float(60)) - int(ToOffvalueMin)) * int(60) self.vrb.write("OFFSET Raw Value: {}".format((int(ToOffValue)))) self.vrb.write("OFFSET Value: {:02}:{:02}:{:02}".format((int(ToOffvalueHr)),(int(ToOffvalueMin)), (int(ToOffvalueSec)))) else: ToOffvalue_Dec = float(float(ToOffValue) / float(60)) ToOffvalueMin = int(ToOffvalue_Dec) ToOffvalueSec = int((ToOffvalue_Dec - int(ToOffvalueMin)) * int(60)) self.vrb.write("OFFSET Raw Value: {}".format((int(ToOffValue)))) self.vrb.write("OFFSET Value: 00:{:02}:{:02}".format((int(ToOffvalueMin)),(int(ToOffvalueSec)))) def do_toggleSaveCSV(self,arg): """ Enable or disable save as CSV file option while plotting ADXL/ADPD4000/PPG streams """ global enable_csv_logs enable_csv_logs ^= 1 print "Save CSV logs option set to '{}'".format(enable_csv_logs) @cli_logger def do_controlECGElectrodeSwitch(self, arg): """ Control(enable/disable) the switches between ECG electrodes and adpd4k/ad5940/ad8233. The argument is 'sw_name': either of '8233_sw'/'5940_sw'/'4k_sw' 'sw_enable': '1' to turn ON Switch '0' to turn OFF Switch. ----------------------------------------------- Usage: #>controlECGElectrodeSwitch [sw_name] [sw_enable] #>controlECGElectrodeSwitch 8233_sw 1 #>controlECGElectrodeSwitch 5940_sw 0 #>controlECGElectrodeSwitch 4k_sw 1 """ args = self._parse_args(arg, 2) if args == None: print("Invalid arguments! please type help <command>(help controlECGElectrodeSwitch) to know the usage.") return sw_name = None sw_enable = None if "8233_sw" in args[0]: sw_name = 0 elif "5940_sw" in args[0]: sw_name = 1 elif "4k_sw" in args[0]: sw_name = 2 else: print("Invalid arguments! please type help <command>(help controlECGElectrodeSwitch) to know the usage.") return if "1" in args[1]: sw_enable = 1 elif "0" in args[1]: sw_enable = 0 else: print("Invalid arguments! please type help <command>(help controlECGElectrodeSwitch) to know the usage.") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_dg2502_sw_ctrl_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_DG2502_SW_CNTRL_REQ msg.payload.sw_enable = sw_enable msg.payload.sw_name = sw_name self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_dg2502_sw_ctrl_cmd_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_PM_SYS_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Switch Control Failed!") def do_LDOControl(self, arg): """ Control the enabling / disabling of the LDO. The argument is the ldo_num: '1' for FS LDO '2' for Optical LDO '3' for EPHYZ LDO ldo_ctrl: '0' for disable '1' for enable ----------------------------------------------- Usage: #>LDOControl [ldo_num] [ldo_ctrl] #>LDOControl 3 1 #>LDOControl 3 0 """ args = self._parse_args(arg, 2) if args == None: self.vrb.write("No ldo number, ldo control specified, check help and retry") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_ldo_ctrl_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_LDO_CNTRL_REQ msg.payload.ldo_num = int(args[0]) msg.payload.ldo_enable = int(args[1]) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_ldo_ctrl_cmd_t(), 10) if reply_msg != None: status = reply_msg.payload.status if status != M2M2_PM_SYS_STATUS_ENUM_t.M2M2_PM_SYS_STATUS_OK: print "Putting LDO into the desired state failed!" else: print "Putting LDO into the desired state.. Status: '{}'".format(status) else: print "Putting LDO into the desired state failed! Please check the arguments" def do_getChipID(self, arg): """ Get the chip ID from a chip. The argument is the chip name: '1' for ADXL362 Part ID '2' for ADPD4K Chip ID '3' for ADP5360 Device ID '4' for AD5940 Chip ID '5' for NAND Flash Device ID '6' for AD7156 Chip ID ----------------------------------------------- Usage: #>getChipID [chip_name] #>getChipID 1 """ err_stat = 0 args = self._parse_args(arg, 1) if args == None: self.vrb.write("No chip name specified, check help and retry") return 1, None msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_chip_id_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_CHIP_ID_REQ msg.payload.chip_name = int(args[0]) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_chip_id_cmd_t(), 10) chip_id = None if reply_msg != None: status = reply_msg.payload.status if status != M2M2_PM_SYS_STATUS_ENUM_t.M2M2_PM_SYS_STATUS_OK: print "Getting chip ID failed!" err_stat = 1 else: if msg.payload.chip_name == 1: chip_name = "ADXL362" elif msg.payload.chip_name == 2: chip_name = "ADPD4K" elif msg.payload.chip_name == 3: chip_name = "ADP5360" elif msg.payload.chip_name == 4: chip_name = "AD5940" elif msg.payload.chip_name == 5: chip_name = "Nand Flash" elif msg.payload.chip_name == 6: chip_name = "AD7156" print "Fetched chip ID:{} for {}".format(hex(reply_msg.payload.chip_id), chip_name) chip_id = reply_msg.payload.chip_id else: err_stat = 1 print "Getting chip ID failed! Please check the arguments" return err_stat, chip_id def do_getBatteryInfo(self, arg): """ Get Battery information. Fetches the device's battery status a given number of times. ----------------------------------------------- Usage: #>getBatteryInfo [count] #>getBatteryInfo 5 """ args = self._parse_args(arg, 1) try: count = int(args[0]) except: count = 1 msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_GET_BAT_INFO_REQ for i in range(count): time.sleep(1) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_bat_info_resp_t(), 10) if reply_msg == None: self.vrb.err("Error! Timed out waiting for the device!") break self.vrb.write("Watch Battery information:") chrg_status = self._get_enum_name(M2M2_PM_SYS_BAT_STATE_ENUM_t, reply_msg.payload.bat_chrg_stat) if chrg_status != None: self.vrb.write("Battery Status: '{}'".format(chrg_status)) self.vrb.write("Battery Level: '{}'%".format(int(reply_msg.payload.bat_lvl))) self.vrb.write("Battery Voltage: '{}'mV".format(int(reply_msg.payload.bat_mv))) #self.vrb.write("Battery Temp: '{}' C".format(int(reply_msg.payload.bat_temp))) self.vrb.write("Date and Time : {}".format(datetime.datetime.now().replace(microsecond=0))) self.vrb.write("---------------------------------------------------------------------") def help_getBatteryInfo(self): print print "Fetches the device's battery status a given number of times." print "-----------------------------------------------" print "Usage:" print " #>getBatteryInfo [count]" print print " #>getBatteryInfo 5" print def do_setBatteryThreshold(self, arg): """ Set Battery Low level and Critical levels in percentage(%). ----------------------------------------------- Usage: #>setBatteryThreshold [low_level critical_level] #>setBatteryThreshold 15 10 """ args = self._parse_args(arg, 2) if args == None: return try: msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_bat_thr_req_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_SET_BAT_THR_REQ msg.payload.bat_level_low = int(args[0]) msg.payload.bat_level_critical = int(args[1]) except ValueError: self.vrb.err("Error! Invalid argument!") return self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_cmd_t(), 10) if reply_msg == None: self.vrb.err("Error! Timed out waiting for the device!") return status = self._get_enum_name(M2M2_PM_SYS_STATUS_ENUM_t, reply_msg.payload.status) if status == None: status = format(reply_msg.payload.status, '#04x') self.vrb.write("status: {}".format(status)) '''def do_setBatteryCharging(self, arg): """ Enable or Disable battery charging. The argument is the option: '1' for Enable '0' for Disable ----------------------------------------------- Usage: #>setBatteryCharging [option] #>setBatteryCharging 1 #>setBatteryCharging 0 """ args = self._parse_args(arg, 1) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_cmd_t()) opt = args[0] if "1" in opt : msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_ENABLE_BAT_CHARGE_REQ elif "0" in opt: msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_DISABLE_BAT_CHARGE_REQ else: self.vrb.err("Invalid arguments") self.help_BatteryCharging() return self._send_packet(msg) resp = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_cmd_t(), 10) if resp == None: self.vrb.err("Timed out waiting for a response from the device!") return self._print_packet_status(resp)''' def do_setPowerMode(self, arg): """ Set the device into a desired power state. The argument is the state: '0' for active mode '2' for hibernate mode - same as Power off from Watch display page. '3' for shutdown mode/shipment mode - same as the Shipment Mode from Watch display page. To see it in action, Watch needs to be removed from a USB connection. Only then it will enter shipment mode. To bring the Watch up, you need to plug in the USB cable. ----------------------------------------------- Usage: #>SetPowerMode [state] #>setPowerMode 2 """ args = self._parse_args(arg, 1) if args == None: self.vrb.write("No state specified, putting the device in shutdown mode", 2) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_pwr_state_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_SET_PWR_STATE_REQ if args == None: msg.payload.state = 0 else: msg.payload.state = int(args[0]) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_pwr_state_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_PM_SYS_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write("Put device into the state:", 2) self.vrb.write(" Device: '{}'".format(int(reply_msg.payload.state)), 2) self.vrb.write(" Status: '{}'".format(status), 2) else: self.vrb.err("Putting device into the desired state failed!") def do_setDisplayColour(self, arg): """ Set LCD Display Colour to all white, R, G, B and black, to check whether there is dead pixel. The argument is the disp_colour: '0' for White '1' for Black '2' for Red '3' for Green '4' for Blue ----------------------------------------------- Usage: #>setDisplayColour [disp_colour] #>setDisplayColour 0 #>setDisplayColour 4 """ args = self._parse_args(arg, 1) if args == None: self.vrb.write("No disp_colour specified, check help and retry") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_DISPLAY, m2m2_display_set_command_t()) msg.payload.command = M2M2_DISPLAY_APP_CMD_ENUM_t.M2M2_DISPLAY_APP_CMD_SET_DISPLAY_REQ msg.payload.colour = int(args[0]) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_DISPLAY, m2m2_display_set_command_t(), 10) if reply_msg != None: status = reply_msg.payload.status if status != 0: print "Putting Display into the desired colour failed!" else: print "Putting Display into the desired colour.. Status: '{}'".format(status) else: print "Putting Display into the desired colour failed! Please check the arguments" def do_loadEdaDcfg(self, arg): """ Load the EDA application DCFG. #>loadEdaDcfg """ msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, m2m2_app_common_sub_op_t()) msg.payload.command = M2M2_EDA_APP_CMD_ENUM_t.M2M2_EDA_APP_CMD_LOAD_DCFG_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA,m2m2_app_common_sub_op_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self._print_packet_status(reply_msg) else: self.vrb.err("Loading EDA device configuration failed!") def do_EdaDcfgUpdate(self, arg): """ Update EDA DCFG register values. Eg: = EdaDcfgUpdate addr1 value1 addr2 value2 ... Usage: EdaDcfgUpdate 0x000021D8 0x00000489 """ args = self._parse_args(arg, None) if len(args) == 0: self._p_err("No arguments supplied!") return num_ops = len(args) num_ops >>= 1 msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_dcfg_op_hdr_t(num_ops)) msg.payload.command = M2M2_EDA_APP_CMD_ENUM_t.M2M2_APP_COMMON_CMD_WRITE_DCFG_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i*2] reg_index = int(tempVal,16) tempVal = args[i*2+1] if ("0x") in tempVal: reg_val = int(tempVal, 16) elif ("0X") in tempVal: reg_val = int(tempVal, 16) else: reg_val = int(tempVal) msg.payload.ops[i].field = reg_index msg.payload.ops[i].value = reg_val self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_dcfg_op_hdr_t(num_ops), 60) if reply_msg == None: status = self._get_enum_name(M2M2_EDA_APP_CMD_ENUM_t, reply_msg.payload.status) print "Writing EDA App DCFG failed!" return self._print_eda_app_dcfg_result(reply_msg) def do_EdaDcfgRead(self, arg): """ Read the EDA DCFG. The argument is the DCFG ID to choose from the eda configuration structure: --------------------------------------------------------------- |Config Element | Address | --------------------------------------------------------------- | FIFO Config register | 0x000021D8 | --------------------------------------------------------------- Eg: = EdaDcfgRead addr1 addr2 ...... """ args = self._parse_args(arg, None) if len(args) == 0: self.vrb.err("No arguments supplied!") return num_ops = len(args) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_dcfg_op_hdr_t(num_ops)) msg.payload.command = M2M2_EDA_APP_CMD_ENUM_t.M2M2_APP_COMMON_CMD_READ_DCFG_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i] if ("0x") in tempVal: reg_addr = int(tempVal, 16) elif ("0X") in tempVal: reg_addr = int(tempVal, 16) else: reg_addr = int(tempVal) msg.payload.ops[i].field = reg_addr self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_dcfg_op_hdr_t(num_ops), 10) reg_result_list = [] if reply_msg == None: err_stat = 1 self.vrb.err("Reading EDA DCFG failed!") else: self._print_eda_dcfg_result(reply_msg) err_stat = 0 for i in range(reply_msg.payload.num_ops): reg_result_list.append((reg_addr, hex(int(reply_msg.payload.ops[i].value)))) return err_stat, reg_result_list def _print_eda_app_dcfg_result(self, packet): self._print_packet_status(packet) self.vrb.write(" Num of registers: '{}'".format(int(packet.payload.num_ops))) t = table(["Field", "Value"]) for i in range(packet.payload.num_ops): t.add_row([hex(packet.payload.ops[i].field), hex(packet.payload.ops[i].value)]) t.display() def do_backlightCntrl(self, arg): """ Do backlight control: enable/disable. The argument is the bl_cntrl: '0' for BL OFF '1' for BL ON ----------------------------------------------- Usage: #>backlightCntrl [bl_cntrl] #>backlightCntrl 0 #>backlightCntrl 1 """ args = self._parse_args(arg, 1) if args == None: self.vrb.write("No bl_cntrl specified, check help and retry") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_DISPLAY, m2m2_backlight_cntrl_command_t()) msg.payload.command = M2M2_DISPLAY_APP_CMD_ENUM_t.M2M2_DISPLAY_APP_CMD_BACKLIGHT_CNTRL_REQ msg.payload.control = int(args[0]) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_DISPLAY, m2m2_backlight_cntrl_command_t(), 10) if reply_msg != None: status = reply_msg.payload.status if status != 0: print "Backlight control for the desired state failed!" else: print "Backlight control for the desired state.. Status: '{}'".format(status) else: print "Backlight control for the desired state failed! Please check the arguments" def do_keyPressTest(self, arg): """ Do key press test: The argument is the 'duration' of test in secs. After the test key presses of application will be unusable. system_Reset needs to be given after the test ----------------------------------------------- Usage: #>keyPressTest [duration] #>keyPressTest 5 #>keyPressTest 10 """ args = self._parse_args(arg, 1) if args == None: self.vrb.write("No duration specified, check help and retry") return #Start the Test msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_DISPLAY, m2m2_key_test_command_t()) msg.payload.command = M2M2_DISPLAY_APP_CMD_ENUM_t.M2M2_DISPLAY_APP_CMD_KEY_TEST_REQ msg.payload.enable = 1 self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_DISPLAY, m2m2_key_test_command_t(), 10) if reply_msg != None: if reply_msg.payload.status == M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK and reply_msg.payload.command == M2M2_DISPLAY_APP_CMD_ENUM_t.M2M2_DISPLAY_APP_CMD_KEY_TEST_RESP: print "Key press test enabled.. Status: '{}'".format(reply_msg.payload.status) else: print "Key Press test start failed!..Status: '{}'".format(reply_msg.payload.status) else: print "Key press start test failed! No response received" return #Get key stream Data duration = int(args[0]) start_time = time.time() while True: # loop body here reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_DISPLAY, m2m2_pm_sys_key_test_data_t(), 1) if reply_msg != None: if reply_msg.payload.command == M2M2_DISPLAY_APP_CMD_ENUM_t.M2M2_DISPLAY_APP_CMD_KEY_STREAM_DATA: print "Received key code: {}".format(reply_msg.payload.key_value) end_time = time.time() if end_time-start_time > duration: break # end of loop #End the Test msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_DISPLAY, m2m2_key_test_command_t()) msg.payload.command = M2M2_DISPLAY_APP_CMD_ENUM_t.M2M2_DISPLAY_APP_CMD_KEY_TEST_REQ msg.payload.enable = 0 self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_DISPLAY, m2m2_key_test_command_t(), 10) if reply_msg != None: if reply_msg.payload.status == M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK and reply_msg.payload.command == M2M2_DISPLAY_APP_CMD_ENUM_t.M2M2_DISPLAY_APP_CMD_KEY_TEST_RESP: print "Key press test disabled.. Status: '{}'".format(reply_msg.payload.status) else: print "Key Press end failed!..Status: '{}'".format(reply_msg.payload.status) else: print "Key press end-test failed! No response received" def do_capSenseTest(self, arg): """ Do Cap sense AD7156 test: The argument is the 'duration' of test in secs. ----------------------------------------------- Usage: #>capSenseTest [duration] #>capSenseTest 5 #>capSenseTest 10 """ args = self._parse_args(arg, 1) if args == None: self.vrb.write("No duration specified, check help and retry") return #start the test msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_cap_sense_test_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_CAP_SENSE_TEST_REQ msg.payload.enable = 1 self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_cap_sense_test_cmd_t(), 20) if reply_msg != None: if reply_msg.payload.status == M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK and reply_msg.payload.command == M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_CAP_SENSE_TEST_RESP: print "Cap Sense test Enabled.. Status: '{}'".format(reply_msg.payload.status) else: print "Cap sense test start failed!.. Status: '{}'".format(reply_msg.payload.status) else: print "Cap Sense start test failed! No response received" return #Get touch stream data duration = int(args[0]) start_time = time.time() while True: # loop body here reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_cap_sense_test_data_t(), 1) if reply_msg != None: if reply_msg.payload.command == M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_CAP_SENSE_STREAM_DATA: print "Received touch position: {}".format(reply_msg.payload.touch_position) print "Received touch value: {}".format(reply_msg.payload.touch_value) end_time = time.time() if end_time-start_time > duration: break # end of loop #End the test msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_cap_sense_test_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_CAP_SENSE_TEST_REQ msg.payload.enable = 0 self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_cap_sense_test_cmd_t(), 20) if reply_msg != None: if reply_msg.payload.status == M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK and reply_msg.payload.command == M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_CAP_SENSE_TEST_RESP: print "Cap Sense test Disabled.. Status: '{}'".format(reply_msg.payload.status) else: print "Cap sense test end failed!.. Status: '{}'".format(reply_msg.payload.status) else: print "Cap Sense end test failed! No response received" return def do_enterBootLoader(self, arg): """ Give the command to enter bootloader ----------------------------------------------- Usage: #>enterBootLoader """ args = self._parse_args(arg, 0) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_enter_bloader_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_ENTER_BOOTLOADER_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_enter_bloader_cmd_t(), 10) if reply_msg != None: status = reply_msg.payload.status if status != 0: print "Bootloader entry failed" else: print "Bootloader entry passed! Status: '{}'".format(status) else: print "Bootloader entry failed!" def do_usb_DFU(self, arg): """ Give the command to start USB DFU on com port of Watch in bootloader mode. Pre-requisite command to be executed is: #>enterBootLoader The following command is executed: nrfutil dfu usb-serial -pkg ADI_project.zip -p COM7 based on argument passed for Debug or Release mode package The argument proj_config is: Release: for copying Watch app zip package from Release folder Debug: for copying Waych app zip package from Debug folder ----------------------------------------------- Usage: #>usb_DFU [comport_no} [proj_config] """ args = self._parse_args(arg, 2) if args == None: self.vrb.write("No arguments specified, check help and retry") return print "Timestamp of hex package is \n" cmd = "dir /T:W /A:-D ..\\..\\..\\app\\nRF52840_app\\ses\\Output\\"+args[1] cmd = cmd + "\\Exe\\watchv4_nrf52840.hex" os.system(cmd) print "Copying the Watch hex file from {} app directory..".format(args[1]) cmd = "copy ..\\..\\..\\app\\nRF52840_app\\ses\\Output\\"+args[1] cmd = cmd + "\\Exe\\watchv4_nrf52840.hex .\\ADI_project.hex" print "Executing command {}".format(cmd) os.system(cmd) print "\nPreparing the zip package for USB DFU.." cmd = "nrfutil pkg generate --hw-version 52 --sd-req 0xAE --application-version 0xff --application ADI_project.hex --key-file ..\\..\\..\\bootloader\\boot_zip\\private.pem ADI_project.zip" os.system(cmd) print "\nStarting USB DFU.." cmd = "nrfutil dfu usb-serial -pkg ADI_project.zip -p "+args[0] os.system(cmd) def do_fs_get_bad_blocks(self, arg): """ Command to get no: of bad blocks from NAND Flash. #>fs_getBadBlock """ args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_get_bad_blocks_cmd_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_GET_BAD_BLOCKS_REQ msg.payload.status = M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_get_bad_blocks_cmd_t(), 10) if reply_msg != None: if reply_msg.payload.status == M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK: self.vrb.write("No: of bad blocks: {}".format(reply_msg.payload.bad_blocks)) else: self.vrb.write("Error returned in getting Bad blocks from NAND FLash") else: self.vrb.err("No response from device.") def do_delete_config_file(self, arg): address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS msg = m2m2_packet(address, m2m2_file_sys_cmd_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_DELETE_CONFIG_FILE_REQ msg.payload.status = M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_cmd_t(), 120) if reply_msg != None: self._print_file_system_status(reply_msg) else: self.vrb.err("The device did not respond!") def do_Enable_Config_logs(self, args): address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM msg = m2m2_packet(address, m2m2_pm_sys_cmd_t()) if "start" in args: msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_ENABLE_USER_CONFIG_LOG_REQ elif "stop" in args: msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_DISABLE_USER_CONFIG_LOG_REQ msg.payload.status = M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_cmd_t(), 120) if reply_msg != None: self._print_low_touch_status(reply_msg) else: self.vrb.err("The device did not respond!") def do_get_low_touch_status(self, args): address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM msg = m2m2_packet(address, m2m2_pm_sys_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PS_SYS_COMMAND_GET_LOW_TOUCH_LOGGING_STATUS_REQ msg.payload.status = M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_cmd_t(), 120) if reply_msg != None: self._print_low_touch_status(reply_msg) else: self.vrb.err("The device did not respond!") def do_pm_activate_touch_sensor(self, args): address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM msg = m2m2_packet(address, m2m2_pm_sys_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_ACTIVATE_TOUCH_SENSOR_REQ msg.payload.status = M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_cmd_t(), 120) if reply_msg != None: self._print_low_touch_status(reply_msg) else: self.vrb.err("The device did not respond!") def do_pm_deactivate_touch_sensor(self, args): address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM msg = m2m2_packet(address, m2m2_pm_sys_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_DEACTIVATE_TOUCH_SENSOR_REQ msg.payload.status = M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_pm_sys_cmd_t(), 120) if reply_msg != None: self._print_low_touch_status(reply_msg) else: self.vrb.err("The device did not respond!") def do_create_gen_blk_dcb(self, arg): global lowtouch args = self._parse_args(arg, 1) if args == None: return if "start" in args: lowtouch.Enable_lowtouch = True elif "stop" in args: lowtouch.Enable_lowtouch = False lowtouch = LowTouch() def do_gen_blk_dcb_file_create(self, arg): """ Create the file with m2m2 commands that will be written into the gen block DCB. This command is to be preceded by 'create_gen_blk_dcb start' and succeeded by 'create_gen_blk_dcb stop'. #>gen_blk_dcb_file_create """ args = self._parse_args(arg, 1) if args == None: return configmsg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_user_cfg_summary_pkt_t()) if "write" in args: if lowtouch.Enable_lowtouch == True and lowtouch.User_File!='': print("\n ############### GEN_BLK_DCB_CONFIG FILE INFO #############\n") self.vrb.write(" Header Info length : '{}'".format(len(configmsg.pack()))) self.vrb.write(" Start command length: '{}'".format(lowtouch.Startcmdlen)) self.vrb.write(" Start command count : '{}'".format( lowtouch.Startcmdcount)) self.vrb.write(" Stop command length : '{}'".format(lowtouch.Stopcmdlen)) self.vrb.write(" Stop command count : '{}'".format(lowtouch.Stopcmdcount)) lowtouch.Enable_lowtouch = False configmsg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_LOG_USER_CONFIG_DATA_RESP configmsg.payload.start_cmd_len = lowtouch.Startcmdlen configmsg.payload.start_cmd_cnt = lowtouch.Startcmdcount configmsg.payload.stop_cmd_len = lowtouch.Stopcmdlen configmsg.payload.stop_cmd_cnt = lowtouch.Stopcmdcount configmsg.payload.crc16 = 0 #T0DO conmsg = configmsg.pack() lowtouch.User_File = conmsg + lowtouch.User_File #check if total bytes in User_File is WORD aligned. If not make it. So that DCB contains data which is WORD aligned if (len(lowtouch.User_File)) % 4 != 0: for i in range((4 - (len(lowtouch.User_File) % 4) )): lowtouch.User_File += "\0" #putting it as NULL #check if total no: of bytes in DCB file will be less than MAXGENBLKDCBSIZE*4*18, if its exceeding don't create the file if ( len(lowtouch.User_File) > (MAXGENBLKDCBSIZE*4*18) ): print(" ########## GEN_BLK_DCB_CONFIG.LOG can't be created ##############"); print(" ########## File size exceeding MAXGENBLKDCBSIZE*4*4 bytes ##############"); print(" ########## Recheck the start-stop commands included ##############"); print(" ########## in gen_blk_dcb_file_create_test and Retry ##############"); return try: Fileobjw = open(lowtouch.DCB_File_name, "wb") Fileobjw.write(lowtouch.User_File) Fileobjw.close() print("Copying and replacing file to dcb_cfg folder as gen_blk_dcb.lcfg") print("File size: ") print(len(lowtouch.User_File)) cmd = "copy GEN_BLK_DCB_CONFIG.LOG .\\dcb_cfg\gen_blk_dcb.lcfg" os.system(cmd) except: print("ERROR: 'GEN_BLK_DCB_CONFIG.LOG' File write failed") return else: print("Create gen blk DCB config file with 'create_gen_blk_dcb start' command") elif "read" in args: try: Fileobj = open(lowtouch.DCB_File_name, "rb") except: print("ERROR: 'GEN_BLK_DCB_CONFIG.LOG' File not found") return chunk_len = 4104 #57*4*18 bytes_read = 0 if Fileobj != None: while True: config_buffer = Fileobj.read(chunk_len) if not config_buffer: break temp_array = list(bytearray(config_buffer)) for index in range(len(config_buffer)): print "{}".format(hex(temp_array[index])) if (index+1)%4 == 0: print('\n') bytes_read = bytes_read + len(config_buffer) else: print("Create gen blk DCB config file with 'create_gen_blk_dcb start' command") def do_loadAd7156Cfg(self, arg): """ Load the AD7156 device configuration. #>loadAd7156Cfg """ msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_AD7156, m2m2_sensor_ad7156_resp_t()) msg.payload.command = M2M2_SENSOR_AD7156_COMMAND_ENUM_t.M2M2_SENSOR_AD7156_COMMAND_LOAD_CFG_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_AD7156, m2m2_sensor_ad7156_resp_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write("Loaded ad7156 device configuration") self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Loading Ad7156 device configuration failed!") def do_fs_config_log(self, arg): global lowtouch address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS args = self._parse_args(arg, 1) if args == None: return msg = m2m2_packet(address, m2m2_file_sys_cmd_t()) if "start" in args: msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_DCFG_START_LOG_REQ elif "stop" in args: msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_DCFG_STOP_LOG_REQ else: self.vrb.err("Incorrect usage!") return msg.payload.status = M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_cmd_t(), 120) if reply_msg != None: status = self._get_enum_name(M2M2_FILE_SYS_STATUS_ENUM_t, reply_msg.payload.status) if status == 'M2M2_FILE_SYS_STATUS_OK' and "start" in args: lowtouch.Enable_lowtouch = True elif status == 'M2M2_FILE_SYS_STATUS_OK' and "stop" in args: lowtouch.Enable_lowtouch = False lowtouch = LowTouch() self._print_file_system_status(reply_msg) else: self.vrb.err("The device did not respond!") def do_fs_config_log_file(self, arg): """ mount file system. Command help to check if file system is available with proper mount. #>fs_config_log_file write """ args = self._parse_args(arg, 1) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_user_config_data()) configmsg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_user_cfg_summary_pkt_t()) if "write" in args: if lowtouch.Enable_lowtouch == True and lowtouch.User_File!='': print("\n ############### CONFIG FILE INFO #############\n") self.vrb.write(" Header Info length : '{}'".format(len(configmsg.pack()))) self.vrb.write(" Start command length: '{}'".format(lowtouch.Startcmdlen)) self.vrb.write(" Start command count : '{}'".format( lowtouch.Startcmdcount)) self.vrb.write(" Stop command length : '{}'".format(lowtouch.Stopcmdlen)) self.vrb.write(" Stop command count : '{}'".format(lowtouch.Stopcmdcount)) lowtouch.Enable_lowtouch = False configmsg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_LOG_USER_CONFIG_DATA_RESP configmsg.payload.start_cmd_len = lowtouch.Startcmdlen configmsg.payload.start_cmd_cnt = lowtouch.Startcmdcount configmsg.payload.stop_cmd_len = lowtouch.Stopcmdlen configmsg.payload.stop_cmd_cnt = lowtouch.Stopcmdcount configmsg.payload.crc16 = 0 #T0DO conmsg = configmsg.pack() lowtouch.User_File = conmsg + lowtouch.User_File try: Fileobjw = open(lowtouch.User_File_name, "wb") Fileobjw.write(lowtouch.User_File) Fileobjw.close() except: print("ERROR: 'USER_CONFIG.LOG' File write failed") return try: Fileobj = open(lowtouch.User_File_name, "rb") except: print("ERROR: 'USER_CONFIG.LOG' File not found") return chunk_len = 70 bytes_read = 0 if Fileobj != None: while True: config_buffer = Fileobj.read(chunk_len) if not config_buffer: break else: if ((len(config_buffer)) < chunk_len): msg.payload.status = M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_END_OF_FILE else: msg.payload.status = M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_STATUS_OK temp_array = list(bytearray(config_buffer)) for index in range(len(config_buffer)): msg.payload.byte_configstream[index] = temp_array[index] msg.payload.len_configstream = len(config_buffer) bytes_read = bytes_read + len(config_buffer) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_LOG_USER_CONFIG_DATA_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_cmd_t(), 500) if reply_msg!=None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) if status != None: if(status != 'M2M2_APP_COMMON_STATUS_OK'): print("ERROR: Error in writing the config data!") self.vrb.write(" Status: '{}'".format(status)) return elif((status == 'M2M2_APP_COMMON_STATUS_OK') and (len(config_buffer)) < chunk_len ): self.vrb.write("\n Total bytes written: '{}'".format(bytes_read)) self.vrb.write(" Status: '{}'\n".format(status)) else: status = self._get_enum_name(M2M2_FILE_SYS_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write(" Status: '{}'".format(status)) break else: self.vrb.write(" Device did not respond") else: print("Create config file with 'fs_config_log start' command") def do_fs_format(self, arg): """ format file system. Command to format file system. #>fs_format """ args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_cmd_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_FORMAT_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_cmd_t(), 30) if reply_msg != None: self._print_file_system_status(reply_msg) else: self.vrb.err("No response from device.Mount operation failed.") def do_fs_block_erase(self, arg): """ Command to erase each block. #>fs_block_erase 1 """ args = self._parse_args(arg, 1) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_blk_erase_cmd_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_BLOCK_ERASE_REQ msg.payload.block_no = int(args[0]) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_blk_erase_cmd_t(), 30) if reply_msg != None: self._print_file_system_status(reply_msg) else: self.vrb.err("No response block erase failed.") def do_fs_write_reserved_block_page(self, arg): """ Command to write reserved block #>fs_write_reserved_block_page """ args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_write_rsd_blk_cmd_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_WRITE_RANDOM_DATA_TO_RSD_BLK_REQ for i in range(20): msg.payload.data[i] = i self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_write_rsd_blk_cmd_t(), 30) if reply_msg != None: self._print_file_system_status(reply_msg) else: self.vrb.err("No response write failed.") def do_fs_req_debug_info(self, arg): """ format file system. Command to format file system. #>fs_req_debug_info """ args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_debug_impt_info_req_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_GET_IMPT_DEBUG_INFO_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_debug_impt_info_resp_t(), 30) if reply_msg != None: self.vrb.write(" CIRCULAR BUFFER HEAD POINTER: {} page no".format(int(reply_msg.payload.head_pointer))) self.vrb.write(" CIRCULAR BUFFER TAIL POINTER : {} block no".format(int(reply_msg.payload.tail_pointer))) self.vrb.write(" USB AVG TRANSMIT TIME : {}us".format(int(reply_msg.payload.usb_avg_tx_time))) self.vrb.write(" USB AVG PORT WRITE TIME : {}us".format(int(reply_msg.payload.usb_avg_port_write_time))) self.vrb.write(" PAGE READ TIME : {}us".format(int(reply_msg.payload.page_read_time))) self.vrb.write(" PAGE WRITE TIME : {}us".format(int(reply_msg.payload.page_write_time))) self.vrb.write(" INIT CIRCULAR BUFFER : {}".format(int(reply_msg.payload.init_circular_buffer_flag))) self.vrb.write(" MEM FULL FLAG : {}".format(int(reply_msg.payload.mem_full_flag))) self.vrb.write(" DATA OFFSET : {}".format(int(reply_msg.payload.data_offset))) self.vrb.write(" CONFIG POS OCCUPIED : {}".format(int(reply_msg.payload.config_file_occupied))) self.vrb.write(" DISPLAY ONE MINUTE TIMER COUNT: {}".format(int(reply_msg.payload.min_timer_cnt))) self.vrb.write(" DISPLAY FIVE MINUTE TIMER COUNT : {}".format(int(reply_msg.payload.fs_display_query_cnt))) self._print_file_system_status(reply_msg) else: self.vrb.err("debug info req error failed !") def do_fs_ls(self, arg): """ list files.Command help to view files in the current directory. #>fs_ls """ fs_ls_list = [] err_stat = 0 args = self._parse_args(arg, 0) if args == None: return err_stat, fs_ls_list msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_ls_req_t(2)) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_LS_REQ msg.payload.dir_path[0] = ord('\x01') msg.payload.dir_path[1] = ord('\x01') self._send_packet(msg) while True: # loop body here reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_ls_resp_t(), 20) if reply_msg != None: if (reply_msg.payload.status != M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_STATUS_OK): self._print_file_system_status(reply_msg) break else : fs_ls_dict = self._print_file_list(reply_msg) fs_ls_list.append(fs_ls_dict) else: err_stat = 1 self.vrb.err("No response from device") # end of loop return err_stat, fs_ls_list def do_fs_vol_info(self, arg): """ File system volume info. Command to get file system volume info. #>fs_vol_info """ args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_cmd_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_VOL_INFO_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_vol_info_resp_t(), 10) if reply_msg != None: self.vrb.write(" FS_TOTAL_MEMORY: '{}'bytes".format(int(reply_msg.payload.totalmemory))) self.vrb.write(" FS_USED_MEMORY : '{}'bytes".format(int(reply_msg.payload.usedmemory))) self.vrb.write(" FS_AVAILABLE_MEMORY : '{}%'".format(int(reply_msg.payload.availmemory))) self._print_file_system_status(reply_msg) else: self.vrb.err("No response from device.Getting volume info failed.") def do_fs_mount(self, arg): """ mount file system. Command help to check if file system is available with proper mount. #>fs_mount """ args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_cmd_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_MOUNT_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_cmd_t(), 10) if reply_msg != None: self._print_packet_status(reply_msg) else: self.vrb.err("The device did not respond!") def do_fs_log(self, arg): address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS args = self._parse_args(arg, 1) if args == None: return if "start" in args: msg = m2m2_packet(address, m2m2_app_common_sub_op_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_START_LOGGING_REQ elif "stop" in args: msg = m2m2_packet(address, m2m2_file_sys_stop_log_cmd_t()) msg.payload.stop_type = FILE_STOP_LOGGING_t.M2M2_FILE_SYS_STOP_LOGGING msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_STOP_LOGGING_REQ else: self.vrb.err("Incorrect usage!") return msg.payload.status = M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK self._send_packet(msg) if "start" in args: reply_msg = self._get_packet(address, m2m2_app_common_sub_op_t(), 20) elif "stop" in args: reply_msg = self._get_packet(address, m2m2_file_sys_stop_log_cmd_t(), 20) if reply_msg != None: self._print_file_system_status(reply_msg) else: self.vrb.err("The device did not respond!") def do_fs_log_append(self, arg): address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(address, m2m2_file_sys_cmd_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_APPEND_FILE_REQ msg.payload.status = M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_STATUS_OK self._send_packet(msg) #pase reply message reply_msg = self._get_packet(address, m2m2_file_sys_cmd_t(), 50) if reply_msg ==None: self.vrb.err("Device did not respond!") return if reply_msg.payload.status == M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_NO_FILE_TO_APPEND: self.vrb.err("No files to append!") self._print_file_system_status(reply_msg) def do_fs_sub(self, arg): fs_address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS address = None args = self._parse_args(arg, 2) if args == None: return for a in args: if a in stream_name_map: address = stream_name_map[a]["application"] stream = stream_name_map[a]["stream"] if address == None: self.vrb.err("Incorrect usage! You did not provide a valid stream.") return msg = m2m2_packet(fs_address, m2m2_file_sys_log_stream_t()) if "r" in args or "remove" in args: msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_STOP_STREAM_REQ msg.payload.stream = address elif "a" in args or "add" in args: msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_LOG_STREAM_REQ else: self.vrb.err("Incorrect usage! You did not provide a valid subscription operation.") return msg.payload.stream = stream self._send_packet(msg) reply_msg = self._get_packet(fs_address, m2m2_file_sys_log_stream_t(), 10) if reply_msg != None: self._print_file_system_status(reply_msg) else: self.vrb.err("No response from device. Subscription/unSubscription operation failed.") def do_fs_log_test(self, arg): """ list files.Command help to view files in the current directory. #>fs_log_test """ args = self._parse_args(arg, 0) if args == None: return address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS msg = m2m2_packet(address, m2m2_file_sys_cmd_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_TEST_LOG_REQ # msg.payload.status = M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_cmd_t(), 120) if reply_msg != None: self._print_file_system_status(reply_msg) else: self.vrb.err("No response received from device!!") def do_fs_stream(self, arg): """ read contents of file. Command is used to read file by getting data from file streamed as byte array. #>fs_stream """ global fs_file_pagechunk_list fs_file_pagechunk_list = [] args = self._parse_args(arg, 1) if args == None: return fobj = open(args[0], "wb") #Counter for CRC16 mismatch nCRCMisMatchCnt = 0 #Counter for Sequence number mismatch nSeqMisMatchCnt = 0 #Reference sequence number for comparing received sequence number nSequenceNumber = 0 nComputedCRC = int(0x0FFFF) nCRCPolynomial = int(0x1021) # Read the size of the file msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_ls_req_t(2)) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_LS_REQ msg.payload.dir_path[0] = ord('\x01') msg.payload.dir_path[1] = ord('\x01') self._send_packet(msg) while True: reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_ls_resp_t(), 100) if reply_msg != None: if reply_msg.payload.status != M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_STATUS_OK: break elif reply_msg.payload.filetype == FILE_TYPE_ENUM_t.M2M2_FILE_SYS_IS_DATA_FILE: if (cast(reply_msg.payload.full_file_name, c_char_p).value == args[0]): total_file_size = reply_msg.payload.filesize #print("file size = {}".format(total_file_size)) elif reply_msg.payload.filetype == FILE_TYPE_ENUM_t.M2M2_FILE_SYS_IS_CONFIG_FILE: if cast(reply_msg.payload.full_file_name, c_char_p).value == args[0]: total_file_size = reply_msg.payload.filesize else: self.vrb.err("No response from device") return bar = tqdm.tqdm(total=total_file_size, dynamic_ncols=True, ascii=True, unit="B", unit_scale=True) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_get_req_t(len(args[0]))) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_DOWNLOAD_LOG_REQ temp_array = list(bytearray(args[0])) for index in range(len(args[0])): msg.payload.file_name[index] = temp_array[index] start_time = datetime.datetime.now() self._send_packet(msg) file_size = 0 while True: # loop body here reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS_STREAM, m2m2_file_sys_download_log_stream_t(), 1000) if reply_msg != None: if reply_msg.payload.status == M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_STATUS_ERROR: self._print_file_system_status(reply_msg) break file_size += reply_msg.payload.page_chunk_size data_length = reply_msg.payload.page_chunk_size + 15 # print"Length received t1 = {}".format(data_length) # print "page_number = {}".format(reply_msg.payload.page_number) # print "page_chunk_number = {}".format(reply_msg.payload.page_chunk_number) # print "page_chunk_size = {}".format(reply_msg.payload.page_chunk_size) if(reply_msg.payload.page_number < len(fs_file_pagechunk_list)): fs_file_pagechunk_list[reply_msg.payload.page_number] = reply_msg.payload.page_chunk_number else: fs_file_pagechunk_list.insert(reply_msg.payload.page_number,reply_msg.payload.page_chunk_number) #self._print_file_system_status(reply_msg) crc16_data_array = buffer(reply_msg.header)[:] + buffer(reply_msg.payload)[:] for index in range(0,8,2): crc16_data_array = swap(crc16_data_array,index, index + 1) nComputedCRC = int(0xFFFF) for nByte in range(data_length): nComputedCRC = ((nComputedCRC >> 8) | (nComputedCRC << 8))&0xFFFF nComputedCRC = nComputedCRC ^ ord(crc16_data_array[nByte]) nComputedCRC ^=(nComputedCRC & 0xFF) >> 4 nComputedCRC ^= ((nComputedCRC << 8) << 4)&0xFFFF nComputedCRC ^= ((nComputedCRC & 0xFF) << 4) << 1 #'''print"nComputedCRC = {}".format(nComputedCRC) #print"Length received = {}".format(data_length) #print"nComputedCRC = {}".format(nComputedCRC) #print"received crc16 = {}".format(reply_msg.payload.crc16)''' if((nComputedCRC != reply_msg.payload.crc16)): nCRCMisMatchCnt += 1 print"received crc16 = {}".format(format_hex(crc16_data_array)) #print"received crc16 = {}".format((crc16_data_array)) #print"Length received t1 = {}".format(data_length) print"CRC mismatch cnt = {}".format(nCRCMisMatchCnt) if(nSequenceNumber != reply_msg.header.checksum): nSeqMisMatchCnt += 1 # Restraining refernce sequence number to 16bit wide if(nSequenceNumber == 65535): nSequenceNumber = 0 else: nSequenceNumber += 1 bar.update(reply_msg.payload.page_chunk_size) if (reply_msg.payload.page_chunk_size != len(reply_msg.payload.page_chunk_bytes)): fobj.write(bytearray(reply_msg.payload.page_chunk_bytes[0:int(reply_msg.payload.page_chunk_size)])) else : fobj.write(reply_msg.payload.page_chunk_bytes) else: bar.close() self.vrb.err("No response from device.Stream file operation failed.") break if (reply_msg.payload.status != M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_STATUS_OK): fobj.close() bar.close() break # end of loop end_time = datetime.datetime.now() # print(fs_file_pagechunk_list) # print(len(fs_file_pagechunk_list)) '''self.vrb.write("\n No. of CRC mismatches = {}".format(nCRCMisMatchCnt)) self.vrb.write("\n No. of Seq mismatches = {}".format(nSeqMisMatchCnt)) self.vrb.write("\nFile read complete!\n Start Time: {}\n End Time: {}\n Elapsed Time: {}\n Total bytes read: {}".format(start_time, end_time, end_time - start_time, file_size))''' def do_fs_stream_ble(self, arg): """ read contents of file. Command is used to read file by getting data from file streamed as byte array. #>fs_stream_ble """ global fs_file_pagechunk_list fs_file_pagechunk_list = [] args = self._parse_args(arg, 1) if args == None: return fobj = open(args[0], "wb") #Counter for CRC16 mismatch nCRCMisMatchCnt = 0 #Counter for Sequence number mismatch nSeqMisMatchCnt = 0 #Reference sequence number for comparing received sequence number nSequenceNumber = 0 nComputedCRC = int(0x0FFFF) nCRCPolynomial = int(0x1021) # Read the size of the file msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_ls_req_t(2)) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_LS_REQ msg.payload.dir_path[0] = ord('\x01') msg.payload.dir_path[1] = ord('\x01') self._send_packet(msg) while True: reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_ls_resp_t(), 100) if reply_msg != None: if reply_msg.payload.status != M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_STATUS_OK: break elif reply_msg.payload.filetype == FILE_TYPE_ENUM_t.M2M2_FILE_SYS_IS_DATA_FILE: if (cast(reply_msg.payload.full_file_name, c_char_p).value == args[0]): total_file_size = reply_msg.payload.filesize #print("file size = {}".format(total_file_size)) elif reply_msg.payload.filetype == FILE_TYPE_ENUM_t.M2M2_FILE_SYS_IS_CONFIG_FILE: if cast(reply_msg.payload.full_file_name, c_char_p).value == args[0]: total_file_size = reply_msg.payload.filesize else: self.vrb.err("No response from device") return bar = tqdm.tqdm(total=total_file_size, dynamic_ncols=True, ascii=True, unit="B", unit_scale=True) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_get_req_t(len(args[0]))) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_DOWNLOAD_LOG_BLE_REQ temp_array = list(bytearray(args[0])) for index in range(len(args[0])): msg.payload.file_name[index] = temp_array[index] start_time = datetime.datetime.now() self._send_packet(msg) file_size = 0 while True: # loop body here reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS_STREAM, m2m2_file_sys_download_log_ble_stream_t(), 1000) if reply_msg != None: if reply_msg.payload.status == M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_STATUS_ERROR: self._print_file_system_status(reply_msg) break file_size += reply_msg.payload.page_chunk_size data_length = reply_msg.payload.page_chunk_size + 15 #print"Length received t1 = {}".format(data_length) # print "page_number = {}".format(reply_msg.payload.page_number) # print "page_chunk_number = {}".format(reply_msg.payload.page_chunk_number) # print "page_chunk_size = {}".format(reply_msg.payload.page_chunk_size) if(reply_msg.payload.page_number < len(fs_file_pagechunk_list)): fs_file_pagechunk_list[reply_msg.payload.page_number] = reply_msg.payload.page_chunk_number else: fs_file_pagechunk_list.insert(reply_msg.payload.page_number, reply_msg.payload.page_chunk_number) #self._print_file_system_status(reply_msg) crc16_data_array = buffer(reply_msg.header)[:] + buffer(reply_msg.payload)[:] for index in range(0,8,2): crc16_data_array = swap(crc16_data_array,index, index + 1) nComputedCRC = int(0xFFFF) for nByte in range(data_length): nComputedCRC = ((nComputedCRC >> 8) | (nComputedCRC << 8))&0xFFFF nComputedCRC = nComputedCRC ^ ord(crc16_data_array[nByte]) nComputedCRC ^=(nComputedCRC & 0xFF) >> 4 nComputedCRC ^= ((nComputedCRC << 8) << 4)&0xFFFF nComputedCRC ^= ((nComputedCRC & 0xFF) << 4) << 1 #'''print"nComputedCRC = {}".format(nComputedCRC) #print"Length received = {}".format(data_length) #print"nComputedCRC = {}".format(nComputedCRC) #print"received crc16 = {}".format(reply_msg.payload.crc16)''' if((nComputedCRC != reply_msg.payload.crc16)): nCRCMisMatchCnt += 1 print"received crc16 = {}".format(format_hex(crc16_data_array)) #print"received crc16 = {}".format((crc16_data_array)) #print"Length received t1 = {}".format(data_length) print"CRC mismatch cnt = {}".format(nCRCMisMatchCnt) if(nSequenceNumber != reply_msg.header.checksum): nSeqMisMatchCnt += 1 # Restraining refernce sequence number to 16bit wide if(nSequenceNumber == 65535): nSequenceNumber = 0 else: nSequenceNumber += 1 bar.update(reply_msg.payload.page_chunk_size) if (reply_msg.payload.page_chunk_size != len(reply_msg.payload.page_chunk_bytes)): fobj.write(bytearray(reply_msg.payload.page_chunk_bytes[0:int(reply_msg.payload.page_chunk_size)])) else : fobj.write(reply_msg.payload.page_chunk_bytes) else: bar.close() self.vrb.err("No response from device.Stream file operation failed.") break if (reply_msg.payload.status != M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_STATUS_OK): fobj.close() bar.close() break # end of loop end_time = datetime.datetime.now() # print(fs_file_pagechunk_list) # print(len(fs_file_pagechunk_list)) '''self.vrb.write("\n No. of CRC mismatches = {}".format(nCRCMisMatchCnt)) self.vrb.write("\n No. of Seq mismatches = {}".format(nSeqMisMatchCnt)) self.vrb.write("\nFile read complete!\n Start Time: {}\n End Time: {}\n Elapsed Time: {}\n Total bytes read: {}".format(start_time, end_time, end_time - start_time, file_size))''' def do_fs_chunk_retransfer_test(self,arg): global fs_file_pagechunk_list args = self._parse_args(arg, 1) if args == None: return if (get_cli_addr() == M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_CLI): self.onecmd("fs_stream "+args[0]); else: self.onecmd("fs_stream_ble " + args[0]); print(fs_file_pagechunk_list) for page_num in range(len(fs_file_pagechunk_list)): print "page_number = {}".format(page_num) for page_chunk_num in range(fs_file_pagechunk_list[page_num] + 1): Retransmit_type = 0 Page_Roll_over = int(page_num/65536) Page_number = page_num - (Page_Roll_over * 65536) Page_chunk_number = page_chunk_num filename, ext = args[0].split('.') temp_array = list(bytearray(args[0])) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_page_chunk_retransmit_req_t(len(args[0]))) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_CHUNK_RETRANSMIT_REQ msg.payload.retransmit_type = Retransmit_type msg.payload.page_roll_over = Page_Roll_over msg.payload.page_number = Page_number msg.payload.page_chunk_number = Page_chunk_number for index in range(len(args[0])): msg.payload.file_name[index] = temp_array[index] self._send_packet(msg) nComputedCRC = int(0x0FFFF) nCRCPolynomial = int(0x1021) nCRCMisMatchCnt = 0 # loop body here if (get_cli_addr() == M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_CLI): reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_download_log_stream_t(), 1000) else: reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_download_log_ble_stream_t(), 1000) if reply_msg != None: # print"Status:{}".format(reply_msg.payload.status) # print"page_number = {}".format(reply_msg.payload.page_number) # print"page_chunk_number = {}".format(reply_msg.payload.page_chunk_number) # print"page_chunk_size = {}".format(reply_msg.payload.page_chunk_size) if ((reply_msg.payload.status == M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_STATUS_OK) or ( reply_msg.payload.status == M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_END_OF_FILE)): fobj = open(filename + '_chunk_retransfer.LOG', "ab") data_length = reply_msg.payload.page_chunk_size + 15 # print"Length received t1 = {}".format(data_length) crc16_data_array = buffer(reply_msg.header)[:] + buffer(reply_msg.payload)[:] for index in range(0, 8, 2): crc16_data_array = swap(crc16_data_array, index, index + 1) nComputedCRC = int(0xFFFF) for nByte in range(data_length): nComputedCRC = ((nComputedCRC >> 8) | (nComputedCRC << 8)) & 0xFFFF nComputedCRC = nComputedCRC ^ ord(crc16_data_array[nByte]) nComputedCRC ^= (nComputedCRC & 0xFF) >> 4 nComputedCRC ^= ((nComputedCRC << 8) << 4) & 0xFFFF nComputedCRC ^= ((nComputedCRC & 0xFF) << 4) << 1 # '''print"nComputedCRC = {}".format(nComputedCRC) # print"Length received = {}".format(data_length)''' # print"nComputedCRC = {}".format(nComputedCRC) # print"received crc16 = {}".format(reply_msg.payload.crc16) if ((nComputedCRC != reply_msg.payload.crc16)): nCRCMisMatchCnt += 1 # print"CRC mismatch {}".format(nCRCMisMatchCnt) # print"received crc16 = {}".format(format_hex(crc16_data_array)) # print"received crc16 = {}".format((crc16_data_array)) # print"Length received t1 = {}".format(data_length) # print"CRC mismatch cnt = {}".format(nCRCMisMatchCnt) # Restraining refernce sequence number to 16bit wide if (reply_msg.payload.page_chunk_size != len(reply_msg.payload.page_chunk_bytes)): fobj.write(bytearray(reply_msg.payload.page_chunk_bytes[0:int(reply_msg.payload.page_chunk_size)])) else: fobj.write(reply_msg.payload.page_chunk_bytes) print"chunk stream obtained successfully" self._print_file_system_status(reply_msg) fobj.close() else: self._print_file_system_status(reply_msg) else: self.vrb.err("No response from device.Stream file operation failed.") def do_get_file_cnt(self, arg): """ list files.Command help to view files in the current directory. #>file_cnt """ args = self._parse_args(arg, 0) if args == None: return address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS msg = m2m2_packet(address, m2m2_file_sys_cmd_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_GET_NUMBER_OF_FILE_REQ msg.payload.status = M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_get_file_count_pkt_t(), 120) if reply_msg != None: self._print_file_count_status(reply_msg) else: self.vrb.err("The device did not respond!") def do_get_file_info(self, arg): """ This is a test command, used to read file information from TOC. It is used for debugging any issues with file information storage page #>gets the information about the file present in the given page of TOC Usage: get_file_info arg1 arg1 -> File index whose file Information has to be displayed """ args = self._parse_args(arg, 1) if args == None: return address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS msg = m2m2_packet(address, m2m2_file_sys_get_file_info_req_pkt_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_GET_FILE_INFO_REQ msg.payload.status = M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK msg.payload.file_index = int(args[0]) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_get_file_info_resp_pkt_t(), 120) if reply_msg != None: if (reply_msg.payload.status == M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_STATUS_OK): self._print_file_info_status(reply_msg) else: self.vrb.err("Error occured while reading the file with given index") else: self.vrb.err("The device did not respond!") page_read_fail_cnt = 0 def do_page_read_test(self, arg): """ ###!!! Under test !!!### This is a test command, used for testing by reading the given page It is used for debugging any issues while reading any page from the NAND flash #>checks if there are any errors during reading of a given page Usage: page_read_test arg1 arg2 arg3 arg1 -> page number to be read arg2 -> 1 ( enables bytes read from page)/0 (disables bytes read from page) arg3 -> Number of bytes to read from page, its between 0-4096 """ global page_read_fail_cnt args = self._parse_args(arg, 3) if args == None: return address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS msg = m2m2_packet(address, m2m2_file_sys_page_read_test_req_pkt_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_PAGE_READ_TEST_REQ msg.payload.status = M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK msg.payload.page_num = int(args[0]) msg.payload.num_bytes = int(args[2]) if(args[1] != None): print_en = int(args[1]) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_page_read_test_resp_pkt_t(), 120) if reply_msg != None: if (reply_msg.payload.status == M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_STATUS_OK): self._print_page_read_test_status(reply_msg,print_en) else: page_read_fail_cnt += 1 self.vrb.err("Error occured while reading the page with given number") else: self.vrb.err("The device did not respond!") def do_file_read_test(self, arg): """ This is a test command, to display contents of particular file information Usage: file_read_test arg1 arg1 -> Name of file whose contents has to be displayed """ global page_read_fail_cnt args = self._parse_args(arg, 1) if args == None: return file_found = 0 file_pos = 0 msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_ls_req_t(2)) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_LS_REQ msg.payload.dir_path[0] = ord('\x01') msg.payload.dir_path[1] = ord('\x01') self._send_packet(msg) while True: # loop body here reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_ls_resp_t(), 120) if reply_msg != None: if (reply_msg.payload.status != M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_STATUS_OK): self._print_file_system_status(reply_msg) if(file_found == 0): self.vrb.write(" File Not Found ") return else: break else : if(file_found == 0): file_pos += 1 if(cast(reply_msg.payload.full_file_name, c_char_p).value == args[0]): file_found = 1 else: self.vrb.err("No response from device") # end of loop address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS msg = m2m2_packet(address, m2m2_file_sys_get_file_info_req_pkt_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_GET_FILE_INFO_REQ msg.payload.status = M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK msg.payload.file_index = file_pos self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_get_file_info_resp_pkt_t(), 120) if reply_msg != None: if (reply_msg.payload.status == M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_STATUS_OK): self._print_file_info_status(reply_msg) start_page = reply_msg.payload.start_page end_page = reply_msg.payload.end_page page_read_fail_cnt = 0 #print("\n Testing by reading the each pages of File \n") #for page_index in range(start_page,end_page+1,1): # print("\n reading page: {}".format(page_index)) # arg_str = str(page_index) + " 1" + " 5" # print(arg_str) # self.do_page_read_test(arg_str) arg_str = str(reply_msg.payload.start_page) + str(reply_msg.payload.end_page) print(arg_str) self.do_read_file_sample_data(arg_str) print("completed reading all the pages of given file index") print("Total No. of pages read = {}".format((end_page-start_page+1))) print("No. of pages reported read failures = {}".format(page_read_fail_cnt)) else: self.vrb.err("Error occured while reading the file with given index") else: self.vrb.err("The device did not respond!") def do_read_file_sample_data(self, arg): """ ###!!! Under test !!!### read file sample bytes from every page #>read_file_sample_data start_page_end end_page_end eg 1: read_file_sample_data 256 270 ( linear scale) """ args = self._parse_args(arg, 2) if args == None: self.vrb.write("please provide valid argument") return else: msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS,m2m2_file_sys_sample_data_file_read_req_t() ) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_FILE_READ_TEST_REQ msg.payload.status = M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK msg.payload.start_page_ind = int(args[0]) msg.payload.end_page_ind = int(args[1]) self._send_packet(msg) cnt = msg.payload.start_page_ind loop_ind=0 loop_num = (abs(msg.payload.end_page_ind - msg.payload.start_page_ind)+1)/2 while loop_ind < loop_num: # loop body here reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_sample_data_file_read_resp_t(), 10) if reply_msg != None: pages_sample_data = re.findall(r'[^\,]+', reply_msg.payload.sample_data) for page_value in pages_sample_data: print "Page index = {}".format(cnt) print page_value cnt = cnt + 1 loop_ind = loop_ind + 1 else: self.vrb.err("File information not received, time out !!") return print("All pages read !!") def do_pattern_write(self, arg): """ multiple pattern write with prescribed file size, scale #>pattern_write file_size scale_type scale_factor base num_of_files_to_write eg 1: pattern_write 16384 0 2 1 2 ( linear scale) eg 2: pattern_write 16384 1 2 2 2 ( log scale) eg 3: pattern_write 16384 2 2 2 2( exp scale ) """ args = self._parse_args(arg, 5) if args == None: self.vrb.write("please provide valid argument") return else: address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS msg = m2m2_packet(address, m2m2_file_sys_pattern_write_req_pkt_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_PATTERN_WRITE_REQ msg.payload.status = M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK msg.payload.file_size = int(args[0]) msg.payload.scale_type = int(args[1]) msg.payload.scale_factor = int(args[2]) msg.payload.base = int(args[3]) msg.payload.num_files_to_write = int(args[4]) status = M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK if msg.payload.file_size < 4096: self.vrb.err("provide file size greater than 4096 bytes!") return while msg.payload.num_files_to_write > 0: self._send_packet(msg) print "File size computed = {}".format(msg.payload.file_size) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_pattern_write_resp_pkt_t(), 5400) if reply_msg != None: if reply_msg.payload.status == M2M2_APP_COMMON_STATUS_ENUM_t.M2M2_APP_COMMON_STATUS_OK: if msg.payload.scale_type == 0:#linear msg.payload.file_size *= msg.payload.scale_factor elif msg.payload.scale_type == 1:#log if msg.payload.scale_factor != 1: msg.payload.file_size *= int(math.log(msg.payload.base,msg.payload.scale_factor)) elif msg.payload.scale_type == 2:#exp msg.payload.file_size *= int(math.exp(msg.payload.scale_factor)) else: if reply_msg.payload.status == M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_ERR_MEMORY_FULL: self.vrb.err("Memory full breaking loop as new files cannot be written!") status = M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_ERR_MEMORY_FULL break elif reply_msg.payload.status == M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_ERR_MAX_FILE_COUNT: self.vrb.err("Max file count crossed!") status = M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_ERR_MAX_FILE_COUNT break status |= reply_msg.payload.status else: self.vrb.err("The device did not respond!") time.sleep(5) msg.payload.num_files_to_write -= 1 self._print_file_system_status(reply_msg) def do_fs_sub_status(self, arg): fs_address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS address = None args = self._parse_args(arg, 1) if args == None: return for a in args: if a in stream_name_map: address = stream_name_map[a]["application"] stream = stream_name_map[a]["stream"] if address == None: self.vrb.err("Incorrect usage! You did not provide a valid stream.") return msg = m2m2_packet(fs_address, m2m2_app_common_sub_op_t()) msg.payload.stream = stream msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_GET_FS_STREAM_SUB_STATUS_REQ self._send_packet(msg) reply_msg = self._get_packet(fs_address, m2m2_file_sys_get_subs_status_resp_t(), 20) if reply_msg != None: stream_f = self._get_enum_name(M2M2_ADDR_ENUM_t, reply_msg.payload.stream) fs_sub_state = self._get_enum_name(FILE_SYS_STREAM_SUBS_STATE_ENUM_t, reply_msg.payload.subs_state) if stream == None: stream = hex(reply_msg.payload.stream_f) self.vrb.write("Application: {}".format(stream_f)) self.vrb.write("FS_SUBS_STATUS: '{}'".format(fs_sub_state)) else: self.vrb.err("No response from device. FS stream status getting failed.") def do_fs_status(self, arg): """ File system status info. Command to get file system current status. #>fs_status """ args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_cmd_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_GET_FS_STATUS_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_cmd_t(), 10) if reply_msg != None: self._print_file_system_status(reply_msg) else: self.vrb.err("No response from device.Getting file system status info failed.") def do_get_max_tx_pkt_comb_cnt(self, arg): """ This is a command, to be used from iOS app to get the "max_tx_pkt_comb_cnt" used in the Watch Fw. It is based on this value that no: of pkts for BLE Tx is combined and send out. #>get_max_tx_pkt_comb_cnt """ address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM msg = m2m2_packet(address, m2m2_ble_max_tx_pkt_comb_cnt_resp_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_BLE_GET_MAX_TX_PKT_COMB_CNT_REQ msg.payload.status = M2M2_PM_SYS_STATUS_ENUM_t.M2M2_PM_SYS_STATUS_OK self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_ble_max_tx_pkt_comb_cnt_resp_cmd_t(), 20) if reply_msg != None: if (reply_msg.payload.status == M2M2_PM_SYS_STATUS_ENUM_t.M2M2_PM_SYS_STATUS_OK): print("max_tx_pkt_comb_cnt used in Watch Fw : {} ").format(reply_msg.payload.max_tx_pkt_comb_cnt) else: self.vrb.err("Error occured while reading max_tx_pkt_comb_cnt") else: self.vrb.err("The device did not respond!") def do_set_max_tx_pkt_comb_cnt(self, arg): """ This is a command, to be used from iOS app to set the "max_tx_pkt_comb_cnt" used in the Watch Fw. It is based on this value that no: of pkts for BLE Tx is combined and send out. #>set_max_tx_pkt_comb_cnt 1 """ args = self._parse_args(arg, 1) if args == None: return address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM msg = m2m2_packet(address, m2m2_ble_max_tx_pkt_comb_cnt_resp_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_BLE_SET_MAX_TX_PKT_COMB_CNT_REQ msg.payload.status = M2M2_PM_SYS_STATUS_ENUM_t.M2M2_PM_SYS_STATUS_OK msg.payload.max_tx_pkt_comb_cnt = int(args[0]) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_ble_max_tx_pkt_comb_cnt_resp_cmd_t(), 20) if reply_msg != None: if (reply_msg.payload.status == M2M2_PM_SYS_STATUS_ENUM_t.M2M2_PM_SYS_STATUS_OK): print("max_tx_pkt_comb_cnt changed in Watch Fw to : {} ").format(reply_msg.payload.max_tx_pkt_comb_cnt) else: self.vrb.err("Error occured while writing max_tx_pkt_comb_cnt") else: self.vrb.err("The device did not respond!") def do_get_hib_mode_status(self, arg): """ This is a command, to be used from iOS app/other tools to get the hibernate mode status- enabled/disabled in the Watch Fw. It is based on this value that Hibernate Mode happens. #>get_hib_mode_status """ address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM msg = m2m2_packet(address, m2m2_hibernate_mode_status_resp_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_GET_HIBERNATE_MODE_STATUS_REQ msg.payload.status = M2M2_PM_SYS_STATUS_ENUM_t.M2M2_PM_SYS_STATUS_OK self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_hibernate_mode_status_resp_cmd_t(), 20) if reply_msg != None: if (reply_msg.payload.status == M2M2_PM_SYS_STATUS_ENUM_t.M2M2_PM_SYS_STATUS_OK): print("hib_mode_status used in Watch Fw : {} ").format(reply_msg.payload.hib_mode_status) else: self.vrb.err("Error occured while reading hib_mode_status") else: self.vrb.err("The device did not respond!") def do_set_hib_mode_status(self, arg): """ This is a command, to be used from iOS app/other tools to control the hibernate mode status- enabled/disabled in the Watch Fw. It is based on this value that Hibernate Mode happens. #>set_hib_mode_status hib_control hib_control: 1 -> to enable Hib Mode 0 -> to disable Hib Mode Eg: #>set_hib_mode_status 1 """ args = self._parse_args(arg, 1) if args == None: return address = M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM msg = m2m2_packet(address, m2m2_hibernate_mode_status_resp_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_SET_HIBERNATE_MODE_STATUS_REQ msg.payload.status = M2M2_PM_SYS_STATUS_ENUM_t.M2M2_PM_SYS_STATUS_OK msg.payload.hib_mode_status = int(args[0]) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_hibernate_mode_status_resp_cmd_t(), 20) if reply_msg != None: if (reply_msg.payload.status == M2M2_PM_SYS_STATUS_ENUM_t.M2M2_PM_SYS_STATUS_OK): print("hib_mode_status changed in Watch Fw to : {} ").format(reply_msg.payload.hib_mode_status) else: self.vrb.err("Error occured while writing hib_mode_status") else: self.vrb.err("The device did not respond!") def do_test_252(self,arg): args = self._parse_args(arg,1) # for item in args: # print item cnt = int(args[0]) print cnt if cnt == None: return #self.onecmd("quickstart start_log_adxl_252") for i in range(cnt): print i self.onecmd("quickstart start_reg_read_adxl") self.onecmd("quickstart start_log_adxl_252") print "test #252 done" def do_test_629(self,arg): """ Maximum count tried / tested is 10. """ args = self._parse_args(arg,1) # for item in args: # print item cnt = int(args[0]) print cnt if cnt == None: return for i in range(cnt): print i self.onecmd("fs_format") self.onecmd("sensor adpd4000 start") self.onecmd("fs_sub add radpd6") self.onecmd("fs_log start") self.onecmd("delay 10") self.onecmd("fs_sub remove radpd6") self.onecmd("sensor adpd4000 stop") self.onecmd("fs_log stop") self.onecmd("fs_ls") print "test #629 done" def do_flash_write_format_test(self,arg): """ arg 1 -> Head = Tail arg 2 -> Head > Tail arg 3 -> Head < Tail """ args = self._parse_args(arg,2) cnt = int(args[1]) self.onecmd("flash_reset") print cnt if cnt == None: return if "1" in args[0]: for i in range(cnt): print i self.onecmd("fs_req_debug_info") self.onecmd("pattern_write 65536000 0 1 1 1") self.onecmd("delay 1") self.onecmd("pattern_write 65536000 0 1 1 1") self.onecmd("delay 1") self.onecmd("pattern_write 65536000 0 1 1 1") self.onecmd("delay 1") self.onecmd("pattern_write 65536000 0 1 1 1") self.onecmd("delay 1") self.onecmd("pattern_write 65536000 0 1 1 1") self.onecmd("delay 1") self.onecmd("pattern_write 65536000 0 1 1 1") self.onecmd("delay 1") self.onecmd("pattern_write 65536000 0 1 1 1") self.onecmd("delay 1") self.onecmd("pattern_write 65536000 0 1 1 1") self.onecmd("delay 1") self.onecmd("pattern_write 11534336 0 1 1 1") self.onecmd("delay 1") self.onecmd("fs_req_debug_info") self.onecmd("fs_format") self.onecmd("fs_req_debug_info") self.onecmd("fs_format_DebugInfo") elif "2" in args[0]: for i in range(cnt): print i self.onecmd("fs_req_debug_info") self.onecmd("pattern_write 32768000 0 2 1 2") self.onecmd("fs_req_debug_info") self.onecmd("fs_format") self.onecmd("fs_req_debug_info") self.onecmd("fs_format_DebugInfo") elif "3" in args[0]: for i in range(cnt): print i self.onecmd("fs_req_debug_info") self.onecmd("pattern_write 32768000 0 2 1 2") self.onecmd("delay 1") self.onecmd("fs_req_debug_info") self.onecmd("fs_format") self.onecmd("fs_format_DebugInfo") self.onecmd("fs_req_debug_info") self.onecmd("pattern_write 65536000 0 1 1 1") self.onecmd("delay 1") self.onecmd("pattern_write 65536000 0 1 1 1") self.onecmd("delay 1") self.onecmd("pattern_write 65536000 0 1 1 1") self.onecmd("delay 1") self.onecmd("pattern_write 65536000 0 1 1 1") self.onecmd("delay 1") self.onecmd("pattern_write 65536000 0 1 1 1") self.onecmd("delay 1") self.onecmd("pattern_write 65536000 0 1 1 1") self.onecmd("delay 1") self.onecmd("pattern_write 65536000 0 1 1 1") self.onecmd("delay 1") self.onecmd("pattern_write 65536000 0 1 1 1") self.onecmd("delay 1") self.onecmd("fs_req_debug_info") self.onecmd("fs_format") self.onecmd("fs_format_DebugInfo") self.onecmd("fs_req_debug_info") print "test #flash_write_format_test done" def do_test_341(self,arg): args = self._parse_args(arg,1) # for item in args: # print item cnt = int(args[0]) print cnt if cnt == None: return #self.onecmd("quickstart start_log_adxl_252") for i in range(cnt): print i self.onecmd("quickstart start_log_mv_uc1") self.onecmd("quickstop stop_log_mv_uc1") # self.onecmd("quickstart adxl") # time.sleep(2) # self.onecmd("quickstop adxl") #self.onecmd("quickstop eda") print "test #341 done" def do_test_330(self,arg): args = self._parse_args(arg,1) # for item in args: # print item cnt = int(args[0]) print cnt if cnt == None: return for i in range(cnt): print i #self.onecmd("quickstart tst_issue_330_1") #self.onecmd("quickstart tst_issue_330_2") #self.onecmd("quickstart start_log_bia") #self.onecmd("quickstop stop_log_bia") self.onecmd("quickstart start_mv_uc1") self.onecmd("quickstop stop_mv_uc1") print "test #330 done" def do_test_217(self,arg): args = self._parse_args(arg,1) # for item in args: # print item cnt = int(args[0]) print cnt if cnt == None: return self.onecmd("quickstart eda") for i in range(cnt): print i self.onecmd("quickstart start_stop_adpd4k") self.onecmd("quickstop eda") print "test #217 done" def do_test_230(self,arg): args = self._parse_args(arg,1) # for item in args: # print item cnt = int(args[0]) print cnt if cnt == None: return for i in range(cnt): print i self.onecmd("quickstart start_stop_230") self.onecmd("status adpd4000") self.onecmd("status adxl") self.onecmd("status eda") self.onecmd("status temperature") print "test #230 done" def do_test_245(self,arg): args = self._parse_args(arg,1) cnt = int(args[0]) print cnt if cnt == None: return for i in range(cnt): print i self.onecmd("quickstart mv_uc1_streaming_start") self.onecmd("status adpd4000") self.onecmd("status adxl") self.onecmd("status temperature") self.onecmd("delay 1") self.onecmd("quickstop mv_uc1_streaming_stop") print "test #245 done" def do_test_245_1(self,arg): args = self._parse_args(arg,1) cnt = int(args[0]) print cnt if cnt == None: return for i in range(cnt): print i self.onecmd("quickstart mv_uc1_245_issue") self.onecmd("delay 1") print "test #245_1 done" def do_test_245_2(self,arg): args = self._parse_args(arg,1) cnt = int(args[0]) print cnt if cnt == None: return for i in range(cnt): print i self.onecmd("quickstart mv_uc1_245_issue_wo_dcb") print "test #245_2 done" def do_test_245_3(self,arg): args = self._parse_args(arg,1) cnt = int(args[0]) print cnt if cnt == None: return for i in range(cnt): print i self.onecmd("quickstart mv_uc1_245_issue_200Hz") self.onecmd("delay 1") print "test #245_3 done" def do_test_245_4(self,arg): args = self._parse_args(arg,1) cnt = int(args[0]) print cnt if cnt == None: return for i in range(cnt): print i self.onecmd("quickstart mv_uc1_245_issue_300Hz") self.onecmd("delay 1") print "test #245_4 done" def do_test_dcb(self,arg): args = self._parse_args(arg,1) cnt = int(args[0]) print cnt if cnt == None: return for i in range(cnt): print i self.onecmd("write_dcb_config adpd4000 UseCase1.dcfg") #self.onecmd("delay 1") self.onecmd("read_dcb_config adpd4000") #self.onecmd("delay 1") self.onecmd("compare_cfg_files UseCase1.dcfg adpd4000_dcb_get.dcfg") self.onecmd("delete_dcb_config adpd4000") #self.onecmd("delay 1") print "test DCB Write/Read/Erase done for count {}".format(cnt) def do_test_dcb_gen_blk(self,arg): args = self._parse_args(arg,1) cnt = int(args[0]) print cnt if cnt == None: return for i in range(cnt): print i self.onecmd("write_dcb_config lt_dcb_config gen_blk_dcb.lcfg") #self.onecmd("delay 1") self.onecmd("read_dcb_config lt_dcb_config") #self.onecmd("delay 1") self.onecmd("delete_dcb_config lt_dcb_config") #self.onecmd("delay 1") print "test gen blk DCB Write/Read/Erase done for count {}".format(cnt) def do_test_dcb_ad7156(self,arg): args = self._parse_args(arg,1) cnt = int(args[0]) print cnt if cnt == None: return for i in range(cnt): print i self.onecmd("write_dcb_config ad7156 ad7156_dcb.dcfg") #self.onecmd("delay 1") self.onecmd("read_dcb_config ad7156") #self.onecmd("delay 1") self.onecmd("compare_cfg_files ad7156_dcb.dcfg ad7156_dcb_get.dcfg") self.onecmd("delete_dcb_config ad7156") #self.onecmd("delay 1") print "test AD7156 DCB Write/Read/Erase done for count {}".format(cnt) def do_test_335(self,arg): args = self._parse_args(arg,1) cnt = int(args[0]) print cnt if cnt == None: return for i in range(cnt): print i self.onecmd("quickstart ppg") self.onecmd("delay 1") self.onecmd("lcfgPpgCheck 40 ppg_dcb.lcfg") self.onecmd("quickstop ppg") print "test ppg start, lcfgPpgCheck, ppg stop done for count {}".format(cnt) def do_fs_list(self, arg): """ read contents of file. Command is used to read file by getting data from file streamed as byte array. #>fs_list """ args = self._parse_args(arg,1) if args == None: return if "start" in args: i=0 for i in range(1,10): self.onecmd("quickstart start_log_adxl") time.sleep(15) self.onecmd("quickstop stop_log_adxl") else: self.vrb.err("running automation multiple files failed.") print "test automate done!" def do_test_469(self,arg): args = self._parse_args(arg,1) # for item in args: # print item cnt = int(args[0]) print cnt if cnt == None: return for i in range(cnt): print i self.onecmd("lcfgEcgWrite 0:50") self.onecmd("sensor ecg start") self.onecmd("sub recg add") self.onecmd("delay 10") self.onecmd("quickstop ecg") self.onecmd("lcfgEcgWrite 0:100") self.onecmd("sensor ecg start") self.onecmd("sub recg add") self.onecmd("delay 10") self.onecmd("quickstop ecg") self.onecmd("lcfgEcgWrite 0:200") self.onecmd("sensor ecg start") self.onecmd("sub recg add") self.onecmd("delay 10") self.onecmd("quickstop ecg") self.onecmd("lcfgEcgWrite 0:300") self.onecmd("sensor ecg start") self.onecmd("sub recg add") self.onecmd("delay 10") self.onecmd("quickstop ecg") self.onecmd("lcfgEcgWrite 0:400") self.onecmd("sensor ecg start") self.onecmd("sub recg add") self.onecmd("delay 10") self.onecmd("quickstop ecg") self.onecmd("lcfgEcgWrite 0:500") self.onecmd("sensor ecg start") self.onecmd("sub recg add") self.onecmd("delay 10") self.onecmd("quickstop ecg") print "test #469 done" def do_test_502_UC4(self,arg): args = self._parse_args(arg,1) # for item in args: # print item cnt = int(args[0]) print cnt if cnt == None: return for i in range(cnt): print i self.onecmd("fs_format") for j in range(62): print i,j self.onecmd("quickstart start_stream_mv_uc4_1") self.onecmd("delay 1") self.onecmd("quickstart start_log_mv_uc4_1") self.onecmd("delay 1") self.onecmd("quickstop stop_log_mv_uc4_1") self.onecmd("delay 1") self.onecmd("quickstop stop_stream_mv_uc4_1") print "test #502 done" def do_test_627_log(self,arg) : args = self._parse_args(arg,1) cnt = int(args[0]) self.onecmd("flash_reset") for _ in range(cnt): self.onecmd("quickstart start_log_mv_uc1") self.onecmd("delay 3") self.onecmd("fs_status") self.onecmd("quickstop stop_log_mv_uc1") self.onecmd("fs_vol_info") self.onecmd("fs_req_debug_info") self.onecmd("fs_ls") self.onecmd("get_file_cnt") self.onecmd("fs_get_bad_blocks") self.onecmd("get_file_cnt") def do_test_627_download(self,arg) : args = self._parse_args(arg,0) error_msg, log_names_list = self.do_fs_ls("") for file_name in log_names_list: print file_name self.onecmd("fs_stream "+ file_name["file"]) def do_test_627_page(self,arg) : args = self._parse_args(arg,0) self.onecmd("flash_reset") for _ in range(1): self.onecmd("quickstart start_log_mv_uc1") self.onecmd("delay 3") self.onecmd("fs_status") self.onecmd("quickstop stop_log_mv_uc1") self.onecmd("page_read_test 4 1 40") self.onecmd("fs_block_erase 1") self.onecmd("page_read_test 4 1 40") def do_test_627_pattern_write(self,arg) : args = self._parse_args(arg,0) self.onecmd("flash_reset") self.onecmd("pattern_write 16384 0 2 1 4") self.onecmd("test_627_download") def do_edadcfg_write(self,arg) : """ To write default dcfg below command is used #>edadcfg_write 1 other arguments will be used to write user configurable registers to test in future """ args = self._parse_args(arg,1) if(int(args[0]) == 1): self.onecmd("EdaDcfgUpdate 0x00002044 0x00004091 0x00003008 0x02000000 0x0000300C 0xFFFFFFFF 0x000021D8 0x00000480 0x000021D8 0x00000489 0x000021A8 0x00000814 0x000020D0 0x00000021 0x00002054 0x00000048 0x00002114 0x00000103 0x00002008 0x00008000") else: self.onecmd("EdaDcfgUpdate 0x00002044 0x00004092 0x00003008 0x02000001 0x0000300C 0xFFFFFFFE 0x000021D8 0x00000481 0x000021D8 0x00000487 0x000021A8 0x00000815 0x000020D0 0x00000022 0x00002054 0x00000049 0x00002114 0x00000104 0x00002008 0x00008001") def do_edadcfg_read(self,arg) : self.onecmd("EdaDcfgRead 0x00002044 0x00003008 0x0000300C 0x000021D8 0x000021D8 0x000021A8 0x000020D0 0x00002054 0x00002114 0x00002008") def do_get_apps_health_status(self, arg): """ read health status of all applications #>get_apps_health_status """ args = self._parse_args(arg,0) if args == None: self.vrb.err("Incorrect usage! Please check help.") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_get_apps_running_stat_req_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_GET_APPS_HEALTH_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_get_apps_running_stat_resp_cmd_t(), 240) if reply_msg != None: self.vrb.write(" AD5940 ISR Count: {}".format(int(reply_msg.payload.ad5940_isr_cnt))) self.vrb.write(" ADPD4000 ISR Count : {}".format(int(reply_msg.payload.adpd4000_isr_cnt))) self.vrb.write(" ADXL ISR Count : {}".format(int(reply_msg.payload.adxl_isr_cnt))) self._print_packet_status(reply_msg) else: self.vrb.err("No response from device.Health status failed.") def do_get_eda_debug_Info(self, arg): """ read debug info of ad5940 #>get_eda_debug_Info """ args = self._parse_args(arg,0) if args == None: self.vrb.err("Incorrect usage! Please check help.") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, m2m2_get_eda_debug_info_req_cmd_t()) msg.payload.command = M2M2_EDA_APP_CMD_ENUM_t.M2M2_EDA_APP_CMD_REQ_DEBUG_INFO_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, m2m2_get_eda_debug_info_resp_cmd_t(), 240) if reply_msg != None: self.vrb.write(" FIFO Overflow status : {}".format(int(reply_msg.payload.ad5940_fifo_overflow_status))) self.vrb.write(" FIFO Level : {}".format(int(reply_msg.payload.ad5940_fifo_level))) self.vrb.write(" Time gap between Interrupts : {}us".format(int(reply_msg.payload.Interrupts_time_gap))) self.vrb.write(" Time gap between packets : {}us".format(int(reply_msg.payload.packets_time_gap))) self.vrb.write(" Time taken for RTIA Calibration : {}s".format(int(reply_msg.payload.rtia_calibration_time))) self.vrb.write(" Time taken for first measurement to start : {}s".format(int(reply_msg.payload.delay_in_first_measurements))) self.vrb.write(" Time taken for first Voltage measurement: {}us".format(int(reply_msg.payload.first_voltage_measure_time))) self.vrb.write(" Time taken for first Current measurement : {}us".format(int(reply_msg.payload.first_current_measure_time))) self.vrb.write(" Time gap between successive Voltage Measurements : {}us".format(int(reply_msg.payload.voltage_measure_time_gap))) self.vrb.write(" Time gap between successive Current Measurements : {}us".format(int(reply_msg.payload.current_measure_time_gap))) self.vrb.write(" Time taken EDA Initilization : {}s".format(int(reply_msg.payload.EDA_Init_Time))) self.vrb.write(" Time taken EDA De initialization : {}us".format(int(reply_msg.payload.EDA_DeInit_Time))) self._print_packet_status(reply_msg) else: self.vrb.err("No response from device.Health status failed.") def do_getPpgLcfg(self, arg): """ Get the PPG LCFG, used from Watch. The argument is the LCFG ID, which is 40 for adpd4000 Eg: = getPpgLcfg 40 """ args = self._parse_args(arg,1) if args == None: self.vrb.err("Incorrect usage! Please check help.") return if (args[0] != '40'): self.vrb.err("Invalid LCFG ID !") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_PPG, m2m2_ppg_lcfg_data_t()) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_GET_LCFG_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_PPG, m2m2_ppg_lcfg_data_t(), 100) if reply_msg == None: print "Reading PPG LCFG failed!" return Cnt = 0 lcfg_array_count = int(reply_msg.payload.size) while Cnt < lcfg_array_count: self.vrb.write("Reading LCFG: 0x{:08X} {}".format(int(reply_msg.payload.lcfgdata[Cnt]), Cnt)) Cnt+=1 status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) if status == None: status = format(reply_msg.payload.status, '#04x') self.vrb.write("Size : {:02}".format(lcfg_array_count)) self.vrb.write("Command return status: {}".format(status)) def do_fs_KeyValuePair(self, arg): """ Inject KeyValuePair into the log. Usage: #>fs_KeyValuePair 4562 Note: The entered Value ID (maximum-16 characters) will retain untill you start and close the file. For every new file,you need to inject value ID. """ args = self._parse_args(arg, 1) if args == None: self.vrb.err("Atleast one argument needed") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_set_key_value_pair_req_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_SET_KEY_VALUE_PAIR_REQ temp_array = list(bytearray(args[0])) if len(args[0]) > 16: self.vrb.err("Maximum 16 characters allowed for value ID") return for index in range(len(args[0])): msg.payload.valueID[index] = temp_array[index] self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_cmd_t(), 10) if reply_msg != None: self._print_file_system_status(reply_msg) else: self.vrb.err("No response from device.Injecting keyvaluePair failed") def do_fs_stream_page_chunk(self, arg): """ Get the particular chunk of page from file by specifying the page chunk number. ----------------------------------------------- Usage: #>fs_stream_chunk RETRANSMIT_TYPE page_rollover page_number page_chunk_number filename #>fs_stream_chunk 0 0 268 3 05113CAC.LOG RETRANSMIT_TYPE: 0 - CHUNK CRC ERROR (Get page current chunk) 1 - CHUNK LOST (Get page next chunk) page_rollover: pagenumber maximum is 0-65535. if we request the pagenumber after 65535 ,page_rollover value should be set to 1 page_number: pagenumber maximum is 0-65535. Pagenumber recieved on download log stream page_chunk_number: page_chunk_number maximum is 0-7 in USB mode and 0-18 in BLE mode. page_chunk_number recieved on download log stream """ args = self._parse_args(arg, 5) if args == None: return Retransmit_type = int(args[0]) Page_Roll_over = int(args[1]) Page_number = int(args[2]) Page_chunk_number = int(args[3]) filename,ext = args[4].split('.') temp_array = list(bytearray(args[4])) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_page_chunk_retransmit_req_t(len(args[4]))) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_CHUNK_RETRANSMIT_REQ msg.payload.retransmit_type = Retransmit_type msg.payload.page_roll_over = Page_Roll_over msg.payload.page_number = Page_number msg.payload.page_chunk_number = Page_chunk_number for index in range(len(args[4])): msg.payload.file_name[index] = temp_array[index] self._send_packet(msg) nComputedCRC = int(0x0FFFF) nCRCPolynomial = int(0x1021) nCRCMisMatchCnt = 0 # loop body here if(get_cli_addr() == M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_CLI): reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_download_log_stream_t(), 1000) else: reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_download_log_ble_stream_t(), 1000) if reply_msg != None: #print"Status:{}".format(reply_msg.payload.status) print "page_number = {}".format(reply_msg.payload.page_number) print "page_chunk_number = {}".format(reply_msg.payload.page_chunk_number) print "page_chunk_size = {}".format(reply_msg.payload.page_chunk_size) if ((reply_msg.payload.status == M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_STATUS_OK) or ( reply_msg.payload.status == M2M2_FILE_SYS_STATUS_ENUM_t.M2M2_FILE_SYS_END_OF_FILE)): fobj = open(filename + '_chunk.LOG', "wb") data_length = reply_msg.payload.page_chunk_size + 15 # print"Length received t1 = {}".format(data_length) crc16_data_array = buffer(reply_msg.header)[:] + buffer(reply_msg.payload)[:] for index in range(0, 8, 2): crc16_data_array = swap(crc16_data_array, index, index + 1) nComputedCRC = int(0xFFFF) for nByte in range(data_length): nComputedCRC = ((nComputedCRC >> 8) | (nComputedCRC << 8))&0xFFFF nComputedCRC = nComputedCRC ^ ord(crc16_data_array[nByte]) nComputedCRC ^=(nComputedCRC & 0xFF) >> 4 nComputedCRC ^= ((nComputedCRC << 8) << 4)&0xFFFF nComputedCRC ^= ((nComputedCRC & 0xFF) << 4) << 1 # '''print"nComputedCRC = {}".format(nComputedCRC) # print"Length received = {}".format(data_length)''' # print"nComputedCRC = {}".format(nComputedCRC) # print"received crc16 = {}".format(reply_msg.payload.crc16) if ((nComputedCRC != reply_msg.payload.crc16)): nCRCMisMatchCnt += 1 print"CRC mismatch {}".format(nCRCMisMatchCnt) # print"received crc16 = {}".format(format_hex(crc16_data_array)) # print"received crc16 = {}".format((crc16_data_array)) # print"Length received t1 = {}".format(data_length) # print"CRC mismatch cnt = {}".format(nCRCMisMatchCnt) # Restraining refernce sequence number to 16bit wide if (reply_msg.payload.page_chunk_size != len(reply_msg.payload.page_chunk_bytes)): fobj.write(bytearray(reply_msg.payload.page_chunk_bytes[0:int(reply_msg.payload.page_chunk_size)])) else: fobj.write(reply_msg.payload.page_chunk_bytes) print "chunk stream obtained successfully" self._print_file_system_status(reply_msg) fobj.close() else: self._print_file_system_status(reply_msg) else: self.vrb.err("No response from device.Stream file operation failed.") def do_fs_refhr(self, arg): """ Write referenceHr.Command is used to write reference hr and Current PC time. #>do_fs_refhr """ args = self._parse_args(arg, 1) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_app_ref_hr_stream_t()) src_addr = get_cli_addr() if src_addr == M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_CLI_BLE: msg.header.src = M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_CLI_BLE_STREAM elif src_addr == M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_CLI: msg.header.src = M2M2_ADDR_ENUM_t.M2M2_ADDR_APP_CLI_STREAM else: self.vrb.err("Improper address !!") msg.payload.command = M2M2_SENSOR_COMMON_CMD_ENUM_t.M2M2_SENSOR_COMMON_CMD_STREAM_DATA now = datetime.datetime.now() is_dst = time.daylight and time.localtime().tm_isdst > 0 utc_offset = - (time.altzone if is_dst else time.timezone) msg.payload.refhr = int(args[0]) msg.payload.year = now.year msg.payload.month = now.month msg.payload.day = now.day msg.payload.hour = now.hour msg.payload.minute = now.minute msg.payload.second = now.second msg.payload.TZ_sec = utc_offset self._send_packet(msg) self.vrb.write("date and time: {}".format(now), 2) self.vrb.write("timezone: {}".format(utc_offset), 2) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_cmd_t(), 20) if reply_msg != None: self._print_file_system_status(reply_msg) else: self.vrb.err("response timeout from device.Stream file written successful.") def do_create_adpd4k_dcfg(self, arg): """ ->creates the dcfg to be loaded into adpd4k ->Argument is the slotid and appid Ex:create_adpd4k_dcfg 1:0 creates dcfg for ppg app in slot-A /***************APP IDs ***************/ /**************************SLOT IDs **********************/ ---------------------------------------- ------------------------------ ------------------------------ | App-ID | Apps | | Slot- ID | ADPD4k_SLOTS | | Slot- ID | ADPD4k_SLOTS | ---------------------------------------- ------------------------------ ------------------------ | 0 | ECG4K | | 1 | A | | 9 | I | --------------------------------------- ------------------------------ ------------------------ | 1 | PPG | | 2 | B | | 10 | J | ---------------------------------------- ------------------------------ ------------------------ | 2 | Temp. (Thermistor) | | 3 | C | | 11 | K | ------------------------- -------------- ------------------------------ ------------------------ | 3 |Temp. (Calibration Resistor) | | 4 | D | | 12 | L | ---------------------------------------- ------------------------------ -------------------------- | 4 | ADPD4K_G | | 5 | E | ---------------------------------------- ------------------------------ | 5 | ADPD4K_R | | 6 | F | ---------------------------------------- ------------------------------ | 6 | ADPD4K_IR | | 7 | G | ---------------------------------------- ------------------------------ | 7 | ADPD4K_B | | 8 | H | ---------------------------------------- ------------------------------- -> Slot Switching feature not enabled in FW, So Mapping of application and slot is fixed as mentioned in below table. if Created and loaded DCFG for any other combination, apps and static AGC will not work as expected ------------------------------------------------------------------- | App-ID | Apps | Slot- ID | ADPD4k_SLOT | -------------------------------------------------------------------- | 0 | ECG4K | 1 | A | ------------------------------------------------------------------- | 1 | PPG | 6 | F | -------------------------------------------------------------------- | 2 | Temp. (Thermistor) | 4 | D | ------------------------- ------------------------------------------ | 3 |Temp. (Calibration Resistor) | 5 | E | -------------------------------------------------------------------- | 4 | ADPD4K_G | 6 | F | -------------------------------------------------------------------- | 5 | ADPD4K_R | 7 | G | -------------------------------------------------------------------- | 6 | ADPD4K_IR | 8 | H | -------------------------------------------------------------------- | 7 | ADPD4K_B | 9 | I | -------------------------------------------------------------------- """ args = self._parse_args(arg, None) if args == None: self.vrb.err("No argument passed, Check Help!") return num_ops = len(args) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_adpd_dcfg_op_hdr_t(num_ops)) msg.payload.command = M2M2_SENSOR_ADPD_COMMAND_ENUM_t.M2M2_SENSOR_ADPD_COMMAND_CREATE_DCFG_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i] slotid = int(tempVal.split(':')[0]) appid = int(tempVal.split(':')[1]) if(appid < 0 or appid > 7 or slotid < 1 or slotid >12): self.vrb.err("Invalid argument passed, Check Help!") return msg.payload.ops[i].slotid = slotid msg.payload.ops[i].appid = appid self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_adpd_dcfg_op_hdr_t(num_ops), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("DCFG Creation failed!") def do_lcfgPpgCheck(self, arg): """ Compares the PPG LCFG which is loaded into adpd4000 with dcb_lcfg and f/w_lcfg. There are two arguments to it- 1. LCFG ID, which is 40 for adpd4000 2. PPG DCB LCFG filename - for ex. ppg_dcb.lcfg Eg: = lcfgPpgCheck 40 ppg_dcb.lcfg """ args = self._parse_args(arg,2) if args == None: self.vrb.err("Incorrect usage! Please check help.") return if (args[0] != '40'): self.vrb.err("Invalid LCFG ID !") return dcb_lcfg_filename = 'dcb_cfg/' + args[1] fw_lcfg = list() dcb_lcfg = list() get_lcfg = list() msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_PPG, m2m2_ppg_lcfg_data_t()) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_GET_LCFG_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_PPG, m2m2_ppg_lcfg_data_t(), 100) if reply_msg == None: print "Reading PPG LCFG failed!" return Cnt = 0 lcfg_array_count = int(reply_msg.payload.size) while Cnt < lcfg_array_count: get_lcfg.append(int((reply_msg.payload.lcfgdata[Cnt]))) Cnt+=1 status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) if status == None: status = format(reply_msg.payload.status, '#04x') #self.vrb.write("LCFG Size : {:02}".format(lcfg_array_count)) self.vrb.write("Command return status: {}".format(status)) try: f = open('dcb_cfg/ppg_fw.lcfg') # ppg_fw.lcfg contain Default f/w PPG lcfg except: self.vrb.err("Invalid File Name") return for line in f.readlines(): if(line[0] == '#' or line[0]=='\n' or line[0]==' ' or line[0]=='\t' or line[0]=='/'): continue else: str = line.split('#') str = str[0].split('/') str = str[0].split(' ') str =str[1].replace(' ','').replace('\t','').replace('\n','') fw_lcfg.append(int(str,16)) f.close() #self.vrb.write("FW LCFG Size : {:02}".format(len(fw_lcfg))) '''self.vrb.write("Fw dcfg") for i in range(len(fw_lcfg)): self.vrb.write("fw_lcfg {}:{}".format(i,fw_lcfg[i]))''' try: f = open(dcb_lcfg_filename) # dcb lcfg file that contains dcb ppg lcfg except: self.vrb.err("Invalid File Name") return for line in f.readlines(): if(line[0] == '#' or line[0]=='\n' or line[0]==' ' or line[0]=='\t' or line[0]=='/'): continue else: str = line.split('#') str = str[0].split('/') str = str[0].split(' ') dcb =str[1].replace(' ','').replace('\t','').replace('\n','') dcb_lcfg.append(int(dcb,16)) f.close() #self.vrb.write("DCB LCFG Size : {:02}".format(len(dcb_lcfg))) '''self.vrb.write("DCB dcfg") for i in range(len(dcb_lcfg)): self.vrb.write("dcb_lcfg {}:{}".format(i,dcb_lcfg[i]))''' if(len(get_lcfg)==len(fw_lcfg)): for i in range(len(get_lcfg)): if(get_lcfg[i]!=fw_lcfg[i]): self.vrb.write("FW Mismatch found at index{:02} Got: {:04} Actual: {:04}".format(i,get_lcfg[i],fw_lcfg[i])) break else: if(i==len(get_lcfg)-1): self.vrb.write("Command return status: {}".format('FW LCFG Present')) return else: continue if(len(get_lcfg)==len(dcb_lcfg)): for i in range(len(get_lcfg)): if(get_lcfg[i]!=dcb_lcfg[i]): self.vrb.write("DCB Mismatch found at index{:02} Got: {:04} Actual: {:04}".format(i,get_lcfg[i],dcb_lcfg[i])) break else: if(i==len(get_lcfg)-1): self.vrb.write("Command return status: {}".format('DCB LCFG Present')) return else: continue self.vrb.write("Command return status: {}".format('Invalid LCFG Present')) def do_compare_cfg_files(self, arg): """ Compares two dcfg/lcfg files to check if they contain same configurations or not. Two arguments-> filename1, filename2 for ex- #>compare_dcb_files adxl_dcb.dcfg adxl_dcb_get.dcfg """ args = self._parse_args(arg, 2) if len(args) == 0: self.vrb.err("No arguments supplied!") return file1 = 'dcb_cfg/' + args[0] file2 = 'dcb_cfg/' + args[1] file1_cfg = [] file2_cfg = [] try: f1 = open(file1) except: self.vrb.err("Invalid File Name") return for line in f1.readlines(): if(line[0] == '#' or line[0]=='\n' or line[0]==' ' or line[0]=='\t'): continue else: str = line.split('#') str = str[0].replace(' ','').replace('\t','').replace('\n','') file1_cfg.append(int(str,16)) f1.close() try: f2 = open(file2) except: self.vrb.err("Invalid File Name") return for line in f2.readlines(): if(line[0] == '#' or line[0]=='\n' or line[0]==' ' or line[0]=='\t'): continue else: str = line.split('#') str = str[0].replace(' ','').replace('\t','').replace('\n','') file2_cfg.append(int(str,16)) f2.close() if(len(file1_cfg) != len(file2_cfg)): self.vrb.write("Command return status: {}".format('Cfg. in both files not matched')) return else: for i in range(len(file1_cfg)): if(file1_cfg[i] != file2_cfg[i]): self.vrb.write("Command return status: {}".format('Cfg. in both files not matched')) return self.vrb.write("Command return status: {}".format('Cfg. in both files matched')) def do_lcfgEcgRead(self, arg): """ Read the ECG LCFG. The argument is the LCFG ID to choose from the ecg configuration structure: -------------------------------------- |Config Element | Index | ----------------------------- | FS | 0 | | ADC_PGA_GAIN | 1 | | PWR MOD | 2 | | packetization enable | 3 | -------------------------------------- Eg: = lcfgEcgRead addr1 addr2 ...... """ args = self._parse_args(arg, None) if len(args) == 0: self.vrb.err("No arguments supplied!") return num_ops = len(args) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_ECG, ecg_app_lcfg_op_hdr_t(num_ops)) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_READ_LCFG_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i] if ("0x") in tempVal: reg_addr = int(tempVal, 16) elif ("0X") in tempVal: reg_addr = int(tempVal, 16) else: reg_addr = int(tempVal) msg.payload.ops[i].field = reg_addr self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_ECG, ecg_app_lcfg_op_hdr_t(num_ops), 10) reg_result_list = [] if reply_msg == None: err_stat = 1 self.vrb.err("Reading ECG LCFG failed!") else: self._print_ecg_lcfg_result(reply_msg) err_stat = 0 for i in range(reply_msg.payload.num_ops): reg_result_list.append((reg_addr, hex(reply_msg.payload.ops[i].value))) return err_stat, reg_result_list def do_set_ecg_dcb_lcfg(self,arg): """ Writes the ECG LCFG values from DCB if present,otherwise it will write default value. There is no argument. Eg: = set_ecg_dcb_lcfg """ args = self._parse_args(arg, None) if len(args) != 0: self.vrb.err("Invalid No. arguments supplied!") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_ECG, ecg_app_dcb_lcfg_t()) msg.payload.command =M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_SET_LCFG_REQ self._send_packet(msg) time.sleep(3) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_ECG, ecg_app_dcb_lcfg_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) if reply_msg != None: self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Setting ECG Library configuration failed!") def do_tempr_lcfg_write(self,arg): """ Write the temperature LCFG. The first argument is the LCFG ID to choose from the temperature configuration structure, second argument is the value to be updated ------------------------------------------------------------------ | Config Element | Index | Value to be updated | ------------------------------------------------------------------ | Sample period | 0 | integer value | | slots selected | 1 | integer value | | LUT for thermistor 0 | 2 | thermistor0_LUT.lcfg | | LUT for thermistor 1 | 3 | thermistor1_LUT.lcfg | | LUT for thermistor 2 | 4 | thermistor2_LUT.lcfg | | LUT for thermistor 3 | 5 | thermistor3_LUT.lcfg | | LUT for thermistor 4 | 6 | thermistor4_LUT.lcfg | ------------------------------------------------------------------ Eg: = tempr_lcfg_write addr1 value ...... Usage: tempr_lcfg_write 0 5 /* To update the sampling period to 5 seconds*/ tempr_lcfg_write 1 0xFF /* To update the slots selected to 0xFF */ tempr_lcfg_write 2 thermistor_LUT.lcfg /* To update the thermistor 0 LUT with values defined in thermistor_LUT.lcfg, template file is given in the same directory as CLI.py*/ """ args = self._parse_args(arg) if(len(args) <= 1): self.vrb.err("Insufficient arguments supplied!") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_TEMPERATURE, temperature_app_lcfg_t()) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_WRITE_LCFG_REQ tempVal = args[0] if ("0x") in tempVal: msg.payload.field = int(tempVal, 16) elif ("0X") in tempVal: msg.payload.field = int(tempVal, 16) else: msg.payload.field = int(tempVal) if(msg.payload.field == 0 or msg.payload.field == 1): tempVal = args[1] if ("0x") in tempVal: msg.payload.value[0] = int(tempVal, 16) elif ("0X") in tempVal: msg.payload.value[0] = int(tempVal, 16) else: msg.payload.value[0] = int(tempVal) elif(msg.payload.field >= 2 or msg.payload.field <= 6): filename = args[1] thermistor_lut = [] try: f = open(filename) except: self.vrb.err("Invalid File Name") return 1 for line in f.readlines(): if (line[0] == '#' or line[0] == '\n' or line[0] == ' ' or line[0] == '\t'): continue else: str = line.split('#') dcb = str[0].replace(' ', '').replace('\t', '').replace('\n', '') thermistor_lut.append(dcb) # int(dcb, 16)) f.close() if (len(thermistor_lut) != 21): self.vrb.err("Invalid number of elements in the look up table") return for index in range(21): msg.payload.value[index] = int(thermistor_lut[index],16) else: self.vrb.err("Invalid field argument passed") self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_TEMPERATURE, temperature_app_lcfg_t(), 10) if reply_msg == None: self.vrb.err("Writing Temperature LCFG failed!") else: self._print_temperature_lcfg_result(reply_msg) def do_tempr_lcfg_read(self,arg): """ Read the temperature LCFG. The argument is the LCFG ID to choose from the temperature configuration structure: ------------------------------------------ | Config Element | Index | ------------------------------------------ | Sample period | 0 | | slots selected | 1 | | LUT for thermistor 0 | 2 | | LUT for thermistor 1 | 3 | | LUT for thermistor 2 | 4 | | LUT for thermistor 3 | 5 | | LUT for thermistor 4 | 6 | ------------------------------------------ Eg: = tempr_lcfg_read addr1 Usage: tempr_lcfg_read 0 /* To get the sampling period */ """ args = self._parse_args(arg, None) if len(args) == 0: self.vrb.err("No arguments supplied!") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_TEMPERATURE, temperature_app_lcfg_t()) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_READ_LCFG_REQ tempVal = args[0] if ("0x") in tempVal: msg.payload.field = int(tempVal, 16) elif ("0X") in tempVal: msg.payload.field = int(tempVal, 16) else: msg.payload.field = int(tempVal) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_TEMPERATURE, temperature_app_lcfg_t(), 10) if reply_msg == None: self.vrb.err("Reading Temperature LCFG failed!") else: self._print_temperature_lcfg_result(reply_msg) def do_set_temperature_lcfg_dcb(self, arg): """ Writes the Temperature LCFG values from DCB if present,otherwise it will write default value. There is no argument. Eg: = set_temperature_dcb_lcfg """ args = self._parse_args(arg, None) if len(args) != 0: self.vrb.err("Invalid No. arguments supplied!") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_TEMPERATURE, temperature_app_dcb_lcfg_t()) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_SET_LCFG_REQ self._send_packet(msg) time.sleep(3) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_TEMPERATURE, temperature_app_dcb_lcfg_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) if reply_msg != None: self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Setting Temperature app Library configuration failed!") def do_set_adpd4k_fs(self,arg): """ Set the sampling frequency value in adpd4k dcfg. The argument is ODR value in Hz. Eg: = set_adpd4k_fs 100 """ args = self._parse_args(arg, 1) if args == None: self.vrb.err("Invalid No. arguments supplied!") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_adpd4000_set_fs_t()) msg.payload.command = M2M2_SENSOR_ADPD_COMMAND_ENUM_t.M2M2_SENSOR_ADPD_COMMAND_SET_FS_REQ msg.payload.odr = int(args[0]) self._send_packet(msg) #time.sleep(3) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_sensor_adpd4000_set_fs_t(), 10) if reply_msg != None: self._print_packet_status(reply_msg) else: self.vrb.err("Setting ADPD4K sampling frequency in DCFG failed!") def do_disable_adpd4k_slots(self, arg): """ Disables all the slots except Slot-A which is enabled by default. There is no argument. Eg: = disable_adpd4k_slots """ args = self._parse_args(arg, None) if len(args) != 0: self.vrb.err("Invalid No. arguments supplied!") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_adpd4k_slot_info_t()) msg.payload.command = M2M2_SENSOR_ADPD_COMMAND_ENUM_t.M2M2_SENSOR_ADPD_COMMAND_DISABLE_SLOTS_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, m2m2_adpd4k_slot_info_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) if reply_msg != None: self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Disabling adpd4k slots failed!") def do_set_eda_dcb_lcfg(self,arg): """ Writes the EDA LCFG values from DCB if present,otherwise it will write default value. There is no argument. Eg: = set_eda_dcb_lcfg """ args = self._parse_args(arg, None) if len(args) != 0: self.vrb.err("Invalid No. arguments supplied!") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_dcb_lcfg_t()) msg.payload.command =M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_SET_LCFG_REQ self._send_packet(msg) time.sleep(3) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_dcb_lcfg_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) if reply_msg != None: self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Setting EDA Library configuration failed!") def do_read_dcb_info(self,arg): """ Reads Important DCB Information Eg: = read_dcb_info """ args = self._parse_args(arg, None) if len(args) != 0: self.vrb.err("Invalid No. arguments supplied!") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA, m2m2_dcb_fds_status_info_req_t()) msg.payload.command = M2M2_BIA_APP_CMD_ENUM_t.M2M2_APP_COMMON_CMD_DCB_TIMING_INFO_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA, m2m2_dcb_fds_timing_info_resp_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) if reply_msg != None: self.vrb.write(" Check entries time: {} us".format(int(reply_msg.payload.adi_dcb_check_entries_time))) self.vrb.write(" Clear entries time : {} us".format(int(reply_msg.payload.adi_dcb_clear_entries_time))) self.vrb.write(" Delete record time : {} us".format(int(reply_msg.payload.adi_dcb_delete_record_time))) self.vrb.write(" Read Entry time : {} us".format(int(reply_msg.payload.adi_dcb_read_entry_time))) self.vrb.write(" Update Entry time : {} us".format(int(reply_msg.payload.adi_dcb_update_entry_time))) else: self.vrb.err("Setting DCB time info failed!") def do_lcfgEdaRead(self, arg): """ Read the EDA LCFG. The argument is the LCFG ID to choose from the eda configuration structure: ----------------------------- |Config Element | Index | ----------------------------- | FS | 0 | | DFT_NUMBER | 1 | ----------------------------- Eg: = lcfgEdaRead addr1 addr2 ...... """ args = self._parse_args(arg, None) if len(args) == 0: self.vrb.err("No arguments supplied!") return num_ops = len(args) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_lcfg_op_hdr_t(num_ops)) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_READ_LCFG_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i] if ("0x") in tempVal: reg_addr = int(tempVal, 16) elif ("0X") in tempVal: reg_addr = int(tempVal, 16) else: reg_addr = int(tempVal) msg.payload.ops[i].field = reg_addr self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_lcfg_op_hdr_t(num_ops), 10) reg_result_list = [] if reply_msg == None: err_stat = 1 self.vrb.err("Reading EDA LCFG failed!") else: self._print_eda_lcfg_result(reply_msg) err_stat = 0 for i in range(reply_msg.payload.num_ops): reg_result_list.append((reg_addr, hex(int(reply_msg.payload.ops[i].value)))) return err_stat, reg_result_list def do_lcfgBiaRead(self, arg): """ Read the BIA LCFG. The argument is the LCFG ID to choose from the BIA configuration structure: ----------------------------- |Config Element | Index | ----------------------------- | FS | 0 | | ADC_PGA_GAIN | 1 | | POWER MOD | 2 | | SIN FREQ | 3 | | BIA DFT number | 4 | ----------------------------- Eg: = lcfgBiaRead addr1 addr2 ...... """ args = self._parse_args(arg, None) if len(args) == 0: self.vrb.err("No arguments supplied!") return num_ops = len(args) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA, bia_app_lcfg_op_hdr_t(num_ops)) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_READ_LCFG_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i] if ("0x") in tempVal: reg_addr = int(tempVal, 16) elif ("0X") in tempVal: reg_addr = int(tempVal, 16) else: reg_addr = int(tempVal) msg.payload.ops[i].field = reg_addr self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA, bia_app_lcfg_op_hdr_t(num_ops), 10) reg_result_list = [] if reply_msg == None: err_stat = 1 self.vrb.err("Reading BIA LCFG failed!") else: self._print_bia_lcfg_result(reply_msg) err_stat = 0 for i in range(reply_msg.payload.num_ops): reg_result_list.append((reg_addr, hex(int(reply_msg.payload.ops[i].value)))) return err_stat, reg_result_list def do_set_bia_dcb_lcfg(self, arg): """ Writes the BIA LCFG values from DCB if present,otherwise it will write default value. There is no argument. Eg: = set_bia_dcb_lcfg """ args = self._parse_args(arg, None) if len(args) != 0: self.vrb.err("Invalid No. arguments supplied!") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA, bia_app_dcb_lcfg_t()) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_SET_LCFG_REQ self._send_packet(msg) time.sleep(3) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA, bia_app_dcb_lcfg_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) if reply_msg != None: self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Setting BIA Library configuration failed!") def do_lcfgEcgWrite(self, arg): """ Set the ECG LCFG. The argument is the ECG LCFG ID:VALUE pair to modify the ecg lcfg value FS address -->0 Values 100 200 300 400 500 ADC_PGA_GAIN address -->1 Values 0 /**< ADC PGA Gain of 1 */ 1 /**< ADC PGA Gain of 1.5 */ 2 /**< ADC PGA Gain of 2 */ 3 /**< ADC PGA Gain of 4 */ 4 /**< ADC PGA Gain of 9 */ AFE POWER MOD address -->1 Values '0' for Low power drive, '1' for High power drive, Default f/w ECG AFE Power Mod = '0' 0 /**< Low power Drive */ 1 /**< High power Drive */ Eg: = lcfgEcgWrite addr1:value1 addr2:value2 ... """ args = self._parse_args(arg, None) if len(args) == 0: self.vrb.err("No arguments supplied!") return num_ops = len(args) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_ECG, ecg_app_lcfg_op_hdr_t(num_ops)) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_WRITE_LCFG_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i] if (':' not in tempVal): self.vrb.err("Invalid Argument Format, check help!") return elif ("0x") in tempVal: reg_addr = int(tempVal.split(':')[0], 16) reg_val = int(tempVal.split(':')[1], 16) elif ("0X") in tempVal: reg_addr = int(tempVal.split(':')[0], 16) reg_val = int(tempVal.split(':')[1], 16) else: reg_addr = int(tempVal.split(':')[0]) reg_val = int(tempVal.split(':')[1]) msg.payload.ops[i].field = reg_addr msg.payload.ops[i].value = reg_val self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_ECG, ecg_app_lcfg_op_hdr_t(num_ops), 10) if reply_msg == None: print "Writing ECG LCFG failed!" return self._print_ecg_lcfg_result(reply_msg) def do_SetEcg4kLcfg(self, arg): """ Set the ECG LCFG values for ADPD4000. The argument is the FIELD:VALUE pair to modify the ecg lcfg value For ODR, field value = 0 Currently ecg lcfg for adpd4k has only one entry i.e. ODR Sampling Freq. address -->0 Values 100 200 300 400 500 Eg: = SetEcg4kLcfg addr1:value1 addr2:value2 ... """ args = self._parse_args(arg, None) if len(args) == 0: self.vrb.err("No arguments supplied!") return num_ops = len(args) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, ecg_app_lcfg_op_hdr_t(num_ops)) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_WRITE_LCFG_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i] if (':' not in tempVal): self.vrb.err("Invalid Argument Format, check help!") return elif ("0x") in tempVal: reg_addr = int(tempVal.split(':')[0], 16) reg_val = int(tempVal.split(':')[1], 16) elif ("0X") in tempVal: reg_addr = int(tempVal.split(':')[0], 16) reg_val = int(tempVal.split(':')[1], 16) else: reg_addr = int(tempVal.split(':')[0]) reg_val = int(tempVal.split(':')[1]) msg.payload.ops[i].field = reg_addr msg.payload.ops[i].value = reg_val self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, ecg_app_lcfg_op_hdr_t(num_ops), 10) if reply_msg == None: print "Setting ECG LCFG values for ADPD4K failed!" return self._print_ecg_lcfg_result(reply_msg) def do_GetEcg4kLcfg(self, arg): """ Get the ECG LCFG values for ADPD4000. The argument is the FIELD value for the entry to be read from the ecg lcfg For ODR, field value = 0 Currently ecg lcfg for adpd4k has only one entry i.e. ODR Eg: = GetEcg4kLcfg addr1 addr2 """ args = self._parse_args(arg, None) if len(args) == 0: self.vrb.err("No arguments supplied!") return num_ops = len(args) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, ecg_app_lcfg_op_hdr_t(num_ops)) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_READ_LCFG_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i] if ("0x") in tempVal: reg_addr = int(tempVal, 16) elif ("0X") in tempVal: reg_addr = int(tempVal, 16) else: reg_addr = int(tempVal) msg.payload.ops[i].field = reg_addr self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SENSOR_ADPD4000, ecg_app_lcfg_op_hdr_t(num_ops), 10) if reply_msg == None: print "Reading ECG LCFG values for ADPD4K failed!" return self._print_ecg_lcfg_result(reply_msg) def do_lcfgEdaWrite(self, arg): """ Set the EDA LCFG. The argument is the EDA LCFG ID:VALUE pair to modify the eda lcfg value ODR address -->0 Values 4 , 8 , 16 ,25 , 30........... DFT_NUMBER address -->2 Values 1 DFT_NUMBER = 8 .Recommended for odr >16 Hz 2 DFT_NUMBER = 16 Recommended for odr <= 16Hz Eg: = lcfgEdaWrite addr1:value1 addr2:value2 lcfgEdaWrite 0:30 lcfgEdaWrite 2:1 """ args = self._parse_args(arg, None) if len(args) == 0: self.vrb.err("No arguments supplied!") return num_ops = len(args) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_lcfg_op_hdr_t(num_ops)) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_WRITE_LCFG_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i] if ("0x") in tempVal: reg_addr = int(tempVal.split(':')[0], 16) reg_val = int(tempVal.split(':')[1], 16) elif ("0X") in tempVal: reg_addr = int(tempVal.split(':')[0], 16) reg_val = int(tempVal.split(':')[1], 16) else: reg_addr = int(tempVal.split(':')[0]) reg_val = int(tempVal.split(':')[1]) msg.payload.ops[i].field = reg_addr msg.payload.ops[i].value = reg_val self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_lcfg_op_hdr_t(num_ops), 10) if reply_msg == None: print "Writing EDA LCFG failed!" return self._print_eda_lcfg_result(reply_msg) def do_lcfgBiaWrite(self, arg): """ Set the BIA LCFG. The argument is the ECG LCFG ID:VALUE pair to modify the bia lcfg value FS address -->0 Values 100 200 300 400 500 ADC_PGA_GAIN address -->1 Values 0 /**< ADC PGA Gain of 1 */ 1 /**< ADC PGA Gain of 1.5 */ 2 /**< ADC PGA Gain of 2 */ 3 /**< ADC PGA Gain of 4 */ 4 /**< ADC PGA Gain of 9 */ Eg: = lcfgBiaWrite addr1:value1 addr2:value2 ... """ args = self._parse_args(arg, None) if len(args) == 0: self.vrb.err("No arguments supplied!") return num_ops = len(args) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA, bia_app_lcfg_op_hdr_t(num_ops)) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_WRITE_LCFG_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i] if ("0x") in tempVal: reg_addr = int(tempVal.split(':')[0], 16) reg_val = tempVal.split(':')[1] elif ("0X") in tempVal: reg_addr = int(tempVal.split(':')[0], 16) reg_val = tempVal.split(':')[1] else: reg_addr = int(tempVal.split(':')[0]) reg_val = tempVal.split(':')[1] msg.payload.ops[i].field = reg_addr msg.payload.ops[i].value = float(reg_val) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA, bia_app_lcfg_op_hdr_t(num_ops), 10) if reply_msg == None: print "Writing BIA LCFG failed!" return self._print_bia_lcfg_result(reply_msg) def do_dcfgEdaWrite(self, arg): """ Set the EDA DCFG. The argument is the EDA DCFG ADDRESS:VALUE pair to modify the eda lcfg value Eg: = dcfgEdaWrite addr1:value1 addr2:value2 ... """ args = self._parse_args(arg, None) if len(args) == 0: self.vrb.err("No arguments supplied!") return num_ops = len(args) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_dcfg_op_hdr_t(num_ops)) msg.payload.command = M2M2_APP_COMMON_CMD_ENUM_t.M2M2_APP_COMMON_CMD_WRITE_DCFG_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i] if ("0x") in tempVal: reg_addr = int(tempVal.split(':')[0], 16) reg_val = int(tempVal.split(':')[1], 16) elif ("0X") in tempVal: reg_addr = int(tempVal.split(':')[0], 16) reg_val = int(tempVal.split(':')[1], 16) else: reg_addr = int(tempVal.split(':')[0]) reg_val = int(tempVal.split(':')[1]) msg.payload.ops[i].field = reg_addr msg.payload.ops[i].value = reg_val self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_dcfg_op_hdr_t(num_ops), 10) if reply_msg == None: print "Writing EDA DCFG failed!" return self._print_eda_dcfg_result(reply_msg) def _print_rtia_cal_result(self, packet): self._print_packet_status(packet) self.vrb.write(" Num of calibrated values: '{}'".format(int(packet.payload.num_calibrated_values))) t = table(["Actual resistance in Ohms", "Calibrated resistance in Ohms"]) for i in range(packet.payload.num_calibrated_values): t.add_row([int(packet.payload.rtia_cal_table_val[i].actual_res), int(packet.payload.rtia_cal_table_val[i].calibrated_res)]) t.display() def _print_ecg_lcfg_result(self, packet): self._print_packet_status(packet) self.vrb.write(" Num of registers: '{}'".format(int(packet.payload.num_ops))) t = table(["Field", "Value"]) for i in range(packet.payload.num_ops): t.add_row([hex(packet.payload.ops[i].field), hex(packet.payload.ops[i].value)]) t.display() def _print_temperature_lcfg_result(self, packet): self._print_packet_status(packet) if(packet.payload.field == 0): self.vrb.write(" Sample Period : {} sec".format(int(packet.payload.value[0]))) elif(packet.payload.field == 1): self.vrb.write(" Slots Selected : {}".format(hex(int(packet.payload.value[0])))) elif(packet.payload.field >= 2 and packet.payload.field <= 6): self.vrb.write(" LUT for Thermistor {} is listed below:".format(hex(int(packet.payload.field) - 2))) for index in range(21): self.vrb.write(" LUT[{}] Impedance (ohm) at {} degrees : {} ".format(int(packet.payload.field) - 2, index*5,hex(int(packet.payload.value[index])))) else: self.vrb.write("Invalid Field Index found") def _print_eda_lcfg_result(self, packet): self._print_packet_status(packet) self.vrb.write(" Num of registers: '{}'".format(int(packet.payload.num_ops))) t = table(["Field", "Value"]) for i in range(packet.payload.num_ops): t.add_row([hex(packet.payload.ops[i].field), hex(packet.payload.ops[i].value)]) t.display() def _get_ecgalgo_version(self, address): msg = m2m2_packet(address, m2m2_app_common_ver_req_t()) msg.payload.command = M2M2_ECG_APP_CMD_ENUM_t.M2M2_ECG_APP_CMD_GET_ALGO_VENDOR_VERSION_REQ self._send_packet(msg) return self._get_packet(address, m2m2_app_common_version_t()) def _print_bia_lcfg_result(self, packet): self._print_packet_status(packet) self.vrb.write(" Num of registers: '{}'".format(int(packet.payload.num_ops))) t = table(["Field", "Value"]) for i in range(packet.payload.num_ops): t.add_row([hex(packet.payload.ops[i].field), float(packet.payload.ops[i].value)]) t.display() def _print_eda_dcfg_result(self, packet): self._print_packet_status(packet) self.vrb.write(" Num of registers: '{}'".format(int(packet.payload.num_ops))) t = table(["Field", "Value"]) for i in range(packet.payload.num_ops): t.add_row([hex(packet.payload.ops[i].field), hex(packet.payload.ops[i].value)]) t.display() def do_flash_reset(self, arg): """ format file system. Command to format file system. #>flash_reset """ args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_file_reset_cmd_t()) msg.payload.command = M2M2_PM_SYS_COMMAND_ENUM_t.M2M2_PM_SYS_COMMAND_FLASH_RESET_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_PM, m2m2_file_reset_cmd_t(), 240) if reply_msg != None: self._print_file_system_status(reply_msg) else: self.vrb.err("No response from device.Flash Reset operation failed.") def do_get_fds_status(self, arg): """ get fs dcb status #>get_fds_status """ args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA, m2m2_file_sys_cmd_t()) msg.payload.command = M2M2_BIA_APP_CMD_ENUM_t.M2M2_DCB_COMMAND_FDS_STATUS_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA, m2m2_dcb_fds_status_info_resp_t(), 10) if reply_msg != None: self.vrb.write(" Dirty records: {}".format(int(reply_msg.payload.dirty_records))) self.vrb.write(" Open records : {}".format(int(reply_msg.payload.open_records))) self.vrb.write(" Valid records : {}".format(int(reply_msg.payload.valid_records))) self.vrb.write(" Pages available : {}".format(int(reply_msg.payload.pages_available))) self.vrb.write(" Memory Number of blocks : {}".format(int(reply_msg.payload.num_blocks))) self.vrb.write(" Blocks free : {}".format(int(reply_msg.payload.blocks_free))) self._print_packet_status(reply_msg) else: self.vrb.err("No response from device.Getting FDS status info failed.") def do_run_dcb_test(self, arg): """ run dcb tests from robot script #>run_dcb_test """ args = self._parse_args(arg, 1) if args == None: self.vrb.write("please provide valid argument") return else: count = 0 max_iter = int(args[0]) while (count < max_iter): print 'The count is:', count count = count + 1 self.onecmd("sensor ppg start") self.onecmd("status ppg") self.onecmd("sensor ppg stop") #self.onecmd("quickstart combined_dcb_ecg_test") #self.onecmd("quickstart combined_dcb_eda_test") #self.onecmd("quickstart combined_dcb_adpd_test") #self.onecmd("quickstart combined_dcb_adxl_test") #self.onecmd("quickstart adxl_dcb_test_2") #self.onecmd("quickstart ppg_status_check_robot_test") #self.onecmd("quickstart temp_dcb_test") def do_loadBiaDcfg(self, arg): """ Load the EDA application DCFG. #>loadBiaDcfg """ msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA, m2m2_app_common_sub_op_t()) msg.payload.command = M2M2_BIA_APP_CMD_ENUM_t.M2M2_BIA_APP_CMD_LOAD_DCFG_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA,m2m2_app_common_sub_op_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self._print_packet_status(reply_msg) else: self.vrb.err("Loading BIA device configuration failed!") def do_BiaDcfgUpdate(self, arg): """ Update BIA DCFG register values. Eg: = BiaDcfgUpdate addr1 value1 addr2 value2 ... Usage: BiaDcfgUpdate 0x000021D8 0x00000489 """ args = self._parse_args(arg, None) if len(args) == 0: self._p_err("No arguments supplied!") return num_ops = len(args) num_ops >>= 1 msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA, bia_app_dcfg_op_hdr_t(num_ops)) msg.payload.command = M2M2_BIA_APP_CMD_ENUM_t.M2M2_BIA_APP_COMMON_CMD_WRITE_DCFG_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i*2] reg_index = int(tempVal,16) tempVal = args[i*2+1] if ("0x") in tempVal: reg_val = int(tempVal, 16) elif ("0X") in tempVal: reg_val = int(tempVal, 16) else: reg_val = int(tempVal) msg.payload.ops[i].field = reg_index msg.payload.ops[i].value = reg_val self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA, bia_app_dcfg_op_hdr_t(num_ops), 60) if reply_msg == None: status = self._get_enum_name(M2M2_EDA_APP_CMD_ENUM_t, reply_msg.payload.status) print "Writing BIA App DCFG failed!" return self._print_bia_app_dcfg_result(reply_msg) def do_BiaDcfgRead(self, arg): """ Read the BIA DCFG. The argument is the DCFG ID to choose from the eda configuration structure: --------------------------------------------------------------- |Config Element | Address | --------------------------------------------------------------- | FIFO Config register | 0x000021D8 | --------------------------------------------------------------- Eg: = BiaDcfgRead addr1 addr2 ...... """ args = self._parse_args(arg, None) if len(args) == 0: self.vrb.err("No arguments supplied!") return num_ops = len(args) msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA, bia_app_dcfg_op_hdr_t(num_ops)) msg.payload.command = M2M2_BIA_APP_CMD_ENUM_t.M2M2_BIA_APP_COMMON_CMD_READ_DCFG_REQ msg.payload.num_ops = num_ops for i in range(num_ops): tempVal = args[i] if ("0x") in tempVal: reg_addr = int(tempVal, 16) elif ("0X") in tempVal: reg_addr = int(tempVal, 16) else: reg_addr = int(tempVal) msg.payload.ops[i].field = reg_addr self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA, bia_app_dcfg_op_hdr_t(num_ops), 10) reg_result_list = [] if reply_msg == None: err_stat = 1 self.vrb.err("Reading BIA DCFG failed!") else: self._print_bia_app_dcfg_result(reply_msg) err_stat = 0 for i in range(reply_msg.payload.num_ops): reg_result_list.append((reg_addr, hex(int(reply_msg.payload.ops[i].value)))) return err_stat, reg_result_list def _print_bia_app_dcfg_result(self, packet): self._print_packet_status(packet) self.vrb.write(" Num of registers: '{}'".format(int(packet.payload.num_ops))) t = table(["Field", "Value"]) for i in range(packet.payload.num_ops): t.add_row([hex(packet.payload.ops[i].field), hex(packet.payload.ops[i].value)]) t.display() def do_biadcfg_write(self,arg) : """ To write default dcfg below command is used #>biadcfg_write 1 other arguments will be used to write user configurable registers to test in future """ args = self._parse_args(arg,1) if(int(args[0]) == 1): self.onecmd("BiaDcfgUpdate 0x00002004 0x00000000 0x000021d8 0x00000489 0x000021e0 0x00040000 0x00002008 0x00004800 0x00002030 0x00333333 0x0000203c 0x000007ff 0x00002038 0x00000000 0x00002034 0x00000000 0x00002014 0x00000004 0x00002044 0x0000e011 0x000020d0 0x001000b1") else: self.onecmd("BiaDcfgUpdate 0x00002004 0x00000001 0x000021d8 0x00000488 0x000021e0 0x00040001 0x00002008 0x00004801 0x00002030 0x00333332 0x0000203c 0x000007fe 0x00002038 0x00000001 0x00002034 0x00000001 0x00002014 0x00000005 0x00002044 0x0000e012 0x000020d0 0x001000b2") def do_biadcfg_read(self,arg) : self.onecmd("BiaDcfgRead 0x00002004 0x000021d8 0x000021e0 0x00002008 0x00002030 0x0000203c 0x00002038 0x00002034 0x00002014 0x00002044 0x000020d0") def do_EdaDynamicScaling(self, arg): """ Set the device into a desired power state. The argument is the state: 'disable' for disabling dynamic scaling 'enable' for enabling dynamic scaling ----------------------------------------------- Usage: #>EdaDynamicScaling [scale] #>EdaDynamicScaling enable/disable minscale maxscale lprtiasel #>minscale [20-100k,21-120k,22-128k,23-160k,24-196k,25-256k,26-512k] #>maxscale [20-100k,21-120k,22-128k,23-160k,24-196k,25-256k,26-512k] #>lprtiasel[20-100k,21-120k,22-128k,23-160k,24-196k,25-256k,26-512k] #>minscale<maxscale, lprtiasel >= minscale #>EdaDynamicScaling enable 20 26 20 """ arg_len = len(arg.split(' ')) if arg_len == 1: args = self._parse_args(arg, 1) if args == None: self.vrb.write("please provide valid argument") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_dynamic_scale_t()) if "d" in arg[0]: msg.payload.dscale = 0 msg.payload.minscale = 0 msg.payload.maxscale = 0 msg.payload.lprtia = 0 elif arg_len == 4: args = self._parse_args(arg, 4) if args == None: self.vrb.write("please provide valid argument") return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_dynamic_scale_t()) if "e" in args[0]: msg.payload.dscale = 1 msg.payload.minscale = int(args[1]) msg.payload.maxscale = int(args[2]) msg.payload.lprtia=int(args[3]) msg.payload.command = M2M2_EDA_APP_CMD_ENUM_t.M2M2_EDA_APP_CMD_DYNAMIC_SCALE_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_dynamic_scale_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write(" Status: '{}'".format(status)) else: self.vrb.err("Enabling dynamic scaling failed!") def do_DoRTIACal(self, arg): """ ----------------------------------------------- Usage: #>DoRTIACal minscale maxscale lprtiasel #>minscale [20-100k,21-120k,22-128k,23-160k,24-196k,25-256k,26-512k] #>maxscale [20-100k,21-120k,22-128k,23-160k,24-196k,25-256k,26-512k] #>lprtiasel[20-100k,21-120k,22-128k,23-160k,24-196k,25-256k,26-512k] #>minscale<maxscale, lprtiasel >= minscale #>DoRTIACal 20 26 20 """ args = self._parse_args(arg, 3) if args == None: self.vrb.write("please provide valid argument") return length = int(args[1]) - int(args[0]) + 1 msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_perform_rtia_cal_t(length)) msg.payload.command = M2M2_EDA_APP_CMD_ENUM_t.M2M2_EDA_APP_CMD_RTIA_CAL_REQ msg.payload.minscale = int(args[0]) msg.payload.maxscale = int(args[1]) msg.payload.lowpowerrtia = int(args[2]) msg.payload.num_calibrated_values = msg.payload.maxscale - msg.payload.minscale + 1 self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_perform_rtia_cal_t(msg.payload.num_calibrated_values), 240) if reply_msg == None: self.vrb.err("RTIA Calibration failed!!") return self._print_rtia_cal_result(reply_msg) self.vrb.write("RTIA Calibration success!!") def do_SetBIADFTnum(self, arg): """ Set the BIA app DFT number.. '0' for 4 '1' for 8 '2' for 16 '3' for 32 '4' for 64 '5' for 128 '6' for 256 '7' for 512 '8' for 1024 '9' for 2048 '10' for 4096 '11' for 8192 '12' for 16384 ----------------------------------------------- Usage: #>SetBIADFTnum [value] #>SetBIADFTnum 11 """ args = self._parse_args(arg, 1) if args == None: self.vrb.write("please provide valid argument") msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA, bia_app_set_dft_num_t()) msg.payload.command = M2M2_BIA_APP_CMD_ENUM_t.M2M2_BIA_APP_CMD_SET_DFT_NUM_REQ if args == None: msg.payload.dftnum = 11 else: msg.payload.dftnum = int(args[0]) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA, bia_app_set_dft_num_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write(" Status: '{}'".format(status)) self.vrb.write(" Value: '{}'".format(int(reply_msg.payload.dftnum))) else: self.vrb.err("Setting BIA DFT number failed!") def do_SetEdaDFTnum(self, arg): """ Set the EDA app DFT number.. '0' for 4 '1' for 8 '2' for 16 '3' for 32 ----------------------------------------------- Usage: #>SetEdaDFTnum [value] #>SetEdaDFTnum 4 """ args = self._parse_args(arg, 1) if args == None: self.vrb.write("please provide valid argument") msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_set_dft_num_t()) msg.payload.command = M2M2_EDA_APP_CMD_ENUM_t.M2M2_EDA_APP_CMD_SET_DFT_NUM_REQ if args == None: msg.payload.dftnum = 2 else: msg.payload.dftnum = int(args[0]) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_set_dft_num_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write(" Status: '{}'".format(status)) self.vrb.write(" Value: '{}'".format(int(reply_msg.payload.dftnum))) else: self.vrb.err("Setting EDA DFT number failed!") def do_SetEdaBaselineImp(self, arg): """ Set the EDA app Baseline resistor used for measurement and measured impedance. ----------------------------------------------- Usage: #>SetEdaBaselineImp [imp_real_dft16] [imp_img_dft16] [imp_real_dft8] [imp_img_dft8] [resistor_baseline] #>SetEdaBaselineImp 25000.5 25000.5 25000.5 25000.5 19900 """ args = self._parse_args(arg, 5) if args == None: self.vrb.write("please provide valid argument") msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_set_baseline_imp_t()) msg.payload.command = M2M2_EDA_APP_CMD_ENUM_t.M2M2_EDA_APP_CMD_BASELINE_IMP_SET_REQ msg.payload.imp_real_dft16 = float(args[0]) msg.payload.imp_img_dft16 = float(args[1]) msg.payload.imp_real_dft8 = float(args[2]) msg.payload.imp_img_dft8 = float(args[3]) msg.payload.resistor_baseline = int(args[4]) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_set_baseline_imp_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write(" Status: '{}'".format(status)) self.vrb.write(" DFT16_REAL: '{}'".format(float(reply_msg.payload.imp_real_dft16))) self.vrb.write(" DFT16_IMG: '{}'".format(float(reply_msg.payload.imp_img_dft16))) self.vrb.write(" DFT8_REAL: '{}'".format(float(reply_msg.payload.imp_real_dft8))) self.vrb.write(" DFT8_IMG: '{}'".format(float(reply_msg.payload.imp_img_dft8))) self.vrb.write(" BASELINE REGISTER: '{}'".format(int(reply_msg.payload.resistor_baseline))) else: self.vrb.err("Setting EDA Baseline Impedance failed!") def do_GetEdaBaselineImp(self, arg): """ Set the EDA app Baseline resistor used for measurement and measured impedance. ----------------------------------------------- Usage: #>GetEdaBaselineImp """ msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_get_baseline_imp_req_t()) msg.payload.command = M2M2_EDA_APP_CMD_ENUM_t.M2M2_EDA_APP_CMD_BASELINE_IMP_GET_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_get_baseline_imp_resp_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write(" Status: '{}'".format(status)) self.vrb.write(" Eda user baseleine imp_set_flag: '{}'".format(reply_msg.payload.eda_user_baseline_imp_set)) self.vrb.write(" DFT16_REAL: '{}'".format(float(reply_msg.payload.imp_real_dft16))) self.vrb.write(" DFT16_IMG: '{}'".format(float(reply_msg.payload.imp_img_dft16))) self.vrb.write(" DFT8_REAL: '{}'".format(float(reply_msg.payload.imp_real_dft8))) self.vrb.write(" DFT8_IMG: '{}'".format(float(reply_msg.payload.imp_img_dft8))) self.vrb.write(" BASELINE REGISTER: '{}'".format(int(reply_msg.payload.resistor_baseline))) else: self.vrb.err("Getting EDA Baseline Impedance failed!") def do_ResetEdaBaselineImp(self, arg): """ Reset the EDA app Baseline resistor used for measurement and measured impedance. ----------------------------------------------- Usage: #>ResetEdaBaselineImp """ msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_set_baseline_imp_t()) msg.payload.command = M2M2_EDA_APP_CMD_ENUM_t.M2M2_EDA_APP_CMD_BASELINE_IMP_RESET_REQ msg.payload.imp_real_dft16 = 0 msg.payload.imp_img_dft16 = 0 msg.payload.imp_real_dft8 = 0 msg.payload.imp_img_dft8 = 0 msg.payload.resistor_baseline = 0 self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA, eda_app_set_baseline_imp_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write(" Status: '{}'".format(status)) self.vrb.write(" DFT16_REAL: '{}'".format(float(reply_msg.payload.imp_real_dft16))) self.vrb.write(" DFT16_IMG: '{}'".format(float(reply_msg.payload.imp_img_dft16))) self.vrb.write(" DFT8_REAL: '{}'".format(float(reply_msg.payload.imp_real_dft8))) self.vrb.write(" DFT8_IMG: '{}'".format(float(reply_msg.payload.imp_img_dft8))) self.vrb.write(" BASELINE REGISTER: '{}'".format(int(reply_msg.payload.resistor_baseline))) else: self.vrb.err("Resetting EDA Baseline Impedance failed!") def do_SetHSRTIACal(self, arg): """ Set the BIA app HSRTIA cal.. HSRTIACAL[0-200,1-1k,2-5k] ----------------------------------------------- Usage: #>SetHSRTIACal [value] #>SetHSRTIACal 1 """ args = self._parse_args(arg, 1) if args == None: self.vrb.write("please provide valid argument") msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA, bia_app_hs_rtia_sel_t()) msg.payload.command = M2M2_BIA_APP_CMD_ENUM_t.M2M2_BCM_APP_CMD_SET_HS_RTIA_CAL_REQ if args == None: msg.payload.hsritasel = 1 else: msg.payload.hsritasel = int(args[0]) self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_BIA, bia_app_hs_rtia_sel_t(), 10) if reply_msg != None: status = self._get_enum_name(M2M2_APP_COMMON_STATUS_ENUM_t, reply_msg.payload.status) self.vrb.write(" Status: '{}'".format(status)) self.vrb.write(" Value: '{}'".format(int(reply_msg.payload.hsritasel))) else: self.vrb.err("Setting BIA HSRTIACAL failed!") def do_getEcgVersion(self, arg): """ Get the Ecg application version information. #>getEcgVersion """ version = self._get_version(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_ECG) if version != None: self._print_version_pkt(version) else: self.vrb.err("Timed out waiting for the ECG version response.") #self.onecmd("getEcgAlgoVendorVersion") def do_getEdaVersion(self, arg): """ Get the EDA application version information. #>getEdaVersion """ version = self._get_version(M2M2_ADDR_ENUM_t.M2M2_ADDR_MED_EDA) if version != None: self._print_version_pkt(version) else: self.vrb.err("Timed out waiting for the EDA version response.") def do_fs_abort(self, arg): """ stop logging #>fs_abort """ args = self._parse_args(arg, 0) if args == None: return msg = m2m2_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_cmd_t()) msg.payload.command = M2M2_FILE_SYS_CMD_ENUM_t.M2M2_FILE_SYS_CMD_FORCE_STOP_LOG_REQ self._send_packet(msg) reply_msg = self._get_packet(M2M2_ADDR_ENUM_t.M2M2_ADDR_SYS_FS, m2m2_file_sys_cmd_t(), 5000) if reply_msg != None: self._print_file_system_status(reply_msg) else: self.vrb.err("No response from device.stop log request failed") if __name__ == '__main__': m2m2_shell().cmdloop()
56.341443
4,463
0.614978
70,655
528,032
4.323247
0.034916
0.033949
0.02089
0.019066
0.812603
0.769092
0.73899
0.718224
0.695894
0.673861
0
0.056496
0.266111
528,032
9,371
4,464
56.347455
0.731751
0.0289
0
0.587688
0
0.006253
0.288033
0.017987
0
0
0.013036
0
0
0
null
null
0.002918
0.001946
null
null
0.066704
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
7
531aa880f81dbf32b4b000e6d4ac56a1c677594b
16,109
py
Python
app/models.py
LuckyQueen0928/tanna
081df82d09d0bcbd132060822ad0459ac670d0f4
[ "Apache-2.0" ]
3
2018-09-19T09:39:11.000Z
2021-08-18T09:23:19.000Z
app/models.py
LuckyQueen0928/tanna
081df82d09d0bcbd132060822ad0459ac670d0f4
[ "Apache-2.0" ]
null
null
null
app/models.py
LuckyQueen0928/tanna
081df82d09d0bcbd132060822ad0459ac670d0f4
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- from . import db from datetime import datetime from sqlalchemy import DateTime class task_info_t(db.Model): __tablename__ = 'task_info_t' tid = db.Column(db.Integer, primary_key=True) task_name = db.Column(db.String(100)) task_desc = db.Column(db.String(500)) create_date = db.Column(db.DateTime, default=datetime.now) uid = db.Column(db.Integer) task_hash = db.Column(db.String(100)) task_state = db.Column(db.Integer) def __repr__(self): return '<task_name %r>' % self.task_name class application_info_t(db.Model): __tablename__ = 'application_info_t' id = db.Column(db.Integer, primary_key=True) tid = db.Column(db.Integer, db.ForeignKey('application_info_t.tid')) app_name = db.Column(db.String(255)) app_version = db.Column(db.String(255)) app_desc = db.Column(db.String(255)) algorithm_mode = db.Column(db.Integer) begin_time = db.Column(db.DateTime, default=datetime.now) end_time = db.Column(db.DateTime) app_state = db.Column(db.Integer) app_hash =db.Column(db.String(100)) fuzz_addr = db.Column(db.Integer) platform = db.Column(db.Integer) instru_mode = db.Column(db.Integer) app_port = db.Column(db.Integer) iterations = db.Column(db.Integer) time_interval = db.Column(db.Integer) def __repr__(self): return '<app_name %r>' % self.app_name #新增视图查询,避免联表查询速度过慢 by wcx class indextasklist(db.Model): __tablename__ = 'indextasklist' tid = db.Column(db.Integer, primary_key=True) task_name = db.Column(db.String(100)) task_desc = db.Column(db.String(500)) create_date = db.Column(db.DateTime, default=datetime.now) uid = db.Column(db.Integer) task_state = db.Column(db.Integer) app_name = db.Column(db.String(255)) app_state = db.Column(db.Integer) platform = db.Column(db.Integer) instru_mode = db.Column(db.Integer) def __repr__(self): return '<app_name %r>' % self.app_name #新增视图查询,避免联表查询速度过慢 by wcx class sample_info_t(db.Model): __tablename__ = 'sample_info_t' sample_id = db.Column(db.Integer, primary_key=True) sample_name = db.Column(db.String(100)) prefix_number = db.Column(db.Integer) suffix_number = db.Column(db.Integer) last_number = db.Column(db.Integer) format = db.Column(db.String(100)) aid = db.Column(db.Integer) father_sample = db.Column(db.String(100)) isexception = db.Column(db.Integer) sample_state = db.Column(db.Integer) taint_start = db.Column(db.String(100)) taint_offset = db.Column(db.String(100)) sample_hash = db.Column(db.String(100)) state_id = db.Column(db.Integer) log_limit = db.Column(db.Integer) ins_limit = db.Column(db.Integer) action_index = db.Column(db.Integer) fuzz_flag=db.Column(db.Integer) def __repr__(self): return '<sample_name %r>' % self.sample_name class Globalnode(db.Model): __tablename__ = 'global_node_t' nid = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer) tail = db.Column(db.Integer) name = db.Column(db.String(128)) status = db.Column(db.Integer) taskid = db.Column(db.Integer) aid = db.Column(db.Integer) check_flag = db.Column(db.Integer) def __repr__(self): return '<name %r>' % self.name class Globaledge(db.Model): __tablename__ = 'global_edge_t' eid = db.Column(db.Integer, primary_key=True) parent = db.Column(db.Integer) child = db.Column(db.Integer) aid = db.Column(db.Integer) parent_name = db.Column(db.String(256)) child_name = db.Column(db.String(256)) def __repr__(self): return '<parent_name %r>' % self.parent_name class Partialedge(db.Model): __tablename__ = 'partial_edge_t' eid = db.Column(db.Integer, primary_key=True) parent = db.Column(db.Integer) child = db.Column(db.Integer) parentnode = db.Column(db.Integer) aid = db.Column(db.Integer) def __repr__(self): return '<parent %r>' % self.parent class Partialnode(db.Model): __tablename__ = 'partial_node_t' nid = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer) tail = db.Column(db.Integer) parentnode = db.Column(db.Integer) status = db.Column(db.Integer) aid = db.Column(db.Integer) def __repr__(self): return '<id %r>' % self.id class peach_pit(db.Model): __tablename__ = 'peach_pit' # is_fuzzing = db.Column(db.Integer) peach_id = db.Column(db.Integer, primary_key=True) pit_hash = db.Column(db.String(100)) aid = db.Column(db.Integer) pit_name = db.Column(db.String(100)) case_count = db.Column(db.Integer) def __repr__(self): return '<pit_name %r>' % self.pit_name class sensitive_addr_info(db.Model): __tablename__ = 'sensitive_addr_info' sensitive_id = db.Column(db.Integer, primary_key=True) aid = db.Column(db.Integer) sensitive_addr = db.Column(db.String(20)) def __repr__(self): return '<sensitive_addr %r>' % self.sensitive_addr class sensitive_post_t(db.Model): __tablename__ = 'sensitive_post_t' id = db.Column(db.Integer, primary_key=True) addr = db.Column(db.Integer) status = db.Column(db.Integer) aid = db.Column(db.Integer) def __repr__(self): return '<addr %r>' % self.addr class source_asm_map(db.Model): __tablename__ = 'source_asm_map' id = db.Column(db.Integer, primary_key=True) addr = db.Column(db.Integer) aid = db.Column(db.Integer) segment = db.Column(db.String(20)) info = db.Column(db.String(20)) def __repr__(self): return '<addr %r>' % self.addr class special_node_t(db.Model): __tablename__ = 'special_node_t' id = db.Column(db.Integer, primary_key=True) addr = db.Column(db.Integer) addrtype = db.Column(db.Integer) taskid = db.Column(db.Integer) aid = db.Column(db.Integer) def __repr__(self): return '<addr %r>' % self.addr class trace_info_t(db.Model): __tablename__ = 'trace_info_t' trace_id = db.Column(db.Integer, primary_key=True) trace_name = db.Column(db.String(100)) trace_file_addr = db.Column(db.String(100)) prefix_number = db.Column(db.Integer) suffix_number = db.Column(db.Integer) trace_state = db.Column(db.Integer) convert_number = db.Column(db.Integer) depth = db.Column(db.Integer, default=-1) count = db.Column(db.Integer) aid = db.Column(db.Integer) sid = db.Column(db.Integer) num = db.Column(db.Integer) def __repr__(self): return '<trace_name %r>' % self.trace_name class constrain_info_t(db.Model): constrain_name = db.Column(db.String(100)) constrain_file_addr = db.Column(db.String(500)) prefix_number = db.Column(db.Integer, default=0) suffix_number = db.Column(db.Integer, default=0) last_number = db.Column(db.Integer) constrain_file_state = db.Column(db.Integer) current_sample = db.Column(db.String(100)) aid = db.Column(db.Integer) constrain_id = db.Column(db.Integer, primary_key=True) convert_addr = db.Column(db.Integer) def __repr__(self): return '<constrain_name %r>' % self.constrain_name class coverage_log_t(db.Model): __tablename__ = 'coverage_log_t' id = db.Column(db.Integer, primary_key=True) log_time = db.Column(db.DateTime, default=datetime.now) coverage = db.Column(db.String) aid = db.Column(db.Integer) def __repr__(self): return '<id %r>' % self.id class user_t(db.Model): __tablename__ = 'user_t' user_id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String) userpassword = db.Column(db.String) createtime = db.Column(db.DateTime, default=datetime.now) def __repr__(self): return '<username %r>' % self.username # 文档类模型 class w_task_info_t(db.Model): __tablename__ = 'w_task_info_t' tid = db.Column(db.Integer, primary_key=True) task_name = db.Column(db.String(100)) task_desc = db.Column(db.String(500)) create_date = db.Column(db.DateTime, default=datetime.now) uid = db.Column(db.Integer) task_hash = db.Column(db.String(100)) task_state = db.Column(db.Integer) def __repr__(self): return '<task_name %r>' % self.task_name class w_application_info_t(db.Model): __tablename__ = 'w_application_info_t' id = db.Column(db.Integer, primary_key=True) tid = db.Column(db.Integer, db.ForeignKey('w_application_info_t.tid')) app_name = db.Column(db.String(255)) app_version = db.Column(db.String(255)) app_desc = db.Column(db.String(255)) algorithm_mode = db.Column(db.Integer) begin_time = db.Column(db.DateTime, default=datetime.now) end_time = db.Column(db.DateTime) app_state = db.Column(db.Integer) app_hash =db.Column(db.String(100)) fuzz_addr = db.Column(db.Integer) platform = db.Column(db.Integer) instru_mode = db.Column(db.Integer) app_port = db.Column(db.Integer) iterations = db.Column(db.Integer) time_interval = db.Column(db.Integer) def __repr__(self): return '<app_name %r>' % self.app_name class w_indextasklist(db.Model): __tablename__ = 'w_indextasklist' tid = db.Column(db.Integer, primary_key=True) task_name = db.Column(db.String(100)) task_desc = db.Column(db.String(500)) create_date = db.Column(db.DateTime, default=datetime.now) uid = db.Column(db.Integer) task_state = db.Column(db.Integer) app_name = db.Column(db.String(255)) app_state = db.Column(db.Integer) platform = db.Column(db.Integer) instru_mode = db.Column(db.Integer) def __repr__(self): return '<app_name %r>' % self.app_name class w_sample_info_t(db.Model): __tablename__ = 'w_sample_info_t' sample_id = db.Column(db.Integer, primary_key=True) sample_name = db.Column(db.String(100)) prefix_number = db.Column(db.Integer) suffix_number = db.Column(db.Integer) last_number = db.Column(db.Integer) format = db.Column(db.String(100)) aid = db.Column(db.Integer) father_sample = db.Column(db.String(100)) isexception = db.Column(db.Integer) sample_state = db.Column(db.Integer) taint_start = db.Column(db.String(100)) taint_offset = db.Column(db.String(100)) sample_hash = db.Column(db.String(100)) state_id = db.Column(db.Integer) log_limit = db.Column(db.Integer) ins_limit = db.Column(db.Integer) action_index = db.Column(db.Integer) fuzz_flag=db.Column(db.Integer) def __repr__(self): return '<sample_name %r>' % self.sample_name class w_Globalnode(db.Model): __tablename__ = 'w_global_node_t' nid = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer) tail = db.Column(db.Integer) name = db.Column(db.String(128)) status = db.Column(db.Integer) taskid = db.Column(db.Integer) aid = db.Column(db.Integer) check_flag = db.Column(db.Integer) def __repr__(self): return '<name %r>' % self.name class w_Globaledge(db.Model): __tablename__ = 'w_global_edge_t' eid = db.Column(db.Integer, primary_key=True) parent = db.Column(db.Integer) child = db.Column(db.Integer) aid = db.Column(db.Integer) parent_name = db.Column(db.String(256)) child_name = db.Column(db.String(256)) def __repr__(self): return '<parent_name %r>' % self.parent_name class w_Partialedge(db.Model): __tablename__ = 'w_partial_edge_t' eid = db.Column(db.Integer, primary_key=True) parent = db.Column(db.Integer) child = db.Column(db.Integer) parentnode = db.Column(db.Integer) aid = db.Column(db.Integer) def __repr__(self): return '<parent %r>' % self.parent class w_Partialnode(db.Model): __tablename__ = 'w_partial_node_t' nid = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer) tail = db.Column(db.Integer) parentnode = db.Column(db.Integer) status = db.Column(db.Integer) aid = db.Column(db.Integer) def __repr__(self): return '<id %r>' % self.id class w_peach_pit(db.Model): __tablename__ = 'w_peach_pit' # is_fuzzing = db.Column(db.Integer) peach_id = db.Column(db.Integer, primary_key=True) pit_hash = db.Column(db.String(100)) aid = db.Column(db.Integer) pit_name = db.Column(db.String(100)) case_count = db.Column(db.Integer) def __repr__(self): return '<pit_name %r>' % self.pit_name class w_sensitive_addr_info(db.Model): __tablename__ = 'w_sensitive_addr_info' sensitive_id = db.Column(db.Integer, primary_key=True) aid = db.Column(db.Integer) sensitive_addr = db.Column(db.String(20)) def __repr__(self): return '<sensitive_addr %r>' % self.sensitive_addr class w_sensitive_post_t(db.Model): __tablename__ = 'w_sensitive_post_t' id = db.Column(db.Integer, primary_key=True) addr = db.Column(db.Integer) status = db.Column(db.Integer) aid = db.Column(db.Integer) def __repr__(self): return '<addr %r>' % self.addr class w_source_asm_map(db.Model): __tablename__ = 'w_source_asm_map' id = db.Column(db.Integer, primary_key=True) addr = db.Column(db.Integer) aid = db.Column(db.Integer) segment = db.Column(db.String(20)) info = db.Column(db.String(20)) def __repr__(self): return '<addr %r>' % self.addr class w_special_node_t(db.Model): __tablename__ = 'w_special_node_t' id = db.Column(db.Integer, primary_key=True) addr = db.Column(db.Integer) addrtype = db.Column(db.Integer) taskid = db.Column(db.Integer) aid = db.Column(db.Integer) def __repr__(self): return '<addr %r>' % self.addr class w_trace_info_t(db.Model): __tablename__ = 'w_trace_info_t' trace_id = db.Column(db.Integer, primary_key=True) trace_name = db.Column(db.String(100)) trace_file_addr = db.Column(db.String(100)) prefix_number = db.Column(db.Integer) suffix_number = db.Column(db.Integer) trace_state = db.Column(db.Integer) convert_number = db.Column(db.Integer) depth = db.Column(db.Integer, default=-1) count = db.Column(db.Integer) aid = db.Column(db.Integer) sid = db.Column(db.Integer) num = db.Column(db.Integer) def __repr__(self): return '<trace_name %r>' % self.trace_name class w_constrain_info_t(db.Model): constrain_name = db.Column(db.String(100)) constrain_file_addr = db.Column(db.String(500)) prefix_number = db.Column(db.Integer, default=0) suffix_number = db.Column(db.Integer, default=0) last_number = db.Column(db.Integer) constrain_file_state = db.Column(db.Integer) current_sample = db.Column(db.String(100)) aid = db.Column(db.Integer) constrain_id = db.Column(db.Integer, primary_key=True) convert_addr = db.Column(db.Integer) def __repr__(self): return '<constrain_name %r>' % self.constrain_name class w_coverage_log_t(db.Model): __tablename__ = 'w_coverage_log_t' id = db.Column(db.Integer, primary_key=True) log_time = db.Column(db.DateTime, default=datetime.now) coverage = db.Column(db.String) aid = db.Column(db.Integer) def __repr__(self): return '<id %r>' % self.id # Auth Model class User(db.Model): __tablename__ = 'WEB_USER_T' id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(64), unique=True) email = db.Column(db.String(64), unique=True) pass_hash = db.Column(db.String(128)) # role_id = db.Column(db.Integer, db.ForeignKey('roles.id')) user_confirm = db.Column(db.Boolean, default=False) def __repr__(self): return '<username %r>' % self.username @property def password(self): raise AttributeError('password is not a readable attribute') @password.setter def password(self, password): self.password_hash = generate_password_hash(password) def verify_password(self, password): return check_password_hash(self.password_hash, password)
32.543434
74
0.678192
2,369
16,109
4.350359
0.06374
0.201824
0.25228
0.301863
0.931011
0.916942
0.863866
0.856491
0.849117
0.84902
0
0.013893
0.18679
16,109
494
75
32.609312
0.772824
0.013471
0
0.783582
0
0
0.06082
0.004218
0
0
0
0
0
1
0.09204
false
0.022388
0.007463
0.087065
0.99005
0
0
0
0
null
1
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
11
53304fca4bb87233c48620681f987247ff2bb89c
2,773
py
Python
pyaz/storage/account/private_endpoint_connection/__init__.py
py-az-cli/py-az-cli
9a7dc44e360c096a5a2f15595353e9dad88a9792
[ "MIT" ]
null
null
null
pyaz/storage/account/private_endpoint_connection/__init__.py
py-az-cli/py-az-cli
9a7dc44e360c096a5a2f15595353e9dad88a9792
[ "MIT" ]
null
null
null
pyaz/storage/account/private_endpoint_connection/__init__.py
py-az-cli/py-az-cli
9a7dc44e360c096a5a2f15595353e9dad88a9792
[ "MIT" ]
1
2022-02-03T09:12:01.000Z
2022-02-03T09:12:01.000Z
from .... pyaz_utils import _call_az def delete(account_name=None, id=None, name=None, resource_group=None, yes=None): ''' Delete a private endpoint connection request for storage account. Optional Parameters: - account_name -- The storage account name. - id -- The ID of the private endpoint connection associated with the Storage Account. You can get it using `az storage account show`. - name -- The name of the private endpoint connection associated with the Storage Account. - resource_group -- The resource group name of specified storage account. - yes -- Do not prompt for confirmation. ''' return _call_az("az storage account private-endpoint-connection delete", locals()) def show(account_name=None, id=None, name=None, resource_group=None): ''' Show details of a private endpoint connection request for storage account. Optional Parameters: - account_name -- The storage account name. - id -- The ID of the private endpoint connection associated with the Storage Account. You can get it using `az storage account show`. - name -- The name of the private endpoint connection associated with the Storage Account. - resource_group -- The resource group name of specified storage account. ''' return _call_az("az storage account private-endpoint-connection show", locals()) def approve(account_name=None, description=None, id=None, name=None, resource_group=None): ''' Approve a private endpoint connection request for storage account. Optional Parameters: - account_name -- The storage account name. - description -- Comments for approve operation. - id -- The ID of the private endpoint connection associated with the Storage Account. You can get it using `az storage account show`. - name -- The name of the private endpoint connection associated with the Storage Account. - resource_group -- The resource group name of specified storage account. ''' return _call_az("az storage account private-endpoint-connection approve", locals()) def reject(account_name=None, description=None, id=None, name=None, resource_group=None): ''' Reject a private endpoint connection request for storage account. Optional Parameters: - account_name -- The storage account name. - description -- Comments for reject operation. - id -- The ID of the private endpoint connection associated with the Storage Account. You can get it using `az storage account show`. - name -- The name of the private endpoint connection associated with the Storage Account. - resource_group -- The resource group name of specified storage account. ''' return _call_az("az storage account private-endpoint-connection reject", locals())
48.649123
138
0.735305
370
2,773
5.437838
0.127027
0.194831
0.198807
0.079523
0.906561
0.906561
0.906561
0.906561
0.906561
0.880219
0
0
0.190408
2,773
56
139
49.517857
0.896214
0.676163
0
0
0
0
0.283602
0.145161
0
0
0
0
0
1
0.444444
false
0
0.111111
0
1
0
0
0
0
null
0
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
9
535e06c5e277a9f31ce4aef6d1d1605ade4aa348
134
py
Python
can_tools/scrapers/official/FL/__init__.py
ChrisBremer/can-scrapers
a91123368f8473a2778c4efcc40855b2fd631306
[ "MIT" ]
null
null
null
can_tools/scrapers/official/FL/__init__.py
ChrisBremer/can-scrapers
a91123368f8473a2778c4efcc40855b2fd631306
[ "MIT" ]
null
null
null
can_tools/scrapers/official/FL/__init__.py
ChrisBremer/can-scrapers
a91123368f8473a2778c4efcc40855b2fd631306
[ "MIT" ]
null
null
null
# from can_tools.scrapers.official.FL.fl_hospitals import FloridaHospital from can_tools.scrapers.official.FL.fl_state import Florida
44.666667
73
0.865672
20
134
5.6
0.55
0.125
0.214286
0.357143
0.571429
0.571429
0.571429
0
0
0
0
0
0.067164
134
2
74
67
0.896
0.529851
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
72c70d092b9bc666ed98dea0411dafbeef718c45
12,801
py
Python
socket_com/UDPSocket.py
vineeths96/Federated-Learning
ef0b46385c421edcfca8fcbd24371a2d9b70fe78
[ "MIT" ]
null
null
null
socket_com/UDPSocket.py
vineeths96/Federated-Learning
ef0b46385c421edcfca8fcbd24371a2d9b70fe78
[ "MIT" ]
null
null
null
socket_com/UDPSocket.py
vineeths96/Federated-Learning
ef0b46385c421edcfca8fcbd24371a2d9b70fe78
[ "MIT" ]
null
null
null
import io import time import torch import socket from seed import set_seed UDP_DEBUG = False BUFFER = 1024 * 64 class UDPServer: def __init__( self, SERVER=socket.gethostbyname(socket.gethostname()), PORT=5050, NUM_CLIENTS=1, TIMEOUT=5, GRADIENT_SIZE=14728266, CHUNK=100, DELAY=5e-3, SEED=42, ): self.SERVER = SERVER self.PORT = PORT self.NUM_CLIENTS = NUM_CLIENTS self.GRADIENT_SIZE = GRADIENT_SIZE self.CHUNK = CHUNK self.DELAY = DELAY self.SEED = SEED self.ADDR = (SERVER, PORT) self.END_OF_MESSAGE = torch.tensor(float("inf")) self.DEVICES = [] self.server = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # self.SEND_BUF_SIZE = 4096 # self.RECV_BUF_SIZE = 4096 # # self.server.setsockopt( # socket.SOL_SOCKET, # socket.SO_SNDBUF, # self.SEND_BUF_SIZE) # self.server.setsockopt( # socket.SOL_SOCKET, # socket.SO_RCVBUF, # self.RECV_BUF_SIZE) self.server.bind(self.ADDR) self.server.settimeout(TIMEOUT) self._indices_queue = [] self.accumulated_gradient = torch.zeros(self.GRADIENT_SIZE) def encode(self, tensor): file = io.BytesIO() torch.save(tensor, file) file.seek(0) encoded = file.read() return encoded def decode(self, buffer): tensor = torch.load(io.BytesIO(buffer)) return tensor def send(self, tensor, addr): messages = tensor.split(self.CHUNK) for message in messages: encoded_message = self.encode(message.clone()) self.server.sendto(encoded_message, addr) time.sleep(self.DELAY) self.send_EOT(addr) def send_EOT(self, addr): encoded_message = self.encode(self.END_OF_MESSAGE) self.server.sendto(encoded_message, addr) def send_TCP_EOT(self): from TCPSocket import TCPClient clientTCP = TCPClient(SERVER=SERVER_COMP, MSG_SIZE=MSG_SIZE, DELAY=DELAY) clientTCP.send(self.END_OF_MESSAGE) def receive(self): buffer = [] readnext = True msg, addr = None, None try: while readnext: msg, addr = self.server.recvfrom(BUFFER) try: decoded_msg = self.decode(msg) except: continue if not len(decoded_msg.shape) and torch.isinf(decoded_msg): if addr not in self.DEVICES: self.DEVICES.append(addr) break buffer.append(decoded_msg) except: if addr and addr not in self.DEVICES: self.DEVICES.append(addr) if len(buffer) > 1: msg = torch.cat(buffer) else: msg = buffer[0] print(f"[{addr}] {msg}") indices = msg[:, 0].long() gradient = msg[:, 1] received_coordinates_fraction = gradient.nelement() / self.GRADIENT_SIZE self.accumulated_gradient[indices] += 1 / received_coordinates_fraction * gradient def start(self): print(f"[LISTENING] Server is listening on {self.SERVER}") try: while True: self.receive() if len(self.DEVICES) < self.NUM_CLIENTS: continue accumulated_grad_indices = torch.vstack( [torch.arange(self.GRADIENT_SIZE), self.accumulated_gradient] ).T for client in self.DEVICES: self.send(accumulated_grad_indices, client) self.DEVICES = [] self.accumulated_gradient.zero_() except KeyboardInterrupt: self.stop() def stop(self): self.server.close() class UDPClient: def __init__( self, SERVER=socket.gethostbyname(socket.gethostname()), PORT=5050, TIMEOUT=5, GRADIENT_SIZE=14728266, CHUNK=100, DELAY=5e-3, SEED=42, ): self.SERVER = SERVER self.PORT = PORT self.TIMEOUT = TIMEOUT self.GRADIENT_SIZE = GRADIENT_SIZE self.CHUNK = CHUNK self.DELAY = DELAY self.SEED = SEED self.ADDR = (SERVER, PORT) self.END_OF_MESSAGE = torch.tensor(float("inf")) self.client = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) def encode(self, tensor): file = io.BytesIO() torch.save(tensor, file) file.seek(0) encoded = file.read() return encoded def decode(self, buffer): tensor = torch.load(io.BytesIO(buffer)) return tensor def send(self, tensor): messages = tensor.split(self.CHUNK) for message in messages: encoded_message = self.encode(message.clone()) self.client.sendto(encoded_message, self.ADDR) time.sleep(self.DELAY) self.send_EOT() def send_EOT(self): encoded_message = self.encode(self.END_OF_MESSAGE) self.client.sendto(encoded_message, self.ADDR) def receive(self): buffer = [] readnext = True try: while readnext: msg, addr = self.client.recvfrom(BUFFER) try: decoded_msg = self.decode(msg) except: continue if not len(decoded_msg.shape) and torch.isinf(decoded_msg): break buffer.append(decoded_msg) except socket.error: pass if len(buffer) > 1: msg = torch.cat(buffer) else: msg = buffer[0] # print(f"[{addr}] {msg}") return msg def receive_TCP_EOT(self): from TCPSocket import TCPServer server = TCPServer(SERVER=SERVER_COMP, MSG_SIZE=MSG_SIZE, DELAY=DELAY) server.start() class UDPKServer: def __init__( self, SERVER=socket.gethostbyname(socket.gethostname()), PORT=5050, NUM_CLIENTS=1, TIMEOUT=5, GRADIENT_SIZE=14728266, K=10000, CHUNK=100, DELAY=5e-3, SEED=42, ): self.SERVER = SERVER self.PORT = PORT self.NUM_CLIENTS = NUM_CLIENTS self.K = K self.GRADIENT_SIZE = GRADIENT_SIZE self.CHUNK = CHUNK self.DELAY = DELAY self.SEED = SEED self.ADDR = (SERVER, PORT) self.END_OF_MESSAGE = torch.tensor(float("inf")) self.DEVICES = [] self.server = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # self.SEND_BUF_SIZE = 4096 # self.RECV_BUF_SIZE = 4096 # # self.server.setsockopt( # socket.SOL_SOCKET, # socket.SO_SNDBUF, # self.SEND_BUF_SIZE) # self.server.setsockopt( # socket.SOL_SOCKET, # socket.SO_RCVBUF, # self.RECV_BUF_SIZE) self.server.bind(self.ADDR) self.server.settimeout(TIMEOUT) buffer_size = self.server.getsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF) print("Buffer size [After]:%d" % buffer_size) self._indices_queue = [] self.accumulated_gradient = torch.zeros(self.GRADIENT_SIZE) def encode(self, tensor): file = io.BytesIO() torch.save(tensor, file) file.seek(0) encoded = file.read() return encoded def decode(self, buffer): tensor = torch.load(io.BytesIO(buffer)) return tensor def send(self, tensor, addr): messages = tensor.split(self.CHUNK) for message in messages: encoded_message = self.encode(message.clone()) self.server.sendto(encoded_message, addr) time.sleep(self.DELAY) self.send_EOT(addr) def send_EOT(self, addr): encoded_message = self.encode(self.END_OF_MESSAGE) self.server.sendto(encoded_message, addr) def send_TCP_EOT(self): from TCPSocket import TCPClient clientTCP = TCPClient(SERVER=SERVER_COMP, MSG_SIZE=MSG_SIZE, DELAY=DELAY) clientTCP.send(self.END_OF_MESSAGE) def receive(self): buffer = [] readnext = True msg, addr = None, None try: while readnext: msg, addr = self.server.recvfrom(BUFFER) # if addr not in self.DEVICES: # self.DEVICES.append(addr) try: decoded_msg = self.decode(msg) except: continue if not len(decoded_msg.shape) and torch.isinf(decoded_msg): if addr not in self.DEVICES: self.DEVICES.append(addr) break buffer.append(decoded_msg) except socket.error: if addr and addr not in self.DEVICES: self.DEVICES.append(addr) if len(buffer) > 1: msg = torch.cat(buffer) else: msg = buffer[0] print(f"[{addr}] {msg}") indices = msg[:, 0].long() gradient = msg[:, 1] received_coordinates_fraction = gradient.nelement() / self.K self.accumulated_gradient[indices] += 1 / received_coordinates_fraction * gradient # time.sleep(self.DELAY) # self.send(msg, addr) def start(self): print(f"[LISTENING] Server is listening on {self.SERVER}") try: while True: self.receive() if len(self.DEVICES) < self.NUM_CLIENTS: continue if not self._indices_queue: set_seed(self.SEED) self._indices_queue = torch.randperm(self.GRADIENT_SIZE).split(self.K) self._indices_queue = list(self._indices_queue) RandK_indices = self._indices_queue.pop().long() RandK_flat_grad = self.accumulated_gradient[RandK_indices] accumulated_grad_indices = torch.vstack([RandK_indices, RandK_flat_grad]).T for client in self.DEVICES: self.send(accumulated_grad_indices, client) self.DEVICES = [] self.accumulated_gradient.zero_() except KeyboardInterrupt: self.stop() def stop(self): self.server.close() class UDPKClient: def __init__( self, SERVER=socket.gethostbyname(socket.gethostname()), PORT=5050, TIMEOUT=5, GRADIENT_SIZE=14728266, K=10000, CHUNK=100, DELAY=5e-3, SEED=42, ): self.SERVER = SERVER self.PORT = PORT self.TIMEOUT = TIMEOUT self.GRADIENT_SIZE = GRADIENT_SIZE self.K = K self.CHUNK = CHUNK self.DELAY = DELAY self.SEED = SEED self.ADDR = (SERVER, PORT) self.END_OF_MESSAGE = torch.tensor(float("inf")) self.client = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) def encode(self, tensor): file = io.BytesIO() torch.save(tensor, file) file.seek(0) encoded = file.read() return encoded def decode(self, buffer): tensor = torch.load(io.BytesIO(buffer)) return tensor def send(self, tensor): messages = tensor.split(self.CHUNK) for message in messages: encoded_message = self.encode(message.clone()) self.client.sendto(encoded_message, self.ADDR) time.sleep(self.DELAY) self.send_EOT() def send_EOT(self): encoded_message = self.encode(self.END_OF_MESSAGE) self.client.sendto(encoded_message, self.ADDR) def receive(self): buffer = [] readnext = True try: while readnext: msg, addr = self.client.recvfrom(BUFFER) try: decoded_msg = self.decode(msg) except: continue if not len(decoded_msg.shape) and torch.isinf(decoded_msg): break buffer.append(decoded_msg) except socket.error: pass if len(buffer) > 1: msg = torch.cat(buffer) else: msg = buffer[0] # print(f"[{addr}] {msg}") return msg def receive_TCP_EOT(self): from TCPSocket import TCPServer server = TCPServer(SERVER=SERVER_COMP, MSG_SIZE=MSG_SIZE, DELAY=DELAY) server.start()
25.653307
91
0.553004
1,403
12,801
4.898076
0.104063
0.0422
0.028376
0.023283
0.933935
0.924331
0.908906
0.908906
0.907305
0.888097
0
0.015913
0.352004
12,801
498
92
25.704819
0.812538
0.049059
0
0.925816
0
0
0.013011
0
0
0
0
0
0
1
0.094955
false
0.005935
0.026706
0
0.163205
0.014837
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
72e1276a03c563e896c49f5855892246c526fe5d
2,283
py
Python
tests/test_pie.py
andrewtavis/stdviz
8dd575d579a62c2afdb83e1d05ce03ac98db6ce1
[ "BSD-3-Clause" ]
null
null
null
tests/test_pie.py
andrewtavis/stdviz
8dd575d579a62c2afdb83e1d05ce03ac98db6ce1
[ "BSD-3-Clause" ]
13
2021-02-11T16:19:03.000Z
2021-10-20T18:59:34.000Z
tests/test_pie.py
andrewtavis/stdviz
8dd575d579a62c2afdb83e1d05ce03ac98db6ce1
[ "BSD-3-Clause" ]
1
2021-03-17T03:35:45.000Z
2021-03-17T03:35:45.000Z
""" Pie Plot Tests -------------- """ import matplotlib.pyplot as plt import pltviz import pytest def test_pie( monkeypatch, allocations, factioned_allocations, parties, faction_labels, party_colors, ): monkeypatch.setattr(plt, "show", lambda: None) pltviz.pie( counts=allocations, labels=None, faction_labels=None, colors=None, radius=1, outer_ring_density=100, donut_ratio=1, display_labels=False, display_counts=False, label_font_size=20, axis=None, ) pltviz.pie( counts=allocations, labels=parties, faction_labels=None, colors=party_colors, radius=1, outer_ring_density=100, donut_ratio=1, display_labels=False, display_counts=False, label_font_size=20, axis=None, ) pltviz.pie( counts=allocations, labels=parties, faction_labels=None, colors=party_colors, radius=1, outer_ring_density=100, donut_ratio=1, display_labels=True, display_counts=True, label_font_size=20, axis=None, ) with pytest.raises(AssertionError): pltviz.pie( counts=allocations, labels=parties, faction_labels=faction_labels, colors=party_colors, radius=1, outer_ring_density=100, donut_ratio=1, display_labels=True, display_counts=True, label_font_size=20, axis=None, ) pltviz.pie( counts=factioned_allocations, labels=parties, faction_labels=faction_labels, colors=party_colors, radius=1, outer_ring_density=100, donut_ratio=1, display_labels=False, display_counts=False, label_font_size=20, axis=None, ) pltviz.pie( counts=factioned_allocations, labels=parties, faction_labels=faction_labels, colors=party_colors, radius=1, outer_ring_density=100, donut_ratio=1, display_labels=True, display_counts=True, label_font_size=20, axis=None, )
21.951923
50
0.576435
236
2,283
5.313559
0.186441
0.103668
0.095694
0.076555
0.810207
0.810207
0.781499
0.781499
0.769537
0.769537
0
0.027851
0.339466
2,283
103
51
22.165049
0.803714
0.012703
0
0.769231
0
0
0.001781
0
0
0
0
0
0.010989
1
0.010989
false
0
0.032967
0
0.043956
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
72f22d7797a382607420cd16b31431e95dd7350b
118
py
Python
backend/models/user.py
hillcrestpaul0719/borz-server
faae966b6138723ee88e80ea6dfe242809ad0a87
[ "Apache-2.0" ]
null
null
null
backend/models/user.py
hillcrestpaul0719/borz-server
faae966b6138723ee88e80ea6dfe242809ad0a87
[ "Apache-2.0" ]
24
2021-01-04T12:16:40.000Z
2021-07-23T12:17:34.000Z
backend/models/user.py
hillcrestpaul0719/borz-server
faae966b6138723ee88e80ea6dfe242809ad0a87
[ "Apache-2.0" ]
null
null
null
from django.db import models from django.contrib.auth.models import AbstractUser class User(AbstractUser): pass
16.857143
51
0.79661
16
118
5.875
0.6875
0.212766
0
0
0
0
0
0
0
0
0
0
0.144068
118
6
52
19.666667
0.930693
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.25
0.5
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
7
72fa6a85bddb3b6613bfc780111f844999aeb4e0
6,819
py
Python
simulator/simp_py/SmallFont.py
kcfkwok2003/Simp_py
f75e66da01b45dc8688dda602f8b33d4258f0c31
[ "MIT" ]
null
null
null
simulator/simp_py/SmallFont.py
kcfkwok2003/Simp_py
f75e66da01b45dc8688dda602f8b33d4258f0c31
[ "MIT" ]
null
null
null
simulator/simp_py/SmallFont.py
kcfkwok2003/Simp_py
f75e66da01b45dc8688dda602f8b33d4258f0c31
[ "MIT" ]
null
null
null
# SmallFont.c # Font type : Full (95 characters) # Font size : 8x12 pixels # Memory usage : 1144 bytes #if defined(__AVR__) #include <avr/pgmspace.h> #define fontdatatype const uint8_t #elif defined(__PIC32MX__) #define PROGMEM #define fontdatatype const unsigned char #elif defined(__arm__) #define PROGMEM #define fontdatatype const unsigned char #endif tft_SmallFont=[ 0x08,0x0C,0x20,0x5F, 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # <space> 0x00,0x00,0x20,0x20,0x20,0x20,0x20,0x20,0x00,0x20,0x00,0x00, # ! 0x00,0x28,0x50,0x50,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # " 0x00,0x00,0x28,0x28,0xFC,0x28,0x50,0xFC,0x50,0x50,0x00,0x00, # # 0x00,0x20,0x78,0xA8,0xA0,0x60,0x30,0x28,0xA8,0xF0,0x20,0x00, # $ 0x00,0x00,0x48,0xA8,0xB0,0x50,0x28,0x34,0x54,0x48,0x00,0x00, # % 0x00,0x00,0x20,0x50,0x50,0x78,0xA8,0xA8,0x90,0x6C,0x00,0x00, # & 0x00,0x40,0x40,0x80,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # ' 0x00,0x04,0x08,0x10,0x10,0x10,0x10,0x10,0x10,0x08,0x04,0x00, # ( 0x00,0x40,0x20,0x10,0x10,0x10,0x10,0x10,0x10,0x20,0x40,0x00, # ) 0x00,0x00,0x00,0x20,0xA8,0x70,0x70,0xA8,0x20,0x00,0x00,0x00, # * 0x00,0x00,0x20,0x20,0x20,0xF8,0x20,0x20,0x20,0x00,0x00,0x00, # + 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x40,0x40,0x80, # , 0x00,0x00,0x00,0x00,0x00,0xF8,0x00,0x00,0x00,0x00,0x00,0x00, # - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x40,0x00,0x00, # . 0x00,0x08,0x10,0x10,0x10,0x20,0x20,0x40,0x40,0x40,0x80,0x00, # / 0x00,0x00,0x70,0x88,0x88,0x88,0x88,0x88,0x88,0x70,0x00,0x00, # 0 0x00,0x00,0x20,0x60,0x20,0x20,0x20,0x20,0x20,0x70,0x00,0x00, # 1 0x00,0x00,0x70,0x88,0x88,0x10,0x20,0x40,0x80,0xF8,0x00,0x00, # 2 0x00,0x00,0x70,0x88,0x08,0x30,0x08,0x08,0x88,0x70,0x00,0x00, # 3 0x00,0x00,0x10,0x30,0x50,0x50,0x90,0x78,0x10,0x18,0x00,0x00, # 4 0x00,0x00,0xF8,0x80,0x80,0xF0,0x08,0x08,0x88,0x70,0x00,0x00, # 5 0x00,0x00,0x70,0x90,0x80,0xF0,0x88,0x88,0x88,0x70,0x00,0x00, # 6 0x00,0x00,0xF8,0x90,0x10,0x20,0x20,0x20,0x20,0x20,0x00,0x00, # 7 0x00,0x00,0x70,0x88,0x88,0x70,0x88,0x88,0x88,0x70,0x00,0x00, # 8 0x00,0x00,0x70,0x88,0x88,0x88,0x78,0x08,0x48,0x70,0x00,0x00, # 9 0x00,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x20,0x00,0x00, # : 0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x20,0x20,0x00, # ; 0x00,0x04,0x08,0x10,0x20,0x40,0x20,0x10,0x08,0x04,0x00,0x00, # < 0x00,0x00,0x00,0x00,0xF8,0x00,0x00,0xF8,0x00,0x00,0x00,0x00, # = 0x00,0x40,0x20,0x10,0x08,0x04,0x08,0x10,0x20,0x40,0x00,0x00, # > 0x00,0x00,0x70,0x88,0x88,0x10,0x20,0x20,0x00,0x20,0x00,0x00, # ? 0x00,0x00,0x70,0x88,0x98,0xA8,0xA8,0xB8,0x80,0x78,0x00,0x00, # @ 0x00,0x00,0x20,0x20,0x30,0x50,0x50,0x78,0x48,0xCC,0x00,0x00, # A 0x00,0x00,0xF0,0x48,0x48,0x70,0x48,0x48,0x48,0xF0,0x00,0x00, # B 0x00,0x00,0x78,0x88,0x80,0x80,0x80,0x80,0x88,0x70,0x00,0x00, # C 0x00,0x00,0xF0,0x48,0x48,0x48,0x48,0x48,0x48,0xF0,0x00,0x00, # D 0x00,0x00,0xF8,0x48,0x50,0x70,0x50,0x40,0x48,0xF8,0x00,0x00, # E 0x00,0x00,0xF8,0x48,0x50,0x70,0x50,0x40,0x40,0xE0,0x00,0x00, # F 0x00,0x00,0x38,0x48,0x80,0x80,0x9C,0x88,0x48,0x30,0x00,0x00, # G 0x00,0x00,0xCC,0x48,0x48,0x78,0x48,0x48,0x48,0xCC,0x00,0x00, # H 0x00,0x00,0xF8,0x20,0x20,0x20,0x20,0x20,0x20,0xF8,0x00,0x00, # I 0x00,0x00,0x7C,0x10,0x10,0x10,0x10,0x10,0x10,0x90,0xE0,0x00, # J 0x00,0x00,0xEC,0x48,0x50,0x60,0x50,0x50,0x48,0xEC,0x00,0x00, # K 0x00,0x00,0xE0,0x40,0x40,0x40,0x40,0x40,0x44,0xFC,0x00,0x00, # L 0x00,0x00,0xD8,0xD8,0xD8,0xD8,0xA8,0xA8,0xA8,0xA8,0x00,0x00, # M 0x00,0x00,0xDC,0x48,0x68,0x68,0x58,0x58,0x48,0xE8,0x00,0x00, # N 0x00,0x00,0x70,0x88,0x88,0x88,0x88,0x88,0x88,0x70,0x00,0x00, # O 0x00,0x00,0xF0,0x48,0x48,0x70,0x40,0x40,0x40,0xE0,0x00,0x00, # P 0x00,0x00,0x70,0x88,0x88,0x88,0x88,0xE8,0x98,0x70,0x18,0x00, # Q 0x00,0x00,0xF0,0x48,0x48,0x70,0x50,0x48,0x48,0xEC,0x00,0x00, # R 0x00,0x00,0x78,0x88,0x80,0x60,0x10,0x08,0x88,0xF0,0x00,0x00, # S 0x00,0x00,0xF8,0xA8,0x20,0x20,0x20,0x20,0x20,0x70,0x00,0x00, # T 0x00,0x00,0xCC,0x48,0x48,0x48,0x48,0x48,0x48,0x30,0x00,0x00, # U 0x00,0x00,0xCC,0x48,0x48,0x50,0x50,0x30,0x20,0x20,0x00,0x00, # V 0x00,0x00,0xA8,0xA8,0xA8,0x70,0x50,0x50,0x50,0x50,0x00,0x00, # W 0x00,0x00,0xD8,0x50,0x50,0x20,0x20,0x50,0x50,0xD8,0x00,0x00, # X 0x00,0x00,0xD8,0x50,0x50,0x20,0x20,0x20,0x20,0x70,0x00,0x00, # Y 0x00,0x00,0xF8,0x90,0x10,0x20,0x20,0x40,0x48,0xF8,0x00,0x00, # Z 0x00,0x38,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x20,0x38,0x00, # [ 0x00,0x40,0x40,0x40,0x20,0x20,0x10,0x10,0x10,0x08,0x00,0x00, # <backslash> 0x00,0x70,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x70,0x00, # ] 0x00,0x20,0x50,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # ^ 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xFC, # _ 0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # ` 0x00,0x00,0x00,0x00,0x00,0x30,0x48,0x38,0x48,0x3C,0x00,0x00, # a 0x00,0x00,0xC0,0x40,0x40,0x70,0x48,0x48,0x48,0x70,0x00,0x00, # b 0x00,0x00,0x00,0x00,0x00,0x38,0x48,0x40,0x40,0x38,0x00,0x00, # c 0x00,0x00,0x18,0x08,0x08,0x38,0x48,0x48,0x48,0x3C,0x00,0x00, # d 0x00,0x00,0x00,0x00,0x00,0x30,0x48,0x78,0x40,0x38,0x00,0x00, # e 0x00,0x00,0x1C,0x20,0x20,0x78,0x20,0x20,0x20,0x78,0x00,0x00, # f 0x00,0x00,0x00,0x00,0x00,0x3C,0x48,0x30,0x40,0x78,0x44,0x38, # g 0x00,0x00,0xC0,0x40,0x40,0x70,0x48,0x48,0x48,0xEC,0x00,0x00, # h 0x00,0x00,0x20,0x00,0x00,0x60,0x20,0x20,0x20,0x70,0x00,0x00, # i 0x00,0x00,0x10,0x00,0x00,0x30,0x10,0x10,0x10,0x10,0x10,0xE0, # j 0x00,0x00,0xC0,0x40,0x40,0x5C,0x50,0x70,0x48,0xEC,0x00,0x00, # k 0x00,0x00,0xE0,0x20,0x20,0x20,0x20,0x20,0x20,0xF8,0x00,0x00, # l 0x00,0x00,0x00,0x00,0x00,0xF0,0xA8,0xA8,0xA8,0xA8,0x00,0x00, # m 0x00,0x00,0x00,0x00,0x00,0xF0,0x48,0x48,0x48,0xEC,0x00,0x00, # n 0x00,0x00,0x00,0x00,0x00,0x30,0x48,0x48,0x48,0x30,0x00,0x00, # o 0x00,0x00,0x00,0x00,0x00,0xF0,0x48,0x48,0x48,0x70,0x40,0xE0, # p 0x00,0x00,0x00,0x00,0x00,0x38,0x48,0x48,0x48,0x38,0x08,0x1C, # q 0x00,0x00,0x00,0x00,0x00,0xD8,0x60,0x40,0x40,0xE0,0x00,0x00, # r 0x00,0x00,0x00,0x00,0x00,0x78,0x40,0x30,0x08,0x78,0x00,0x00, # s 0x00,0x00,0x00,0x20,0x20,0x70,0x20,0x20,0x20,0x18,0x00,0x00, # t 0x00,0x00,0x00,0x00,0x00,0xD8,0x48,0x48,0x48,0x3C,0x00,0x00, # u 0x00,0x00,0x00,0x00,0x00,0xEC,0x48,0x50,0x30,0x20,0x00,0x00, # v 0x00,0x00,0x00,0x00,0x00,0xA8,0xA8,0x70,0x50,0x50,0x00,0x00, # w 0x00,0x00,0x00,0x00,0x00,0xD8,0x50,0x20,0x50,0xD8,0x00,0x00, # x 0x00,0x00,0x00,0x00,0x00,0xEC,0x48,0x50,0x30,0x20,0x20,0xC0, # y 0x00,0x00,0x00,0x00,0x00,0x78,0x10,0x20,0x20,0x78,0x00,0x00, # z 0x00,0x18,0x10,0x10,0x10,0x20,0x10,0x10,0x10,0x10,0x18,0x00, # { 0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10, # | 0x00,0x60,0x20,0x20,0x20,0x10,0x20,0x20,0x20,0x20,0x60,0x00, # } 0x40,0xA4,0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, # ~ ]
56.825
76
0.725033
1,258
6,819
3.918124
0.091415
0.525867
0.423615
0.421992
0.736052
0.56198
0.403327
0.273483
0.213836
0.13309
0
0.532
0.083443
6,819
119
77
57.302521
0.25664
0.078897
0
0.020408
0
0
0
0
0
1
0.756239
0
0
1
0
false
0
0
0
0
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
7
f411385e97d884b04a33b463333dbc323e8f13d0
18,611
py
Python
opts.py
GibranBenitez/Real-time-GesRec
2c24ea75cb9373c310f3fe903ef9512da5d65385
[ "MIT" ]
null
null
null
opts.py
GibranBenitez/Real-time-GesRec
2c24ea75cb9373c310f3fe903ef9512da5d65385
[ "MIT" ]
null
null
null
opts.py
GibranBenitez/Real-time-GesRec
2c24ea75cb9373c310f3fe903ef9512da5d65385
[ "MIT" ]
null
null
null
import argparse def parse_opts_offline(): # Offline means not real time parser = argparse.ArgumentParser() parser.add_argument( '--root_path', default='/root/data/ActivityNet', type=str, help='Root directory path of data') parser.add_argument( '--video_path', default='video_kinetics_jpg', type=str, help='Directory path of Videos') parser.add_argument( '--annotation_path', default='kinetics.json', type=str, help='Annotation file path') parser.add_argument( '--result_path', default='results', type=str, help='Result directory path') parser.add_argument( '--store_name', default='model', type=str, help='Name to store checkpoints') parser.add_argument( '--modality', default='RGB', type=str, help='Modality of input data. RGB, Depth, or RGB-D') parser.add_argument( '--dataset', default='kinetics', type=str, help='Used dataset (activitynet | kinetics | ucf101 | hmdb51)') parser.add_argument( '--n_classes', default=400, type=int, help= 'Number of classes (activitynet: 200, kinetics: 400, ucf101: 101, hmdb51: 51)') parser.add_argument( '--n_finetune_classes', default=400, type=int, help= 'Number of classes for fine-tuning. n_classes is set to the number when pretraining.') parser.add_argument( '--sample_size', default=112, type=int, help='Height and width of inputs') parser.add_argument( '--sample_duration', default=16, type=int, help='Temporal duration of inputs') parser.add_argument( '--initial_scale', default=1.0, type=float, help='Initial scale for multiscale cropping') parser.add_argument( '--n_scales', default=5, type=int, help='Number of scales for multiscale cropping') parser.add_argument( '--scale_step', default=0.84089641525, type=float, help='Scale step for multiscale cropping') parser.add_argument( '--train_crop', default='corner', type=str, help= 'Spatial cropping method in training. random is uniform. corner is selection from 4 corners and 1 center. (random | corner | center)') parser.add_argument( '--learning_rate', default=0.1, type=float, help= 'Initial learning rate (divided by 10 while training by lr scheduler)') parser.add_argument( '--lr_steps', default=[10, 25, 50, 80, 100], type=float, nargs="+", metavar='LRSteps', help='epochs to decay learning rate by 10') parser.add_argument( '--momentum', default=0.9, type=float, help='Momentum') parser.add_argument( '--dampening', default=0.9, type=float, help='dampening of SGD') parser.add_argument( '--weight_decay', default=1e-3, type=float, help='Weight Decay') parser.add_argument( '--mean_dataset', default='activitynet', type=str, help= 'dataset for mean values of mean subtraction (activitynet | kinetics)') parser.add_argument( '--no_mean_norm', action='store_true', help='If true, inputs are not normalized by mean.') parser.set_defaults(no_mean_norm=False) parser.add_argument( '--std_norm', action='store_true', help='If true, inputs are normalized by standard deviation.') parser.set_defaults(std_norm=False) parser.add_argument( '--nesterov', action='store_true', help='Nesterov momentum') parser.set_defaults(nesterov=False) parser.add_argument( '--optimizer', default='sgd', type=str, help='Currently only support SGD') parser.add_argument( '--lr_patience', default=10, type=int, help='Patience of LR scheduler. See documentation of ReduceLROnPlateau.') parser.add_argument( '--batch_size', default=128, type=int, help='Batch Size') parser.add_argument( '--n_epochs', default=200, type=int, help='Number of total epochs to run') parser.add_argument( '--begin_epoch', default=1, type=int, help= 'Training begins at this epoch. Previous trained model indicated by resume_path is loaded.') parser.add_argument( '--n_val_samples', default=3, type=int, help='Number of validation samples for each activity') parser.add_argument( '--resume_path', default='', type=str, help='Save data (.pth) of previous training') parser.add_argument( '--pretrain_path', default='', type=str, help='Pretrained model (.pth)') parser.add_argument( '--pretrain_dataset', default='', type=str, help='dataset from pretrained model') parser.add_argument( '--ft_begin_index', default=0, type=int, help='Begin block index of fine-tuning') parser.add_argument( '--no_train', action='store_true', help='If true, training is not performed.') parser.set_defaults(no_train=False) parser.add_argument( '--no_val', action='store_true', help='If true, validation is not performed.') parser.set_defaults(no_val=False) parser.add_argument( '--test', action='store_true', help='If true, test is performed.') parser.set_defaults(test=False) parser.add_argument( '--test_subset', default='val', type=str, help='Used subset in test (val | test)') parser.add_argument( '--train_validate', action='store_true', help='If true, test is performed.') parser.set_defaults(train_validate=False) parser.add_argument( '--scale_in_test', default=1.0, type=float, help='Spatial scale in test') parser.add_argument( '--crop_position_in_test', default='c', type=str, help='Cropping method (c | tl | tr | bl | br) in test') parser.add_argument( '--no_softmax_in_test', action='store_true', help='If true, output for each clip is not normalized using softmax.') parser.set_defaults(no_softmax_in_test=False) parser.add_argument( '--no_cuda', action='store_true', help='If true, cuda is not used.') parser.set_defaults(no_cuda=False) parser.add_argument( '--n_threads', default=4, type=int, help='Number of threads for multi-thread loading') parser.add_argument( '--checkpoint', default=10, type=int, help='Trained model is saved at every this epochs.') parser.add_argument( '--no_hflip', action='store_true', help='If true holizontal flipping is not performed.') parser.set_defaults(no_hflip=False) parser.add_argument( '--norm_value', default=1, type=int, help= 'If 1, range of inputs is [0-255]. If 255, range of inputs is [0-1].') parser.add_argument( '--model', default='resnet', type=str, help='(resnet | preresnet | wideresnet | resnext | densenet | ') parser.add_argument( '--model_depth', default=18, type=int, help='Depth of resnet (10 | 18 | 34 | 50 | 101)') parser.add_argument( '--resnet_shortcut', default='B', type=str, help='Shortcut type of resnet (A | B)') parser.add_argument( '--wide_resnet_k', default=2, type=int, help='Wide resnet k') parser.add_argument( '--resnext_cardinality', default=32, type=int, help='ResNeXt cardinality') parser.add_argument( '--manual_seed', default=1, type=int, help='Manually set random seed') parser.add_argument( '--weighted', action='store_true', help='If true, loss is weighted') parser.set_defaults(weighted=False) args = parser.parse_args() return args def parse_opts_online(): # Real-time test arguments with detector and classifier architecture parser = argparse.ArgumentParser() parser.add_argument('--root_path', default='/root/data/ActivityNet', type=str, help='Root directory path of data') parser.add_argument('--video_path', default='video_kinetics_jpg', type=str, help='Directory path of Videos') parser.add_argument('--whole_path', default='video_kinetics_jpg', type=str, help='The whole path of Videos') parser.add_argument('--annotation_path', default='kinetics.json', type=str, help='Annotation file path') parser.add_argument('--result_path', default='results', type=str, help='Result directory path') parser.add_argument('--store_name', default='model', type=str, help='Name to store checkpoints') parser.add_argument('--modality', default='RGB', type=str, help='Modality of input data. RGB, Flow or RGBFlow') parser.add_argument('--modality_det', default='RGB', type=str, help='Modality of input data. RGB, Flow or RGBFlow') parser.add_argument('--modality_clf', default='RGB', type=str, help='Modality of input data. RGB, Flow or RGBFlow') parser.add_argument('--dataset', default='kinetics', type=str, help='Used dataset (activitynet | kinetics | ucf101 | hmdb51)') parser.add_argument('--n_classes_det', default=400, type=int, help='Number of classes (activitynet: 200, kinetics: 400, ucf101: 101, hmdb51: 51)') parser.add_argument('--n_finetune_classes_det', default=400, type=int, help='Number of classes for fine-tuning. n_classes is set to the number when pretraining.') parser.add_argument('--n_classes_clf', default=400, type=int, help='Number of classes (activitynet: 200, kinetics: 400, ucf101: 101, hmdb51: 51)') parser.add_argument('--n_finetune_classes_clf', default=400, type=int, help='Number of classes for fine-tuning. n_classes is set to the number when pretraining.') parser.add_argument('--n_classes', default=400, type=int, help='Number of classes (activitynet: 200, kinetics: 400, ucf101: 101, hmdb51: 51)') parser.add_argument('--n_finetune_classes', default=400, type=int, help='Number of classes for fine-tuning. n_classes is set to the number when pretraining.') parser.add_argument('--sample_size', default=112, type=int, help='Height and width of inputs') parser.add_argument('--sample_duration_det', default=16, type=int, help='Temporal duration of inputs') parser.add_argument('--sample_duration_clf', default=16, type=int, help='Temporal duration of inputs') parser.add_argument('--sample_duration', default=16, type=int, help='Temporal duration of inputs') parser.add_argument('--initial_scale', default=1.0, type=float, help='Initial scale for multiscale cropping') parser.add_argument('--n_scales', default=5, type=int, help='Number of scales for multiscale cropping') parser.add_argument('--scale_step', default=0.84089641525, type=float, help='Scale step for multiscale cropping') parser.add_argument('--train_crop', default='corner', type=str, help='Spatial cropping method in training. random is uniform. corner is selection from 4 corners and 1 center. (random | corner | center)') parser.add_argument('--learning_rate', default=0.1, type=float, help='Initial learning rate (divided by 10 while training by lr scheduler)') parser.add_argument('--lr_steps', default=[10, 20, 30, 40, 100], type=float, nargs="+", metavar='LRSteps', help='epochs to decay learning rate by 10') parser.add_argument('--momentum', default=0.9, type=float, help='Momentum') parser.add_argument('--dampening', default=0.9, type=float, help='dampening of SGD') parser.add_argument('--weight_decay', default=1e-3, type=float, help='Weight Decay') parser.add_argument('--mean_dataset', default='activitynet', type=str, help='dataset for mean values of mean subtraction (activitynet | kinetics)') parser.add_argument('--no_mean_norm', action='store_true', help='If true, inputs are not normalized by mean.') parser.set_defaults(no_mean_norm=False) parser.add_argument('--std_norm', action='store_true', help='If true, inputs are normalized by standard deviation.') parser.set_defaults(std_norm=False) parser.add_argument('--nesterov', action='store_true', help='Nesterov momentum') parser.set_defaults(nesterov=False) parser.add_argument('--optimizer', default='sgd', type=str, help='Currently only support SGD') parser.add_argument('--lr_patience', default=10, type=int, help='Patience of LR scheduler. See documentation of ReduceLROnPlateau.') parser.add_argument('--batch_size', default=128, type=int, help='Batch Size') parser.add_argument('--n_epochs', default=200, type=int, help='Number of total epochs to run') parser.add_argument('--begin_epoch', default=1, type=int, help='Training begins at this epoch. Previous trained model indicated by resume_path is loaded.') parser.add_argument('--n_val_samples', default=3, type=int, help='Number of validation samples for each activity') parser.add_argument('--resume_path_det', default='', type=str, help='Save data (.pth) of previous training') parser.add_argument('--resume_path_clf', default='', type=str, help='Save data (.pth) of previous training') parser.add_argument('--resume_path', default='', type=str, help='Save data (.pth) of previous training') parser.add_argument('--pretrain_path_det', default='', type=str, help='Pretrained model (.pth)') parser.add_argument('--pretrain_path_clf', default='', type=str, help='Pretrained model (.pth)') parser.add_argument('--pretrain_path', default='', type=str, help='Pretrained model (.pth)') parser.add_argument('--ft_begin_index', default=0, type=int, help='Begin block index of fine-tuning') parser.add_argument('--no_train', action='store_true', help='If true, training is not performed.') parser.set_defaults(no_train=False) parser.add_argument('--no_val', action='store_true', help='If true, validation is not performed.') parser.set_defaults(no_val=False) parser.add_argument('--test', action='store_true', help='If true, test is performed.') parser.set_defaults(test=True) parser.add_argument('--test_subset', default='val', type=str, help='Used subset in test (val | test)') parser.add_argument('--scale_in_test', default=1.0, type=float, help='Spatial scale in test') parser.add_argument('--crop_position_in_test', default='c', type=str, help='Cropping method (c | tl | tr | bl | br) in test') parser.add_argument('--no_softmax_in_test', action='store_true', help='If true, output for each clip is not normalized using softmax.') parser.set_defaults(no_softmax_in_test=False) parser.add_argument('--no_cuda', action='store_true', help='If true, cuda is not used.') parser.set_defaults(no_cuda=False) parser.add_argument('--n_threads', default=4, type=int, help='Number of threads for multi-thread loading') parser.add_argument('--checkpoint', default=10, type=int, help='Trained model is saved at every this epochs.') parser.add_argument('--no_hflip', action='store_true', help='If true holizontal flipping is not performed.') parser.set_defaults(no_hflip=False) parser.add_argument('--norm_value', default=1, type=int, help='If 1, range of inputs is [0-255]. If 255, range of inputs is [0-1].') parser.add_argument('--model_det', default='resnet', type=str, help='(resnet | preresnet | wideresnet | resnext | densenet | ') parser.add_argument('--model_depth_det', default=18, type=int, help='Depth of resnet (10 | 18 | 34 | 50 | 101)') parser.add_argument('--resnet_shortcut_det', default='B', type=str, help='Shortcut type of resnet (A | B)') parser.add_argument('--wide_resnet_k_det', default=2, type=int, help='Wide resnet k') parser.add_argument('--resnext_cardinality_det', default=32, type=int, help='ResNeXt cardinality') parser.add_argument('--model', default='resnet', type=str, help='(resnet | preresnet | wideresnet | resnext | densenet | ') parser.add_argument('--model_depth', default=18, type=int, help='Depth of resnet (10 | 18 | 34 | 50 | 101)') parser.add_argument('--resnet_shortcut', default='B', type=str, help='Shortcut type of resnet (A | B)') parser.add_argument('--wide_resnet_k', default=2, type=int, help='Wide resnet k') parser.add_argument('--resnext_cardinality', default=32, type=int, help='ResNeXt cardinality') parser.add_argument('--model_clf', default='resnet', type=str, help='(resnet | preresnet | wideresnet | resnext | densenet | ') parser.add_argument('--model_depth_clf', default=18, type=int, help='Depth of resnet (10 | 18 | 34 | 50 | 101)') parser.add_argument('--resnet_shortcut_clf', default='B', type=str, help='Shortcut type of resnet (A | B)') parser.add_argument('--wide_resnet_k_clf', default=2, type=int, help='Wide resnet k') parser.add_argument('--resnext_cardinality_clf', default=32, type=int, help='ResNeXt cardinality') parser.add_argument('--manual_seed', default=1, type=int, help='Manually set random seed') parser.add_argument('--det_strategy', default='raw', type=str, help='Detector filter (raw | median | ma | ewma)') parser.add_argument('--det_queue_size', default=1, type=int, help='Detector queue size') parser.add_argument('--det_counter', default=1, type=float, help='Number of consequtive detection') parser.add_argument('--clf_strategy', default='raw', type=str, help='Classifier filter (raw | median | ma | ewma)') parser.add_argument('--clf_queue_size', default=1, type=int, help='Classifier queue size') parser.add_argument('--clf_threshold_pre', default=1, type=float, help='Cumulative sum threshold to prepredict') parser.add_argument('--clf_threshold_final', default=1, type=float, help='Cumulative sum threshold to predict at the end') parser.add_argument('--stride_len', default=1, type=int, help='Stride Lenght of video loader window') args = parser.parse_args() return args
47.356234
208
0.656117
2,424
18,611
4.881601
0.108086
0.10344
0.195386
0.032114
0.95428
0.942618
0.929688
0.924956
0.915322
0.907716
0
0.021529
0.20633
18,611
392
209
47.477041
0.779568
0.005105
0
0.40107
0
0.010695
0.415244
0.018096
0
0
0
0
0
1
0.005348
false
0
0.002674
0
0.013369
0
0
0
0
null
0
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
be481928df6b1e48d3dc926e3fe7884c72edc0b1
155
py
Python
loldib/getratings/models/NA/na_xinzhao/__init__.py
koliupy/loldib
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
[ "Apache-2.0" ]
null
null
null
loldib/getratings/models/NA/na_xinzhao/__init__.py
koliupy/loldib
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
[ "Apache-2.0" ]
null
null
null
loldib/getratings/models/NA/na_xinzhao/__init__.py
koliupy/loldib
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
[ "Apache-2.0" ]
null
null
null
from .na_xinzhao_top import * from .na_xinzhao_jng import * from .na_xinzhao_mid import * from .na_xinzhao_bot import * from .na_xinzhao_sup import *
25.833333
30
0.774194
25
155
4.4
0.36
0.272727
0.590909
0.690909
0
0
0
0
0
0
0
0
0.16129
155
5
31
31
0.846154
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
be4fca39aa784c0d8b2778c533ae7c57ff827b2f
6,078
py
Python
lib/models/prop_modules/sparse.py
CFM-MSG/Code_CDN
b7d21cd4234ef55443dd60d7c48166085f93253c
[ "MIT" ]
4
2022-01-20T13:52:55.000Z
2022-03-30T08:46:51.000Z
lib/models/prop_modules/sparse.py
CFM-MSG/Code_CDN
b7d21cd4234ef55443dd60d7c48166085f93253c
[ "MIT" ]
1
2022-03-09T07:10:54.000Z
2022-03-09T07:10:54.000Z
lib/models/prop_modules/sparse.py
CFM-MSG/Code_CDN
b7d21cd4234ef55443dd60d7c48166085f93253c
[ "MIT" ]
null
null
null
import torch from torch import nn class SparsePropMaxPool(nn.Module): def __init__(self, cfg): super(SparsePropMaxPool, self).__init__() self.num_scale_layers = cfg.NUM_SCALE_LAYERS self.layers = nn.ModuleList() for scale_idx, num_layer in enumerate(self.num_scale_layers): scale_layers = nn.ModuleList() first_layer = nn.MaxPool1d(1, 1) if scale_idx == 0 else nn.MaxPool1d(3, 2) rest_layers = [nn.MaxPool1d(2, 1) for _ in range(1, num_layer)] scale_layers.extend([first_layer] + rest_layers) self.layers.append(scale_layers) def forward(self, x): # batchsize * 512 * 16 map_h_list = [] map_mask_list = [] for scale_idx, scale_layers in enumerate(self.layers): batch_size, hidden_size, num_scale_clips = x.shape num_scale_clips = num_scale_clips // scale_layers[0].stride # 16 map_h = x.new_zeros(batch_size, hidden_size, num_scale_clips, num_scale_clips) # batchsize * 512 * 16 *16 map_mask = x.new_zeros(batch_size, 1, num_scale_clips, num_scale_clips) # batchsize * 1 * 16 *16 for i, layer in enumerate(scale_layers): try: x = layer(x) except: pass scale_s_idxs = list(range(0, num_scale_clips - i, 1)) scale_e_idxs = [s_idx + i for s_idx in scale_s_idxs] map_h[:, :, scale_s_idxs, scale_e_idxs] = x map_mask[:, :, scale_s_idxs, scale_e_idxs] = 1 map_h_list.append(map_h) map_mask_list.append(map_mask) ori_map_h, ori_map_mask = self.recover_to_original_map(map_h_list, map_mask_list) return ori_map_h, ori_map_mask def recover_to_original_map(self, h_list, mask_list): # resize to original scale batch_size, hidden_size, ori_num_clips, _ = h_list[0].shape # batchsize * 512 * 16 *16 ori_map_h = h_list[0].new_zeros(batch_size, hidden_size, ori_num_clips, ori_num_clips) # batchsize * 512 * 16 *16 ori_map_mask = mask_list[0].new_zeros(batch_size, 1, ori_num_clips, ori_num_clips) # batchsize * 1 * 16 *16 acum_layers = 0 stride = 1 for scale_layers, h, mask in zip(self.layers, h_list, mask_list): num_scale_clips = h.shape[-1] # 16 for i, layer in enumerate(scale_layers): stride = stride * layer.stride # 1 scale_s_idxs = list(range(0, num_scale_clips - i, 1)) scale_e_idxs = [s_idx + i for s_idx in scale_s_idxs] ori_s_idxs = list(range(0, ori_num_clips - acum_layers - i * stride, stride)) ori_e_idxs = [s_idx + acum_layers + i * stride for s_idx in ori_s_idxs] ori_map_h[:, :, ori_s_idxs, ori_e_idxs] = h[:, :, scale_s_idxs, scale_e_idxs] ori_map_mask[:, :, ori_s_idxs, ori_e_idxs] = 1 acum_layers += stride * (len(scale_layers) + 1) return ori_map_h, ori_map_mask class SparsePropConv(nn.Module): def __init__(self, cfg): super(SparsePropConv, self).__init__() self.num_scale_layers = cfg.NUM_SCALE_LAYERS self.hidden_size = cfg.HIDDEN_SIZE self.layers = nn.ModuleList() for scale_idx, num_layer in enumerate(self.num_scale_layers): scale_layers = nn.ModuleList() first_layer = nn.Conv1d(self.hidden_size, self.hidden_size, 1, 1) if scale_idx == 0 else nn.Conv1d( self.hidden_size, self.hidden_size, 3, 2) rest_layers = [nn.Conv1d(self.hidden_size, self.hidden_size, 2, 1) for _ in range(1, num_layer)] scale_layers.extend([first_layer] + rest_layers) self.layers.append(scale_layers) def forward(self, x): map_h_list = [] map_mask_list = [] for scale_idx, scale_layers in enumerate(self.layers): batch_size, hidden_size, num_scale_clips = x.shape # batchsize * 512 * 16 num_scale_clips = num_scale_clips // scale_layers[0].stride[0] map_h = x.new_zeros(batch_size, hidden_size, num_scale_clips, num_scale_clips) map_mask = x.new_zeros(batch_size, 1, num_scale_clips, num_scale_clips) for i, layer in enumerate(scale_layers): x = layer(x) scale_s_idxs = list(range(0, num_scale_clips - i, 1)) scale_e_idxs = [s_idx + i for s_idx in scale_s_idxs] map_h[:, :, scale_s_idxs, scale_e_idxs] = x map_mask[:, :, scale_s_idxs, scale_e_idxs] = 1 map_h_list.append(map_h) map_mask_list.append(map_mask) ori_map_h, ori_map_mask = self.recover_to_original_map(map_h_list, map_mask_list) return ori_map_h, ori_map_mask def recover_to_original_map(self, h_list, mask_list): # resize to original scale batch_size, hidden_size, ori_num_clips, _ = h_list[0].shape ori_map_h = h_list[0].new_zeros(batch_size, hidden_size, ori_num_clips, ori_num_clips) ori_map_mask = mask_list[0].new_zeros(batch_size, 1, ori_num_clips, ori_num_clips) acum_layers = 0 stride = 1 for scale_layers, h, mask in zip(self.layers, h_list, mask_list): num_scale_clips = h.shape[-1] for i, layer in enumerate(scale_layers): stride = stride * layer.stride[0] scale_s_idxs = list(range(0, num_scale_clips - i, 1)) scale_e_idxs = [s_idx + i for s_idx in scale_s_idxs] ori_s_idxs = list(range(0, ori_num_clips - acum_layers - i * stride, stride)) ori_e_idxs = [s_idx + acum_layers + i * stride for s_idx in ori_s_idxs] ori_map_h[:, :, ori_s_idxs, ori_e_idxs] = h[:, :, scale_s_idxs, scale_e_idxs] ori_map_mask[:, :, ori_s_idxs, ori_e_idxs] = 1 acum_layers += stride * (len(scale_layers) + 1) return ori_map_h, ori_map_mask
47.858268
118
0.61665
908
6,078
3.72467
0.07489
0.061502
0.076878
0.044944
0.937907
0.925784
0.918983
0.883205
0.830869
0.830869
0
0.024063
0.288911
6,078
127
119
47.858268
0.758445
0.036196
0
0.784314
0
0
0
0
0
0
0
0
0
1
0.058824
false
0.009804
0.019608
0
0.137255
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
be84e24a0baef38a6c7b02954d315c851f684c78
210
py
Python
CommitLink.py
everydaydaniel/test_repo
50512a26188628db8d5a64454bd42c1e99b30511
[ "Apache-2.0" ]
null
null
null
CommitLink.py
everydaydaniel/test_repo
50512a26188628db8d5a64454bd42c1e99b30511
[ "Apache-2.0" ]
6
2019-02-14T16:50:59.000Z
2019-02-28T20:20:51.000Z
CommitLink.py
everydaydaniel/test_repo
50512a26188628db8d5a64454bd42c1e99b30511
[ "Apache-2.0" ]
null
null
null
print("commit Link") print("commit Link") print("commit Link") print("commit Link") print("commit Link") print("commit Link") print("commit Link") print("commit Link") print("commit Link") print("commit Link")
19.090909
20
0.714286
30
210
5
0.1
0.733333
1
1.2
1
1
1
1
1
1
0
0
0.095238
210
11
21
19.090909
0.789474
0
0
1
0
0
0.521327
0
0
0
0
0
0
1
0
true
0
0
0
0
1
0
0
0
null
1
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
12
be99e1f654e55594e62fbc5be529c47a24c623de
20,639
py
Python
sms/install_fixtures.py
ashish-greycube/sms
a5cb615013c9491acfc23342095fbfd2cd097ba6
[ "MIT" ]
null
null
null
sms/install_fixtures.py
ashish-greycube/sms
a5cb615013c9491acfc23342095fbfd2cd097ba6
[ "MIT" ]
null
null
null
sms/install_fixtures.py
ashish-greycube/sms
a5cb615013c9491acfc23342095fbfd2cd097ba6
[ "MIT" ]
2
2021-12-10T11:33:19.000Z
2022-02-02T20:26:27.000Z
# -*- coding: utf-8 -*- # Copyright (c) 2020, GreyCube Technologies and contributors # For license information, please see license.txt from __future__ import unicode_literals import frappe from frappe.desk.page.setup_wizard.setup_wizard import make_records def install_fixtures(): records = [ { "_liked_by": "", "channel": "SMS", "condition": "", "date_changed": "", "days_in_advance": 0, "docstatus": 0, "doctype": "SMS Notification", "document_type": "Sales Order", "enabled": 1, "event": "Submit", "interval": "", "is_standard": 0, "message": "<p><i>Dear {{doc.customer}},<br><br>\n\nThank you for your reservation at Sunfit.<br><br> \n\nBooking details<br><br>\n\nGuest Name: {{doc.guest_cf}}<br>\nArrival Date: {{doc.check_in_cf}}<br>\nRoom Type: {{doc.room_type_cf}}<br>\nRate/Night:{{doc.room_rate_cf}}<br><br>\n\nPayment Details<br><br>\n\nSunfit International Ltd<br>\nFidelity Bank<br>\nAcct No.: 4010480839</i></p>", "method": "", "modified": "2021-01-20 09:36:50.369249", "module": "", "name": "Reservation Confirmation", "parent": "", "parentfield": "", "parenttype": "", "property_value": "", "recipients": [ { "condition": "", "parent": "Reservation Confirmation", "parentfield": "recipients", "parenttype": "SMS Notification", "receiver_by_document_field": "contact_mobile", "receiver_by_role": "" } ], "send_system_notification": 0, "set_property_after_alert": "", "subject": "", "value_changed": "" }, { "_liked_by": "", "channel": "SMS", "condition": "", "date_changed": "", "days_in_advance": 0, "docstatus": 0, "doctype": "SMS Notification", "document_type": "Client Appointment", "enabled": 1, "event": "New", "interval": "", "is_standard": 0, "message": "<p><i>Dear {{doc.customer_name}},<br><br>\n\nThank you for your spa reservation at Sunfit.<br><br> \n\nBooking details<br><br>\n\nName: {{doc.customer_name}}<br>\nTreatment Type: {{doc.service}}<br>\nBooking Date and Time: {{doc.scheduled_from_date}}<br><br>\n\nPayment Details<br><br>\n\nSunfit International Ltd<br>\nFidelity Bank<br>\nAcct No.: 4010480839 </i></p>", "method": "", "modified": "2021-01-20 09:36:50.194059", "module": "", "name": "Spa Booking Confirmation", "parent": "", "parentfield": "", "parenttype": "", "property_value": "", "recipients": [ { "condition": "", "parent": "Spa Booking Confirmation", "parentfield": "recipients", "parenttype": "SMS Notification", "receiver_by_document_field": "customer_phone_number", "receiver_by_role": "" } ], "send_system_notification": 0, "set_property_after_alert": "", "subject": "", "value_changed": "" }, { "_liked_by": "", "channel": "SMS", "condition": "doc.status==\"Cancelled\" and doc.facility in ['Spa-Male','Spa-Female']", "date_changed": "", "days_in_advance": 0, "docstatus": 0, "doctype": "SMS Notification", "document_type": "Client Appointment", "enabled": 1, "event": "Save", "interval": "", "is_standard": 0, "message": "<p><i>Dear {{doc.customer_name}},<br><br>\n\nYour spa booking has been cancelled.<br><br> \n\nIf you would like to make a new booking or have any inquiries, please contact our 24 hours reception desk on 08096320300, 08096320302; or book online at www.sunfitltd.com/book_services</i></p>", "method": "", "modified": "2021-01-20 09:36:49.878867", "module": "", "name": "Your booking has been Cancelled", "parent": "", "parentfield": "", "parenttype": "", "property_value": "", "recipients": [ { "condition": "", "parent": "Your booking has been Cancelled", "parentfield": "recipients", "parenttype": "SMS Notification", "receiver_by_document_field": "customer_phone_number", "receiver_by_role": "" } ], "send_system_notification": 0, "set_property_after_alert": "", "subject": "", "value_changed": "" }, { "_liked_by": "", "channel": "SMS", "condition": "", "date_changed": "", "days_in_advance": 0, "docstatus": 0, "doctype": "SMS Notification", "document_type": "Sales Order", "enabled": 1, "event": "Cancel", "interval": "", "is_standard": 0, "message": "<p><i>Dear {{doc.customer}},<br><br>\n\nYour reservation at Sunfit has been cancelled.<br><br> \n\nIf you would like to make a new booking or have any inquiries, please contact our 24 hours reception desk on 08096320300, 08096320302; or book online at www.sunfitltd.com/room_booking</p></i>", "method": "", "modified": "2021-01-20 09:36:50.322663", "module": "", "name": "Your Reservation has been Cancelled", "parent": "", "parentfield": "", "parenttype": "", "property_value": "", "recipients": [ { "condition": "", "parent": "Your Reservation has been Cancelled", "parentfield": "recipients", "parenttype": "SMS Notification", "receiver_by_document_field": "contact_mobile", "receiver_by_role": "" } ], "send_system_notification": 0, "set_property_after_alert": "", "subject": "", "value_changed": "" }, { "_liked_by": "", "channel": "SMS", "condition": "", "date_changed": "", "days_in_advance": 0, "docstatus": 0, "doctype": "SMS Notification", "document_type": "Room Folio HMS", "enabled": 1, "event": "New", "interval": "", "is_standard": 0, "message": "<p><i>Dear {{doc.room_guest_detail[0].first_name}}<br><br>\n\nWe are delighted to have you at SUNFIT.<br><br>\n\nOur service hours are:<br><br>\n- Gym: 5am to 11pm<br>\n- Spa: 8am to 10pm<br>\n- Pool: 8am to 8pm<br>\n- Salon & Events: On request<br><br>\n\nPlease call the reception on 111 for any assistance needed. For feedback, please call the Manager on 08096320250.<br><br>\n\nWe wish you a very pleasant stay!!!</i></p>", "method": "", "modified": "2021-01-20 09:36:48.798072", "module": "", "name": "Welcome Letter", "parent": "", "parentfield": "", "parenttype": "", "property_value": "", "recipients": [ { "condition": "", "parent": "Welcome Letter", "parentfield": "recipients", "parenttype": "SMS Notification", "receiver_by_document_field": "customer_mobile", "receiver_by_role": "" } ], "send_system_notification": 0, "set_property_after_alert": "", "subject": "", "value_changed": "" }, { "_liked_by": "", "channel": "SMS", "condition": "", "date_changed": "", "days_in_advance": 0, "docstatus": 0, "doctype": "SMS Notification", "document_type": "Payment Entry", "enabled": 1, "event": "Custom", "interval": "", "is_standard": 0, "message": "<p><i>Dear {{doc.party_name}},<br><br>\n\nYour Club Membership has been activated.<br><br> \n\nStart Date: {{current_invoice_start}}<br> \nEnd Date: {{current_invoice_end}}<br><br>\n\n\nYour login ID is: {{member_web_login_id}}.<br><br> \nClick on the link below to access your membership page.<br><br>\n\n<a href =\"{{ frappe.get_url()}}/membership?membership={{name}}\">My Club Membership - Link</a><br><br>\n\nFor feedback, please call the Manager, Tonia on 08096320250.<br><br>\n\nWelcome to SUNFIT!!!</i></p>", "method": "", "modified": "2021-01-20 09:36:48.708601", "module": "", "name": "membership_active", "parent": "", "parentfield": "", "parenttype": "", "property_value": "", "recipients": [ { "condition": "", "parent": "membership_active", "parentfield": "recipients", "parenttype": "SMS Notification", "receiver_by_document_field": "contact_mobile", "receiver_by_role": "" } ], "send_system_notification": 0, "set_property_after_alert": "", "subject": "", "value_changed": "" }, { "_liked_by": "", "channel": "SMS", "condition": "doc.payment_type == \"Receive\"", "date_changed": "", "days_in_advance": 0, "docstatus": 0, "doctype": "SMS Notification", "document_type": "Payment Entry", "enabled": 1, "event": "Submit", "interval": "", "is_standard": 0, "message": "<p><i>Thank you for your patronage.<br><br>\n\nYour payment of {{doc.paid_amount}} has been received.<br><br>\n\nFor feedback and enquiries, please call our 24 hours reception desk on 08096320250. Or contact our marketing unit, Esther on 0809589089</i></p>", "method": "", "modified": "2021-01-20 09:36:50.075254", "module": "", "name": "Payment Receipt", "parent": "", "parentfield": "", "parenttype": "", "property_value": "", "recipients": [ { "condition": "", "parent": "Payment Receipt", "parentfield": "recipients", "parenttype": "SMS Notification", "receiver_by_document_field": "contact_mobile", "receiver_by_role": "" } ], "send_system_notification": 0, "set_property_after_alert": "", "subject": "", "value_changed": "" }, { "_liked_by": "", "channel": "SMS", "condition": "", "date_changed": "", "days_in_advance": 0, "docstatus": 0, "doctype": "SMS Notification", "document_type": "Payment Entry", "enabled": 1, "event": "Cancel", "interval": "", "is_standard": 0, "message": "<p><i>Thank you for your patronage.\n\nYour payment of {{doc.paid_amount}} has been cancelled. \n\nFor feedback and enquiries, please call our 24 hours reception desk on 08096320250. Or contact our marketing unit, Esther on 0809589089</i></p>", "method": "", "modified": "2021-01-20 09:36:50.144595", "module": "", "name": "Payment Cancellation", "parent": "", "parentfield": "", "parenttype": "", "property_value": "", "recipients": [ { "condition": "", "parent": "Payment Cancellation", "parentfield": "recipients", "parenttype": "SMS Notification", "receiver_by_document_field": "contact_mobile", "receiver_by_role": "" } ], "send_system_notification": 0, "set_property_after_alert": "", "subject": "", "value_changed": "" }, { "_liked_by": "", "channel": "SMS", "condition": "", "date_changed": "birth_date_cf", "days_in_advance": 0, "docstatus": 0, "doctype": "SMS Notification", "document_type": "Contact", "enabled": 1, "event": "Custom", "interval": "", "is_standard": 0, "message": "<p><i>Dear {{doc.first_name}},<br><br>\n\nWe all at Sunfit are extremely excited to celebrate this special day with you.<br><br>\n\nMay your days be filled with prosperity, great health and above all joy in its truest and purest form.<br><br>\t\n\nHappy Birthday!!!</i></p>", "method": "", "modified": "2021-01-20 09:36:48.864047", "module": "", "name": "birthday_reminder", "parent": "", "parentfield": "", "parenttype": "", "property_value": "", "recipients": [ { "condition": "", "parent": "birthday_reminder", "parentfield": "recipients", "parenttype": "SMS Notification", "receiver_by_document_field": "mobile_no", "receiver_by_role": "" } ], "send_system_notification": 0, "set_property_after_alert": "", "subject": "", "value_changed": "" }, { "_liked_by": "", "channel": "SMS", "condition": "", "date_changed": "", "days_in_advance": 0, "docstatus": 0, "doctype": "SMS Notification", "document_type": "Membership Suspension CT", "enabled": 1, "event": "Submit", "interval": "", "is_standard": 0, "message": "<p><i>Dear {{doc.customer}},<br><br>\n\nYour Club Membership has been suspended from {{doc.from_date}} to {{doc.to_date}}. Your membership renewal date will now be {{frappe.format_date(new_membership_end_date(doc.name))}}\n.<br><br>\n\nFor modifications, kindly contact Esther on 0809589089<br><br>\n\nStay Active, Live Healthy & Love Life!!!</i></p>", "method": "", "modified": "2021-01-20 09:36:50.279180", "module": "", "name": "Your Membership has been Suspended", "parent": "", "parentfield": "", "parenttype": "", "property_value": "", "recipients": [ { "condition": "", "parent": "Your Membership has been Suspended", "parentfield": "recipients", "parenttype": "SMS Notification", "receiver_by_document_field": "customer_mobile_no", "receiver_by_role": "" } ], "send_system_notification": 0, "set_property_after_alert": "", "subject": "", "value_changed": "" }, { "_liked_by": "", "channel": "SMS", "condition": "", "date_changed": "current_invoice_end", "days_in_advance": 7, "docstatus": 0, "doctype": "SMS Notification", "document_type": "Membership CT", "enabled": 1, "event": "Days Before", "interval": "", "is_standard": 0, "message": "<p><i>Dear {{doc.customer}},<br><br>\n\nYour Club Membership will expire on {{doc.current_invoice_end}}.<br><br> \n\nFor assistance, kindly contact Esther on 0809589089<br><br>\n\nStay Active, Live Healthy & Love Life!!!</p></i>", "method": "", "modified": "2021-01-20 09:36:48.955285", "module": "", "name": "Your Membership is about to Expire.", "parent": "", "parentfield": "", "parenttype": "", "property_value": "", "recipients": [ { "condition": "", "parent": "Your Membership is about to Expire.", "parentfield": "recipients", "parenttype": "SMS Notification", "receiver_by_document_field": "customer_mobile_no", "receiver_by_role": "" } ], "send_system_notification": 0, "set_property_after_alert": "", "subject": "", "value_changed": "" }, { "_liked_by": "", "channel": "SMS", "condition": "", "date_changed": "", "days_in_advance": 0, "docstatus": 0, "doctype": "SMS Notification", "document_type": "Payment Entry", "enabled": 1, "event": "Custom", "interval": "", "is_standard": 0, "message": "<p><i>Dear {{doc.party_name}},<br><br>\n\nYour Club Membership has been renewed from {{current_invoice_start}} to {{current_invoice_end}}.<br><br> \n\nPlease log in below to view your membership profile.<br><br>\n<a href =\"{{ frappe.get_url()}}/membership?membership={{name}}\">Client Portal \u2013 Link</a><br><br>\n\nStay Active, Live Healthy & Love Life!!!</i></p>", "method": "", "modified": "2021-01-20 09:36:48.910014", "module": "", "name": "membership_renewal", "parent": "", "parentfield": "", "parenttype": "", "property_value": "", "recipients": [ { "condition": "", "parent": "membership_renewal", "parentfield": "recipients", "parenttype": "SMS Notification", "receiver_by_document_field": "contact_mobile", "receiver_by_role": "" } ], "send_system_notification": 0, "set_property_after_alert": "", "subject": "", "value_changed": "current_invoice_start" }, { "channel": "SMS", "condition": "doc.status==\"Active Paid\"", "date_changed": "last_accessed_on", "days_in_advance": 14, "docstatus": 0, "doctype": "SMS Notification", "document_type": "Membership CT", "enabled": 1, "event": "Condition Days After", "interval": "Every 15 days", "is_standard": 0, "message": "<p><i>Dear {{doc.customer}},<br><br>\n\nWe haven\u2019t seen you in a while; we\u2019re just checking in.<br><br> \n\nIf you need any assistance, or have any feedback or enquiries, please contact our marketing manager, Esther on 0809589089 or the Manager, Tonia on 0809 632 0250.<br><br>\n\nStay Active, Live Healthy and Love Life!!!</i></p>", "method": "", "modified": "2021-01-20 09:36:50.410161", "module": "", "name": "Just Checking In", "parent": "", "parentfield": "", "parenttype": "", "property_value": "", "recipients": [ { "condition": "", "parent": "Just Checking In", "parentfield": "recipients", "parenttype": "SMS Notification", "receiver_by_document_field": "contact_mobile", "receiver_by_role": "" } ], "send_system_notification": 0, "set_property_after_alert": "", "subject": "", "value_changed": "" }, { "channel": "SMS", "condition": "doc.customer_group == \"Club Member\" and doc.disabled == 0", "date_changed": "last_fitness_checked_on", "days_in_advance": 24, "docstatus": 0, "doctype": "SMS Notification", "document_type": "Customer", "enabled": 1, "event": "Condition Days After", "interval": "25th of every month", "is_standard": 0, "message": "<p><i>Dear {{doc.customer_name}},<br><br>\n\t\t\nIn our busy lives, we may forget to take care of \u201cour health\u201d.<br><br> \n\nThis is to remind you to check your Blood Pressure today.<br><br> \n\nStay Active, Live Healthy and Love Life!!!</i></p>", "method": "", "modified": "2021-01-20 09:36:50.458074", "module": "", "name": "Reminder: Check your Blood Pressure", "parent": "", "parentfield": "", "parenttype": "", "property_value": "", "recipients": [ { "condition": "", "parent": "Reminder: Check your Blood Pressure", "parentfield": "recipients", "parenttype": "SMS Notification", "receiver_by_document_field": "customer_mobile_no", "receiver_by_role": "" } ], "send_system_notification": 0, "set_property_after_alert": "", "subject": "", "value_changed": "" }, { "channel": "SMS", "condition": "doc.customer_group == \"Club Member\" and doc.disabled == 0", "date_changed": "last_fitness_checked_on", "days_in_advance": 89, "docstatus": 0, "doctype": "SMS Notification", "document_type": "Customer", "enabled": 1, "event": "Condition Days After", "interval": "Every 3 months", "is_standard": 0, "message": "<p><i>Dear{{doc.customer_name}},<br><br>\n\t\t\nRemember to update your fitness metrics on your next visit to the gym.<br><br>\n\nLog in below to view your membership profile.<br><br>\n<a href =\"{{ frappe.get_url() }}+'/membership?membership='+{{ doc.name }}\">Client Portal - Link</a><br><br>\n\nStay Active, Live Healthy and Love Life!!!</i></p>", "method": "", "modified": "2021-01-20 09:36:50.529947", "module": "", "name": "Update Your Fitness Metrics", "parent": "", "parentfield": "", "parenttype": "", "property_value": "", "recipients": [ { "condition": "", "parent": "Update Your Fitness Metrics", "parentfield": "recipients", "parenttype": "SMS Notification", "receiver_by_document_field": "customer_mobile_no", "receiver_by_role": "" } ], "send_system_notification": 0, "set_property_after_alert": "", "subject": "", "value_changed": "" }, { "_liked_by": "", "channel": "SMS", "condition": "doc.amended_from is not None", "date_changed": "", "days_in_advance": 0, "docstatus": 0, "doctype": "SMS Notification", "document_type": "Sales Order", "enabled": 1, "event": "Submit", "interval": "", "is_standard": 0, "message": "<p><i>Dear {{doc.customer}},<br><br>\n\nYour reservation has been modified as follows:<br><br>\n\n<b>Booking details</b><br><br>\n\nGuest Name: {{doc.guest_cf}}<br>\nArrival Date: {{doc.check_in_cf}}<br>\nRoom Type: {{doc.room_type_cf}}<br>\nRate/Night: {{doc.room_rate_cf}}</i></p>", "method": "", "modified": "2021-01-20 09:36:49.930795", "module": "", "name": "Your Reservation has been Modified", "parent": "", "parentfield": "", "parenttype": "", "property_value": "", "recipients": [ { "condition": "", "parent": "Your Reservation has been Modified", "parentfield": "recipients", "parenttype": "SMS Notification", "receiver_by_document_field": "contact_mobile", "receiver_by_role": "" } ], "send_system_notification": 0, "set_property_after_alert": "", "subject": "", "value_changed": "" }, { "_liked_by": "", "channel": "SMS", "condition": "doc.status==\"Rescheduled\" and doc.facility in ['Spa-Male','Spa-Female']", "date_changed": "", "days_in_advance": 0, "docstatus": 0, "doctype": "SMS Notification", "document_type": "Client Appointment", "enabled": 1, "event": "Save", "interval": "", "is_standard": 0, "message": "<p><i>Dear {{doc.customer_name}},<br><br>\n\nYour spa booking has been modified as follows:<br><br>\n\nBooking details<br><br>\n\nName:{{doc.customer_name}}<br>\nTreatment Type: {{doc.service}}<br>\nBooking Date and Time: {{doc.scheduled_from_date}}</i></p>", "method": "", "modified": "2021-01-20 09:36:48.992879", "module": "", "name": "Your Spa Booking has been Rescheduled", "parent": "", "parentfield": "", "parenttype": "", "property_value": "", "recipients": [ { "condition": "", "parent": "Your Spa Booking has been Rescheduled", "parentfield": "recipients", "parenttype": "SMS Notification", "receiver_by_document_field": "customer_phone_number", "receiver_by_role": "" } ], "send_system_notification": 0, "set_property_after_alert": "", "subject": "", "value_changed": "scheduled_from_date" } ] make_records(records) frappe.db.commit()
32.248438
529
0.626145
2,490
20,639
5.015663
0.145382
0.013932
0.022019
0.027224
0.81856
0.794379
0.773481
0.770918
0.736648
0.675154
0
0.037514
0.164349
20,639
640
530
32.248438
0.686612
0.006202
0
0.736593
0
0.031546
0.693242
0.165204
0
0
0
0
0
1
0.001577
false
0
0.004732
0
0.006309
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
bea432733b9c5effec821a7f24bed4f36d0977a4
3,338
py
Python
Zhangjiashan_dwt/projects/generate_samples.py
zjy8006/MonthlyRunoffForecastByAutoReg
661fcb5dcdfbbb2ec6861e1668a035b50e69f7c2
[ "MIT" ]
2
2020-05-18T06:45:04.000Z
2021-05-18T06:38:23.000Z
Zhangjiashan_dwt/projects/generate_samples.py
zjy8006/MonthlyRunoffForecastByAutoReg
661fcb5dcdfbbb2ec6861e1668a035b50e69f7c2
[ "MIT" ]
null
null
null
Zhangjiashan_dwt/projects/generate_samples.py
zjy8006/MonthlyRunoffForecastByAutoReg
661fcb5dcdfbbb2ec6861e1668a035b50e69f7c2
[ "MIT" ]
1
2020-01-17T02:56:18.000Z
2020-01-17T02:56:18.000Z
import sys import os root_path = os.path.dirname(os.path.abspath("__file__")) sys.path.append(root_path) from tools.samples_generator import gen_multi_forecast_samples from tools.samples_generator import gen_direct_forecast_samples from Zhangjiashan_dwt.projects.variables import variables gen_direct_forecast_samples( station="Zhangjiashan", decomposer="dwt", lags_dict=variables['lags_dict'], input_columns=['D1', 'D2', 'A2',], output_column=['ORIG'], start=533, stop=792, test_len=120, gen_from='training and validation sets', ) gen_direct_forecast_samples( station="Zhangjiashan", decomposer="dwt", lags_dict=variables['lags_dict'], input_columns=['D1', 'D2', 'A2',], output_column=['ORIG'], start=533, stop=792, test_len=120, gen_from='training-development and appended sets', ) gen_direct_forecast_samples( station="Zhangjiashan", decomposer="dwt", lags_dict=variables['lags_dict'], input_columns=['D1', 'D2', 'A2',], output_column=['ORIG'], start=533, stop=792, test_len=120, gen_from='training-development and test sets', ) for lead_time in [1,3,5,7,9]: gen_direct_forecast_samples( station='Zhangjiashan', decomposer="dwt", lags_dict=variables['lags_dict'], input_columns=['D1', 'D2', 'A2', ], output_column=['ORIG'], start=533, stop=792, test_len=120, mode='PACF', lead_time=lead_time, gen_from='training and appended sets', ) for lead_time in [1, 3, 5, 7, 9]: gen_direct_forecast_samples( station='Zhangjiashan', decomposer="dwt", lags_dict=variables['lags_dict'], input_columns=['D1', 'D2', 'A2', ], output_column=['ORIG'], start=533, stop=792, test_len=120, mode='Pearson', lead_time=lead_time, gen_from='training and appended sets', ) gen_multi_forecast_samples( station='Zhangjiashan', decomposer="dwt", lags_dict=variables['lags_dict'], columns=['D1', 'D2', 'A2', ], start=533, stop=792, test_len=120, ) gen_direct_forecast_samples( station='Zhangjiashan', decomposer="dwt", lags_dict=variables['lags_dict'], input_columns=['D1', 'D2', 'A2', ], output_column=['ORIG'], start=533, stop=792, test_len=120, mode='PACF', lead_time=1, n_components=28, gen_from='training and appended sets', ) gen_direct_forecast_samples( station='Zhangjiashan', decomposer="dwt", lags_dict=variables['lags_dict'], input_columns=['D1', 'D2', 'A2', ], output_column=['ORIG'], start=533, stop=792, test_len=120, mode='PACF', lead_time=1, n_components='mle', gen_from='training and appended sets', ) num_in_one = sum(variables['lags_dict']['db10-2'].values()) for n_components in range(num_in_one-16,num_in_one+1): gen_direct_forecast_samples( station='Zhangjiashan', decomposer="dwt", lags_dict=variables['lags_dict'], input_columns=['D1', 'D2', 'A2', ], output_column=['ORIG'], start=533, stop=792, test_len=120, mode='PACF', lead_time=1, n_components=n_components, gen_from='training and appended sets', )
25.287879
63
0.633313
419
3,338
4.77327
0.167064
0.076
0.085
0.108
0.8475
0.8475
0.7835
0.7745
0.762
0.762
0
0.049558
0.220192
3,338
131
64
25.480916
0.718786
0
0
0.77686
0
0
0.174056
0
0
0
0
0
0
1
0
false
0
0.041322
0
0.041322
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
beb1e5c17df2c8adb6db1e9fa60fc0685a1f2d23
3,399
py
Python
ovl/image_filters/kernels.py
ofekashery/Ovl-Python
117e3f4ae1a8a5624c41792bd36b760afbe86c8e
[ "Apache-2.0" ]
1
2020-10-11T16:14:46.000Z
2020-10-11T16:14:46.000Z
ovl/image_filters/kernels.py
ofekashery/Ovl-Python
117e3f4ae1a8a5624c41792bd36b760afbe86c8e
[ "Apache-2.0" ]
1
2020-10-18T05:00:06.000Z
2020-12-24T20:03:44.000Z
ovl/image_filters/kernels.py
ofekashery/Ovl-Python
117e3f4ae1a8a5624c41792bd36b760afbe86c8e
[ "Apache-2.0" ]
null
null
null
import numpy as np import cv2 def validate_odd_size(size): """ Validates that a kernel shape is of odd ints and of size 2 :param size: the shape (size) to be checked :return: False if size is invalid """ if type(size) not in (list, tuple): return False if len(size) != 2: return False if size[0] % 2 != 1 or size[1] % 2 != 1: return False return True def is_odd_size(size) -> bool: """ Validates that a kernel shape is of odd ints and of size 2 :param size: the shape (size) to be checked :return: doesnt raise an error if it's ok. """ if type(size) not in (list, tuple): return False if len(size) != 2: return False if size[0] % 2 != 1 or size[1] % 2 != 1: return False return True def cross_kernel(size): r""" Returns a cross (ones in a cross) kernel for morphological functions Example of a (5,5) cross: | \| 0 0 1 0 0 \| | \| 0 0 1 0 0 \| | \| 1 1 1 1 1 \| | \| 0 0 1 0 0 \| | \| 0 0 1 0 0 \| :param size: a tuple of size 2 of 2 odd integers denoting the size of the kernel f.g. (5, 5) :return: the numpy.array of the cross shape """ validate_odd_size(size) return cv2.getStructuringElement(cv2.MORPH_CROSS, ksize=size) def rectangle_kernel(size): r""" Returns a rectangle (all ones) kernel for morphological functions Example of a (5,5) rectangle: | \| 1 1 1 1 1 \| | \| 1 1 1 1 1 \| | \| 1 1 1 1 1 \| | \| 1 1 1 1 1 \| | \| 1 1 1 1 1 \| :param size: a tuple of size 2 of 2 odd integers denoting the size of the kernel f.g. (5, 5) :return: the numpy.array of the cross shape """ return cv2.getStructuringElement(cv2.MORPH_RECT, ksize=size) def ellipse_kernel(size): r""" Returns an ellipse (ones in the shape of an ellipse) kernel for morphological functions Example of a (5,5) ellipse: | \| 0 0 1 0 0 \| | \| 1 1 1 1 1 \| | \| 1 1 1 1 1 \| | \| 1 1 1 1 1 \| | \| 0 0 1 0 0 \| :param size: a tuple of size 2 of 2 odd integers denoting the size of the kernel f.g. (5, 5) :return: the kernel """ validate_odd_size(size) return cv2.getStructuringElement(cv2.MORPH_ELLIPSE, ksize=size) def horizontal_line_kernel(size): r""" Returns an horizontal line (a horizontal line of ones) kernel for morphological functions Example of a (5,5) horizontal line: | \| 0 0 0 0 0 \| | \| 0 0 0 0 0 \| | \| 1 1 1 1 1 \| | \| 0 0 0 0 0 \| | \| 0 0 0 0 0 \| :param size: a tuple of size 2 of 2 odd integers denoting the size of the kernel f.g. (5, 5) :return: the kernel """ validate_odd_size(size) kernel = np.zeros(size, dtype=np.uint8) kernel[int((size[0] - 1) / 2), ] = 1 return kernel def vertical_line_kernel(size): r""" Returns a vertical line (a vertical line of ones) kernel for morphological functions Example of a (5,5) vertical line: | \| 0 0 1 0 0 \| | \| 0 0 1 0 0 \| | \| 0 0 1 0 0 \| | \| 0 0 1 0 0 \| | \| 0 0 1 0 0 \| :param size: a tuple of size 2 of 2 odd integers denoting the size of the kernel f.g. (5, 5) :return: the kernel """ validate_odd_size(size) kernel = np.zeros(size, dtype=np.uint8) kernel[:, int((size[1] - 1) / 2)] = 1 return kernel
25.556391
93
0.57664
578
3,399
3.352941
0.128028
0.048504
0.065015
0.078431
0.815273
0.726006
0.726006
0.726006
0.726006
0.630031
0
0.08
0.30862
3,399
132
94
25.75
0.744681
0.563989
0
0.536585
0
0
0
0
0
0
0
0
0
1
0.170732
false
0
0.04878
0
0.536585
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
7
feb5f5b683b02277996c054a22f258809aa1b008
2,047
gyp
Python
gyp/pdfviewer_lib.gyp
quanganh2627/bytm-x64-L-w05-2015_external_chromium_org_third_party_skia
90b3f9b82dbad266f960601d2120082bb841fb97
[ "BSD-3-Clause" ]
2
2019-05-09T17:06:47.000Z
2020-07-06T16:14:13.000Z
gyp/pdfviewer_lib.gyp
quanganh2627/bytm-x64-L-w05-2015_external_chromium_org_third_party_skia
90b3f9b82dbad266f960601d2120082bb841fb97
[ "BSD-3-Clause" ]
null
null
null
gyp/pdfviewer_lib.gyp
quanganh2627/bytm-x64-L-w05-2015_external_chromium_org_third_party_skia
90b3f9b82dbad266f960601d2120082bb841fb97
[ "BSD-3-Clause" ]
3
2015-03-13T14:30:30.000Z
2020-07-06T16:13:36.000Z
# GYP file to build pdfviewer. # # To build on Linux: # ./gyp_skia pdfviewer.gyp && make pdfviewer # { 'targets': [ { 'target_name': 'pdfviewer_lib', 'type': 'static_library', 'sources': [ # FIXME: Include directory is named "inc" (instead of "include") in # order to not be considered the public API. '../experimental/PdfViewer/inc/SkPdfContext.h', '../experimental/PdfViewer/inc/SkPdfDiffEncoder.h', '../experimental/PdfViewer/inc/SkPdfRenderer.h', '../experimental/PdfViewer/inc/SkPdfTokenLooper.h', '../experimental/PdfViewer/src/SkPdfContext.cpp', '../experimental/PdfViewer/src/SkPdfRenderer.cpp', '../experimental/PdfViewer/src/SkTDStackNester.h', '../experimental/PdfViewer/src/SkPdfDiffEncoder.cpp', '../experimental/PdfViewer/SkPdfGraphicsState.cpp', '../experimental/PdfViewer/SkPdfFont.cpp', '../experimental/PdfViewer/SkPdfReporter.cpp', '../experimental/PdfViewer/SkPdfUtils.cpp', #'../experimental/PdfViewer/SkPdfNYI.cpp', '../experimental/PdfViewer/SkTrackDevice.cpp', '../experimental/PdfViewer/SkTracker.cpp', '../experimental/PdfViewer/pdfparser/native/SkPdfNativeObject.cpp', '../experimental/PdfViewer/pdfparser/native/SkPdfNativeTokenizer.cpp', '../experimental/PdfViewer/pdfparser/native/SkPdfNativeDoc.cpp', '../experimental/PdfViewer/pdfparser/native/pdfapi/SkPdfMapper_autogen.cpp', '../experimental/PdfViewer/pdfparser/native/pdfapi/SkPdfHeaders_autogen.cpp', ], 'include_dirs': [ '../experimental/PdfViewer', '../experimental/PdfViewer/inc', '../experimental/PdfViewer/src', '../experimental/PdfViewer/pdfparser', '../experimental/PdfViewer/pdfparser/native', '../experimental/PdfViewer/pdfparser/native/pdfapi', '../src/core', ], 'dependencies': [ 'skia_lib.gyp:skia_lib', 'skflate.gyp:skflate', ], }, ], }
38.622642
85
0.643381
177
2,047
7.389831
0.350282
0.417431
0.256881
0.192661
0.190367
0.068807
0
0
0
0
0
0
0.194919
2,047
52
86
39.365385
0.793689
0.117733
0
0.095238
0
0
0.726767
0.665554
0
0
0
0.019231
0
1
0
true
0
0
0
0
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
null
0
0
1
0
0
0
1
0
0
0
0
0
0
7
22cbbfb7393af0a516b219a985cdbe092a689d25
2,530
py
Python
classes/state_machine.py
cameronstinson4/diamondback
3481055cb1cb144296ced18df9fe60872853ffd9
[ "MIT" ]
null
null
null
classes/state_machine.py
cameronstinson4/diamondback
3481055cb1cb144296ced18df9fe60872853ffd9
[ "MIT" ]
null
null
null
classes/state_machine.py
cameronstinson4/diamondback
3481055cb1cb144296ced18df9fe60872853ffd9
[ "MIT" ]
null
null
null
class StateMachine: """ This class represents the encompassing state machine object """ def __init__(self, StartAt, States={}, Comment=None, TimeoutSeconds=None, Version=None): """ Constructor Args: StartAt: State, required. A string that must exactly match (is case sensitive) the name of one of the state objects. Comment: string optional. A human-readable description of the state machine. TimeoutSeconds: integer, optional. The maximum number of seconds an execution of the state machine can run. If it runs longer than the specified time, the execution fails with a States.Timeout Error Name. Version: The version of the Amazon States Language used in the state machine (default is "1.0"). """ self.StartAt = StartAt self.States = States self.Comment = Comment self.TimeoutSeconds = TimeoutSeconds self.Version = Version def addState(self, state, stateName): """ Adds a state to the state machine object """ self.States[stateName] = state class StateMachineBuilder(): """ This class represents the encompassing state machine object with no required """ def __init__(self, StartAt=None, States=None, Comment=None, TimeoutSeconds=None, Version=None): """ Constructor Args: StartAt: State, required. A string that must exactly match (is case sensitive) the name of one of the state objects. Comment: string optional. A human-readable description of the state machine. TimeoutSeconds: integer, optional. The maximum number of seconds an execution of the state machine can run. If it runs longer than the specified time, the execution fails with a States.Timeout Error Name. Version: The version of the Amazon States Language used in the state machine (default is "1.0"). """ self.StartAt = StartAt self.States = States self.Comment = Comment self.TimeoutSeconds = TimeoutSeconds self.Version = Version def addState(self, state, stateName): """ Adds a state to the state machine object """ self.States[stateName] = state def setStartAtStart(self, state): """ Sets the state to start at """ pass def build(self): """ Builds the state machine object """ return
37.205882
220
0.63004
297
2,530
5.340067
0.262626
0.06053
0.08512
0.042875
0.868852
0.868852
0.868852
0.868852
0.803279
0.803279
0
0.002262
0.301186
2,530
68
221
37.205882
0.894796
0.545059
0
0.636364
0
0
0
0
0
0
0
0
0
1
0.272727
false
0.045455
0
0
0.409091
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
7
22ee1b6200c9d24bbda334a27bd5d6e9e15681e2
38
py
Python
test/run/t527.py
timmartin/skulpt
2e3a3fbbaccc12baa29094a717ceec491a8a6750
[ "MIT" ]
2,671
2015-01-03T08:23:25.000Z
2022-03-31T06:15:48.000Z
test/run/t527.py
timmartin/skulpt
2e3a3fbbaccc12baa29094a717ceec491a8a6750
[ "MIT" ]
972
2015-01-05T08:11:00.000Z
2022-03-29T13:47:15.000Z
test/run/t527.py
timmartin/skulpt
2e3a3fbbaccc12baa29094a717ceec491a8a6750
[ "MIT" ]
845
2015-01-03T19:53:36.000Z
2022-03-29T18:34:22.000Z
print pow(4, 5) print pow(4, 5, None)
12.666667
21
0.631579
9
38
2.666667
0.555556
0.666667
0.75
0.833333
0
0
0
0
0
0
0
0.129032
0.184211
38
2
22
19
0.645161
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
1
1
1
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
8
a3c660c09a78e94a463be51c25529f56b40b1a80
20,975
py
Python
sdk/python/pulumi_alicloud/ecs/ecs_key_pair.py
pulumi/pulumi-alicloud
9c34d84b4588a7c885c6bec1f03b5016e5a41683
[ "ECL-2.0", "Apache-2.0" ]
42
2019-03-18T06:34:37.000Z
2022-03-24T07:08:57.000Z
sdk/python/pulumi_alicloud/ecs/ecs_key_pair.py
pulumi/pulumi-alicloud
9c34d84b4588a7c885c6bec1f03b5016e5a41683
[ "ECL-2.0", "Apache-2.0" ]
152
2019-04-15T21:03:44.000Z
2022-03-29T18:00:57.000Z
sdk/python/pulumi_alicloud/ecs/ecs_key_pair.py
pulumi/pulumi-alicloud
9c34d84b4588a7c885c6bec1f03b5016e5a41683
[ "ECL-2.0", "Apache-2.0" ]
3
2020-08-26T17:30:07.000Z
2021-07-05T01:37:45.000Z
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities __all__ = ['EcsKeyPairArgs', 'EcsKeyPair'] @pulumi.input_type class EcsKeyPairArgs: def __init__(__self__, *, key_file: Optional[pulumi.Input[str]] = None, key_name: Optional[pulumi.Input[str]] = None, key_name_prefix: Optional[pulumi.Input[str]] = None, key_pair_name: Optional[pulumi.Input[str]] = None, public_key: Optional[pulumi.Input[str]] = None, resource_group_id: Optional[pulumi.Input[str]] = None, tags: Optional[pulumi.Input[Mapping[str, Any]]] = None): """ The set of arguments for constructing a EcsKeyPair resource. :param pulumi.Input[str] key_file: The key file. :param pulumi.Input[str] key_name: The key pair's name. It is the only in one Alicloud account. :param pulumi.Input[str] public_key: You can import an existing public key and using Alicloud key pair to manage it. If this parameter is specified, `resource_group_id` is the key pair belongs. :param pulumi.Input[str] resource_group_id: The Id of resource group which the key pair belongs. :param pulumi.Input[Mapping[str, Any]] tags: A mapping of tags to assign to the resource. """ if key_file is not None: pulumi.set(__self__, "key_file", key_file) if key_name is not None: warnings.warn("""Field 'key_name' has been deprecated from provider version 1.121.0. New field 'key_pair_name' instead.""", DeprecationWarning) pulumi.log.warn("""key_name is deprecated: Field 'key_name' has been deprecated from provider version 1.121.0. New field 'key_pair_name' instead.""") if key_name is not None: pulumi.set(__self__, "key_name", key_name) if key_name_prefix is not None: pulumi.set(__self__, "key_name_prefix", key_name_prefix) if key_pair_name is not None: pulumi.set(__self__, "key_pair_name", key_pair_name) if public_key is not None: pulumi.set(__self__, "public_key", public_key) if resource_group_id is not None: pulumi.set(__self__, "resource_group_id", resource_group_id) if tags is not None: pulumi.set(__self__, "tags", tags) @property @pulumi.getter(name="keyFile") def key_file(self) -> Optional[pulumi.Input[str]]: """ The key file. """ return pulumi.get(self, "key_file") @key_file.setter def key_file(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "key_file", value) @property @pulumi.getter(name="keyName") def key_name(self) -> Optional[pulumi.Input[str]]: """ The key pair's name. It is the only in one Alicloud account. """ return pulumi.get(self, "key_name") @key_name.setter def key_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "key_name", value) @property @pulumi.getter(name="keyNamePrefix") def key_name_prefix(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "key_name_prefix") @key_name_prefix.setter def key_name_prefix(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "key_name_prefix", value) @property @pulumi.getter(name="keyPairName") def key_pair_name(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "key_pair_name") @key_pair_name.setter def key_pair_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "key_pair_name", value) @property @pulumi.getter(name="publicKey") def public_key(self) -> Optional[pulumi.Input[str]]: """ You can import an existing public key and using Alicloud key pair to manage it. If this parameter is specified, `resource_group_id` is the key pair belongs. """ return pulumi.get(self, "public_key") @public_key.setter def public_key(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "public_key", value) @property @pulumi.getter(name="resourceGroupId") def resource_group_id(self) -> Optional[pulumi.Input[str]]: """ The Id of resource group which the key pair belongs. """ return pulumi.get(self, "resource_group_id") @resource_group_id.setter def resource_group_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "resource_group_id", value) @property @pulumi.getter def tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]: """ A mapping of tags to assign to the resource. """ return pulumi.get(self, "tags") @tags.setter def tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]): pulumi.set(self, "tags", value) @pulumi.input_type class _EcsKeyPairState: def __init__(__self__, *, finger_print: Optional[pulumi.Input[str]] = None, key_file: Optional[pulumi.Input[str]] = None, key_name: Optional[pulumi.Input[str]] = None, key_name_prefix: Optional[pulumi.Input[str]] = None, key_pair_name: Optional[pulumi.Input[str]] = None, public_key: Optional[pulumi.Input[str]] = None, resource_group_id: Optional[pulumi.Input[str]] = None, tags: Optional[pulumi.Input[Mapping[str, Any]]] = None): """ Input properties used for looking up and filtering EcsKeyPair resources. :param pulumi.Input[str] key_file: The key file. :param pulumi.Input[str] key_name: The key pair's name. It is the only in one Alicloud account. :param pulumi.Input[str] public_key: You can import an existing public key and using Alicloud key pair to manage it. If this parameter is specified, `resource_group_id` is the key pair belongs. :param pulumi.Input[str] resource_group_id: The Id of resource group which the key pair belongs. :param pulumi.Input[Mapping[str, Any]] tags: A mapping of tags to assign to the resource. """ if finger_print is not None: pulumi.set(__self__, "finger_print", finger_print) if key_file is not None: pulumi.set(__self__, "key_file", key_file) if key_name is not None: warnings.warn("""Field 'key_name' has been deprecated from provider version 1.121.0. New field 'key_pair_name' instead.""", DeprecationWarning) pulumi.log.warn("""key_name is deprecated: Field 'key_name' has been deprecated from provider version 1.121.0. New field 'key_pair_name' instead.""") if key_name is not None: pulumi.set(__self__, "key_name", key_name) if key_name_prefix is not None: pulumi.set(__self__, "key_name_prefix", key_name_prefix) if key_pair_name is not None: pulumi.set(__self__, "key_pair_name", key_pair_name) if public_key is not None: pulumi.set(__self__, "public_key", public_key) if resource_group_id is not None: pulumi.set(__self__, "resource_group_id", resource_group_id) if tags is not None: pulumi.set(__self__, "tags", tags) @property @pulumi.getter(name="fingerPrint") def finger_print(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "finger_print") @finger_print.setter def finger_print(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "finger_print", value) @property @pulumi.getter(name="keyFile") def key_file(self) -> Optional[pulumi.Input[str]]: """ The key file. """ return pulumi.get(self, "key_file") @key_file.setter def key_file(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "key_file", value) @property @pulumi.getter(name="keyName") def key_name(self) -> Optional[pulumi.Input[str]]: """ The key pair's name. It is the only in one Alicloud account. """ return pulumi.get(self, "key_name") @key_name.setter def key_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "key_name", value) @property @pulumi.getter(name="keyNamePrefix") def key_name_prefix(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "key_name_prefix") @key_name_prefix.setter def key_name_prefix(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "key_name_prefix", value) @property @pulumi.getter(name="keyPairName") def key_pair_name(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "key_pair_name") @key_pair_name.setter def key_pair_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "key_pair_name", value) @property @pulumi.getter(name="publicKey") def public_key(self) -> Optional[pulumi.Input[str]]: """ You can import an existing public key and using Alicloud key pair to manage it. If this parameter is specified, `resource_group_id` is the key pair belongs. """ return pulumi.get(self, "public_key") @public_key.setter def public_key(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "public_key", value) @property @pulumi.getter(name="resourceGroupId") def resource_group_id(self) -> Optional[pulumi.Input[str]]: """ The Id of resource group which the key pair belongs. """ return pulumi.get(self, "resource_group_id") @resource_group_id.setter def resource_group_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "resource_group_id", value) @property @pulumi.getter def tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]: """ A mapping of tags to assign to the resource. """ return pulumi.get(self, "tags") @tags.setter def tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]): pulumi.set(self, "tags", value) class EcsKeyPair(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, key_file: Optional[pulumi.Input[str]] = None, key_name: Optional[pulumi.Input[str]] = None, key_name_prefix: Optional[pulumi.Input[str]] = None, key_pair_name: Optional[pulumi.Input[str]] = None, public_key: Optional[pulumi.Input[str]] = None, resource_group_id: Optional[pulumi.Input[str]] = None, tags: Optional[pulumi.Input[Mapping[str, Any]]] = None, __props__=None): """ Provides a ECS Key Pair resource. For information about ECS Key Pair and how to use it, see [What is Key Pair](https://www.alibabacloud.com/help/en/doc-detail/51771.htm). > **NOTE:** Available in v1.121.0+. ## Example Usage Basic Usage ```python import pulumi import pulumi_alicloud as alicloud example = alicloud.ecs.EcsKeyPair("example", key_pair_name="key_pair_name") # Using name prefix to build key pair prefix = alicloud.ecs.EcsKeyPair("prefix", key_name_prefix="terraform-test-key-pair-prefix") # Import an existing public key to build a alicloud key pair publickey = alicloud.ecs.EcsKeyPair("publickey", key_pair_name="my_public_key", public_key="ssh-rsa AAAAB3Nza12345678qwertyuudsfsg") ``` ## Import ECS Key Pair can be imported using the id, e.g. ```sh $ pulumi import alicloud:ecs/ecsKeyPair:EcsKeyPair example <key_name> ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] key_file: The key file. :param pulumi.Input[str] key_name: The key pair's name. It is the only in one Alicloud account. :param pulumi.Input[str] public_key: You can import an existing public key and using Alicloud key pair to manage it. If this parameter is specified, `resource_group_id` is the key pair belongs. :param pulumi.Input[str] resource_group_id: The Id of resource group which the key pair belongs. :param pulumi.Input[Mapping[str, Any]] tags: A mapping of tags to assign to the resource. """ ... @overload def __init__(__self__, resource_name: str, args: Optional[EcsKeyPairArgs] = None, opts: Optional[pulumi.ResourceOptions] = None): """ Provides a ECS Key Pair resource. For information about ECS Key Pair and how to use it, see [What is Key Pair](https://www.alibabacloud.com/help/en/doc-detail/51771.htm). > **NOTE:** Available in v1.121.0+. ## Example Usage Basic Usage ```python import pulumi import pulumi_alicloud as alicloud example = alicloud.ecs.EcsKeyPair("example", key_pair_name="key_pair_name") # Using name prefix to build key pair prefix = alicloud.ecs.EcsKeyPair("prefix", key_name_prefix="terraform-test-key-pair-prefix") # Import an existing public key to build a alicloud key pair publickey = alicloud.ecs.EcsKeyPair("publickey", key_pair_name="my_public_key", public_key="ssh-rsa AAAAB3Nza12345678qwertyuudsfsg") ``` ## Import ECS Key Pair can be imported using the id, e.g. ```sh $ pulumi import alicloud:ecs/ecsKeyPair:EcsKeyPair example <key_name> ``` :param str resource_name: The name of the resource. :param EcsKeyPairArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """ ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(EcsKeyPairArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, key_file: Optional[pulumi.Input[str]] = None, key_name: Optional[pulumi.Input[str]] = None, key_name_prefix: Optional[pulumi.Input[str]] = None, key_pair_name: Optional[pulumi.Input[str]] = None, public_key: Optional[pulumi.Input[str]] = None, resource_group_id: Optional[pulumi.Input[str]] = None, tags: Optional[pulumi.Input[Mapping[str, Any]]] = None, __props__=None): if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = EcsKeyPairArgs.__new__(EcsKeyPairArgs) __props__.__dict__["key_file"] = key_file if key_name is not None and not opts.urn: warnings.warn("""Field 'key_name' has been deprecated from provider version 1.121.0. New field 'key_pair_name' instead.""", DeprecationWarning) pulumi.log.warn("""key_name is deprecated: Field 'key_name' has been deprecated from provider version 1.121.0. New field 'key_pair_name' instead.""") __props__.__dict__["key_name"] = key_name __props__.__dict__["key_name_prefix"] = key_name_prefix __props__.__dict__["key_pair_name"] = key_pair_name __props__.__dict__["public_key"] = public_key __props__.__dict__["resource_group_id"] = resource_group_id __props__.__dict__["tags"] = tags __props__.__dict__["finger_print"] = None super(EcsKeyPair, __self__).__init__( 'alicloud:ecs/ecsKeyPair:EcsKeyPair', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, finger_print: Optional[pulumi.Input[str]] = None, key_file: Optional[pulumi.Input[str]] = None, key_name: Optional[pulumi.Input[str]] = None, key_name_prefix: Optional[pulumi.Input[str]] = None, key_pair_name: Optional[pulumi.Input[str]] = None, public_key: Optional[pulumi.Input[str]] = None, resource_group_id: Optional[pulumi.Input[str]] = None, tags: Optional[pulumi.Input[Mapping[str, Any]]] = None) -> 'EcsKeyPair': """ Get an existing EcsKeyPair resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] key_file: The key file. :param pulumi.Input[str] key_name: The key pair's name. It is the only in one Alicloud account. :param pulumi.Input[str] public_key: You can import an existing public key and using Alicloud key pair to manage it. If this parameter is specified, `resource_group_id` is the key pair belongs. :param pulumi.Input[str] resource_group_id: The Id of resource group which the key pair belongs. :param pulumi.Input[Mapping[str, Any]] tags: A mapping of tags to assign to the resource. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _EcsKeyPairState.__new__(_EcsKeyPairState) __props__.__dict__["finger_print"] = finger_print __props__.__dict__["key_file"] = key_file __props__.__dict__["key_name"] = key_name __props__.__dict__["key_name_prefix"] = key_name_prefix __props__.__dict__["key_pair_name"] = key_pair_name __props__.__dict__["public_key"] = public_key __props__.__dict__["resource_group_id"] = resource_group_id __props__.__dict__["tags"] = tags return EcsKeyPair(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name="fingerPrint") def finger_print(self) -> pulumi.Output[str]: return pulumi.get(self, "finger_print") @property @pulumi.getter(name="keyFile") def key_file(self) -> pulumi.Output[Optional[str]]: """ The key file. """ return pulumi.get(self, "key_file") @property @pulumi.getter(name="keyName") def key_name(self) -> pulumi.Output[str]: """ The key pair's name. It is the only in one Alicloud account. """ return pulumi.get(self, "key_name") @property @pulumi.getter(name="keyNamePrefix") def key_name_prefix(self) -> pulumi.Output[Optional[str]]: return pulumi.get(self, "key_name_prefix") @property @pulumi.getter(name="keyPairName") def key_pair_name(self) -> pulumi.Output[str]: return pulumi.get(self, "key_pair_name") @property @pulumi.getter(name="publicKey") def public_key(self) -> pulumi.Output[Optional[str]]: """ You can import an existing public key and using Alicloud key pair to manage it. If this parameter is specified, `resource_group_id` is the key pair belongs. """ return pulumi.get(self, "public_key") @property @pulumi.getter(name="resourceGroupId") def resource_group_id(self) -> pulumi.Output[Optional[str]]: """ The Id of resource group which the key pair belongs. """ return pulumi.get(self, "resource_group_id") @property @pulumi.getter def tags(self) -> pulumi.Output[Optional[Mapping[str, Any]]]: """ A mapping of tags to assign to the resource. """ return pulumi.get(self, "tags")
42.718941
201
0.642622
2,706
20,975
4.731707
0.070953
0.078179
0.083099
0.099656
0.869416
0.850672
0.837785
0.829897
0.81701
0.793112
0
0.004387
0.250155
20,975
490
202
42.806122
0.809702
0.275328
0
0.800699
1
0.020979
0.131416
0.002397
0
0
0
0
0
1
0.157343
false
0.003497
0.017483
0.027972
0.269231
0.045455
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
432b91a84921ad16f0f6263d2ef94d42b9699cb9
6,405
py
Python
src/plot_functions/3d_tensor.py
kaylani2/machineLearning
692623abf6fe02bde6c7da6c2f8c0ec526a3e8f8
[ "MIT" ]
7
2019-11-06T14:35:37.000Z
2022-03-06T03:55:06.000Z
src/plot_functions/3d_tensor.py
kaylani2/machineLearning
692623abf6fe02bde6c7da6c2f8c0ec526a3e8f8
[ "MIT" ]
10
2020-05-16T02:38:35.000Z
2021-04-11T23:55:35.000Z
src/plot_functions/3d_tensor.py
kaylani2/machineLearning
692623abf6fe02bde6c7da6c2f8c0ec526a3e8f8
[ "MIT" ]
2
2020-06-26T21:39:41.000Z
2020-09-15T03:38:32.000Z
#import tkinter import matplotlib.pyplot as plt import numpy as np from mpl_toolkits.mplot3d import Axes3D #plt.use('TkAgg') xyz = np.array (np.random.random ( (100,3))) x = xyz[:,0] y = xyz[:,1] z = xyz[:,2]*100 fig = plt.figure (figsize = (10, 10)) ax = fig.add_subplot (111, projection = '3d') ax.set_ylabel ('$F1,\ F2,\ F3\ |\ Label$', fontsize = 20, rotation = 180) ax.set_zlabel ('$Samples$', fontsize = 20, rotation = 0) ax.plot ([1.], [1.], [1.], markerfacecolor = 'r', markeredgecolor = 'r', marker = '$7$', markersize = 9, alpha = 0.6) ax.plot ([1.], [2.], [1.], markerfacecolor = 'r', markeredgecolor = 'r', marker = '$8$', markersize = 9, alpha = 0.6) ax.plot ([1.], [3.], [1.], markerfacecolor = 'r', markeredgecolor = 'r', marker = '$9$', markersize = 9, alpha = 0.6) ax.plot ([1.], [1.], [2.], markerfacecolor = 'g', markeredgecolor = 'g', marker = '$4$', markersize = 9, alpha = 0.6) ax.plot ([1.], [2.], [2.], markerfacecolor = 'g', markeredgecolor = 'g', marker = '$5$', markersize = 9, alpha = 0.6) ax.plot ([1.], [3.], [2.], markerfacecolor = 'g', markeredgecolor = 'g', marker = '$6$', markersize = 9, alpha = 0.6) ax.plot ([1.], [1.], [3.], markerfacecolor = 'b', markeredgecolor = 'b', marker = '$1$', markersize = 9, alpha = 0.6) ax.plot ([1.], [2.], [3.], markerfacecolor = 'b', markeredgecolor = 'b', marker = '$2$', markersize = 9, alpha = 0.6) ax.plot ([1.], [3.], [3.], markerfacecolor = 'b', markeredgecolor = 'b', marker = '$3$', markersize = 9, alpha = 0.6) ax.plot ([1.], [4.], [1.], markerfacecolor = 'r', markeredgecolor = 'r', marker = 'X', markersize = 9, alpha = 0.6) ax.plot ([1.], [4.], [2.], markerfacecolor = 'g', markeredgecolor = 'g', marker = 'X', markersize = 9, alpha = 0.6) ax.plot ([1.], [4.], [3.], markerfacecolor = 'b', markeredgecolor = 'b', marker = 'o', markersize = 9, alpha = 0.6) ax.plot ([1.], [1.], [4.], markerfacecolor = 'r', markeredgecolor = 'r', marker = '$70$', markersize = 9, alpha = 0.6) ax.plot ([1.], [2.], [4.], markerfacecolor = 'r', markeredgecolor = 'r', marker = '$80$', markersize = 9, alpha = 0.6) ax.plot ([1.], [3.], [4.], markerfacecolor = 'r', markeredgecolor = 'r', marker = '$90$', markersize = 9, alpha = 0.6) ax.plot ([1.], [1.], [5.], markerfacecolor = 'g', markeredgecolor = 'g', marker = '$40$', markersize = 9, alpha = 0.6) ax.plot ([1.], [2.], [5.], markerfacecolor = 'g', markeredgecolor = 'g', marker = '$50$', markersize = 9, alpha = 0.6) ax.plot ([1.], [3.], [5.], markerfacecolor = 'g', markeredgecolor = 'g', marker = '$60$', markersize = 9, alpha = 0.6) ax.plot ([1.], [1.], [6.], markerfacecolor = 'b', markeredgecolor = 'b', marker = '$10$', markersize = 9, alpha = 0.6) ax.plot ([1.], [2.], [6.], markerfacecolor = 'b', markeredgecolor = 'b', marker = '$20$', markersize = 9, alpha = 0.6) ax.plot ([1.], [3.], [6.], markerfacecolor = 'b', markeredgecolor = 'b', marker = '$30$', markersize = 9, alpha = 0.6) ax.plot ([1.], [4.], [4.], markerfacecolor = 'r', markeredgecolor = 'r', marker = 'X', markersize = 9, alpha = 0.6) ax.plot ([1.], [4.], [5.], markerfacecolor = 'g', markeredgecolor = 'g', marker = 'X', markersize = 9, alpha = 0.6) ax.plot ([1.], [4.], [6.], markerfacecolor = 'b', markeredgecolor = 'b', marker = 'o', markersize = 9, alpha = 0.6) plt.show () fig = plt.figure (figsize = (10, 10)) ax = fig.add_subplot (111, projection = '3d') ax.set_ylabel ('$F1,\ F2,\ F3\ |\ Label$', fontsize = 20, rotation = 180) ax.set_zlabel ('$Samples$', fontsize = 20, rotation = 0) ax.set_xlabel ('$Steps$', fontsize = 20, rotation = 0) ax.plot ([3.], [1.], [5.], markerfacecolor = 'r', markeredgecolor = 'r', marker = '$9$', markersize = 9, alpha = 0.6) ax.plot ([3.], [2.], [5.], markerfacecolor = 'r', markeredgecolor = 'r', marker = '$8$', markersize = 9, alpha = 0.6) ax.plot ([3.], [3.], [5.], markerfacecolor = 'r', markeredgecolor = 'r', marker = '$7$', markersize = 9, alpha = 0.6) ax.plot ([2.], [1.], [5.], markerfacecolor = 'g', markeredgecolor = 'g', marker = '$6$', markersize = 9, alpha = 0.6) ax.plot ([2.], [2.], [5.], markerfacecolor = 'g', markeredgecolor = 'g', marker = '$5$', markersize = 9, alpha = 0.6) ax.plot ([2.], [3.], [5.], markerfacecolor = 'g', markeredgecolor = 'g', marker = '$4$', markersize = 9, alpha = 0.6) ax.plot ([1.], [1.], [5.], markerfacecolor = 'b', markeredgecolor = 'b', marker = '$3$', markersize = 9, alpha = 0.6) ax.plot ([1.], [2.], [5.], markerfacecolor = 'b', markeredgecolor = 'b', marker = '$2$', markersize = 9, alpha = 0.6) ax.plot ([1.], [3.], [5.], markerfacecolor = 'b', markeredgecolor = 'b', marker = '$1$', markersize = 9, alpha = 0.6) ax.plot ([1.], [0.], [1.], markerfacecolor = 'r', markeredgecolor = 'r', marker = 'X', markersize = 9, alpha = 0.6) ax.plot ([1.], [0.], [2.], markerfacecolor = 'g', markeredgecolor = 'g', marker = 'X', markersize = 9, alpha = 0.6) ax.plot ([1.], [0.], [3.], markerfacecolor = 'b', markeredgecolor = 'b', marker = 'o', markersize = 9, alpha = 0.6) ax.plot ([3.], [1.], [3.], markerfacecolor = 'r', markeredgecolor = 'r', marker = '$90$', markersize = 9, alpha = 0.6) ax.plot ([3.], [2.], [3.], markerfacecolor = 'r', markeredgecolor = 'r', marker = '$80$', markersize = 9, alpha = 0.6) ax.plot ([3.], [3.], [3.], markerfacecolor = 'r', markeredgecolor = 'r', marker = '$70$', markersize = 9, alpha = 0.6) ax.plot ([2.], [1.], [3.], markerfacecolor = 'g', markeredgecolor = 'g', marker = '$60$', markersize = 9, alpha = 0.6) ax.plot ([2.], [2.], [3.], markerfacecolor = 'g', markeredgecolor = 'g', marker = '$50$', markersize = 9, alpha = 0.6) ax.plot ([2.], [3.], [3.], markerfacecolor = 'g', markeredgecolor = 'g', marker = '$30$', markersize = 9, alpha = 0.6) ax.plot ([1.], [1.], [3.], markerfacecolor = 'b', markeredgecolor = 'b', marker = '$30$', markersize = 9, alpha = 0.6) ax.plot ([1.], [2.], [3.], markerfacecolor = 'b', markeredgecolor = 'b', marker = '$20$', markersize = 9, alpha = 0.6) ax.plot ([1.], [3.], [3.], markerfacecolor = 'b', markeredgecolor = 'b', marker = '$10$', markersize = 9, alpha = 0.6) ax.plot ([1.], [0.], [4.], markerfacecolor = 'r', markeredgecolor = 'r', marker = 'X', markersize = 9, alpha = 0.6) ax.plot ([1.], [0.], [5.], markerfacecolor = 'g', markeredgecolor = 'g', marker = 'X', markersize = 9, alpha = 0.6) ax.plot ([1.], [0.], [6.], markerfacecolor = 'b', markeredgecolor = 'b', marker = 'o', markersize = 9, alpha = 0.6) plt.show ()
68.138298
118
0.570648
882
6,405
4.134921
0.0839
0.026871
0.210584
0.223746
0.954209
0.954209
0.931725
0.924596
0.924321
0.912805
0
0.073722
0.163466
6,405
93
119
68.870968
0.606943
0.004684
0
0.151515
0
0
0.0488
0
0
0
0
0
0
1
0
false
0
0.045455
0
0.045455
0
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
4a54ee2bf37c7096bfddf6a4cc790a7220f1fe4b
390
py
Python
prediction/values.py
FelixAugenstein/digital-tech-tutorial-watson-studio-part-v
de90b4c8b37e69631f3d2aee4b08b3e846043455
[ "Apache-2.0" ]
null
null
null
prediction/values.py
FelixAugenstein/digital-tech-tutorial-watson-studio-part-v
de90b4c8b37e69631f3d2aee4b08b3e846043455
[ "Apache-2.0" ]
null
null
null
prediction/values.py
FelixAugenstein/digital-tech-tutorial-watson-studio-part-v
de90b4c8b37e69631f3d2aee4b08b3e846043455
[ "Apache-2.0" ]
null
null
null
# prediction values def predictionValues(predictions): predictionValues.prediction_churn_true_or_false = predictions['predictions'][0]['values'][0][0] predictionValues.prediction_churn_true_or_false_percentage_one = predictions['predictions'][0]['values'][0][1][0] predictionValues.prediction_churn_true_or_false_percentage_two = predictions['predictions'][0]['values'][0][1][1]
78
117
0.792308
47
390
6.234043
0.297872
0.266212
0.317406
0.358362
0.819113
0.716724
0.361775
0.361775
0
0
0
0.030137
0.064103
390
5
118
78
0.772603
0.04359
0
0
0
0
0.137097
0
0
0
0
0
0
1
0.25
false
0
0
0
0.25
0
0
0
0
null
1
1
1
1
1
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
7
4a6128b6cb9217c1cbd97839277157478c37c9b6
17,375
py
Python
schevo/test/test_equivalent.py
Schevo/schevo
d57a41f8b7b514ed48dc0164dcd3412a89e9873b
[ "MIT" ]
1
2020-09-05T00:47:50.000Z
2020-09-05T00:47:50.000Z
schevo/test/test_equivalent.py
Schevo/schevo
d57a41f8b7b514ed48dc0164dcd3412a89e9873b
[ "MIT" ]
null
null
null
schevo/test/test_equivalent.py
Schevo/schevo
d57a41f8b7b514ed48dc0164dcd3412a89e9873b
[ "MIT" ]
null
null
null
"""Database equivalence tests. NOTE: These do not test against the format 1 database engine, as schevo.database.equivalent is engine-agnostic and some of these tests depend on field types that may only be used in format 2 databases.""" # Copyright (c) 2001-2009 ElevenCraft Inc. # See LICENSE for details. from textwrap import dedent from schevo.backend import backends from schevo.database import equivalent from schevo.test import CreatesSchema, ComparesDatabases # Make sure we can import the testschema_equivalent_* packages. import os import sys tests_path = os.path.dirname(os.path.abspath(__file__)) if tests_path not in sys.path: sys.path.insert(0, tests_path) class TestAllEquivalenceGood(ComparesDatabases): schemata = 'testschema_equivalent_good' class TestAllEquivalenceBad(ComparesDatabases): schemata = 'testschema_equivalent_bad' expected_failure = True class TestIsEquivalent(CreatesSchema): body = ''' class Foo(E.Entity): """Unicode field, with key.""" name = f.string() _key(name) _initial_priority = 100 _initial = [ (u'one', ), (u'two', ), (u'three', ), ] class Far(E.Entity): """Integer field, no keys.""" number = f.integer() _initial_priority = 100 _initial = [ (1, ), (2, ), (3, ), (3, ), (3, ), ] class Faz(E.Entity): """Entity field, no keys.""" foo = f.entity('Foo') _key(foo) _initial_priority = 90 _initial = [ ((u'one', ), ), ((u'two', ), ), ((u'three', ), ), ] class Fiz(E.Entity): """Entity list field, no key.""" fazs = f.entity_list('Faz') _initial_priority = 80 @extentmethod def _initial(extent, db): db.execute(extent.t.create(fazs=[ db.Faz.findone(foo=db.Foo.findone(name=u'three')), db.Faz.findone(foo=db.Foo.findone(name=u'two')), db.Faz.findone(foo=db.Foo.findone(name=u'one')), ])) db.execute(extent.t.create(fazs=[ db.Faz.findone(foo=db.Foo.findone(name=u'one')), db.Faz.findone(foo=db.Foo.findone(name=u'two')), ])) class FobOne(E.Entity): """Entity, recursing with FobTwo.""" fob_two = f.entity('FobTwo', required=False) @extentmethod def _initial(extent, db): fob_one = db.execute(extent.t.create()) fob_two = db.execute(db.FobTwo.t.create(fob_one=fob_one)) db.execute(fob_one.t.update(fob_two=fob_two)) class FobTwo(E.Entity): """See FobOne.""" fob_one = f.entity('FobOne') class EveryField(E.Entity): """Every built-in non-may_store_entities field.""" string = f.string(required=False) bytes = f.bytes(required=False) integer = f.integer(required=False) float = f.float(required=False) money = f.money(required=False) date = f.date(required=False) datetime = f.datetime(required=False) boolean = f.boolean(required=False) _initial = [ (UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, ), (u'string', UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, ), (UNASSIGNED, 'bytes', UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, ), (UNASSIGNED, UNASSIGNED, 42, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, ), (UNASSIGNED, UNASSIGNED, UNASSIGNED, 42.424242, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, ), (UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, 42.42, UNASSIGNED, UNASSIGNED, UNASSIGNED, ), (UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, '2005-04-03', UNASSIGNED, UNASSIGNED, ), (UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, '2005-04-03 02:01:00', UNASSIGNED, ), (UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, True, ), ] ''' body2 = ''' class Foo(E.Entity): """Unicode field, with key.""" name = f.string() _key(name) _initial_priority = 100 _initial = [ (u'three', ), (u'two', ), (u'one', ), ] class Far(E.Entity): """Integer field, no keys.""" number = f.integer() _initial_priority = 100 _initial = [ (3, ), (3, ), (3, ), (1, ), (2, ), ] class Faz(E.Entity): """Entity field, with key.""" foo = f.entity('Foo') _key(foo) _initial_priority = 90 _initial = [ ((u'two', ), ), ((u'one', ), ), ((u'three', ), ), ] class Fiz(E.Entity): """Entity list field, no key.""" fazs = f.entity_list('Faz') _initial_priority = 80 @extentmethod def _initial(extent, db): db.execute(extent.t.create(fazs=[ db.Faz.findone(foo=db.Foo.findone(name=u'one')), db.Faz.findone(foo=db.Foo.findone(name=u'two')), ])) db.execute(extent.t.create(fazs=[ db.Faz.findone(foo=db.Foo.findone(name=u'three')), db.Faz.findone(foo=db.Foo.findone(name=u'two')), db.Faz.findone(foo=db.Foo.findone(name=u'one')), ])) class FobOne(E.Entity): """Entity, recursing with FobTwo.""" fob_two = f.entity('FobTwo', required=False) @extentmethod def _initial(extent, db): fob_one = db.execute(extent.t.create()) fob_two = db.execute(db.FobTwo.t.create(fob_one=fob_one)) db.execute(fob_one.t.update(fob_two=fob_two)) class FobTwo(E.Entity): """See FobOne.""" fob_one = f.entity('FobOne') class EveryField(E.Entity): """Every built-in non-may_store_entities field.""" string = f.string(required=False) bytes = f.bytes(required=False) integer = f.integer(required=False) float = f.float(required=False) money = f.money(required=False) date = f.date(required=False) datetime = f.datetime(required=False) boolean = f.boolean(required=False) _initial = [ (u'string', UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, ), (UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, ), (UNASSIGNED, UNASSIGNED, 42, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, ), (UNASSIGNED, UNASSIGNED, UNASSIGNED, 42.424242, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, ), (UNASSIGNED, 'bytes', UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, ), (UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, '2005-04-03 02:01:00', UNASSIGNED, ), (UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, True, ), (UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, 42.42, UNASSIGNED, UNASSIGNED, UNASSIGNED, ), (UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, UNASSIGNED, '2005-04-03', UNASSIGNED, UNASSIGNED, ), ] ''' def test_properly_opened(self): self.open('2') assert db.schema_source != db2.schema_source def test_identical_schema_source_not_required(self): self.open('2') assert equivalent(db, db2, require_identical_schema_source=False) def test_identical_schema_source_required(self): self.open('2') assert not equivalent(db, db2) class BaseDataNotEquivalent(CreatesSchema): def test(self): self.open('2') assert not equivalent(db, db2, require_identical_schema_source=False) class TestDataNotEquivalentString(BaseDataNotEquivalent): body = ''' class Foo(E.Entity): name = f.string() _key(name) _initial = [ ('one', ), ('two', ), ('three', ), ] ''' body2 = ''' class Foo(E.Entity): name = f.string() _key(name) _initial = [ ('four', ), ('two', ), ('one', ), ] ''' class TestDataNotEquivalentBytes(BaseDataNotEquivalent): body = ''' class Foo(E.Entity): thing = f.bytes() _key(thing) _initial = [ ('one', ), ('two', ), ('three', ), ] ''' body2 = ''' class Foo(E.Entity): thing = f.bytes() _key(thing) _initial = [ ('four', ), ('two', ), ('one', ), ] ''' class TestDataNotEquivalentInteger(BaseDataNotEquivalent): body = ''' class Far(E.Entity): """Integer field, no keys.""" number = f.integer() _initial = [ (1, ), (2, ), (3, ), (3, ), (3, ), ] ''' body2 = ''' class Far(E.Entity): """Integer field, no keys.""" number = f.integer() _initial = [ (1, ), (2, ), (3, ), (3, ), (4, ), ] ''' class TestDataNotEquivalentFloat(BaseDataNotEquivalent): body = ''' class Far(E.Entity): number = f.float() _initial = [ (1.1, ), (2.2, ), (3.3, ), (3.3, ), (3.3, ), ] ''' body2 = ''' class Far(E.Entity): number = f.float() _initial = [ (1.1, ), (2.2, ), (3.3, ), (3.3, ), (4.4, ), ] ''' class TestDataNotEquivalentMoney(BaseDataNotEquivalent): body = ''' class Far(E.Entity): amount = f.money() _initial = [ (1.11, ), (2.22, ), (3.33, ), (3.33, ), (3.33, ), ] ''' body2 = ''' class Far(E.Entity): amount = f.money() _initial = [ (1.11, ), (2.22, ), (3.33, ), (3.33, ), (4.44, ), ] ''' class TestDataNotEquivalentDate(BaseDataNotEquivalent): body = ''' class Far(E.Entity): """Integer field, no keys.""" date = f.date() _initial = [ ('2001-01-01', ), ('2002-02-02', ), ('2003-03-03', ), ('2003-03-03', ), ('2003-03-03', ), ] ''' body2 = ''' class Far(E.Entity): date = f.date() _initial = [ ('2001-01-01', ), ('2002-02-02', ), ('2003-03-03', ), ('2003-03-03', ), ('2004-04-04', ), ] ''' class TestDataNotEquivalentDatetime(BaseDataNotEquivalent): body = ''' class Far(E.Entity): datetime = f.datetime() _initial = [ ('2001-01-01 01:11:11', ), ('2002-02-02 02:22:22', ), ('2003-03-03 03:33:33', ), ('2003-03-03 03:33:33', ), ('2003-03-03 03:33:33', ), ] ''' body2 = ''' class Far(E.Entity): datetime = f.datetime() _initial = [ ('2001-01-01 01:11:11', ), ('2002-02-02 02:22:22', ), ('2003-03-03 03:33:33', ), ('2003-03-03 03:33:33', ), ('2003-04-04 04:44:44', ), ] ''' class TestDataNotEquivalentBoolean(BaseDataNotEquivalent): body = ''' class Far(E.Entity): flag = f.boolean() _initial = [ (True, ), (False, ), (True, ), (False, ), (True, ), ] ''' body2 = ''' class Far(E.Entity): flag = f.boolean() _initial = [ (True, ), (False, ), (True, ), (False, ), (False, ), ] ''' class TestDataNotEquivalentEntity(BaseDataNotEquivalent): body = ''' class Far(E.Entity): faz = f.entity('Faz') _initial = [ ((u"Faz1", ), ), ((u"Faz2", ), ), ((u"Faz3", ), ), ((u"Faz3", ), ), ((u"Faz3", ), ), ] class Faz(E.Entity): name = f.string() _key(name) _initial = [ (u"Faz1", ), (u"Faz2", ), (u"Faz3", ), ] ''' body2 = ''' class Far(E.Entity): faz = f.entity('Faz') _initial = [ ((u"Faz1", ), ), ((u"Faz2", ), ), ((u"Faz3", ), ), ((u"Faz3", ), ), ((u"Faz2", ), ), ] class Faz(E.Entity): name = f.string() _key(name) _initial = [ (u"Faz2", ), (u"Faz3", ), (u"Faz1", ), ] '''
23.736339
77
0.380777
1,312
17,375
4.95503
0.134909
0.344562
0.443009
0.498385
0.81649
0.792801
0.748654
0.746501
0.716659
0.700354
0
0.04925
0.501007
17,375
731
78
23.76881
0.700577
0.020662
0
0.877256
0
0
0.882585
0.080962
0
0
0
0
0.00722
1
0.00722
false
0
0.01083
0
0.083032
0
0
0
0
null
1
1
1
1
1
1
1
1
1
0
0
1
0
0
0
0
0
0
0
1
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
12
4a66b41664f08e7d8a1eab7b858ecff619a86839
161
py
Python
django/contrib/gis/db/models/sql/__init__.py
Yuanoung/djg-master
40413f268dd7dc3ca2d48f51c9327928b0b922de
[ "BSD-3-Clause" ]
91
2015-01-05T01:10:51.000Z
2021-09-26T18:01:53.000Z
django/contrib/gis/db/models/sql/__init__.py
eduncan911/django104
710acf976367ffd2b18a307ed61def75f6a460f0
[ "BSD-3-Clause" ]
4
2015-07-05T21:09:37.000Z
2019-09-06T14:34:59.000Z
django/contrib/gis/db/models/sql/__init__.py
eduncan911/django104
710acf976367ffd2b18a307ed61def75f6a460f0
[ "BSD-3-Clause" ]
32
2015-04-03T04:29:45.000Z
2021-09-14T21:36:02.000Z
from django.contrib.gis.db.models.sql.query import AreaField, DistanceField, GeomField, GeoQuery from django.contrib.gis.db.models.sql.where import GeoWhereNode
53.666667
96
0.838509
23
161
5.869565
0.652174
0.148148
0.251852
0.296296
0.459259
0.459259
0.459259
0
0
0
0
0
0.068323
161
2
97
80.5
0.9
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
4ab55fd6665568825b9e6a39ee182a60b7ed648b
44
py
Python
examples/phobos/tests/test_std_demangle.py
kinke/autowrap
2f042df3f292aa39b1da0b9607fbe3424f56ff4a
[ "BSD-3-Clause" ]
47
2019-07-16T10:38:07.000Z
2022-03-30T16:34:24.000Z
examples/phobos/tests/test_std_demangle.py
kinke/autowrap
2f042df3f292aa39b1da0b9607fbe3424f56ff4a
[ "BSD-3-Clause" ]
199
2019-06-17T23:24:40.000Z
2021-06-16T16:41:36.000Z
examples/phobos/tests/test_std_demangle.py
kinke/autowrap
2f042df3f292aa39b1da0b9607fbe3424f56ff4a
[ "BSD-3-Clause" ]
7
2019-09-13T18:03:49.000Z
2022-01-17T03:53:00.000Z
def test_import(): import std_demangle
11
23
0.727273
6
44
5
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.204545
44
3
24
14.666667
0.857143
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
true
0
1
0
1.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
7
4362e1d51aaa59cab6a9064eae21d22758d43fac
30,530
py
Python
src/containerapp/azext_containerapp/_clients.py
haroonf/azure-cli-extensions
61c044d34c224372f186934fa7c9313f1cd3a525
[ "MIT" ]
null
null
null
src/containerapp/azext_containerapp/_clients.py
haroonf/azure-cli-extensions
61c044d34c224372f186934fa7c9313f1cd3a525
[ "MIT" ]
null
null
null
src/containerapp/azext_containerapp/_clients.py
haroonf/azure-cli-extensions
61c044d34c224372f186934fa7c9313f1cd3a525
[ "MIT" ]
null
null
null
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- # pylint: disable=line-too-long, super-with-arguments, too-many-instance-attributes, consider-using-f-string, no-else-return, no-self-use import json import time import sys from azure.cli.core.util import send_raw_request from azure.cli.core.commands.client_factory import get_subscription_id from knack.log import get_logger logger = get_logger(__name__) API_VERSION = "2021-03-01" PREVIEW_API_VERSION = "2022-01-01-preview" STABLE_API_VERSION = "2022-03-01" POLLING_TIMEOUT = 60 # how many seconds before exiting POLLING_SECONDS = 2 # how many seconds between requests class PollingAnimation(): def __init__(self): self.tickers = ["/", "|", "\\", "-", "/", "|", "\\", "-"] self.currTicker = 0 def tick(self): sys.stdout.write('\r') sys.stdout.write(self.tickers[self.currTicker] + " Running ..") sys.stdout.flush() self.currTicker += 1 self.currTicker = self.currTicker % len(self.tickers) def flush(self): sys.stdout.flush() sys.stdout.write('\r') sys.stdout.write("\033[K") def poll(cmd, request_url, poll_if_status): # pylint: disable=inconsistent-return-statements try: start = time.time() end = time.time() + POLLING_TIMEOUT animation = PollingAnimation() animation.tick() r = send_raw_request(cmd.cli_ctx, "GET", request_url) while r.status_code in [200, 201] and start < end: time.sleep(POLLING_SECONDS) animation.tick() r = send_raw_request(cmd.cli_ctx, "GET", request_url) r2 = r.json() if "properties" not in r2 or "provisioningState" not in r2["properties"] or not r2["properties"]["provisioningState"].lower() == poll_if_status: break start = time.time() animation.flush() return r.json() except Exception as e: # pylint: disable=broad-except animation.flush() delete_statuses = ["scheduledfordelete", "cancelled"] if poll_if_status not in delete_statuses: # Catch "not found" errors if polling for delete raise e class ContainerAppClient(): @classmethod def create_or_update(cls, cmd, resource_group_name, name, container_app_envelope, no_wait=False): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) r = send_raw_request(cmd.cli_ctx, "PUT", request_url, body=json.dumps(container_app_envelope)) if no_wait: return r.json() elif r.status_code == 201: url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) return poll(cmd, request_url, "inprogress") return r.json() @classmethod def update(cls, cmd, resource_group_name, name, container_app_envelope, no_wait=False): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = STABLE_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) r = send_raw_request(cmd.cli_ctx, "PATCH", request_url, body=json.dumps(container_app_envelope)) if no_wait: return r.json() elif r.status_code == 202: url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) return poll(cmd, request_url, "inprogress") return r.json() @classmethod def delete(cls, cmd, resource_group_name, name, no_wait=False): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) r = send_raw_request(cmd.cli_ctx, "DELETE", request_url) if no_wait: return # API doesn't return JSON (it returns no content) elif r.status_code in [200, 201, 202, 204]: url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) if r.status_code == 202: from azure.cli.core.azclierror import ResourceNotFoundError try: poll(cmd, request_url, "cancelled") except ResourceNotFoundError: pass logger.warning('Containerapp successfully deleted') @classmethod def show(cls, cmd, resource_group_name, name): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) r = send_raw_request(cmd.cli_ctx, "GET", request_url) return r.json() @classmethod def list_by_subscription(cls, cmd, formatter=lambda x: x): app_list = [] management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) request_url = "{}/subscriptions/{}/providers/Microsoft.App/containerApps?api-version={}".format( management_hostname.strip('/'), sub_id, api_version) r = send_raw_request(cmd.cli_ctx, "GET", request_url) j = r.json() for app in j["value"]: formatted = formatter(app) app_list.append(formatted) while j.get("nextLink") is not None: request_url = j["nextLink"] r = send_raw_request(cmd.cli_ctx, "GET", request_url) j = r.json() for app in j["value"]: formatted = formatter(app) app_list.append(formatted) return app_list @classmethod def list_by_resource_group(cls, cmd, resource_group_name, formatter=lambda x: x): app_list = [] management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, api_version) r = send_raw_request(cmd.cli_ctx, "GET", request_url) j = r.json() for app in j["value"]: formatted = formatter(app) app_list.append(formatted) while j.get("nextLink") is not None: request_url = j["nextLink"] r = send_raw_request(cmd.cli_ctx, "GET", request_url) j = r.json() for app in j["value"]: formatted = formatter(app) app_list.append(formatted) return app_list @classmethod def list_secrets(cls, cmd, resource_group_name, name): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}/listSecrets?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) r = send_raw_request(cmd.cli_ctx, "POST", request_url, body=None) return r.json() @classmethod def list_revisions(cls, cmd, resource_group_name, name, formatter=lambda x: x): revisions_list = [] management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}/revisions?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) r = send_raw_request(cmd.cli_ctx, "GET", request_url) j = r.json() for app in j["value"]: formatted = formatter(app) revisions_list.append(formatted) while j.get("nextLink") is not None: request_url = j["nextLink"] r = send_raw_request(cmd.cli_ctx, "GET", request_url) j = r.json() for app in j["value"]: formatted = formatter(app) revisions_list.append(formatted) return revisions_list @classmethod def show_revision(cls, cmd, resource_group_name, container_app_name, name): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}/revisions/{}?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, container_app_name, name, api_version) r = send_raw_request(cmd.cli_ctx, "GET", request_url) return r.json() @classmethod def restart_revision(cls, cmd, resource_group_name, container_app_name, name): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}/revisions/{}/restart?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, container_app_name, name, api_version) r = send_raw_request(cmd.cli_ctx, "POST", request_url) return r.json() @classmethod def activate_revision(cls, cmd, resource_group_name, container_app_name, name): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}/revisions/{}/activate?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, container_app_name, name, api_version) r = send_raw_request(cmd.cli_ctx, "POST", request_url) return r.json() @classmethod def deactivate_revision(cls, cmd, resource_group_name, container_app_name, name): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}/revisions/{}/deactivate?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, container_app_name, name, api_version) r = send_raw_request(cmd.cli_ctx, "POST", request_url) return r.json() @classmethod def list_replicas(cls, cmd, resource_group_name, container_app_name, revision_name): replica_list = [] management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}/revisions/{}/replicas?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, container_app_name, revision_name, STABLE_API_VERSION) r = send_raw_request(cmd.cli_ctx, "GET", request_url) j = r.json() for replica in j["value"]: replica_list.append(replica) while j.get("nextLink") is not None: request_url = j["nextLink"] r = send_raw_request(cmd.cli_ctx, "GET", request_url) j = r.json() for replica in j["value"]: replica_list.append(replica) return replica_list @classmethod def get_replica(cls, cmd, resource_group_name, container_app_name, revision_name, replica_name): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}/revisions/{}/replicas/{}/?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, container_app_name, revision_name, replica_name, STABLE_API_VERSION) r = send_raw_request(cmd.cli_ctx, "GET", request_url) return r.json() @classmethod def get_auth_token(cls, cmd, resource_group_name, name): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}/authtoken?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, STABLE_API_VERSION) r = send_raw_request(cmd.cli_ctx, "POST", request_url) return r.json() class ManagedEnvironmentClient(): @classmethod def create(cls, cmd, resource_group_name, name, managed_environment_envelope, no_wait=False): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/managedEnvironments/{}?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) r = send_raw_request(cmd.cli_ctx, "PUT", request_url, body=json.dumps(managed_environment_envelope)) if no_wait: return r.json() elif r.status_code == 201: url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/managedEnvironments/{}?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) return poll(cmd, request_url, "waiting") return r.json() @classmethod def update(cls, cmd, resource_group_name, name, managed_environment_envelope, no_wait=False): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/managedEnvironments/{}?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) r = send_raw_request(cmd.cli_ctx, "PATCH", request_url, body=json.dumps(managed_environment_envelope)) if no_wait: return r.json() elif r.status_code == 201: url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/managedEnvironments/{}?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) return poll(cmd, request_url, "waiting") return r.json() @classmethod def delete(cls, cmd, resource_group_name, name, no_wait=False): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/managedEnvironments/{}?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) r = send_raw_request(cmd.cli_ctx, "DELETE", request_url) if no_wait: return # API doesn't return JSON (it returns no content) elif r.status_code in [200, 201, 202, 204]: url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/managedEnvironments/{}?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) if r.status_code == 202: from azure.cli.core.azclierror import ResourceNotFoundError try: poll(cmd, request_url, "scheduledfordelete") except ResourceNotFoundError: pass logger.warning('Containerapp environment successfully deleted') return @classmethod def show(cls, cmd, resource_group_name, name): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/managedEnvironments/{}?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) r = send_raw_request(cmd.cli_ctx, "GET", request_url) return r.json() @classmethod def list_by_subscription(cls, cmd, formatter=lambda x: x): env_list = [] management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) request_url = "{}/subscriptions/{}/providers/Microsoft.App/managedEnvironments?api-version={}".format( management_hostname.strip('/'), sub_id, api_version) r = send_raw_request(cmd.cli_ctx, "GET", request_url) j = r.json() for env in j["value"]: formatted = formatter(env) env_list.append(formatted) while j.get("nextLink") is not None: request_url = j["nextLink"] r = send_raw_request(cmd.cli_ctx, "GET", request_url) j = r.json() for env in j["value"]: formatted = formatter(env) env_list.append(formatted) return env_list @classmethod def list_by_resource_group(cls, cmd, resource_group_name, formatter=lambda x: x): env_list = [] management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/managedEnvironments?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, api_version) r = send_raw_request(cmd.cli_ctx, "GET", request_url) j = r.json() for env in j["value"]: formatted = formatter(env) env_list.append(formatted) while j.get("nextLink") is not None: request_url = j["nextLink"] r = send_raw_request(cmd.cli_ctx, "GET", request_url) j = r.json() for env in j["value"]: formatted = formatter(env) env_list.append(formatted) return env_list class GitHubActionClient(): @classmethod def create_or_update(cls, cmd, resource_group_name, name, github_action_envelope, headers, no_wait=False): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = STABLE_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}/sourcecontrols/current?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) r = send_raw_request(cmd.cli_ctx, "PUT", request_url, body=json.dumps(github_action_envelope), headers=headers) if no_wait: return r.json() elif r.status_code == 201: url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}/sourcecontrols/current?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) return poll(cmd, request_url, "inprogress") return r.json() @classmethod def show(cls, cmd, resource_group_name, name): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = STABLE_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}/sourcecontrols/current?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) r = send_raw_request(cmd.cli_ctx, "GET", request_url) return r.json() @classmethod def delete(cls, cmd, resource_group_name, name, headers, no_wait=False): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}/sourcecontrols/current?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) r = send_raw_request(cmd.cli_ctx, "DELETE", request_url, headers=headers) if no_wait: return # API doesn't return JSON (it returns no content) elif r.status_code in [200, 201, 202, 204]: url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/containerApps/{}/sourcecontrols/current?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, name, api_version) if r.status_code == 202: from azure.cli.core.azclierror import ResourceNotFoundError try: poll(cmd, request_url, "cancelled") except ResourceNotFoundError: pass logger.warning('Containerapp github action successfully deleted') return class DaprComponentClient(): @classmethod def create_or_update(cls, cmd, resource_group_name, environment_name, name, dapr_component_envelope, no_wait=False): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/managedEnvironments/{}/daprComponents/{}?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, environment_name, name, api_version) r = send_raw_request(cmd.cli_ctx, "PUT", request_url, body=json.dumps(dapr_component_envelope)) if no_wait: return r.json() elif r.status_code == 201: url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/managedEnvironments/{}/daprComponents/{}?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, environment_name, name, api_version) return poll(cmd, request_url, "inprogress") return r.json() @classmethod def delete(cls, cmd, resource_group_name, environment_name, name, no_wait=False): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/managedEnvironments/{}/daprComponents/{}?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, environment_name, name, api_version) r = send_raw_request(cmd.cli_ctx, "DELETE", request_url) if no_wait: return # API doesn't return JSON (it returns no content) elif r.status_code in [200, 201, 202, 204]: url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/managedEnvironments/{}/daprComponents/{}?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, environment_name, name, api_version) if r.status_code == 202: from azure.cli.core.azclierror import ResourceNotFoundError try: poll(cmd, request_url, "cancelled") except ResourceNotFoundError: pass logger.warning('Dapr component successfully deleted') return @classmethod def show(cls, cmd, resource_group_name, environment_name, name): management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) url_fmt = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/managedEnvironments/{}/daprComponents/{}?api-version={}" request_url = url_fmt.format( management_hostname.strip('/'), sub_id, resource_group_name, environment_name, name, api_version) r = send_raw_request(cmd.cli_ctx, "GET", request_url) return r.json() @classmethod def list(cls, cmd, resource_group_name, environment_name, formatter=lambda x: x): app_list = [] management_hostname = cmd.cli_ctx.cloud.endpoints.resource_manager api_version = PREVIEW_API_VERSION sub_id = get_subscription_id(cmd.cli_ctx) request_url = "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.App/managedEnvironments/{}/daprComponents?api-version={}".format( management_hostname.strip('/'), sub_id, resource_group_name, environment_name, api_version) r = send_raw_request(cmd.cli_ctx, "GET", request_url) j = r.json() for app in j["value"]: formatted = formatter(app) app_list.append(formatted) while j.get("nextLink") is not None: request_url = j["nextLink"] r = send_raw_request(cmd.cli_ctx, "GET", request_url) j = r.json() for app in j["value"]: formatted = formatter(app) app_list.append(formatted) return app_list
38.743655
156
0.610875
3,363
30,530
5.256913
0.065715
0.072968
0.047344
0.062334
0.900277
0.899316
0.888512
0.883308
0.880762
0.87635
0
0.00544
0.277432
30,530
787
157
38.792884
0.795966
0.02794
0
0.863429
0
0.015175
0.159362
0.133369
0
0
0
0
0
1
0.048558
false
0.00607
0.015175
0
0.138088
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
4386ca05bc59a6799fa4e0b3c4c86da501c39da3
2,503
py
Python
experiment_scripts/paper_runs/big_batch/make_mnist.py
jacqueschen1/adam_sgd_heavy_tails
d4ecab6d460fb44ac3fd2b865641b8e47f3848ee
[ "Apache-2.0" ]
1
2021-12-02T21:47:46.000Z
2021-12-02T21:47:46.000Z
experiment_scripts/paper_runs/big_batch/make_mnist.py
jacqueschen1/adam_sgd_heavy_tails
d4ecab6d460fb44ac3fd2b865641b8e47f3848ee
[ "Apache-2.0" ]
null
null
null
experiment_scripts/paper_runs/big_batch/make_mnist.py
jacqueschen1/adam_sgd_heavy_tails
d4ecab6d460fb44ac3fd2b865641b8e47f3848ee
[ "Apache-2.0" ]
null
null
null
# 2 hours normal gpu import numpy as np import explib def merge_grids(*grids): return sorted(list(set.union(*[set(grid) for grid in grids]))) EXPERIMENTS = [] EXPERIMENTS_SGD = [ { "loss_func": "logloss", "metrics": ["accuracy"], "dataset": "mnist", "model": "lenet5", "batch_size": b_size, "max_epoch": 200, "seed": seed, "opt": { "name": "SGD", "alpha": alpha, }, "drop_last": True, "final_reruns": True, } for alpha in np.logspace(-6, 2, num=9, base=10) for seed in range(5) for b_size in [4096, 8192, 16384] ] EXPERIMENTS_ADAM = [ { "loss_func": "logloss", "metrics": ["accuracy"], "dataset": "mnist", "model": "lenet5", "batch_size": b_size, "max_epoch": 200, "seed": seed, "opt": { "name": "Adam", "alpha": alpha, "b1": 0.9, "b2": 0.999, }, "drop_last": True, "final_reruns": True, } for alpha in np.logspace(-6, 2, num=9, base=10) for seed in range(5) for b_size in [4096, 8192, 16384] ] EXPERIMENTS.extend(EXPERIMENTS_SGD) EXPERIMENTS.extend(EXPERIMENTS_ADAM) EXPERIMENTS_SGD = [ { "loss_func": "logloss", "metrics": ["accuracy"], "dataset": "mnist", "model": "lenet5", "batch_size": b_size, "max_epoch": 200, "seed": seed, "opt": { "name": "SGD", "alpha": alpha, "momentum": 0.9, }, "drop_last": True, "final_reruns": True, } for alpha in np.logspace(-6, 2, num=9, base=10) for seed in range(5) for b_size in [60000] ] EXPERIMENTS_ADAM = [ { "loss_func": "logloss", "metrics": ["accuracy"], "dataset": "mnist", "model": "lenet5", "batch_size": b_size, "max_epoch": 200, "seed": seed, "opt": { "name": "Adam", "alpha": alpha, "b1": 0, "b2": 0.999, }, "drop_last": True, "final_reruns": True, } for alpha in np.logspace(-6, 2, num=9, base=10) for seed in range(5) for b_size in [60000] ] EXPERIMENTS.extend(EXPERIMENTS_SGD) EXPERIMENTS.extend(EXPERIMENTS_ADAM) if __name__ == "__main__": explib.expmaker.experiment_maker_cli( descr="all experiments", experiments=EXPERIMENTS )
22.54955
66
0.502197
281
2,503
4.30605
0.259786
0.033058
0.049587
0.072727
0.826446
0.826446
0.826446
0.826446
0.740496
0.740496
0
0.056935
0.340392
2,503
110
67
22.754545
0.675954
0.007191
0
0.714286
0
0
0.19855
0
0
0
0
0
0
1
0.010204
false
0
0.020408
0.010204
0.040816
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
60bd8254f2c37008fa33171db286787ced8c7f61
2,828
py
Python
project_euler/problem_08/sol2.py
JB1959/Python
b6ca263983933c3ecc06ed0083dd11b6faf870c8
[ "MIT" ]
14
2020-10-03T05:43:48.000Z
2021-11-01T21:02:26.000Z
project_euler/problem_08/sol2.py
JB1959/Python
b6ca263983933c3ecc06ed0083dd11b6faf870c8
[ "MIT" ]
3
2020-06-08T07:03:15.000Z
2020-06-08T08:41:22.000Z
project_euler/problem_08/sol2.py
JB1959/Python
b6ca263983933c3ecc06ed0083dd11b6faf870c8
[ "MIT" ]
12
2020-10-03T05:44:19.000Z
2022-01-16T05:37:54.000Z
""" The four adjacent digits in the 1000-digit number that have the greatest product are 9 × 9 × 8 × 9 = 5832. 73167176531330624919225119674426574742355349194934 96983520312774506326239578318016984801869478851843 85861560789112949495459501737958331952853208805511 12540698747158523863050715693290963295227443043557 66896648950445244523161731856403098711121722383113 62229893423380308135336276614282806444486645238749 30358907296290491560440772390713810515859307960866 70172427121883998797908792274921901699720888093776 65727333001053367881220235421809751254540594752243 52584907711670556013604839586446706324415722155397 53697817977846174064955149290862569321978468622482 83972241375657056057490261407972968652414535100474 82166370484403199890008895243450658541227588666881 16427171479924442928230863465674813919123162824586 17866458359124566529476545682848912883142607690042 24219022671055626321111109370544217506941658960408 07198403850962455444362981230987879927244284909188 84580156166097919133875499200524063689912560717606 05886116467109405077541002256983155200055935729725 71636269561882670428252483600823257530420752963450 Find the thirteen adjacent digits in the 1000-digit number that have the greatest product. What is the value of this product? """ from functools import reduce N = ( "73167176531330624919225119674426574742355349194934" "96983520312774506326239578318016984801869478851843" "85861560789112949495459501737958331952853208805511" "12540698747158523863050715693290963295227443043557" "66896648950445244523161731856403098711121722383113" "62229893423380308135336276614282806444486645238749" "30358907296290491560440772390713810515859307960866" "70172427121883998797908792274921901699720888093776" "65727333001053367881220235421809751254540594752243" "52584907711670556013604839586446706324415722155397" "53697817977846174064955149290862569321978468622482" "83972241375657056057490261407972968652414535100474" "82166370484403199890008895243450658541227588666881" "16427171479924442928230863465674813919123162824586" "17866458359124566529476545682848912883142607690042" "24219022671055626321111109370544217506941658960408" "07198403850962455444362981230987879927244284909188" "84580156166097919133875499200524063689912560717606" "05886116467109405077541002256983155200055935729725" "71636269561882670428252483600823257530420752963450" ) def solution(n): """Find the thirteen adjacent digits in the 1000-digit number n that have the greatest product and returns it. >>> solution(N) 23514624000 """ return max( [ reduce(lambda x, y: int(x) * int(y), n[i : i + 13]) for i in range(len(n) - 12) ] ) if __name__ == "__main__": print(solution(str(N)))
38.739726
77
0.853607
143
2,828
16.846154
0.503497
0.017435
0.019925
0.023661
0.917393
0.9066
0.9066
0.9066
0.9066
0.9066
0
0.806899
0.108204
2,828
72
78
39.277778
0.147105
0.491867
0
0
0
0
0.716418
0.710732
0
1
0
0
0
1
0.03125
false
0
0.03125
0
0.09375
0.03125
0
0
1
null
0
0
0
1
1
1
1
1
1
0
1
0
0
0
0
0
1
0
0
0
0
0
1
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
12
71c7bc51be733dcc6488b8e9360c3e9ba9852f05
765
py
Python
quadboost/weak_learner/__init__.py
jsleb333/quadboost
b4b980ff4af727d5cec0348484a34f34e82168cd
[ "MIT" ]
1
2018-08-27T22:56:30.000Z
2018-08-27T22:56:30.000Z
quadboost/weak_learner/__init__.py
jsleb333/quadboost
b4b980ff4af727d5cec0348484a34f34e82168cd
[ "MIT" ]
null
null
null
quadboost/weak_learner/__init__.py
jsleb333/quadboost
b4b980ff4af727d5cec0348484a34f34e82168cd
[ "MIT" ]
null
null
null
try: from weak_learner_base import _WeakLearnerBase, _Cloner from ridge import * from svr import * from decision_stump import MulticlassDecisionStump from decision_tree import MulticlassDecisionTree from random_convolution import RandomConvolution, Filters, LocalFilters, WeightFromBankGenerator, center_weight, normalize_weight, reduce_weight except ModuleNotFoundError: from .weak_learner_base import _WeakLearnerBase, _Cloner from .ridge import * from .svr import * from .decision_stump import MulticlassDecisionStump from .decision_tree import MulticlassDecisionTree from .random_convolution import RandomConvolution, Filters, LocalFilters, WeightFromBankGenerator, center_weight, normalize_weight, reduce_weight
51
149
0.818301
77
765
7.87013
0.350649
0.066007
0.049505
0.062706
0.953795
0.953795
0.953795
0.953795
0.953795
0.953795
0
0
0.147712
765
14
150
54.642857
0.929448
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.857143
0
0.857143
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
9
e0aaa3341f8ad49e202cd3e75ad3d34b3e922bc2
25,530
py
Python
StartRace/StartRace.py
tejasurya/LASER-BASED-AUTOMATIC-RUN-TIMING-DEVICE
ed8c3309e189b52981cd70709a1162996367ea34
[ "MIT" ]
null
null
null
StartRace/StartRace.py
tejasurya/LASER-BASED-AUTOMATIC-RUN-TIMING-DEVICE
ed8c3309e189b52981cd70709a1162996367ea34
[ "MIT" ]
null
null
null
StartRace/StartRace.py
tejasurya/LASER-BASED-AUTOMATIC-RUN-TIMING-DEVICE
ed8c3309e189b52981cd70709a1162996367ea34
[ "MIT" ]
null
null
null
import time import serial import socket import sys import serial.tools.list_ports import MySQLdb as sql from selenium import webdriver track=10 c=0 x=0 z=0 t = False f = 5 g = 5 h = 5 i = 5 j = 5 k = 5 l = 5 m = 5 n = 5 o = 5 ab = 4 ba = 4 ac = 4 ca = 4 ad = 4 da = 4 ae = 4 ea = 4 af = 4 fa = 4 port = "" port1 = "" a = webdriver.Firefox() a.get("http://localhost/TestStopwatch/index.html") rank = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; db=sql.connect("localhost","root","","automation") cursor=db.cursor() query = "select * from tracks;" cursor.execute(query) result=cursor.fetchone() track=int(result[0]) ports=list(serial.tools.list_ports.comports()) if ports[0][1].find('Silicon Labs') != -1: port = ports[0][0] elif ports[1][1].find('Silicon Labs') != -1: port = ports[1][0] if ports[0][1].find('USB Serial Port') != -1: port1 = ports[0][0] elif ports[1][1].find('USB Serial Port') != -1: port1 = ports[1][0] #connect Starting Arduino to right side and Finish arduino to the Left side ser = serial.Serial(port1, 9600) cer = serial.Serial(port, 9600) print "Started" if track==1: while x < 5000: x+=1 if ser.read() == 'a' and ab == 4: print "first track false start" t=True ab = 1 elif track==2: while x < 5000: x+=1 if ser.read() == 'a' and ab == 4: print "first track false start" t=True ab = 1 if ser.read() == 'c' and ba == 4: print "second track false start" t=True ba = 2 elif track==3: while x < 5000: x+=1 if ser.read() == 'a' and ab == 4: print "first track false start" t=True ab = 1 if ser.read() == 'c' and ba == 4: print "second track false start" t=True ba = 2 if ser.read() == 'e' and ac == 4: print "third track false start" t=True ac = 1 elif track==4: while x < 5000: x+=1 if ser.read() == 'a' and ab == 4: print "first track false start" t=True ab = 1 if ser.read() == 'c' and ba == 4: print "second track false start" t=True ba = 2 if ser.read() == 'e' and ac == 4: print "third track false start" t=True ac = 1 if ser.read() == 'g' and ca == 4: print "fourth track false start" t=True ca = 2 elif track==5: while x < 5000: x+=1 if ser.read() == 'a' and ab == 4: print "first track false start" t=True ab = 1 if ser.read() == 'c' and ba == 4: print "second track false start" t=True ba = 2 if ser.read() == 'e' and ac == 4: print "third track false start" t=True ac = 1 if ser.read() == 'g' and ca == 4: print "fourth track false start" t=True ca = 2 if ser.read() == 'i' and ad == 4: print "fifth track false start" t=True ad = 1 elif track==6: while x < 5000: x+=1 if ser.read() == 'a' and ab == 4: print "first track false start" t=True ab = 1 if ser.read() == 'c' and ba == 4: print "second track false start" t=True ba = 2 if ser.read() == 'e' and ac == 4: print "third track false start" t=True ac = 1 if ser.read() == 'g' and ca == 4: print "fourth track false start" t=True ca = 2 if ser.read() == 'i' and ad == 4: print "fifth track false start" t=True ad = 1 if ser.read() == 'k' and da == 4: print "sixth track false start" t=True da = 2 elif track==7: while x < 5000: x+=1 if ser.read() == 'a' and ab == 4: print "first track false start" t=True ab = 1 if ser.read() == 'c' and ba == 4: print "second track false start" t=True ba = 2 if ser.read() == 'e' and ac == 4: print "third track false start" t=True ac = 1 if ser.read() == 'g' and ca == 4: print "fourth track false start" t=True ca = 2 if ser.read() == 'i' and ad == 4: print "fifth track false start" t=True ad = 1 if ser.read() == 'k' and da == 4: print "sixth track false start" t=True da = 2 if ser.read() == 'm' and ae == 4: print "seventh track false start" t=True ae = 1 elif track==8: while x < 5000: x+=1 if ser.read() == 'a' and ab == 4: print "first track false start" t=True ab = 1 if ser.read() == 'c' and ba == 4: print "second track false start" t=True ba = 2 if ser.read() == 'e' and ac == 4: print "third track false start" t=True ac = 1 if ser.read() == 'g' and ca == 4: print "fourth track false start" t=True ca = 2 if ser.read() == 'i' and ad == 4: print "fifth track false start" t=True ad = 1 if ser.read() == 'k' and da == 4: print "sixth track false start" t=True da = 2 if ser.read() == 'm' and ae == 4: print "seventh track false start" t=True ae = 1 if ser.read() == 'o' and ea == 4: print "eigth track false start" t=True ea = 2 elif track==9: while x < 5000: x+=1 if ser.read() == 'a' and ab == 4: print "first track false start" t=True ab = 1 if ser.read() == 'c' and ba == 4: print "second track false start" t=True ba = 2 if ser.read() == 'e' and ac == 4: print "third track false start" t=True ac = 1 if ser.read() == 'g' and ca == 4: print "fourth track false start" t=True ca = 2 if ser.read() == 'i' and ad == 4: print "fifth track false start" t=True ad = 1 if ser.read() == 'k' and da == 4: print "sixth track false start" t=True da = 2 if ser.read() == 'm' and ae == 4: print "seventh track false start" t=True ae = 1 if ser.read() == 'o' and ea == 4: print "eigth track false start" t=True ea = 2 if ser.read() == 'q' and af == 4: print "nineth track false start" t=True af = 1 elif track==10: while x < 5000: x=x+1 if ser.read() == 'a' and ab == 4: print "first track false start" t=True ab = 1 if ser.read() == 'c' and ba == 4: print "second track false start" t=True ba = 2 if ser.read() == 'e' and ac == 4: print "third track false start" t=True ac = 1 if ser.read() == 'g' and ca == 4: print "fourth track false start" t=True ca = 2 if ser.read() == 'i' and ad == 4: print "fifth track false start" t=True ad = 1 if ser.read() == 'k' and da == 4: print "sixth track false start" t=True da = 2 if ser.read() == 'm' and ae == 4: print "seventh track false start" t=True ae = 1 if ser.read() == 'o' and ea == 4: print "eigth track false start" t=True ea = 2 if ser.read() == 'q' and af == 4: print "nineth track false start" t=True af = 1 if ser.read() == 's' and fa == 4: print "Tenth track false start" t=True fa = 1 if t == True: a.close() sys.exit() else : a.find_element_by_xpath("//button[@title='Start']").click() ser.close() xoxo="0"+cer.readline() if track==1: while z < 1: abdc=0 ahf = cer.readline() bhf = cer.readline() chf = cer.readline() dhf = cer.readline() ahf=ahf+bhf+chf+dhf if ahf.find("b") !=-1 and f == 5: f = 50 if ahf.find("d") !=-1 and f == 50: rank[z] = 1 z+=1 print "first" f=51 a.find_element_by_xpath("//button[@title='Split']").click() elif track==2: while z < 2: abdc=0 ahf = cer.readline() bhf = cer.readline() chf = cer.readline() dhf = cer.readline() ahf=ahf+bhf+chf+dhf if ahf.find("b") !=-1 and f == 5: f = 50 if ahf.find("d") !=-1 and f == 50: rank[z] = 1 z+=1 print "first" f=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find("f") !=-1 and g == 5: g = 50 if ahf.find("h") !=-1 and g == 50: rank[z] = 2 z+=1 print "second" g = 51 a.find_element_by_xpath("//button[@title='Split']").click() elif track==3: while z < 3: abdc=0 ahf = cer.readline() bhf = cer.readline() chf = cer.readline() dhf = cer.readline() ahf=ahf+bhf+chf+dhf if ahf.find("b") !=-1 and f == 5: f = 50 if ahf.find("d") !=-1 and f == 50: rank[z] = 1 z+=1 print "first" f=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find("f") !=-1 and g == 5: g = 50 if ahf.find("h") !=-1 and g == 50: rank[z] = 2 z+=1 print "second" g = 51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('j') !=-1 and h == 5: h = 50 if ahf.find('l') !=-1 and h == 50: rank[z] = 3 z+=1 print "third" h=51 a.find_element_by_xpath("//button[@title='Split']").click() elif track==4: while z < 4: abdc=0 ahf = cer.readline() bhf = cer.readline() chf = cer.readline() dhf = cer.readline() ahf=ahf+bhf+chf+dhf if ahf.find("b") !=-1 and f == 5: f = 50 if ahf.find("d") !=-1 and f == 50: rank[z] = 1 z+=1 print "first" f=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find("f") !=-1 and g == 5: g = 50 if ahf.find("h") !=-1 and g == 50: rank[z] = 2 z+=1 print "second" g = 51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('j') !=-1 and h == 5: h = 50 if ahf.find('l') !=-1 and h == 50: rank[z] = 3 z+=1 print "third" h=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('n') !=-1 and i == 5: i = 50 if ahf.find('p') !=-1 and i == 50: rank[z] = 4 z+=1 print "fourth" i=51 a.find_element_by_xpath("//button[@title='Split']").click() elif track==5: while z < 5: abdc=0 ahf = cer.readline() bhf = cer.readline() chf = cer.readline() dhf = cer.readline() ahf=ahf+bhf+chf+dhf if ahf.find("b") !=-1 and f == 5: f = 50 if ahf.find("d") !=-1 and f == 50: rank[z] = 1 z+=1 print "first" f=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find("f") !=-1 and g == 5: g = 50 if ahf.find("h") !=-1 and g == 50: rank[z] = 2 z+=1 print "second" g = 51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('j') !=-1 and h == 5: h = 50 if ahf.find('l') !=-1 and h == 50: rank[z] = 3 z+=1 print "third" h=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('n') !=-1 and i == 5: i = 50 if ahf.find('p') !=-1 and i == 50: rank[z] = 4 z+=1 print "fourth" i=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('r') !=-1 and j == 5: j=50 if ahf.find('t') !=-1 and j == 50: rank[z] = 5 z+=1 print "fifth" j=51 a.find_element_by_xpath("//button[@title='Split']").click() elif track==6: while z < 6: abdc=0 ahf = cer.readline() bhf = cer.readline() chf = cer.readline() dhf = cer.readline() ahf=ahf+bhf+chf+dhf if ahf.find("b") !=-1 and f == 5: f = 50 if ahf.find("d") !=-1 and f == 50: rank[z] = 1 z+=1 print "first" f=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find("f") !=-1 and g == 5: g = 50 if ahf.find("h") !=-1 and g == 50: rank[z] = 2 z+=1 print "second" g = 51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('j') !=-1 and h == 5: h = 50 if ahf.find('l') !=-1 and h == 50: rank[z] = 3 z+=1 print "third" h=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('n') !=-1 and i == 5: i = 50 if ahf.find('p') !=-1 and i == 50: rank[z] = 4 z+=1 print "fourth" i=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('r') !=-1 and j == 5: j=50 if ahf.find('t') !=-1 and j == 50: rank[z] = 5 z+=1 print "fifth" j=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('v') !=-1 and k == 5: k=50 if ahf.find('x') !=-1 and k == 50: rank[z] = 6 z+=1 print "six" k = 51 a.find_element_by_xpath("//button[@title='Split']").click() elif track==7: while z < 7: abdc=0 ahf = cer.readline() bhf = cer.readline() chf = cer.readline() dhf = cer.readline() ahf=ahf+bhf+chf+dhf if ahf.find("b") !=-1 and f == 5: f = 50 if ahf.find("d") !=-1 and f == 50: rank[z] = 1 z+=1 print "first" f=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find("f") !=-1 and g == 5: g = 50 if ahf.find("h") !=-1 and g == 50: rank[z] = 2 z+=1 print "second" g = 51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('j') !=-1 and h == 5: h = 50 if ahf.find('l') !=-1 and h == 50: rank[z] = 3 z+=1 print "third" h=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('n') !=-1 and i == 5: i = 50 if ahf.find('p') !=-1 and i == 50: rank[z] = 4 z+=1 print "fourth" i=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('r') !=-1 and j == 5: j=50 if ahf.find('t') !=-1 and j == 50: rank[z] = 5 z+=1 print "fifth" j=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('v') !=-1 and k == 5: k=50 if ahf.find('x') !=-1 and k == 50: rank[z] = 6 z+=1 print "six" k = 51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('z') !=-1 and l == 5: l = 50 if ahf.find('2') !=-1 and l == 50: rank[z] = 7 z+=1 print "seven" l = 51 a.find_element_by_xpath("//button[@title='Split']").click() elif track==8: while z < 8: abdc=0 ahf = cer.readline() bhf = cer.readline() chf = cer.readline() dhf = cer.readline() ahf=ahf+bhf+chf+dhf if ahf.find("b") !=-1 and f == 5: f = 50 if ahf.find("d") !=-1 and f == 50: rank[z] = 1 z+=1 print "first" f=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find("f") !=-1 and g == 5: g = 50 if ahf.find("h") !=-1 and g == 50: rank[z] = 2 z+=1 print "second" g = 51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('j') !=-1 and h == 5: h = 50 if ahf.find('l') !=-1 and h == 50: rank[z] = 3 z+=1 print "third" h=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('n') !=-1 and i == 5: i = 50 if ahf.find('p') !=-1 and i == 50: rank[z] = 4 z+=1 print "fourth" i=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('r') !=-1 and j == 5: j=50 if ahf.find('t') !=-1 and j == 50: rank[z] = 5 z+=1 print "fifth" j=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('v') !=-1 and k == 5: k=50 if ahf.find('x') !=-1 and k == 50: rank[z] = 6 z+=1 print "six" k = 51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('z') !=-1 and l == 5: l = 50 if ahf.find('2') !=-1 and l == 50: rank[z] = 7 z+=1 print "seven" l = 51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('4') !=-1 and m == 5: m = 50 if ahf.find('6') !=-1 and m == 50: rank[z] = 8 z+=1 print "eight" m=51 a.find_element_by_xpath("//button[@title='Split']").click() elif track==9: while z < 9: abdc=0 ahf = cer.readline() bhf = cer.readline() chf = cer.readline() dhf = cer.readline() ahf=ahf+bhf+chf+dhf if ahf.find("b") !=-1 and f == 5: f = 50 if ahf.find("d") !=-1 and f == 50: rank[z] = 1 z+=1 print "first" f=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find("f") !=-1 and g == 5: g = 50 if ahf.find("h") !=-1 and g == 50: rank[z] = 2 z+=1 print "second" g = 51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('j') !=-1 and h == 5: h = 50 if ahf.find('l') !=-1 and h == 50: rank[z] = 3 z+=1 print "third" h=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('n') !=-1 and i == 5: i = 50 if ahf.find('p') !=-1 and i == 50: rank[z] = 4 z+=1 print "fourth" i=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('r') !=-1 and j == 5: j=50 if ahf.find('t') !=-1 and j == 50: rank[z] = 5 z+=1 print "fifth" j=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('v') !=-1 and k == 5: k=50 if ahf.find('x') !=-1 and k == 50: rank[z] = 6 z+=1 print "six" k = 51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('z') !=-1 and l == 5: l = 50 if ahf.find('2') !=-1 and l == 50: rank[z] = 7 z+=1 print "seven" l = 51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('4') !=-1 and m == 5: m = 50 if ahf.find('6') !=-1 and m == 50: rank[z] = 8 z+=1 print "eight" m=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('8') !=-1 and n == 5: n = 50 if ahf.find('!') !=-1 and n == 50: rank[z] = 9 z+=1 print "nine" n = 51 a.find_element_by_xpath("//button[@title='Split']").click() elif track==10: while z < 10: abdc=0 ahf = cer.readline() bhf = cer.readline() chf = cer.readline() dhf = cer.readline() ahf=ahf+bhf+chf+dhf if ahf.find("b") !=-1 and f == 5: f = 50 if ahf.find("d") !=-1 and f == 50: rank[z] = 1 z+=1 print "first" f=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find("f") !=-1 and g == 5: g = 50 if ahf.find("h") !=-1 and g == 50: rank[z] = 2 z+=1 print "second" g = 51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('j') !=-1 and h == 5: h = 50 if ahf.find('l') !=-1 and h == 50: rank[z] = 3 z+=1 print "third" h=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('n') !=-1 and i == 5: i = 50 if ahf.find('p') !=-1 and i == 50: rank[z] = 4 z+=1 print "fourth" i=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('r') !=-1 and j == 5: j=50 if ahf.find('t') !=-1 and j == 50: rank[z] = 5 z+=1 print "fifth" j=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('v') !=-1 and k == 5: k=50 if ahf.find('x') !=-1 and k == 50: rank[z] = 6 z+=1 print "six" k = 51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('z') !=-1 and l == 5: l = 50 if ahf.find('2') !=-1 and l == 50: rank[z] = 7 z+=1 print "seven" l = 51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('4') !=-1 and m == 5: m = 50 if ahf.find('6') !=-1 and m == 50: rank[z] = 8 z+=1 print "eight" m=51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('8') !=-1 and n == 5: n = 50 if ahf.find('!') !=-1 and n == 50: rank[z] = 9 z+=1 print "nine" n = 51 a.find_element_by_xpath("//button[@title='Split']").click() if ahf.find('#') !=-1 and o == 5: o = 50 if ahf.find('%') !=-1 and o == 50: rank[z] = 10 z+=1 print "ten" o=51 a.find_element_by_xpath("//button[@title='Split']").click() print "stop" a.find_element_by_xpath("//button[@title='Stop']").click() cer.close() cur=db.cursor() time.sleep(10) query1 = "select * from details;" cur.execute(query1) result1 = cur.fetchall() count = cur.rowcount row=result1[count-track][0] strin="" z=1 while z<=track: i=track d = 'cm' d += `z` time=str(a.find_element_by_id("time"+str(z)).text) cur.execute("""UPDATE details SET elapsedtime = %s WHERE sno >= %s AND trackno = %s""" , (time, str(row), str(rank[z-1]))) while i>0: if int(result1[count-i][2])==rank[z-1]: strin=str(rank[z-1])+" - "+result1[count-i][1] a.find_element_by_id(d).send_keys(strin) i=i-1 z=z+1 db.commit() db.close()
29.720605
126
0.414963
3,529
25,530
2.950978
0.047039
0.052814
0.095064
0.079316
0.884098
0.877569
0.875552
0.865758
0.856923
0.855963
0
0.061941
0.425813
25,530
859
127
29.720605
0.648475
0.002899
0
0.874126
0
0
0.131678
0.053701
0
0
0
0
0
0
null
null
0
0.008159
null
null
0.130536
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
8
e0c4efd653d6e7e04d31311653f44f6b9f6fa896
8,522
py
Python
leasing/tests/api/test_lease_basis_of_rent_lock.py
suutari-ai/mvj
c39dbc692afcb3b26366783414c2d5a88a57b25a
[ "MIT" ]
1
2021-01-12T08:14:10.000Z
2021-01-12T08:14:10.000Z
leasing/tests/api/test_lease_basis_of_rent_lock.py
suutari-ai/mvj
c39dbc692afcb3b26366783414c2d5a88a57b25a
[ "MIT" ]
249
2017-04-18T14:00:13.000Z
2022-03-30T12:18:03.000Z
leasing/tests/api/test_lease_basis_of_rent_lock.py
suutari-ai/mvj
c39dbc692afcb3b26366783414c2d5a88a57b25a
[ "MIT" ]
7
2017-04-18T08:43:54.000Z
2021-07-28T07:29:30.000Z
import datetime import json import pytest import pytz from django.contrib.auth.models import Permission from django.core.serializers.json import DjangoJSONEncoder from django.urls import reverse from leasing.models import LeaseBasisOfRent @pytest.mark.django_db def test_lock_lease_basis_of_rent( django_db_setup, client, lease_test_data, user_factory, lease_basis_of_rent_factory ): user = user_factory(username="test_user") user.set_password("test_password") user.save() permission_codenames = [ "view_lease", "change_lease", "add_leasebasisofrent", "change_lease_basis_of_rents", "change_leasebasisofrent", "change_leasebasisofrent_locked_at", ] for permission_codename in permission_codenames: permission = Permission.objects.get(codename=permission_codename) user.user_permissions.add(permission) client.login(username="test_user", password="test_password") lease = lease_test_data["lease"] lease_basis_of_rent = lease_basis_of_rent_factory( lease=lease, intended_use_id=1, area=12345, area_unit="m2", index_id=1 ) lock_time = datetime.datetime( year=2010, month=1, day=1, hour=1, minute=1, tzinfo=pytz.timezone("Europe/Helsinki"), ) data = { "id": lease.id, "basis_of_rents": [{"id": lease_basis_of_rent.id, "locked_at": lock_time}], } url = reverse("lease-detail", kwargs={"pk": lease.id}) response = client.patch( url, data=json.dumps(data, cls=DjangoJSONEncoder), content_type="application/json", ) assert response.status_code == 200, "%s %s" % (response.status_code, response.data) lease_basis_of_rent = LeaseBasisOfRent.objects.get(pk=lease_basis_of_rent.id) assert lease_basis_of_rent.locked_at == lock_time @pytest.mark.django_db def test_cannot_change_locked_lease_basis_of_rent( django_db_setup, client, lease_test_data, user_factory, lease_basis_of_rent_factory ): user = user_factory(username="test_user") user.set_password("test_password") user.save() permission_codenames = [ "view_lease", "change_lease", "add_leasebasisofrent", "change_lease_basis_of_rents", "change_leasebasisofrent", "change_leasebasisofrent_locked_at", ] for permission_codename in permission_codenames: permission = Permission.objects.get(codename=permission_codename) user.user_permissions.add(permission) client.login(username="test_user", password="test_password") lease = lease_test_data["lease"] lock_time = datetime.datetime( year=2010, month=1, day=1, hour=1, minute=1, tzinfo=pytz.timezone("Europe/Helsinki"), ) lease_basis_of_rent = lease_basis_of_rent_factory( lease=lease, intended_use_id=1, area=12345, area_unit="m2", index_id=1, locked_at=lock_time, ) data = { "id": lease.id, "basis_of_rents": [{"id": lease_basis_of_rent.id, "intended_use_id": 2}], } url = reverse("lease-detail", kwargs={"pk": lease.id}) response = client.patch( url, data=json.dumps(data, cls=DjangoJSONEncoder), content_type="application/json", ) assert response.status_code == 400, "%s %s" % (response.status_code, response.data) lease_basis_of_rent = LeaseBasisOfRent.objects.get(pk=lease_basis_of_rent.id) assert lease_basis_of_rent.intended_use_id == 1 @pytest.mark.django_db def test_cannot_unclock_locked_lease_basis_of_rent( django_db_setup, client, lease_test_data, user_factory, lease_basis_of_rent_factory ): user = user_factory(username="test_user") user.set_password("test_password") user.save() permission_codenames = [ "view_lease", "change_lease", "add_leasebasisofrent", "change_lease_basis_of_rents", "change_leasebasisofrent", ] for permission_codename in permission_codenames: permission = Permission.objects.get(codename=permission_codename) user.user_permissions.add(permission) client.login(username="test_user", password="test_password") lease = lease_test_data["lease"] lock_time = datetime.datetime( year=2010, month=1, day=1, hour=1, minute=1, tzinfo=pytz.timezone("Europe/Helsinki"), ) lease_basis_of_rent = lease_basis_of_rent_factory( lease=lease, intended_use_id=1, area=12345, area_unit="m2", index_id=1, locked_at=lock_time, ) data = { "id": lease.id, "basis_of_rents": [{"id": lease_basis_of_rent.id, "locked_at": None}], } url = reverse("lease-detail", kwargs={"pk": lease.id}) response = client.patch( url, data=json.dumps(data, cls=DjangoJSONEncoder), content_type="application/json", ) assert response.status_code == 200, "%s %s" % (response.status_code, response.data) lease_basis_of_rent = LeaseBasisOfRent.objects.get(pk=lease_basis_of_rent.id) assert lease_basis_of_rent.locked_at == lock_time @pytest.mark.django_db def test_can_unclock_locked_lease_basis_of_rent( django_db_setup, client, lease_test_data, user_factory, lease_basis_of_rent_factory ): user = user_factory(username="test_user") user.set_password("test_password") user.save() permission_codenames = [ "view_lease", "change_lease", "add_leasebasisofrent", "change_lease_basis_of_rents", "change_leasebasisofrent", "change_leasebasisofrent_locked_at", ] for permission_codename in permission_codenames: permission = Permission.objects.get(codename=permission_codename) user.user_permissions.add(permission) client.login(username="test_user", password="test_password") lease = lease_test_data["lease"] lock_time = datetime.datetime( year=2010, month=1, day=1, hour=1, minute=1, tzinfo=pytz.timezone("Europe/Helsinki"), ) lease_basis_of_rent = lease_basis_of_rent_factory( lease=lease, intended_use_id=1, area=12345, area_unit="m2", index_id=1, locked_at=lock_time, ) data = { "id": lease.id, "basis_of_rents": [{"id": lease_basis_of_rent.id, "locked_at": None}], } url = reverse("lease-detail", kwargs={"pk": lease.id}) response = client.patch( url, data=json.dumps(data, cls=DjangoJSONEncoder), content_type="application/json", ) assert response.status_code == 200, "%s %s" % (response.status_code, response.data) lease_basis_of_rent = LeaseBasisOfRent.objects.get(pk=lease_basis_of_rent.id) assert lease_basis_of_rent.locked_at is None @pytest.mark.django_db def test_cannot_remove_locked_lease_basis_of_rent( django_db_setup, client, lease_test_data, user_factory, lease_basis_of_rent_factory ): user = user_factory(username="test_user") user.set_password("test_password") user.save() permission_codenames = [ "view_lease", "change_lease", "add_leasebasisofrent", "delete_leasebasisofrent", "change_lease_basis_of_rents", "change_leasebasisofrent", "change_leasebasisofrent_locked_at", ] for permission_codename in permission_codenames: permission = Permission.objects.get(codename=permission_codename) user.user_permissions.add(permission) client.login(username="test_user", password="test_password") lease = lease_test_data["lease"] lock_time = datetime.datetime( year=2010, month=1, day=1, hour=1, minute=1, tzinfo=pytz.timezone("Europe/Helsinki"), ) lease_basis_of_rent_factory( lease=lease, intended_use_id=1, area=12345, area_unit="m2", index_id=1, locked_at=lock_time, ) data = {"id": lease.id, "basis_of_rents": []} url = reverse("lease-detail", kwargs={"pk": lease.id}) response = client.patch( url, data=json.dumps(data, cls=DjangoJSONEncoder), content_type="application/json", ) assert response.status_code == 200, "%s %s" % (response.status_code, response.data) assert lease.basis_of_rents.count() == 1
27.401929
87
0.665337
1,038
8,522
5.121387
0.092486
0.060572
0.092551
0.105342
0.942438
0.942438
0.937735
0.924944
0.924944
0.924944
0
0.014844
0.225299
8,522
310
88
27.490323
0.790367
0
0
0.807377
0
0
0.146679
0.047524
0
0
0
0
0.040984
1
0.020492
false
0.040984
0.032787
0
0.053279
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
e0f4bb6f4c00530197463e2a03a135eb2f58c00e
3,513
py
Python
tests/test_health_component.py
Leonardo767/Abmarl
9fada5447b09174c6a70b6032b4a8d08b66c4589
[ "Apache-2.0" ]
null
null
null
tests/test_health_component.py
Leonardo767/Abmarl
9fada5447b09174c6a70b6032b4a8d08b66c4589
[ "Apache-2.0" ]
null
null
null
tests/test_health_component.py
Leonardo767/Abmarl
9fada5447b09174c6a70b6032b4a8d08b66c4589
[ "Apache-2.0" ]
null
null
null
from abmarl.sim.components.agent import ComponentAgent as Agent from abmarl.sim.components.state import LifeState def test_health_agents(): agents = { 'agent0': Agent(id='agent0', min_health=0.0, max_health=5.0, initial_health=3.4), 'agent1': Agent(id='agent1', min_health=0.0, max_health=5.0, initial_health=2.4), 'agent2': Agent(id='agent2', min_health=0.0, max_health=5.0), 'agent3': Agent(id='agent3', min_health=0.0, max_health=5.0), } assert agents['agent0'].min_health == 0.0 assert agents['agent0'].max_health == 5.0 assert agents['agent0'].initial_health == 3.4 assert agents['agent0'].is_alive assert agents['agent1'].min_health == 0.0 assert agents['agent1'].max_health == 5.0 assert agents['agent1'].initial_health == 2.4 assert agents['agent1'].is_alive assert agents['agent2'].min_health == 0.0 assert agents['agent2'].max_health == 5.0 assert agents['agent2'].is_alive assert agents['agent3'].min_health == 0.0 assert agents['agent3'].max_health == 5.0 assert agents['agent3'].is_alive def test_life_state(): agents = { 'agent0': Agent(id='agent0', min_health=0.0, max_health=5.0, initial_health=3.4), 'agent1': Agent(id='agent1', min_health=0.0, max_health=5.0, initial_health=2.4), 'agent2': Agent(id='agent2', min_health=0.0, max_health=5.0), 'agent3': Agent(id='agent3', min_health=0.0, max_health=5.0), } assert agents['agent0'].min_health == 0.0 assert agents['agent0'].max_health == 5.0 assert agents['agent0'].initial_health == 3.4 assert agents['agent0'].is_alive assert agents['agent1'].min_health == 0.0 assert agents['agent1'].max_health == 5.0 assert agents['agent1'].initial_health == 2.4 assert agents['agent1'].is_alive assert agents['agent2'].min_health == 0.0 assert agents['agent2'].max_health == 5.0 assert agents['agent2'].is_alive assert agents['agent3'].min_health == 0.0 assert agents['agent3'].max_health == 5.0 assert agents['agent3'].is_alive state = LifeState(agents=agents, entropy=0.5) state.reset() assert agents['agent0'].health == 3.4 assert agents['agent1'].health == 2.4 assert 0.0 <= agents['agent2'].health <= 5.0 assert 0.0 <= agents['agent3'].health <= 5.0 state.apply_entropy(agents['agent0']) state.apply_entropy(agents['agent1']) assert agents['agent0'].health == 2.9 assert agents['agent1'].health == 1.9 for _ in range(10): state.apply_entropy(agents['agent0']) state.apply_entropy(agents['agent1']) state.apply_entropy(agents['agent2']) state.apply_entropy(agents['agent3']) assert not agents['agent0'].is_alive assert not agents['agent1'].is_alive assert not agents['agent2'].is_alive assert not agents['agent3'].is_alive state.reset() assert agents['agent0'].min_health == 0.0 assert agents['agent0'].max_health == 5.0 assert agents['agent0'].initial_health == 3.4 assert agents['agent0'].is_alive assert agents['agent1'].min_health == 0.0 assert agents['agent1'].max_health == 5.0 assert agents['agent1'].initial_health == 2.4 assert agents['agent1'].is_alive assert agents['agent2'].min_health == 0.0 assert agents['agent2'].max_health == 5.0 assert agents['agent2'].is_alive assert agents['agent3'].min_health == 0.0 assert agents['agent3'].max_health == 5.0 assert agents['agent3'].is_alive
38.604396
89
0.65955
517
3,513
4.332689
0.079304
0.246429
0.150893
0.098214
0.8375
0.763393
0.763393
0.763393
0.763393
0.763393
0
0.06692
0.174779
3,513
90
90
39.033333
0.705761
0
0
0.74359
0
0
0.126388
0
0
0
0
0
0.666667
1
0.025641
false
0
0.025641
0
0.051282
0
0
0
0
null
1
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
9
4605703e434681f20d5aa8aee59648db8c956557
3,170
py
Python
trello/webhooks.py
cmuozdiaz/trello-py
fa3e05972ad22796e515a6bb1a31e1ea5b365ed6
[ "BSD-2-Clause" ]
null
null
null
trello/webhooks.py
cmuozdiaz/trello-py
fa3e05972ad22796e515a6bb1a31e1ea5b365ed6
[ "BSD-2-Clause" ]
null
null
null
trello/webhooks.py
cmuozdiaz/trello-py
fa3e05972ad22796e515a6bb1a31e1ea5b365ed6
[ "BSD-2-Clause" ]
null
null
null
import json import requests class Webhooks(object): __module__ = 'trello' def __init__(self, apikey, token=None): self._apikey = apikey self._token = token def get_(self, idWebhook): resp = requests.get("https://trello.com/1/webhooks/{}/".format(idWebhook), params={"key": self._apikey, "token": self._token}, data=None) resp.raise_for_status() return json.loads(resp.text) def get_field(self, field, idWebhook): resp = requests.get("https://trello.com/1/webhooks/{}/{}".format(idWebhook, field), params={"key": self._apikey, "token": self._token}, data=None) resp.raise_for_status() return json.loads(resp.text) def update(self, idWebhook, description=None, callbackURL=None, idModel=None, active=None): resp = requests.put("https://trello.com/1/webhooks/{}".format(idWebhook), params={"key": self._apikey, "token": self._token}, data={"description": description, "callbackURL": callbackURL, "idModel": idModel, "active": active}) resp.raise_for_status() return json.loads(resp.text) def update_(self, callbackURL, idModel, description=None): resp = requests.put("https://trello.com/1/webhooks/".format(), params={"key": self._apikey, "token": self._token}, data={"callbackURL": callbackURL, "idModel": idModel, "description": description}) resp.raise_for_status() return json.loads(resp.text) def update_active(self, idWebhook, value): resp = requests.put("https://trello.com/1/webhooks/{}/active".format(idWebhook), params={"key": self._apikey, "token": self._token}, data={"value": value}) resp.raise_for_status() return json.loads(resp.text) def update_callbackURL(self, idWebhook, value): resp = requests.put("https://trello.com/1/webhooks/{}/callbackURL".format(idWebhook), params={"key": self._apikey, "token": self._token}, data={"value": value}) resp.raise_for_status() return json.loads(resp.text) def update_description(self, idWebhook, value): resp = requests.put("https://trello.com/1/webhooks/{}/description".format(idWebhook), params={"key": self._apikey, "token": self._token}, data={"value": value}) resp.raise_for_status() return json.loads(resp.text) def update_idModel(self, idWebhook, value): resp = requests.put("https://trello.com/1/webhooks/{}/idModel".format(idWebhook), params={"key": self._apikey, "token": self._token}, data={"value": value}) resp.raise_for_status() return json.loads(resp.text) def new(self, callbackURL, idModel, description=None): resp = requests.post("https://trello.com/1/webhooks".format(), params={"key": self._apikey, "token": self._token}, data={"callbackURL": callbackURL, "idModel": idModel, "description": description}) resp.raise_for_status() return json.loads(resp.text) def delete(self, idWebhook): resp = requests.delete("https://trello.com/1/webhooks/{}".format(idWebhook), params={"key": self._apikey, "token": self._token}, data=None) resp.raise_for_status() return json.loads(resp.text)
51.967213
234
0.667823
389
3,170
5.29563
0.105398
0.058252
0.080097
0.072816
0.814078
0.814078
0.814078
0.776214
0.776214
0.776214
0
0.003761
0.161199
3,170
60
235
52.833333
0.770967
0
0
0.425532
0
0
0.175765
0
0
0
0
0
0
1
0.234043
false
0
0.042553
0
0.531915
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
8
1cb3b05d93b2492dbca9ea86e652395b73f095af
624
py
Python
src/models/coinbase.py
Dragonfly-Capital/oracles.club.server
092dc1e6d205ceb475cd65f9b1c3e4aa6ef588dd
[ "MIT" ]
7
2020-04-28T02:17:51.000Z
2020-09-23T17:39:38.000Z
src/models/coinbase.py
Dragonfly-Capital/oracles.club.server
092dc1e6d205ceb475cd65f9b1c3e4aa6ef588dd
[ "MIT" ]
1
2020-08-10T19:39:12.000Z
2020-08-10T19:39:12.000Z
src/models/coinbase.py
Dragonfly-Capital/oracles.club.server
092dc1e6d205ceb475cd65f9b1c3e4aa6ef588dd
[ "MIT" ]
2
2020-05-10T09:39:47.000Z
2020-07-27T18:12:23.000Z
from .create_db import db class CoinbaseETH(db.Model): __tablename__ = 'coinbase' id = db.Column('id', db.Integer, primary_key=True) timestamp = db.Column('timestamp', db.Integer) price = db.Column('price', db.Float) def __repr__(self): return '{}, {}, {}'.format(self.timestamp, self.price) class CoinbaseBTC(db.Model): __tablename__ = 'coinbasebtc' id = db.Column('id', db.Integer, primary_key=True) timestamp = db.Column('timestamp', db.Integer) price = db.Column('price', db.Float) def __repr__(self): return '{}, {}, {}'.format(self.timestamp, self.price)
27.130435
62
0.644231
77
624
4.974026
0.311688
0.125326
0.083551
0.062663
0.731071
0.731071
0.731071
0.731071
0.731071
0.731071
0
0
0.1875
624
22
63
28.363636
0.755424
0
0
0.666667
0
0
0.113782
0
0
0
0
0
0
1
0.133333
false
0
0.066667
0.133333
1
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
7
1cc5557a422043c5f999847cbe7050c5edc278f4
209
py
Python
stores/utils.py
thgazis/django-oscar-stores
373f45a9f7d18f5ffaec06580fbe7ecf8edc27df
[ "BSD-3-Clause" ]
83
2015-01-21T14:33:38.000Z
2021-11-08T11:30:18.000Z
stores/utils.py
thgazis/django-oscar-stores
373f45a9f7d18f5ffaec06580fbe7ecf8edc27df
[ "BSD-3-Clause" ]
113
2015-01-04T22:56:23.000Z
2022-02-01T11:05:09.000Z
stores/utils.py
thgazis/django-oscar-stores
373f45a9f7d18f5ffaec06580fbe7ecf8edc27df
[ "BSD-3-Clause" ]
50
2015-06-09T18:13:48.000Z
2021-09-08T14:36:37.000Z
from django.conf import settings def get_geographic_srid(): return getattr(settings, 'STORES_GEOGRAPHIC_SRID', 3577) def get_geodetic_srid(): return getattr(settings, 'STORES_GEODETIC_SRID', 4326)
20.9
60
0.77512
27
209
5.703704
0.555556
0.077922
0.220779
0.324675
0.402597
0
0
0
0
0
0
0.044199
0.133971
209
9
61
23.222222
0.80663
0
0
0
0
0
0.200957
0.105263
0
0
0
0
0
1
0.4
true
0
0.2
0.4
1
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
1
1
0
0
8
1cd07a016b555b32a982e5422b0a361663e88225
197
py
Python
DemPipe/executor/__init__.py
hmiladhia/DemPipe
48d48150969fa047e2f20b35ee1c61991e7b44ad
[ "MIT" ]
null
null
null
DemPipe/executor/__init__.py
hmiladhia/DemPipe
48d48150969fa047e2f20b35ee1c61991e7b44ad
[ "MIT" ]
null
null
null
DemPipe/executor/__init__.py
hmiladhia/DemPipe
48d48150969fa047e2f20b35ee1c61991e7b44ad
[ "MIT" ]
null
null
null
from DemPipe.executor.pipe_exec_base import SimplePipeExecutor from DemPipe.executor.pipe_exec import PipeExecutor from DemPipe.executor.mixin import PushNotificationMixin, EmailMixin, ConfigMixin
49.25
81
0.888325
23
197
7.478261
0.565217
0.19186
0.331395
0.267442
0.313953
0
0
0
0
0
0
0
0.071066
197
3
82
65.666667
0.939891
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
e8026b47cca1e0048f0a7b5bd5719bb144d3df59
331
py
Python
iceworm/engine/rules/__init__.py
wrmsr0/iceworm
09431bb3cdc4f6796aafca41e37d42ebe0ddfeef
[ "BSD-3-Clause" ]
null
null
null
iceworm/engine/rules/__init__.py
wrmsr0/iceworm
09431bb3cdc4f6796aafca41e37d42ebe0ddfeef
[ "BSD-3-Clause" ]
1
2021-01-19T14:29:19.000Z
2021-01-19T14:34:27.000Z
iceworm/engine/rules/__init__.py
wrmsr0/iceworm
09431bb3cdc4f6796aafca41e37d42ebe0ddfeef
[ "BSD-3-Clause" ]
1
2020-12-31T22:29:52.000Z
2020-12-31T22:29:52.000Z
from . import inject # noqa from .base import Rule # noqa from .base import RuleElementProcessor # noqa from .base import RuleProcessor # noqa from .tables import InsertedRows # noqa from .tables import InsertedRowsProcessor # noqa from .tables import TableAsSelect # noqa from .tables import TableAsSelectProcessor # noqa
36.777778
50
0.782477
39
331
6.641026
0.333333
0.216216
0.216216
0.30888
0
0
0
0
0
0
0
0
0.169184
331
8
51
41.375
0.941818
0.117825
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
e81aa878b5e937860143947a65e9673ffc5e21e1
101,238
py
Python
MedTAG_Dockerized/MedTAG_sket_dock_App/utils_download.py
MedTAG/medtag-core
f2dae7b38230179d71babede7e4910631d91053f
[ "MIT" ]
6
2021-12-20T12:15:17.000Z
2022-02-02T15:28:42.000Z
MedTAG_Dockerized/MedTAG_sket_dock_App/utils_download.py
MedTAG/medtag-core
f2dae7b38230179d71babede7e4910631d91053f
[ "MIT" ]
1
2022-03-07T14:57:44.000Z
2022-03-11T18:11:55.000Z
MedTAG_Dockerized/MedTAG_sket_dock_App/utils_download.py
MedTAG/medtag-core
f2dae7b38230179d71babede7e4910631d91053f
[ "MIT" ]
2
2021-05-29T09:44:38.000Z
2021-12-28T03:53:40.000Z
import psycopg2 import csv from MedTAG_sket_dock_App.utils import * from bioc import * from datetime import date """This file manages the creation of the files to be downloaded by the users""" def generate_bioc(json_keys,json_keys_to_ann,username,action,language,usecase,institute,form,annotation_mode,report_type,batch): """This method creates the BioC files both XML and JSON depending on the language, usecase, institute chosen""" try: languages = ['English','english'] usec = UseCase.objects.get(name=usecase) batch_num = [] if batch is None: b = Report.objects.filter(name=usec).values('batch') for el in b: if el['batch'] not in batch_num: batch_num.append(el['batch']) else: batch_num.append(batch) writer = BioCXMLWriter() json_writer = BioCJSONWriter() writer.collection = BioCCollection() json_writer.collection = BioCCollection() collection = writer.collection collection1 = json_writer.collection today = str(date.today()) collection.date = today if report_type == 'reports': collection.source = 'MEDTAG Collection' collection1.source = 'MEDTAG Collection' else: collection.source = 'PUBMED Collection' collection1.source = 'PUBMED Collection' collection.put_infon('username', username) collection1.date = today collection1.put_infon('username', username) if action == 'mentions': collection.put_infon('annotation_type', 'mentions') collection.key = 'mentions.key' collection1.put_infon('annotation_type', 'mentions') collection1.key = 'mentions.key' with connection.cursor() as cursor: if report_type == 'reports': if annotation_mode == 'Human': cursor.execute( "SELECT DISTINCT r.name,r.id_report,r.language,r.institute FROM report AS r INNER JOIN annotate AS a ON r.id_report = a.id_report AND r.language = a.language WHERE a.username = %s AND r.name = %s AND r.language = COALESCE(%s,r.language) AND r.institute = COALESCE(%s,r.institute) AND a.ns_id = %s AND r.batch = COALESCE(%s,r.batch) AND r.institute!=%s", [str(username), str(usecase),(language),institute, str(annotation_mode), batch, 'PUBMED']) elif annotation_mode == 'Robot': # cambio # cursor.execute( # "SELECT DISTINCT r.name,r.id_report,r.language,r.institute FROM report AS r INNER JOIN annotate AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN mention as m on m.id_report = a.id_report and m.language = a.language and m.start = a.start and m.stop = a.stop WHERE r.name = %s and a.ns_id = %s and a.username = %s and (a.id_report,a.language) IN (select g.id_report,g.language FROM ground_truth_log_file as g inner join ground_truth_log_file as gg on g.id_report = gg.id_report and g.language = gg.language and g.gt_type = gg.gt_type and g.ns_id = gg.ns_id where g.gt_type = %s and g.ns_id = %s and gg.insertion_time != g.insertion_time and gg.username = %s and g.username =%s AND r.batch = COALESCE (%s,r.batch) AND r.institute = COALESCE(%s,r.institute) AND r.language = COALESCE(%s,r.language) AND r.institute != %s)", # [str(usecase), 'Robot', str(username), 'mentions', 'Robot','Robot_user',str(username), batch,institute,language, 'PUBMED']) cursor.execute( "SELECT DISTINCT r.name,r.id_report,r.language,r.institute FROM report AS r INNER JOIN annotate AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN mention as m on m.id_report = a.id_report and m.language = a.language and m.start = a.start and m.stop = a.stop WHERE r.name = %s and a.ns_id = %s and a.username = %s and (a.id_report,a.language) IN (select g.id_report,g.language FROM ground_truth_log_file as g where g.gt_type = %s and g.ns_id = %s and g.username =%s AND r.batch = COALESCE (%s,r.batch) AND r.institute = COALESCE(%s,r.institute) AND r.language = COALESCE(%s,r.language) AND r.institute != %s)", [str(usecase), 'Robot', str(username), 'mentions', 'Robot', str(username), batch, institute, language, 'PUBMED']) reports = cursor.fetchall() elif report_type == 'pubmed': if annotation_mode == 'Human': cursor.execute( "SELECT DISTINCT r.name,r.id_report,r.language,r.institute FROM report AS r INNER JOIN annotate AS a ON r.id_report = a.id_report AND r.language = a.language WHERE a.username = %s AND r.language in %s AND r.name = %s AND r.institute = %s AND a.ns_id = %s AND r.batch = COALESCE(%s,r.batch) AND r.language in %s", [str(username), tuple(languages), str(usecase), str('PUBMED'), str(annotation_mode), batch,tuple(languages)]) elif annotation_mode == 'Robot': # CAMBIO # cursor.execute( # "SELECT DISTINCT r.name,r.id_report,r.language,r.institute FROM report AS r INNER JOIN annotate AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN mention as m on m.id_report = a.id_report and m.language = a.language and m.start = a.start and m.stop = a.stop WHERE r.name = %s and a.ns_id = %s and a.username = %s and (a.id_report,a.language) IN (select g.id_report,g.language FROM ground_truth_log_file as g inner join ground_truth_log_file as gg on g.id_report = gg.id_report and g.language = gg.language and g.gt_type = gg.gt_type and g.ns_id = gg.ns_id where g.gt_type = %s and g.ns_id = %s and gg.insertion_time != g.insertion_time and gg.username = %s and g.username =%s AND r.batch = COALESCE (%s,r.batch) AND r.institute = COALESCE(%s,r.institute) AND r.language = COALESCE(%s,r.language) AND r.institute = %s)", # [str(usecase), 'Robot', str(username), 'mentions', 'Robot', 'Robot_user', str(username), # batch, institute, tuple(languages), 'PUBMED']) cursor.execute( "SELECT DISTINCT r.name,r.id_report,r.language,r.institute FROM report AS r INNER JOIN annotate AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN mention as m on m.id_report = a.id_report and m.language = a.language and m.start = a.start and m.stop = a.stop WHERE r.name = %s and a.ns_id = %s and a.username = %s and (a.id_report,a.language) IN (select g.id_report,g.language FROM ground_truth_log_file as g where g.gt_type=%s and g.ns_id = %s and g.username =%s AND r.batch = COALESCE (%s,r.batch) AND r.institute = COALESCE(%s,r.institute) AND r.language in %s AND r.institute = %s)", [str(usecase), 'Robot', str(username), 'mentions', 'Robot', str(username), batch, institute, tuple(languages), 'PUBMED']) reports = cursor.fetchall() documents = [] # reports = Annotate.objects.filter(username=username).values('id_report','language').distinct() for couple in reports: document = '' report = Report.objects.get(name=couple[0], id_report=couple[1], institute=couple[3], language=couple[2]) json_dict = report_get_start_end(json_keys, json_keys_to_ann, report.id_report, report.language) ns_cur = NameSpace.objects.get(ns_id=annotation_mode) anno = Annotate.objects.filter(username=username, id_report=report,ns_id = ns_cur, language=report.language) document = BioCDocument() document.id = str(report.id_report) document.put_infon('usecase', report.name_id) document.put_infon('language', report.language) document.put_infon('institute', report.institute) annotations = [] count = 0 for el in anno: mention = Mention.objects.get(start=el.start_id, stop=el.stop, id_report=report, language=report.language) json_dict = report_get_start_end(json_keys, json_keys_to_ann, report.id_report, report.language) annotation = BioCAnnotation() annotation.id = str(count) count = count + 1 loc_ann = BioCLocation() loc_ann.offset = str(mention.start) loc_ann.length = str(mention.stop - mention.start + 1) annotation.add_location(loc_ann) mention_text = mention.mention_text mtext = re.sub('[^a-zA-Z0-9n\-_/\' ]+', '', mention_text) annotation.text = mtext couple = (annotation, mention.start, mention.stop) annotations.append(couple) seen = [] for key in json_keys_to_ann: passage = BioCPassage() passage.put_infon('section', key) check = False keys = json_dict['rep_string'].keys() if key in keys: if json_dict['rep_string'].get(key) != '': # if json_dict['rep_string'].get(key) is not None and json_dict['rep_string'].get(key) != '': passage.text = json_dict['rep_string'][key]['text'] start = str(json_dict['rep_string'][key]['start']) passage.offset = start for el in annotations: if el not in seen: if int(el[1]) >= int(json_dict['rep_string'][key]['start']) and int(el[2]) <= int( json_dict['rep_string'][key]['end']): check = True passage.add_annotation(el[0]) seen.append(el) if check: document.add_passage(passage) collection.add_document(document) collection1.add_document(document) # print(writer) # print(json_writer) elif action == 'concept-mention': collection.put_infon('annotation_type', 'linking') collection.key = 'linking.key' collection1.put_infon('annotation_type', 'linking') collection1.key = 'linking.key' with connection.cursor() as cursor: if report_type == 'reports': if annotation_mode == 'Human': cursor.execute( "SELECT DISTINCT r.name,r.id_report,r.language,r.institute FROM report AS r INNER JOIN linked AS a ON r.id_report = a.id_report AND r.language = a.language WHERE a.username = %s AND r.name = %s AND a.ns_id = %s AND r.batch =COALESCE(%s,r.batch) AND r.institute=COALESCE(%s,r.institute) AND r.language = COALESCE(%s, r.language) AND r.institute != %s", [str(username), str(usecase), str(annotation_mode), batch,institute,language,'PUBMED']) elif annotation_mode == 'Robot': # CAMBIA # cursor.execute( # "SELECT DISTINCT r.name,r.id_report,r.language,r.institute FROM report AS r INNER JOIN linked AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN mention as m on m.id_report = a.id_report and m.language = a.language and a.start = m.start and a.stop = m.stop INNER JOIN concept as c on c.concept_url = a.concept_url inner join semantic_area as s on s.name = a.name WHERE r.name = %s AND a.ns_id = %s and a.username = %s and (a.id_report,a.language) IN (select g.id_report,g.language FROM ground_truth_log_file as g inner join ground_truth_log_file as gg on g.id_report = gg.id_report and g.language = gg.language and g.gt_type = gg.gt_type and g.ns_id = gg.ns_id where g.gt_type = %s and g.ns_id = %s and gg.insertion_time != g.insertion_time and gg.username = %s and g.username =%s AND r.batch = COALESCE(%s,r.batch) AND r.institute=COALESCE(%s,r.institute) AND r.language = COALESCE(%s,r.language) AND r.institute != %s) ", # [str(usecase), 'Robot', str(username), 'concept-mention', 'Robot', # 'Robot_user', # str(username), batch,institute,language, 'PUBMED']) cursor.execute( "SELECT DISTINCT r.name,r.id_report,r.language,r.institute FROM report AS r INNER JOIN linked AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN mention as m on m.id_report = a.id_report and m.language = a.language and a.start = m.start and a.stop = m.stop INNER JOIN concept as c on c.concept_url = a.concept_url inner join semantic_area as s on s.name = a.name WHERE r.name = %s AND a.ns_id = %s and a.username = %s and (a.id_report,a.language) IN (select g.id_report,g.language FROM ground_truth_log_file AS g where g.gt_type = %s and g.ns_id = %s and g.username =%s AND r.batch = COALESCE(%s,r.batch) AND r.institute=COALESCE(%s,r.institute) AND r.language = COALESCE(%s,r.language) AND r.institute != %s) ", [str(usecase), 'Robot', str(username), 'concept-mention', 'Robot', str(username), batch,institute,language, 'PUBMED']) reports = cursor.fetchall() elif report_type == 'pubmed': if language is not None and institute is not None and usecase is not None: if annotation_mode == 'Human': cursor.execute( "SELECT DISTINCT r.name,r.id_report,r.language,r.institute FROM report AS r INNER JOIN linked AS a ON r.id_report = a.id_report AND r.language = a.language WHERE a.username = %s AND r.language in %s AND r.name = %s AND r.institute = %s AND a.ns_id = %s AND r.batch IN %s", [str(username), tuple(languages), str(usecase), str('PUBMED'),str(annotation_mode),tuple(batch_num)]) elif annotation_mode == 'Robot': # CAMBIO # cursor.execute( # "SELECT DISTINCT r.name,r.id_report,r.language,r.institute FROM report AS r INNER JOIN linked AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN mention as m on m.id_report = a.id_report and m.language = a.language and a.start = m.start and a.stop = m.stop INNER JOIN concept as c on c.concept_url = a.concept_url inner join semantic_area as s on s.name = a.name WHERE r.name = %s AND a.ns_id = %s and a.username = %s AND r.institute = %s and r.language = %s and (a.id_report,a.language) IN (select g.id_report,g.language FROM ground_truth_log_file as g inner join ground_truth_log_file as gg on g.id_report = gg.id_report and g.language = gg.language and g.gt_type = gg.gt_type and g.ns_id = gg.ns_id where g.gt_type = %s and g.ns_id = %s and gg.insertion_time != g.insertion_time and gg.username = %s and g.username =%s AND r.batch IN %s) ", # [str(usecase), 'Robot', str(username), str('PUBMED'), str(language), 'concept-mention', 'Robot', # 'Robot_user', # str(username),tuple(batch_num)]) cursor.execute( "SELECT DISTINCT r.name,r.id_report,r.language,r.institute FROM report AS r INNER JOIN linked AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN mention as m on m.id_report = a.id_report and m.language = a.language and a.start = m.start and a.stop = m.stop INNER JOIN concept as c on c.concept_url = a.concept_url inner join semantic_area as s on s.name = a.name WHERE r.name = %s AND a.ns_id = %s and a.username = %s AND r.institute = %s and r.language = %s and (a.id_report,a.language) IN (select g.id_report,g.language FROM ground_truth_log_file as g where g.gt_type = %s and g.ns_id = %s and g.username =%s AND r.batch IN %s) ", [str(usecase), 'Robot', str(username), str('PUBMED'), str(language), 'concept-mention', 'Robot', str(username),tuple(batch_num)]) reports = cursor.fetchall() documents = [] # reports = Annotate.objects.filter(username=username).values('id_report','language').distinct() for couple in reports: document = '' report = Report.objects.get(name = couple[0],id_report=couple[1],institute = couple[3], language=couple[2]) json_dict = report_get_start_end(json_keys, json_keys_to_ann, report.id_report, report.language) ns = NameSpace.objects.get(ns_id = annotation_mode) anno = Linked.objects.filter(username=username,ns_id = ns,id_report = report,language = report.language) document = BioCDocument() document.id = str(report.id_report) document.put_infon('usecase', report.name_id) document.put_infon('language', report.language) document.put_infon('institute', report.institute) annotations = [] count = 0 for el in anno: mention = Mention.objects.get(start = el.start_id,stop = el.stop,id_report=report,language = report.language) concept = Concept.objects.get(concept_url = el.concept_url_id) json_dict = report_get_start_end(json_keys,json_keys_to_ann,report.id_report,report.language) annotation = BioCAnnotation() annotation.id = str(count) annotation.put_infon('concept_name', concept.name) annotation.put_infon('concept_url', concept.concept_url) count = count+1 loc_ann = BioCLocation() loc_ann.offset = str(mention.start) loc_ann.length = str(mention.stop - mention.start + 1) annotation.add_location(loc_ann) mention_text = mention.mention_text mtext = re.sub('[^a-zA-Z0-9n\-_/\' ]+', '', mention_text) annotation.text = mtext couple = (annotation,mention.start,mention.stop) annotations.append(couple) seen = [] for key in json_keys_to_ann: passage = BioCPassage() passage.put_infon('section', key) check = False keys = json_dict['rep_string'].keys() if key in keys: if json_dict['rep_string'].get(key) != '': # if json_dict['rep_string'].get(key) is not None and json_dict['rep_string'].get(key) != '': passage.text = json_dict['rep_string'][key]['text'] start = str(json_dict['rep_string'][key]['start']) passage.offset = start for el in annotations: if el not in seen: # start1 = int(el[1]) # start2 = int(json_dict['rep_string'][key]['start']) # stop1 = int(el[2]) # stop2 = int(json_dict['rep_string'][key]['end']) if int(el[1]) >= int(json_dict['rep_string'][key]['start']) and int(el[2]) <= int(json_dict['rep_string'][key]['end']): check = True passage.add_annotation(el[0]) seen.append(el) # passage.add_annotation(el[0]) if check: document.add_passage(passage) collection.add_document(document) collection1.add_document(document) print(writer) except Exception as e: print(e) return False else: #os.remove(path1) if form == 'json': # os.remove(path1) return json_writer # return True return writer def create_csv_to_download(report_type,annotation_mode,username,use,inst,lang,action,response,batch): """This method creates a csv to download depending on the language, the usecase, the institute chosen.""" usecase = UseCase.objects.get(name=use) batch_num = [] if batch is None: b = Report.objects.filter(name=usecase).values('batch') for el in b: if el['batch'] not in batch_num: batch_num.append(el['batch']) else: batch_num.append(batch) languages = ['English','english'] row_list = [] if action == 'labels': row_list.append(['username', 'annotation_mode','id_report', 'language','batch', 'institute', 'usecase', 'label']) elif action == 'mentions': row_list.append(['username','annotation_mode', 'id_report', 'language','batch', 'institute', 'usecase', 'start','stop','mention_text']) elif action == 'concept-mention': row_list.append(['username','annotation_mode', 'id_report', 'language','batch', 'institute', 'usecase', 'start', 'stop', 'mention_text','concept_name','concept_url','area']) elif action == 'concepts': row_list.append(['username','annotation_mode', 'id_report', 'language','batch', 'institute', 'usecase', 'concept_url','concept_name','area']) try: if report_type == 'reports': with connection.cursor() as cursor: # if use is not None and lang is not None and inst is not None: if annotation_mode == 'Human': if action == 'labels': cursor.execute( "SELECT DISTINCT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name, a.label FROM report AS r INNER JOIN associate AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN associate as aa on aa.id_report = a.id_report and aa.language = a.language and a.label = aa.label and aa.ns_id = a.ns_id and aa.seq_number = a.seq_number WHERE a.username = %s AND r.language = COALESCE(%s,r.language) AND r.name = %s AND r.institute = COALESCE(%s,r.institute) AND a.ns_id = %s AND r.batch = COALESCE(%s,r.batch) AND r.institute != %s", [str(username),(lang), str(use), (inst),str(annotation_mode),(batch),'PUBMED']) if action == 'mentions': cursor.execute( "SELECT DISTINCT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,a.start,a.stop,m.mention_text FROM report AS r INNER JOIN annotate AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN mention AS m ON m.id_report = a.id_report AND m.language = a.language AND a.start = m.start AND a.stop = m.stop WHERE a.username = %s AND r.language = COALESCE(%s,r.language) AND r.name = %s AND r.institute = COALESCE(%s,r.institute) AND a.ns_id = %s AND r.batch = COALESCE(%s,r.batch) AND r.institute != %s", [str(username),(lang), str(use), (inst),str(annotation_mode),(batch),'PUBMED']) if action == 'concepts': cursor.execute( "SELECT DISTINCT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,c.concept_url, c.name, a.name FROM report AS r INNER JOIN contains AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN concept AS c ON c.concept_url = a.concept_url WHERE a.username = %s AND r.language = COALESCE(%s,r.language) AND r.name = %s AND r.institute = COALESCE(%s,r.institute) AND a.ns_id = %s AND r.batch = COALESCE(%s,r.batch) AND r.institute != %s", [str(username),(lang), str(use), (inst),str(annotation_mode),(batch),'PUBMED']) if action == 'concept-mention': cursor.execute( "SELECT DISTINCT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,a.start,a.stop,m.mention_text,c.name,c.concept_url,a.name FROM report AS r INNER JOIN linked AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN concept as c ON a.concept_url = c.concept_url INNER JOIN mention AS m ON m.id_report = a.id_report AND m.language = a.language AND a.start = m.start AND a.stop = m.stop WHERE a.username = %s AND r.language = COALESCE(%s,r.language) AND r.name = %s AND r.institute = COALESCE(%s,r.institute) AND a.ns_id = %s AND r.batch = COALESCE(%s,r.batch) AND r.institute != %s", [str(username),(lang), str(use), (inst),str(annotation_mode),(batch),'PUBMED']) elif annotation_mode == 'Robot': if action == 'labels': cursor.execute( "SELECT DISTINCT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,a.label FROM report AS r INNER JOIN associate AS a ON r.id_report = a.id_report AND r.language = a.language WHERE r.name = %s AND a.ns_id = %s and a.username = %s AND r.institute = COALESCE(%s,r.institute) and r.language = COALESCE(%s,r.language) and (a.id_report,a.language) IN (select g.id_report,g.language FROM ground_truth_log_file as g where g.gt_type = %s and g.ns_id = %s and g.username =%s AND r.batch = COALESCE(%s,r.batch) AND r.institute != %s) ", [str(use), 'Robot', str(username), (inst), (lang), 'labels', 'Robot', str(username), batch, 'PUBMED']) # CAMBIO # cursor.execute( # "SELECT DISTINCT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,a.label FROM report AS r INNER JOIN associate AS a ON r.id_report = a.id_report AND r.language = a.language WHERE r.name = %s AND a.ns_id = %s and a.username = %s AND r.institute = COALESCE(%s,r.institute) and r.language = COALESCE(%s,r.language) and (a.id_report,a.language) IN (select g.id_report,g.language FROM ground_truth_log_file as g inner join ground_truth_log_file as gg on g.id_report = gg.id_report and g.language = gg.language and g.gt_type = gg.gt_type and g.ns_id = gg.ns_id where g.gt_type = %s and g.ns_id = %s and gg.insertion_time != g.insertion_time and gg.username = %s and g.username =%s AND r.batch = COALESCE(%s,r.batch) AND r.institute != %s) ", # [str(use), 'Robot', str(username), str(inst), str(lang), 'labels', 'Robot', 'Robot_user', # str(username), batch, 'PUBMED']) if action == 'mentions': # cursor.execute( # "SELECT DISTINCT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,a.start,a.stop,m.mention_text FROM report AS r INNER JOIN annotate AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN mention as m on m.id_report = a.id_report and m.language = a.language and m.start = a.start and m.stop = a.stop WHERE r.name = %s and a.ns_id = %s and a.username = %s AND r.institute = COALESCE(%s,r.institute) and r.language = COALESCE(%s,r.language) and (a.id_report,a.language) IN (select g.id_report,g.language FROM ground_truth_log_file as g inner join ground_truth_log_file as gg on g.id_report = gg.id_report and g.language = gg.language and g.gt_type = gg.gt_type and g.ns_id = gg.ns_id where g.gt_type = %s and g.ns_id = %s and gg.insertion_time != g.insertion_time and gg.username = %s and g.username =%s AND r.batch = COALESCE(%s,r.batch) AND r.institute != %s) ", # [str(use),'Robot', str(username), (inst), (lang), 'mentions', 'Robot', 'Robot_user', # str(username),batch,'PUBMED']) cursor.execute( "SELECT DISTINCT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,a.start,a.stop,m.mention_text FROM report AS r INNER JOIN annotate AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN mention as m on m.id_report = a.id_report and m.language = a.language and m.start = a.start and m.stop = a.stop WHERE r.name = %s and a.ns_id = %s and a.username = %s AND r.institute = COALESCE(%s,r.institute) and r.language = COALESCE(%s,r.language) and (a.id_report,a.language) IN (select g.id_report,g.language FROM ground_truth_log_file as g where g.gt_type = %s and g.ns_id = %s and g.username =%s AND r.batch = COALESCE(%s,r.batch) AND r.institute != %s) ", [str(use), 'Robot', str(username), (inst), (lang), 'mentions', 'Robot', str(username), batch, 'PUBMED']) if action == 'concepts': # cursor.execute( # "SELECT DISTINCT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,a.concept_url,c.name,a.name FROM report AS r INNER JOIN contains AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN concept as c on c.concept_url = a.concept_url INNER JOIN semantic_area as s on a.name = s.name WHERE r.name = %s AND a.ns_id = %s and a.username = %s AND r.institute = COALESCE(%s,r.institute) and r.language = COALESCE(%s,r.language) and (a.id_report,a.language) IN (select g.id_report,g.language FROM ground_truth_log_file as g inner join ground_truth_log_file as gg on g.id_report = gg.id_report and g.language = gg.language and g.gt_type = gg.gt_type and g.ns_id = gg.ns_id where g.gt_type = %s and g.ns_id = %s and gg.insertion_time != g.insertion_time and gg.username = %s and g.username =%s AND r.batch = COALESCE(%s,r.batch) AND r.institute != %s) ", # [str(use),'Robot', str(username), (inst), (lang), 'concepts', 'Robot', 'Robot_user', # str(username),batch,'PUBMED']) cursor.execute( "SELECT DISTINCT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,a.concept_url,c.name,a.name FROM report AS r INNER JOIN contains AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN concept as c on c.concept_url = a.concept_url INNER JOIN semantic_area as s on a.name = s.name WHERE r.name = %s AND a.ns_id = %s and a.username = %s AND r.institute = COALESCE(%s,r.institute) and r.language = COALESCE(%s,r.language) and (a.id_report,a.language) IN (select g.id_report,g.language FROM ground_truth_log_file as g where g.gt_type = %s and g.ns_id = %s and g.username =%s AND r.batch = COALESCE(%s,r.batch) AND r.institute != %s) ", [str(use), 'Robot', str(username), (inst), (lang), 'concepts', 'Robot', str(username), batch, 'PUBMED']) if action == 'concept-mention': # cursor.execute( # "SELECT DISTINCT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,a.start,a.stop,m.mention_text,c.name,c.concept_url,a.name FROM report AS r INNER JOIN linked AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN mention as m on m.id_report = a.id_report and m.language = a.language and a.start = m.start and a.stop = m.stop INNER JOIN concept as c on c.concept_url = a.concept_url inner join semantic_area as s on s.name = a.name WHERE r.name = %s AND a.ns_id = %s and a.username = %s AND r.institute = COALESCE(%s,r.institute) and r.language = COALESCE(%s,r.language) and (a.id_report,a.language) IN (select g.id_report,g.language FROM ground_truth_log_file as g inner join ground_truth_log_file as gg on g.id_report = gg.id_report and g.language = gg.language and g.gt_type = gg.gt_type and g.ns_id = gg.ns_id where g.gt_type = %s and g.ns_id = %s and gg.insertion_time != g.insertion_time and gg.username = %s and g.username =%s AND r.batch = COALESCE(%s,r.batch) AND r.institute != %s) ", # [str(use),'Robot', str(username), (inst), (lang), 'concept-mention', 'Robot', 'Robot_user', # str(username),batch,'PUBMED']) cursor.execute( "SELECT DISTINCT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,a.start,a.stop,m.mention_text,c.name,c.concept_url,a.name FROM report AS r INNER JOIN linked AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN mention as m on m.id_report = a.id_report and m.language = a.language and a.start = m.start and a.stop = m.stop INNER JOIN concept as c on c.concept_url = a.concept_url inner join semantic_area as s on s.name = a.name WHERE r.name = %s AND a.ns_id = %s and a.username = %s AND r.institute = COALESCE(%s,r.institute) and r.language = COALESCE(%s,r.language) and (a.id_report,a.language) IN (select g.id_report,g.language FROM ground_truth_log_file as g where g.gt_type = %s and g.ns_id = %s and g.username =%s AND r.batch = COALESCE(%s,r.batch) AND r.institute != %s) ", [str(use), 'Robot', str(username), (inst), (lang), 'concept-mention', 'Robot', str(username), batch, 'PUBMED']) reports = cursor.fetchall() reports = sorted(reports, key=lambda x: x[1]) for el in reports: row = list(el) if row[1] == 'Human': row[1] = 'Manual' else: row[1] = 'Automatic' if action == 'mentions' or action == 'concept-mention': row[9] = re.sub('[^a-zA-Z0-9n\-_/\' ]+', '', row[9]) row_list.append(row) elif report_type == 'pubmed': with connection.cursor() as cursor: # if use is not None and lang is not None and inst is not None: if annotation_mode == 'Human': if action == 'labels': cursor.execute( "SELECT DISTINCT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name, a.label FROM report AS r INNER JOIN associate AS a ON r.id_report = a.id_report AND r.language = a.language WHERE a.username = %s AND r.language in %s AND r.name = %s AND r.institute = %s AND a.ns_id = %s AND r.batch =COALESCE(%s,r.batch)", [str(username), tuple(languages), str(use), 'PUBMED','Human',batch]) if action == 'mentions': cursor.execute( "SELECT DISTINCT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,a.start,a.stop,m.mention_text FROM report AS r INNER JOIN annotate AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN mention AS m ON m.id_report = a.id_report AND m.language = a.language AND a.start = m.start AND a.stop = m.stop WHERE a.username = %s AND r.language in %s AND r.name = %s AND r.institute = %s AND a.ns_id = %s AND r.batch =COALESCE(%s,r.batch)", [str(username), tuple(languages), str(use), 'PUBMED','Human',batch]) if action == 'concepts': cursor.execute( "SELECT DISTINCT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,c.concept_url, c.name, a.name FROM report AS r INNER JOIN contains AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN concept AS c ON c.concept_url = a.concept_url WHERE a.username = %s AND r.language in %s AND r.name = %s AND r.institute = %s AND a.ns_id = %s AND r.batch =COALESCE(%s,r.batch)", [str(username), tuple(languages), str(use), 'PUBMED','Human',batch]) if action == 'concept-mention': cursor.execute( "SELECT DISTINCT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,a.start,a.stop,m.mention_text,c.name,c.concept_url,a.name FROM report AS r INNER JOIN linked AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN concept as c ON a.concept_url = c.concept_url INNER JOIN mention AS m ON m.id_report = a.id_report AND m.language = a.language AND a.start = m.start AND a.stop = m.stop WHERE a.username = %s AND r.language in %s AND r.name = %s AND r.institute = %s AND a.ns_id = %s AND r.batch =COALESCE(%s,r.batch)", [str(username), tuple(languages), str(use), 'PUBMED','Human',batch]) elif annotation_mode == 'Robot': if action == 'labels': cursor.execute( "SELECT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,a.label FROM report AS r INNER JOIN associate AS a ON r.id_report = a.id_report AND r.language = a.language WHERE r.name = %s AND a.ns_id = %s and a.username = %s AND r.institute = %s and r.language in %s and (a.id_report,a.language) IN (select g.id_report,g.language FROM ground_truth_log_file as g where g.gt_type = %s and g.ns_id = %s and g.username =%s AND r.batch =COALESCE(%s,r.batch)) ", [str(use), 'Robot', str(username), 'PUBMED', tuple(languages), 'labels', 'Robot', str(username), batch]) if action == 'mentions': cursor.execute( "SELECT DISTINCT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,a.start,a.stop,m.mention_text FROM report AS r INNER JOIN annotate AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN mention as m on m.id_report = a.id_report and m.language = a.language and m.start = a.start and m.stop = a.stop WHERE r.name = %s and a.ns_id = %s and a.username = %s AND r.institute = %s and r.language = %s and (a.id_report,a.language) IN (select g.id_report,g.language FROM ground_truth_log_file as g where g.gt_type = %s and g.ns_id = %s and g.username =%s AND r.batch =COALESCE(%s,r.batch)) ", [str(use), 'Robot', str(username), str(inst), str(lang), 'mentions', 'Robot', str(username), batch]) if action == 'concepts': cursor.execute( "SELECT DISTINCT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,a.concept_url,c.name,a.name FROM report AS r INNER JOIN contains AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN concept as c on c.concept_url = a.concept_url INNER JOIN semantic_area as s on a.name = s.name WHERE r.name = %s AND a.ns_id = %s and a.username = %s AND r.institute = %s and r.language = %s and (a.id_report,a.language) IN (select g.id_report,g.language FROM ground_truth_log_file as g where g.gt_type = %s and g.ns_id = %s and g.username =%s AND r.batch =COALESCE(%s,r.batch)) ", [str(use), 'Robot', str(username), str(inst), str(lang), 'concepts', 'Robot', str(username), batch]) if action == 'concept-mention': cursor.execute( "SELECT DISTINCT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,a.start,a.stop,m.mention_text,c.name,c.concept_url,a.name FROM report AS r INNER JOIN linked AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN mention as m on m.id_report = a.id_report and m.language = a.language and a.start = m.start and a.stop = m.stop INNER JOIN concept as c on c.concept_url = a.concept_url inner join semantic_area as s on s.name = a.name WHERE r.name = %s AND a.ns_id = %s and a.username = %s AND r.institute = %s and r.language = %s and (a.id_report,a.language) IN (select g.id_report,g.language FROM ground_truth_log_file as g where g.gt_type = %s and g.ns_id = %s and g.username =%s AND r.batch =COALESCE(%s,r.batch)) ", [str(use), 'Robot', str(username), str(inst), str(lang), 'concept-mention', 'Robot', str(username), batch]) reports = cursor.fetchall() reports = sorted(reports, key=lambda x: x[1]) for el in reports: row = list(el) if row[1] == 'Human': row[1] = 'Manual' else: row[1] = 'Automatic' if action == 'mentions' or action == 'concept-mention': row[9] = re.sub('[^a-zA-Z0-9n\-_/\' ]+', '', row[9]) row_list.append(row) except Exception as e: print(e) return False else: writer = csv.writer(response) writer.writerows(row_list) return response def create_json_to_download(report_type,action,username,use,annotation_mode = None,inst = None,lang = None,all = None,batch = None): json_resp = {} #added 22/10/2021 if all == 'all': json_resp['groundtruths'] = {} types = ['labels','mentions','concepts','concept-mention'] json_resp_add = {} for typ in types: a = typ if typ == 'concept-mention': a = 'linking' b = a + '_ground_truths' json_resp_add[b] = [] gt = GroundTruthLogFile.objects.filter(username=username,gt_type = typ) for el in gt: gt_json = el.gt_json del gt_json['gt_type'] del gt_json['username'] gt_json['id_report_hashed'] = gt_json['id_report'] del gt_json['id_report'] if 'id_report_not_hashed' in gt_json.keys(): gt_json['id_report'] = gt_json['id_report_not_hashed'] json_resp_add[b].append(gt_json) json_resp['groundtruths'] = json_resp_add return json_resp cursor = connection.cursor() usecase = UseCase.objects.get(name=use) batch_num = [] if batch is None: b = Report.objects.filter(name=usecase).values('batch') for el in b: if el['batch'] not in batch_num: batch_num.append(el['batch']) else: batch_num.append(batch) json_resp['groundtruths'] = [] json_resp['username'] = username json_resp['usecase'] = use if annotation_mode == 'Human': json_resp['annotation_mode'] = 'Manual' else: json_resp['annotation_mode'] = 'Automatic' if all != 'all' and action != None and action != '': json_resp['action'] = action if action == 'concept-mention': json_resp['action'] = 'linking' if report_type == 'reports': if annotation_mode != 'both': cursor.execute( "SELECT DISTINCT r.id_report,r.language,r.institute FROM report AS r INNER JOIN ground_truth_log_file AS g ON r.id_report = g.id_report AND g.language = r.language WHERE gt_type = %s AND r.name = %s AND g.ns_id = %s AND r.language = COALESCE(%s, r.language) AND r.institute = COALESCE(%s,r.institute) AND institute != %s AND r.batch = COALESCE(%s,r.batch) AND username = %s", [str(action), str(use), str(annotation_mode), lang, inst, 'PUBMED', batch, username]) if annotation_mode == 'Robot': # cursor.execute( # "SELECT DISTINCT r.id_report,r.language,r.institute FROM report AS r INNER JOIN ground_truth_log_file AS g ON r.id_report = g.id_report AND g.language = r.language inner join ground_truth_log_file as gg on gg.id_report = g.id_report and gg.language = g.language and gg.gt_type = g.gt_type and gg.ns_id = g.ns_id WHERE gg.insertion_time != g.insertion_time and g.gt_type = %s AND r.name = %s AND g.ns_id = %s AND r.language = COALESCE(%s, r.language) AND r.institute = COALESCE(%s,r.institute) AND institute != %s and g.username != %s and gg.username = %s AND r.batch = COALESCE(%s,r.batch)", # [str(action), str(use), str(annotation_mode),lang,inst, 'PUBMED', 'Robot_user', 'Robot_user',batch]) cursor.execute( "SELECT DISTINCT r.id_report,r.language,r.institute FROM report AS r INNER JOIN ground_truth_log_file AS g ON r.id_report = g.id_report AND g.language = r.language WHERE g.gt_type = %s AND r.name = %s AND g.ns_id = %s AND r.language = COALESCE(%s, r.language) AND r.institute = COALESCE(%s,r.institute) AND institute != %s and g.username != %s AND r.batch = COALESCE(%s,r.batch) AND username = %s", [str(action), str(use), str(annotation_mode), lang, inst, 'PUBMED', 'Robot_user', batch, username]) ids = cursor.fetchall() id_rep = [] for el in ids: if el[0] not in id_rep: id_rep.append(el[0]) for el in ids: json_val = {} json_val['id_report'] = el[0] json_val['language'] = el[1] json_val['institute'] = el[2] if action == 'labels': cursor.execute( "SELECT r.id_report,r.language,r.institute,g.ns_id,g.label FROM associate AS g INNER JOIN report AS r ON r.id_report = g.id_report AND r.language = g.language WHERE r.name = %s AND g.username = %s AND g.ns_id = %s AND r.language = COALESCE(%s, r.language) AND r.institute = COALESCE(%s,r.institute) AND institute != %s AND r.id_report = %s AND r.language = %s AND r.institute = %s AND r.batch = COALESCE(%s,r.batch)", [str(use), str(username),str(annotation_mode),lang,inst,'PUBMED',str(el[0]),str(el[1]),str(el[2]),batch]) ans = cursor.fetchall() json_val['labels'] = [] for el in ans: json_lab = {} json_lab['label'] = el[4] json_val['labels'].append(json_lab) json_resp['groundtruths'].append(json_val) if action == 'mentions': cursor.execute( "SELECT r.id_report,r.language,r.institute,g.ns_id,g.start,g.stop,m.mention_text FROM annotate AS g INNER JOIN report AS r ON r.id_report = g.id_report AND r.language = g.language INNER JOIN mention AS m ON m.id_report = g.id_report AND g.language = m.language AND m.start = g.start AND m.stop = g.stop WHERE r.name = %s AND g.username = %s AND g.ns_id = %s AND r.language = COALESCE(%s, r.language) AND r.institute = COALESCE(%s,r.institute) AND institute != %s AND r.id_report = %s AND r.language = %s AND r.institute = %s AND r.batch = COALESCE(%s,r.batch)", [str(use), str(username),str(annotation_mode),lang,inst,'PUBMED',str(el[0]),str(el[1]),str(el[2]),batch]) ans = cursor.fetchall() json_val['mentions'] = [] for el in ans: json_ment = {} json_ment['start'] = el[4] json_ment['stop'] = el[5] json_ment['mention_text'] = el[6] json_val['mentions'].append(json_ment) json_resp['groundtruths'].append(json_val) if action == 'concept-mention': cursor.execute( "SELECT r.id_report,r.language,r.institute,g.ns_id,g.start,g.stop,m.mention_text,g.name,g.concept_url,c.name FROM linked AS g INNER JOIN report AS r ON r.id_report = g.id_report AND r.language = g.language INNER JOIN concept AS c ON c.concept_url = g.concept_url INNER JOIN mention AS m ON m.id_report = r.id_report AND m.language = r.language AND m.start = g.start AND g.stop = m.stop WHERE r.name = %s AND g.username = %s AND g.ns_id = %s AND r.language = COALESCE(%s, r.language) AND r.institute = COALESCE(%s,r.institute) AND institute != %s AND r.id_report = %s AND r.language = %s AND r.institute = %s AND r.batch = COALESCE(%s,r.batch)", [str(use), str(username),str(annotation_mode),lang,inst,'PUBMED',str(el[0]),str(el[1]),str(el[2]),batch]) ans = cursor.fetchall() json_val['linking'] = [] for el in ans: json_link = {} json_link['start'] = el[4] json_link['stop'] = el[5] json_link['mention_text'] = el[6] json_link['area'] = el[7] json_link['concept_url'] = el[8] json_link['concept_name'] = el[9] json_val['linking'].append(json_link) json_resp['groundtruths'].append(json_val) if action == 'concepts': cursor.execute( "SELECT r.id_report,r.language,r.institute,g.ns_id,g.name,g.concept_url,c.name FROM contains AS g INNER JOIN report AS r ON r.id_report = g.id_report AND r.language = g.language INNER JOIN concept AS c ON c.concept_url = g.concept_url WHERE r.name = %s AND g.username = %s AND g.ns_id = %s AND r.language = COALESCE(%s, r.language) AND r.institute = COALESCE(%s,r.institute) AND institute != %s AND r.id_report = %s AND r.language = %s AND r.institute = %s AND r.batch = COALESCE(%s,r.batch)", [str(use), str(username),str(annotation_mode),lang,inst,'PUBMED',str(el[0]),str(el[1]),str(el[2]),batch]) ans = cursor.fetchall() json_val['concepts'] = [] for el in ans: json_conc = {} json_conc['area'] = el[4] json_conc['concept_url'] = el[5] json_conc['concept_name'] = el[6] json_val['concepts'].append(json_conc) json_resp['groundtruths'].append(json_val) elif report_type == 'pubmed': cursor.execute( "SELECT DISTINCT r.id_report,r.language,r.institute FROM report AS r INNER JOIN ground_truth_log_file AS g ON r.id_report = g.id_report AND g.language = r.language WHERE gt_type = %s AND r.name = %s AND g.ns_id = %s AND institute = %s and r.language = %s AND r.batch = COALESCE(%s,r.batch) AND username = %s", [str(action), str(use), str(annotation_mode), str(inst), str(lang), batch, username]) if annotation_mode == 'Robot': # cursor.execute( # "SELECT DISTINCT r.id_report,r.language,r.institute FROM report AS r INNER JOIN ground_truth_log_file AS g ON r.id_report = g.id_report AND g.language = r.language inner join ground_truth_log_file as gg on gg.id_report = g.id_report and gg.language = g.language and gg.gt_type = g.gt_type and gg.ns_id = g.ns_id WHERE gg.insertion_time != g.insertion_time and g.gt_type = %s AND r.name = %s AND g.ns_id = %s AND institute = %s and r.language = %s and g.username != %s and gg.username = %s AND r.batch = COALESCE(%s,r.batch)", # [str(action), str(use), str(annotation_mode), str(inst),str(lang), 'Robot_user', 'Robot_user',batch]) cursor.execute( "SELECT DISTINCT r.id_report,r.language,r.institute FROM report AS r INNER JOIN ground_truth_log_file AS g ON r.id_report = g.id_report AND g.language = r.language WHERE g.gt_type = %s AND r.name = %s AND g.ns_id = %s AND institute = %s and r.language = %s and g.username != %s AND r.batch = COALESCE(%s,r.batch) AND username = %s", [str(action), str(use), str(annotation_mode), str(inst), str(lang), 'Robot_user', batch, username]) ids = cursor.fetchall() id_rep = [] for el in ids: if el[0] not in id_rep: id_rep.append(el[0]) json_resp['institute'] = inst json_resp['language'] = lang for el in ids: json_val = {} json_val['id_report'] = el[0] if action == 'labels': cursor.execute( "SELECT r.id_report,r.language,r.institute,g.ns_id,g.label FROM associate AS g INNER JOIN report AS r ON r.id_report = g.id_report AND r.language = g.language WHERE r.name = %s AND g.username = %s AND g.ns_id = %s AND r.language = %s AND r.institute = %s AND r.id_report = %s AND r.batch = COALESCE(%s,r.batch)", [str(use), str(username), str(annotation_mode), str(lang), str(inst), str(el[0]),batch]) ans = cursor.fetchall() json_val['labels'] = [] for el in ans: json_lab = {} json_lab['label'] = el[4] json_val['labels'].append(json_lab) json_resp['groundtruths'].append(json_val) if action == 'mentions': cursor.execute( "SELECT r.id_report,r.language,r.institute,g.ns_id,g.start,g.stop,m.mention_text FROM annotate AS g INNER JOIN report AS r ON r.id_report = g.id_report AND r.language = g.language INNER JOIN mention AS m ON m.id_report = g.id_report AND g.language = m.language AND m.start = g.start AND m.stop = g.stop WHERE r.name = %s AND g.username = %s AND g.ns_id = %s AND r.language = %s AND r.institute = %s AND r.id_report = %s AND r.batch = COALESCE(%s,r.batch)", [str(use), str(username), str(annotation_mode), str(lang), str(inst), str(el[0]),batch]) ans = cursor.fetchall() json_val['mentions'] = [] for el in ans: json_ment = {} json_ment['start'] = el[4] json_ment['stop'] = el[5] json_ment['mention_text'] = el[6] json_val['mentions'].append(json_ment) json_resp['groundtruths'].append(json_val) if action == 'concept-mention': cursor.execute( "SELECT r.id_report,r.language,r.institute,g.ns_id,g.start,g.stop,m.mention_text,g.name,g.concept_url,c.name FROM linked AS g INNER JOIN report AS r ON r.id_report = g.id_report AND r.language = g.language INNER JOIN concept AS c ON c.concept_url = g.concept_url INNER JOIN mention AS m ON m.id_report = r.id_report AND m.language = r.language AND m.start = g.start AND g.stop = m.stop WHERE r.name = %s AND g.username = %s AND g.ns_id = %s AND r.language = %s AND r.institute = %s AND r.id_report = %s AND r.batch = COALESCE(%s,r.batch)", [str(use), str(username), str(annotation_mode), str(lang), str(inst), str(el[0]),batch]) ans = cursor.fetchall() json_val['linking'] = [] for el in ans: json_ass = {} json_ass['start'] = el[4] json_ass['stop'] = el[5] json_ass['mention_text'] = el[6] json_ass['area'] = el[7] json_ass['concept_url'] = el[8] json_ass['concept_name'] = el[9] json_val['linking'].append(json_ass) json_resp['groundtruths'].append(json_val) if action == 'concepts': cursor.execute( "SELECT r.id_report,r.language,r.institute,g.ns_id,g.name,g.concept_url,c.name FROM contains AS g INNER JOIN report AS r ON r.id_report = g.id_report AND r.language = g.language INNER JOIN concept AS c ON c.concept_url = g.concept_url WHERE r.name = %s AND g.username = %s AND g.ns_id = %s AND r.language = %s AND r.institute = %s AND r.id_report = %s AND r.batch = COALESCE(%s,r.batch)", [str(use), str(username), str(annotation_mode), str(lang), str(inst), str(el[0]),batch]) ans = cursor.fetchall() json_val['concepts'] = [] for el in ans: json_conc = {} json_conc['area'] = el[4] json_conc['concept_url'] = el[5] json_conc['concept_name'] = el[6] json_val['concepts'].append(json_conc) json_resp['groundtruths'].append(json_val) # print(json_resp) return json_resp from MedTAG_sket_dock_App.utils_majority_vote import * def download_majority_gt(chosen_users,report_list,action,mode,format,response = None): if format == 'json': json_to_ret = create_majority_json(chosen_users,report_list,action,mode) return json_to_ret elif format == 'csv': row_list = [] if action == 'labels': row_list.append([ 'annotation_mode','action', 'id_report', 'language', 'institute', 'usecase', 'label','total_human_annotations','total_robot_annotations']) elif action == 'mentions': row_list.append( ['annotation_mode','action', 'id_report', 'language', 'institute', 'usecase', 'start', 'stop', 'mention_text','mention_annotators','total_human_annotations','total_robot_annotations']) elif action == 'concept-mention': row_list.append( ['annotation_mode','action', 'id_report', 'language', 'institute', 'usecase', 'start', 'stop', 'mention_text','concept_name', 'concept_url', 'area','total_human_annotations','total_robot_annotations']) elif action == 'concepts': row_list.append( ['annotation_mode','action', 'id_report', 'language', 'institute', 'usecase', 'concept_url', 'concept_name', 'area','total_human_annotations','total_robot_annotations']) try: # for report in report_list: row_list.extend(create_majority_csv(chosen_users,report_list,action,mode)) except Exception as e: print(e) return False else: writer = csv.writer(response) writer.writerows(row_list) return response elif format.startswith('bioc'): ret = True try: writer = BioCXMLWriter() json_writer = BioCJSONWriter() writer.collection = BioCCollection() json_writer.collection = BioCCollection() collection = writer.collection collection1 = json_writer.collection today = str(date.today()) collection.date = today collection.source = 'MEDTAG Collection' collection.put_infon('gt_type', 'MAJORITY VOTE') collection1.date = today collection1.source = 'MEDTAG Collection' collection1.put_infon('gt_type', 'MAJORITY VOTE') if action == 'mentions': collection.put_infon('annotation_type', 'mentions') collection.key = 'mentions.key' collection1.put_infon('annotation_type', 'mentions') collection1.key = 'mentions.key' for rep in report_list: report = Report.objects.get(id_report=rep['id_report'],language=rep['language']) data = get_fields_from_json() json_keys_to_display_human = data['fields'] json_keys_to_display_human.extend(['authors', 'volume', 'journal', 'year']) json_keys_to_ann_human = data['fields_to_ann'] json_keys_to_ann_human.extend(['abstract', 'title']) json_keys_to_display_human = list(set(json_keys_to_display_human)) json_keys_to_ann_human = list(set(json_keys_to_ann_human)) json_keys_to_ann_robot = [] if mode == 'Robot': workpath = os.path.dirname( os.path.abspath(__file__)) # Returns the Path your .py file is in with open(os.path.join(workpath, './automatic_annotation/auto_fields/auto_fields.json')) as out: data = json.load(out) json_keys_to_ann_robot = data['extract_fields'][report.name.name] json_keys_to_ann_robot.append('abstract') json_keys_to_ann_robot.append('title') json_keys_to_ann_robot = list(set(json_keys_to_ann_robot)) json_keys = [] json_keys_to_ann = [] if mode == 'Human': json_keys = list(set(json_keys_to_display_human + json_keys_to_ann_human)) json_keys_to_ann = json_keys_to_ann_human elif mode == 'Robot': json_keys = list(set(json_keys_to_display_human + json_keys_to_ann_robot)) json_keys_to_ann = json_keys_to_ann_robot elif mode == 'both': json_keys = list( set(json_keys_to_display_human + json_keys_to_ann_robot + json_keys_to_ann_human)) json_keys_to_ann = list(set(json_keys_to_ann_robot + json_keys_to_ann_human)) json_dict = report_get_start_end(json_keys, json_keys_to_ann, report.id_report, report.language) document = BioCDocument() document.id = str(report.id_report) document.put_infon('usecase', report.name_id) document.put_infon('language', report.language) document.put_infon('institute', report.institute) gt_dict = create_majority_vote_gt(action, chosen_users, mode, report) annotations = [] count = 0 for val in gt_dict[mode]: annotation = BioCAnnotation() annotation.id = str(count) count = count + 1 loc_ann = BioCLocation() loc_ann.offset = str(val['start']) loc_ann.length = str(val['stop'] - val['start'] + 1) annotation.add_location(loc_ann) mention_text = val['mention'] mtext = re.sub('[^a-zA-Z0-9n\-_/\' ]+', '', mention_text) annotation.text = mtext couple = (annotation, val['start'], val['stop']) annotations.append(couple) seen = [] for key in json_keys_to_ann: passage = BioCPassage() passage.put_infon('section', key) check = False keys = json_dict['rep_string'].keys() if key in list(keys): if json_dict['rep_string'].get(key) != '': passage.text = json_dict['rep_string'][key]['text'] start = str(json_dict['rep_string'][key]['start']) passage.offset = (start) for el in annotations: if el not in seen: if int(el[1]) >= int(json_dict['rep_string'][key]['start']) and int( el[2]) <= int(json_dict['rep_string'][key]['end']): check = True passage.add_annotation(el[0]) seen.append(el) if check: document.add_passage(passage) collection.add_document(document) collection1.add_document(document) elif action == 'concept-mention': collection.put_infon('annotation_type', 'linking') collection.key = 'linking.key' collection1.put_infon('annotation_type', 'linking') collection1.key = 'linking.key' for rep in report_list: report = Report.objects.get(id_report=rep['id_report'], language=rep['language']) data = get_fields_from_json() json_keys_to_display_human = data['fields'] json_keys_to_display_human.extend(['authors', 'volume', 'journal', 'year']) json_keys_to_ann_human = data['fields_to_ann'] json_keys_to_ann_human.extend(['abstract', 'title']) json_keys_to_display_human = list(set(json_keys_to_display_human)) json_keys_to_ann_human = list(set(json_keys_to_ann_human)) json_keys_to_ann_robot = [] if mode == 'Robot': workpath = os.path.dirname( os.path.abspath(__file__)) # Returns the Path your .py file is in with open(os.path.join(workpath, './automatic_annotation/auto_fields/auto_fields.json')) as out: data = json.load(out) json_keys_to_ann_robot = data['extract_fields'][report.name.name] json_keys_to_ann_robot.append('abstract') json_keys_to_ann_robot.append('title') json_keys_to_ann_robot = list(set(json_keys_to_ann_robot)) json_keys = [] json_keys_to_ann = [] if mode == 'Human': json_keys = list(set(json_keys_to_display_human + json_keys_to_ann_human)) json_keys_to_ann = json_keys_to_ann_human elif mode == 'Robot': json_keys = list(set(json_keys_to_display_human + json_keys_to_ann_robot)) json_keys_to_ann = json_keys_to_ann_robot elif mode == 'both': json_keys = list( set(json_keys_to_display_human + json_keys_to_ann_robot + json_keys_to_ann_human)) json_keys_to_ann = list(set(json_keys_to_ann_robot + json_keys_to_ann_human)) json_dict = report_get_start_end(json_keys, json_keys_to_ann, report.id_report, report.language) document = BioCDocument() document.id = str(report.id_report) document.put_infon('usecase', report.name_id) document.put_infon('language', report.language) document.put_infon('institute', report.institute) gt_dict = create_majority_vote_gt(action, chosen_users, mode, report) annotations = [] count = 0 maj_annotations = [] for val in gt_dict[mode]: # data = get_fields_from_json() # json_keys_to_display = data['fields'] # json_keys_to_ann = data['fields_to_ann'] json_dict = report_get_start_end(json_keys, json_keys_to_ann, report.id_report, report.language) annotation = BioCAnnotation() annotation.id = str(count) annotation.put_infon('concept_name', val['concept_name']) annotation.put_infon('concept_url', val['concept_url']) count = count + 1 loc_ann = BioCLocation() loc_ann.offset = str(val['start']) loc_ann.length = str(val['stop']- val['start'] + 1) annotation.add_location(loc_ann) mention_text = val['mention'] mtext = re.sub('[^a-zA-Z0-9n\-_/\' ]+', '', mention_text) annotation.text = mtext couple = (annotation, val['start'], val['stop']) annotations.append(couple) seen = [] for key in json_keys_to_ann: passage = BioCPassage() passage.put_infon('section', key) check = False keys = json_dict['rep_string'].keys() if key in list(keys): if json_dict['rep_string'].get(key) != '': # if json_dict['rep_string'].get(key) is not None and json_dict['rep_string'].get(key) != '': passage.text = json_dict['rep_string'][key]['text'] start = str(json_dict['rep_string'][key]['start']) passage.offset = (start) for el in annotations: if el not in seen: if int(el[1]) >= int(json_dict['rep_string'][key]['start']) and int( el[2]) <= int(json_dict['rep_string'][key]['end']): check = True print('annoto') print(el) passage.add_annotation(el[0]) seen.append(el) if check: document.add_passage(passage) collection.add_document(document) collection1.add_document(document) # documents.append(document) # workpath = os.path.dirname(os.path.abspath(__file__)) # Returns the Path your .py file is in # path = os.path.join(workpath, './static/BioC/BioC.dtd') # path1 = os.path.join(workpath, './static/BioC/to_download.xml') # print(writer.collection) # # writer.write(path1) # print(writer) # dtd_file = path # xml_reader = BioCXMLReader(path1, dtd_valid_file=dtd_file) # xml_reader.read() # if format.endswith('json'): # json_writer = BioCJSONWriter() # json_writer.collection = xml_reader.collection except Exception as e: print(e) return False else: ret = False #os.remove(path1) if format.endswith('json'): # os.remove(path1) return json_writer return writer from itertools import chain def download_report_gt(report_list,action,mode=None,format = None,response = None): """This method creates the files to download from the reports table accessible only by the admin.""" cursor = connection.cursor() if format == 'json': json_resp = {} json_resp['action'] = action if mode == 'Robot': json_resp['annotation_mode'] = 'Automatic' elif mode =='Human': json_resp['annotation_mode'] = 'Manual' elif mode == 'both': json_resp['annotation_mode'] = 'Manual_and_Automatic' json_resp['ground_truth_list'] = [] for report in report_list: rep = Report.objects.get(id_report = report['id_report'],language = report['language']) ns_id_human = NameSpace.objects.get(ns_id = 'Human') gt_human = GroundTruthLogFile.objects.filter(id_report = rep,language = rep.language,gt_type=action,ns_id = ns_id_human) ns_id_robot = NameSpace.objects.get(ns_id = 'Robot') agent = User.objects.get(username = 'Robot_user',ns_id = ns_id_robot) gt_agent = GroundTruthLogFile.objects.filter(username = agent,id_report = rep,language = rep.language,gt_type=action,ns_id = ns_id_robot) ins_arr = [] for el in gt_agent: ins_arr.append(el.insertion_time) gt_list = GroundTruthLogFile.objects.filter(id_report = rep,language = rep.language,gt_type=action,ns_id = ns_id_robot).exclude(insertion_time__in=ins_arr) if mode == 'both': gt = chain(gt_human,gt_list) elif mode == 'Human': gt = gt_human elif mode == 'Robot': gt = gt_list for el in gt: if el.username_id != 'Robot_user': gt_j = el.gt_json if mode != 'both': del gt_j['mode'] json_resp['ground_truth_list'].append(el.gt_json) return json_resp elif format == 'csv': row_list = [] if action == 'labels': row_list.append(['username', 'user_type', 'id_report', 'language', 'institute', 'usecase', 'label']) elif action == 'mentions': row_list.append( ['username', 'user_type', 'id_report', 'language', 'institute', 'usecase', 'start', 'stop', 'mention_text']) elif action == 'concept-mention': row_list.append( ['username', 'user_type', 'id_report', 'language', 'institute', 'usecase', 'start', 'stop', 'mention_text', 'concept_name', 'concept_url', 'area']) elif action == 'concepts': row_list.append( ['username', 'user_type', 'id_report', 'language', 'institute', 'usecase', 'concept_url', 'concept_name', 'area']) try: for report in report_list: rep = Report.objects.get(id_report=report['id_report'], language=report['language']) cursor = connection.cursor() if action == 'labels': cursor.execute( "SELECT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name, a.label FROM report AS r INNER JOIN associate AS a ON r.id_report = a.id_report AND r.language = a.language WHERE a.id_report = %s AND r.language = %s AND ns_id = %s ", [str(report['id_report']), str(report['language']), 'Human']) reports_human_labels = cursor.fetchall() agent_ns = NameSpace.objects.get(ns_id = 'Robot') agent = User.objects.get(username = 'Robot_user',ns_id = agent_ns) ass = Associate.objects.filter(username = agent,ns_id = agent_ns,id_report = rep,language = rep.language) ins_arr = [] reports_robot_labels = [] for el in ass: ins_arr.append(el.insertion_time) if len(ins_arr) > 0: cursor.execute( "SELECT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name, a.label FROM report AS r INNER JOIN associate AS a ON r.id_report = a.id_report AND r.language = a.language WHERE a.id_report = %s AND r.language = %s AND ns_id = %s AND a.insertion_time not in %s", [str(report['id_report']), str(report['language']), 'Robot',tuple(ins_arr)]) reports_robot_labels = cursor.fetchall() if mode == 'both': reports = chain(reports_human_labels, reports_robot_labels) elif mode == 'Human': reports = reports_human_labels elif mode == 'Robot': reports = reports_robot_labels if action == 'mentions': cursor.execute( "SELECT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,a.start,a.stop,m.mention_text FROM report AS r INNER JOIN annotate AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN mention AS m ON m.id_report = a.id_report AND m.language = a.language AND a.start = m.start AND a.stop = m.stop WHERE r.id_report = %s AND r.language = %s AND ns_id = %s", [str(report['id_report']), str(report['language']), 'Human']) reports_human_mentions = cursor.fetchall() agent_ns = NameSpace.objects.get(ns_id='Robot') agent = User.objects.get(username='Robot_user', ns_id=agent_ns) ass = Annotate.objects.filter(username=agent, ns_id=agent_ns, id_report=rep, language=rep.language) ins_arr = [] reports_robot_mentions = [] for el in ass: ins_arr.append(el.insertion_time) if len(ins_arr) > 0: cursor.execute( "SELECT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,a.start,a.stop,m.mention_text FROM report AS r INNER JOIN annotate AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN mention AS m ON m.id_report = a.id_report AND m.language = a.language AND a.start = m.start AND a.stop = m.stop WHERE r.id_report = %s AND r.language = %s AND ns_id = %s and a.insertion_time not in %s", [str(report['id_report']), str(report['language']), 'Robot',tuple(ins_arr)]) reports_robot_mentions = cursor.fetchall() if mode == 'both': reports = chain(reports_human_mentions, reports_robot_mentions) elif mode == 'Human': reports = reports_human_mentions elif mode == 'Robot': reports = reports_robot_mentions elif action == 'concepts': cursor.execute( "SELECT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,c.concept_url, c.name, a.name FROM report AS r INNER JOIN contains AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN concept AS c ON c.concept_url = a.concept_url WHERE r.id_report = %s AND r.language = %s AND ns_id = %s", [str(report['id_report']), str(report['language']), 'Human']) reports_human_concepts = cursor.fetchall() agent_ns = NameSpace.objects.get(ns_id='Robot') agent = User.objects.get(username='Robot_user', ns_id=agent_ns) ass = Contains.objects.filter(username=agent, ns_id=agent_ns, id_report=rep, language=rep.language) ins_arr = [] reports_robot_concepts =[] for el in ass: ins_arr.append(el.insertion_time) if len(ins_arr) > 0: cursor.execute( "SELECT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,c.concept_url, c.name, a.name FROM report AS r INNER JOIN contains AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN concept AS c ON c.concept_url = a.concept_url WHERE r.id_report = %s AND r.language = %s AND ns_id = %s and a.insertion_time not in %s", [str(report['id_report']), str(report['language']), 'Robot',tuple(ins_arr)]) reports_robot_concepts = cursor.fetchall() if mode == 'both': reports = chain(reports_human_concepts, reports_robot_concepts) elif mode == 'Human': reports = reports_human_concepts elif mode == 'Robot': reports = reports_robot_concepts elif action == 'concept-mention': cursor.execute( "SELECT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,a.start,a.stop,m.mention_text,c.name,c.concept_url,a.name FROM report AS r INNER JOIN linked AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN concept as c ON a.concept_url = c.concept_url INNER JOIN mention AS m ON m.id_report = a.id_report AND m.language = a.language AND a.start = m.start AND a.stop = m.stop WHERE r.id_report = %s AND r.language = %s AND ns_id = %s", [str(report['id_report']), str(report['language']), 'Human']) reports_human_linking = cursor.fetchall() agent_ns = NameSpace.objects.get(ns_id='Robot') agent = User.objects.get(username='Robot_user', ns_id=agent_ns) ass = Linked.objects.filter(username=agent, ns_id=agent_ns, id_report=rep, language=rep.language) ins_arr = [] reports_robot_linking = [] for el in ass: ins_arr.append(el.insertion_time) if len(ins_arr) >0: cursor.execute( "SELECT a.username,a.ns_id,r.id_report,r.language,r.batch,r.institute,r.name,a.start,a.stop,m.mention_text,c.name,c.concept_url,a.name FROM report AS r INNER JOIN linked AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN concept as c ON a.concept_url = c.concept_url INNER JOIN mention AS m ON m.id_report = a.id_report AND m.language = a.language AND a.start = m.start AND a.stop = m.stop WHERE r.id_report = %s AND r.language = %s AND ns_id = %s and a.insertion_time not in %s", [str(report['id_report']), str(report['language']), 'Robot',tuple(ins_arr)]) reports_robot_linking = cursor.fetchall() if mode == 'both': reports = chain(reports_human_linking,reports_robot_linking) elif mode == 'Human': reports = reports_human_linking elif mode == 'Robot': reports = reports_robot_linking for el in reports: row = list(el) if row[0] != 'Robot_user': if row[1] == 'Human': row[1] = 'Manual' else: row[1] = 'Automatic' if action == 'mentions' or action == 'concept-mention': row[9] = re.sub('[^a-zA-Z0-9n\-_/\' ]+', '', row[9]) row_list.append(row) except Exception as e: print(e) return False else: writer = csv.writer(response) writer.writerows(row_list) return response elif format.startswith('bioc'): if mode is not None and mode != 'both': ns = NameSpace.objects.get(ns_id=mode) try: writer = BioCXMLWriter() json_writer = BioCJSONWriter() writer.collection = BioCCollection() json_writer.collection = BioCCollection() collection = writer.collection collection1 = json_writer.collection today = str(date.today()) collection.date = today collection.source = 'MEDTAG Collection' collection1.date = today collection1.source = 'MEDTAG Collection' if action == 'mentions': collection.put_infon('annotation_type', 'mentions') collection.key = 'mentions.key' collection1.put_infon('annotation_type', 'mentions') collection1.key = 'mentions.key' for rep in report_list: print(rep['id_report']) report = Report.objects.get(id_report=rep['id_report'],language=rep['language']) cursor.execute( "SELECT a.username,a.ns_id,r.id_report,r.language,r.institute,r.name,a.start,a.stop,m.mention_text FROM report AS r INNER JOIN annotate AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN mention AS m ON m.id_report = a.id_report AND m.language = a.language AND a.start = m.start AND a.stop = m.stop WHERE r.id_report = %s AND r.language = %s AND ns_id = %s", [str(rep['id_report']), str(rep['language']), 'Human']) reports_human_mentions = cursor.fetchall() agent_ns = NameSpace.objects.get(ns_id='Robot') agent = User.objects.get(username='Robot_user', ns_id=agent_ns) ass = Annotate.objects.filter(username=agent, ns_id=agent_ns, id_report=report, language=report.language) ins_arr = [] reports_robot_mentions = [] for el in ass: ins_arr.append(el.insertion_time) if len(ins_arr) > 0: cursor.execute( "SELECT a.username,a.ns_id,r.id_report,r.language,r.institute,r.name,a.start,a.stop,m.mention_text FROM report AS r INNER JOIN annotate AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN mention AS m ON m.id_report = a.id_report AND m.language = a.language AND a.start = m.start AND a.stop = m.stop WHERE r.id_report = %s AND r.language = %s AND ns_id = %s and a.insertion_time not in %s", [str(rep['id_report']), str(rep['language']), 'Robot', tuple(ins_arr)]) reports_robot_mentions = cursor.fetchall() if mode == 'both': reports = list(chain(reports_human_mentions, reports_robot_mentions)) elif mode == 'Human': reports = reports_human_mentions elif mode == 'Robot': reports = reports_robot_mentions data = get_fields_from_json() json_keys_to_display_human = data['fields'] json_keys_to_display_human.extend(['authors','volume','journal','year']) json_keys_to_ann_human = data['fields_to_ann'] json_keys_to_ann_human.extend(['abstract','title']) json_keys_to_display_human = list(set(json_keys_to_display_human)) json_keys_to_ann_human = list(set(json_keys_to_ann_human)) json_keys_to_ann_robot = [] workpath = os.path.dirname( os.path.abspath(__file__)) # Returns the Path your .py file is in with open(os.path.join(workpath, './automatic_annotation/auto_fields/auto_fields.json')) as out: data = json.load(out) json_keys_to_ann_robot = data['extract_fields'][report.name.name] json_keys_to_ann_robot.append('abstract') json_keys_to_ann_robot.append('title') json_keys_to_ann_robot = list(set(json_keys_to_ann_robot)) json_keys = [] json_keys_to_ann = [] if mode == 'Human': json_keys = list(set(json_keys_to_display_human + json_keys_to_ann_human)) json_keys_to_ann = json_keys_to_ann_human elif mode == 'Robot': json_keys = list(set(json_keys_to_display_human + json_keys_to_ann_robot)) json_keys_to_ann = json_keys_to_ann_robot elif mode == 'both': json_keys = list( set(json_keys_to_display_human + json_keys_to_ann_robot + json_keys_to_ann_human)) json_keys_to_ann = list(set(json_keys_to_ann_robot + json_keys_to_ann_human)) document = BioCDocument() document.id = str(report.id_report) document.put_infon('usecase', report.name_id) document.put_infon('language', report.language) document.put_infon('institute', report.institute) document1 = BioCDocument() document1.id = str(report.id_report) document1.put_infon('usecase', report.name_id) document1.put_infon('language', report.language) document1.put_infon('institute', report.institute) annotations = [] count = 0 maj_annotations = [] for el in reports: # print(el) mention = Mention.objects.get(start=el[6], stop=el[7], id_report=report,language=report.language) json_dict = report_get_start_end(json_keys,json_keys_to_ann,report.id_report,report.language) annotation = BioCAnnotation() annotation.id = str(count) count = count + 1 loc_ann = BioCLocation() loc_ann.offset = str(mention.start) loc_ann.length = str(mention.stop - mention.start + 1) annotation.add_location(loc_ann) mention_text = mention.mention_text mtext = re.sub('[^a-zA-Z0-9n\-_/\' ]+', '', mention_text) annotation.text = mtext couple = (annotation, mention.start, mention.stop) annotations.append(couple) seen = [] for key in json_keys_to_ann: passage = BioCPassage() passage.put_infon('section', key) check = False keys = json_dict['rep_string'].keys() if key in keys: if json_dict['rep_string'].get(key) != '': passage.text = json_dict['rep_string'][key]['text'] start = str(json_dict['rep_string'][key]['start']) passage.offset = (start) for el in annotations: if el not in seen: if int(el[1]) >= int(json_dict['rep_string'][key]['start']) and int( el[2]) <= int(json_dict['rep_string'][key]['end']): check = True passage.add_annotation(el[0]) seen.append(el) # passage.add_annotation(el[0]) if check: document.add_passage(passage) document1.add_passage(passage) collection.add_document(document) for doc in collection: print(doc.id) collection1.add_document(document) elif action == 'concept-mention': collection.put_infon('annotation_type', 'linking') collection.key = 'linking.key' collection1.put_infon('annotation_type', 'linking') collection1.key = 'linking.key' documents = [] # reports = Annotate.objects.filter(username=username).values('id_report','language').distinct() for rep in report_list: document = '' report = Report.objects.get(id_report=rep['id_report'], language=rep['language']) cursor.execute( "SELECT a.username,a.ns_id,r.id_report,r.language,r.institute,r.name,a.start,a.stop,m.mention_text,c.name,c.concept_url,a.name FROM report AS r INNER JOIN linked AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN concept as c ON a.concept_url = c.concept_url INNER JOIN mention AS m ON m.id_report = a.id_report AND m.language = a.language AND a.start = m.start AND a.stop = m.stop WHERE r.id_report = %s AND r.language = %s AND ns_id = %s", [str(rep['id_report']), str(rep['language']), 'Human']) reports_human_linking = cursor.fetchall() agent_ns = NameSpace.objects.get(ns_id='Robot') agent = User.objects.get(username='Robot_user', ns_id=agent_ns) ass = Linked.objects.filter(username=agent, ns_id=agent_ns, id_report=report, language=report.language) ins_arr = [] reports_robot_linking = [] for el in ass: ins_arr.append(el.insertion_time) if len(ins_arr) > 0: cursor.execute( "SELECT a.username,a.ns_id,r.id_report,r.language,r.institute,r.name,a.start,a.stop,m.mention_text,c.name,c.concept_url,a.name FROM report AS r INNER JOIN linked AS a ON r.id_report = a.id_report AND r.language = a.language INNER JOIN concept as c ON a.concept_url = c.concept_url INNER JOIN mention AS m ON m.id_report = a.id_report AND m.language = a.language AND a.start = m.start AND a.stop = m.stop WHERE r.id_report = %s AND r.language = %s AND ns_id = %s and a.insertion_time not in %s", [str(rep['id_report']), str(rep['language']), 'Robot', tuple(ins_arr)]) reports_robot_linking = cursor.fetchall() if mode == 'both': reports = list(chain(reports_human_linking, reports_robot_linking)) elif mode == 'Human': reports = reports_human_linking elif mode == 'Robot': reports = reports_robot_linking data = get_fields_from_json() json_keys_to_display_human = data['fields'] json_keys_to_display_human.extend(['authors', 'volume', 'journal', 'year']) json_keys_to_ann_human = data['fields_to_ann'] json_keys_to_ann_human.extend(['abstract', 'title']) json_keys_to_display_human = list(set(json_keys_to_display_human)) json_keys_to_ann_human = list(set(json_keys_to_ann_human)) json_keys_to_ann_robot = [] workpath = os.path.dirname( os.path.abspath(__file__)) # Returns the Path your .py file is in with open(os.path.join(workpath, './automatic_annotation/auto_fields/auto_fields.json')) as out: data = json.load(out) json_keys_to_ann_robot = data['extract_fields'][report.name.name] json_keys_to_ann_robot.append('abstract') json_keys_to_ann_robot.append('title') json_keys_to_ann_robot = list(set(json_keys_to_ann_robot)) json_keys = [] json_keys_to_ann = [] if mode == 'Human': json_keys = list(set(json_keys_to_display_human + json_keys_to_ann_human)) json_keys_to_ann = json_keys_to_ann_human elif mode == 'Robot': json_keys = list(set(json_keys_to_display_human + json_keys_to_ann_robot)) json_keys_to_ann = json_keys_to_ann_robot elif mode == 'both': json_keys = list( set(json_keys_to_display_human + json_keys_to_ann_robot + json_keys_to_ann_human)) json_keys_to_ann = list(set(json_keys_to_ann_robot + json_keys_to_ann_human)) document = BioCDocument() document.id = str(report.id_report) document.put_infon('usecase', report.name_id) document.put_infon('language', report.language) document.put_infon('institute', report.institute) annotations = [] count = 0 maj_annotations = [] for el in reports: mention = Mention.objects.get(start=el[6], stop=el[7], id_report=report, language=report.language) concept = Concept.objects.get(concept_url=el[10]) json_dict = report_get_start_end(json_keys, json_keys_to_ann, report.id_report, report.language) annotation = BioCAnnotation() annotation.id = str(count) annotation.put_infon('concept_name', concept.name) annotation.put_infon('concept_url', concept.concept_url) count = count + 1 loc_ann = BioCLocation() loc_ann.offset = str(mention.start) loc_ann.length = str(mention.stop - mention.start + 1) annotation.add_location(loc_ann) mention_text = mention.mention_text mtext = re.sub('[^a-zA-Z0-9n\-_/\' ]+', '', mention_text) annotation.text = mtext couple = (annotation, mention.start, mention.stop) annotations.append(couple) seen = [] for key in json_keys_to_ann: passage = BioCPassage() passage.put_infon('section', key) check = False keys = json_dict['rep_string'].keys() if key in keys: if json_dict['rep_string'].get(key) != '': # if json_dict['rep_string'].get(key) is not None and json_dict['rep_string'].get(key) != '': passage.text = json_dict['rep_string'][key]['text'] start = str(json_dict['rep_string'][key]['start']) passage.offset = (start) for el in annotations: if el not in seen: # start1 = int(el[1]) # start2 = int(json_dict['rep_string'][key]['start']) # stop1 = int(el[2]) # stop2 = int(json_dict['rep_string'][key]['end']) if int(el[1]) >= int(json_dict['rep_string'][key]['start']) and int( el[2]) <= int(json_dict['rep_string'][key]['end']): check = True passage.add_annotation(el[0]) seen.append(el) # passage.add_annotation(el[0]) if check: document.add_passage(passage) collection.add_document(document) for doc in collection: print(doc.id) collection1.add_document(document) # print(writer) # documents.append(document) except Exception as e: print(e) return False else: # os.remove(path1) if format.endswith('json'): # os.remove(path1) return json_writer # return True return writer
70.158004
1,063
0.555128
13,289
101,238
4.055384
0.021221
0.058933
0.013731
0.025328
0.937208
0.923978
0.91071
0.901581
0.89607
0.889761
0
0.003124
0.332859
101,238
1,442
1,064
70.206657
0.794803
0.120844
0
0.802913
0
0.044559
0.327073
0.067135
0
0
0
0
0
1
0.004284
false
0.031705
0.005998
0
0.026564
0.010283
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
1
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
1c0ebc7cfeca229af4ecb40d82b0236d449c2eb4
984
py
Python
src/binary.py
dsanno/chainer-etl9
e6968442b9aa159302a1b5c02fced686f72ef682
[ "MIT" ]
7
2016-09-10T07:34:13.000Z
2018-05-13T06:00:51.000Z
src/binary.py
dsanno/chainer-etl9
e6968442b9aa159302a1b5c02fced686f72ef682
[ "MIT" ]
null
null
null
src/binary.py
dsanno/chainer-etl9
e6968442b9aa159302a1b5c02fced686f72ef682
[ "MIT" ]
3
2016-05-17T03:22:18.000Z
2018-05-13T06:00:55.000Z
from struct import unpack def read8(str, offset=0): return unpack('B', str[offset:offset + 1])[0] def read16be(str, offset=0): return unpack('>H', str[offset:offset + 2])[0] def read16le(str, offset=0): return unpack('<H', str[offset:offset + 2])[0] def read32be(str, offset=0): return unpack('>I', str[offset:offset + 4])[0] def read32le(str, offset=0): return unpack('<I', str[offset:offset + 4])[0] def read8s(str, length, offset=0): return unpack('{}B'.format(length), str[offset:offset + length]) def read16bes(str, length, offset=0): return unpack('>{}H'.format(length), str[offset:offset + 2 * length]) def read16les(str, length, offset=0): return unpack('<{}H'.format(length), str[offset:offset + 2 * length]) def read32bes(str, length, offset=0): return unpack('>{}I'.format(length), str[offset:offset + 4 * length]) def read32les(str, length, offset=0): return unpack('<{}I'.format(length), str[offset:offset + 4 * length])
30.75
73
0.655488
148
984
4.358108
0.182432
0.209302
0.20155
0.294574
0.789147
0.710078
0.666667
0.666667
0.666667
0.666667
0
0.050179
0.14939
984
31
74
31.741935
0.72043
0
0
0
0
0
0.028455
0
0
0
0
0
0
1
0.47619
false
0
0.047619
0.47619
1
0
0
0
0
null
1
1
1
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
8
1c17a288ee42144ff967cd9cce7cf0978bec27fd
154
py
Python
Lesson4/ActivityA/sum.py
cedrickadi/Beginning-Jenkins
f4c4d23031ac82d729e4773e51c83dd69b0dd0d4
[ "MIT" ]
9
2019-04-30T18:08:55.000Z
2022-01-24T22:43:03.000Z
Lesson4/ActivityA/sum.py
cedrickadi/Beginning-Jenkins
f4c4d23031ac82d729e4773e51c83dd69b0dd0d4
[ "MIT" ]
1
2021-03-20T05:48:31.000Z
2021-03-20T05:48:31.000Z
Lesson4/ActivityA/sum.py
cedrickadi/Beginning-Jenkins
f4c4d23031ac82d729e4773e51c83dd69b0dd0d4
[ "MIT" ]
148
2018-10-07T20:00:42.000Z
2022-03-14T08:09:45.000Z
def add_numbers(number1, number2): return number1 + number2 def add_three_numbers(number1, number2, number3): return number1 + number2 + number3
25.666667
49
0.753247
19
154
5.947368
0.421053
0.495575
0.371681
0
0
0
0
0
0
0
0
0.078125
0.168831
154
5
50
30.8
0.804688
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
7
1c431e9494b623ae6f2fdbb6c1b0e694576fc004
2,641
py
Python
module4-acid-and-database-scalability-tradeoffs/Assignment/assignment_mongo.py
singparvi/DS-Unit-3-Sprint-2-SQL-and-Databases
7d61f09a410ea91731caddb4fcc96b84cb9b0221
[ "MIT" ]
null
null
null
module4-acid-and-database-scalability-tradeoffs/Assignment/assignment_mongo.py
singparvi/DS-Unit-3-Sprint-2-SQL-and-Databases
7d61f09a410ea91731caddb4fcc96b84cb9b0221
[ "MIT" ]
null
null
null
module4-acid-and-database-scalability-tradeoffs/Assignment/assignment_mongo.py
singparvi/DS-Unit-3-Sprint-2-SQL-and-Databases
7d61f09a410ea91731caddb4fcc96b84cb9b0221
[ "MIT" ]
null
null
null
import pymongo # now make a connection with mongo db and test connection mongo_client = pymongo.MongoClient( 'mongodb+srv://singparvi:qwerty12345@cluster0.l0ldo.mongodb.net/myFirstDatabase?retryWrites=true&w=majority') rpg_collections = mongo_client.myFirstDatabase.rpg_collections # How many total Characters are there? print('How many total Characters are there?: ', rpg_collections.count()) print("\n") # How many total Items? ok items_list = [] for document in rpg_collections.find(): # pprint.pprint(document) items_list.append(document['items']) # flatten list flat_list = [item for sublist in items_list for item in sublist] # get the number of unique in the items print('How many total Items?: ', len(set(flat_list))) print("\n") # How many of the Items are weapons? How many are not? ok items_list = [] for document in rpg_collections.find(): # pprint.pprint(document) items_list.append(document['weapons']) # flatten list flat_list = [item for sublist in items_list for item in sublist] # get the number of unique in the items print('How many of the Items are weapons? ', len(set(flat_list))) print("\n") # How many Items does each character have? (Return first 20 rows) print('How many Items does each character have? (Return first 20 rows)') items_list = [] i = 0 for document in rpg_collections.find(): # pprint.pprint(document) if i < 20: i = i + 1 print('Item ', i, document['name'], 'has ', len(document['items']), 'items') print() # How many Weapons does each character have? (Return first 20 rows) print('How many Weapons does each character have? (Return first 20 rows)') items_list = [] i = 0 for document in rpg_collections.find(): # pprint.pprint(document) if i < 20: i = i + 1 print('Item ', i, document['name'], 'has ', len(document['weapons']), 'items') print() # On average, how many Items does each Character have? items_list = [] i = 0 for document in rpg_collections.find(): # pprint.pprint(document) items_list.append(document['items']) # flatten the list flat_list = [item for sublist in items_list for item in sublist] print('On average, how many Items does each Character have? ', len(flat_list) / rpg_collections.count()) # On average, how many Weapons does each character have? items_list = [] i = 0 for document in rpg_collections.find(): # pprint.pprint(document) items_list.append(document['weapons']) # flatten the list flat_list = [item for sublist in items_list for item in sublist] print('On average, how many Items does each Character have? ', len(flat_list) / rpg_collections.count())
33.43038
113
0.711094
394
2,641
4.677665
0.187817
0.056972
0.073793
0.091156
0.818231
0.818231
0.785676
0.756375
0.730874
0.730874
0
0.011452
0.173419
2,641
78
114
33.858974
0.832799
0.261643
0
0.76087
0
0.021739
0.266459
0.054951
0
0
0
0
0
1
0
false
0
0.021739
0
0.021739
0.304348
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
1c55a25dfd2b153fe629b3de26d074dcd4ff2b36
1,964
py
Python
make_juypter_toc/test.py
rawrgulmuffins/make_juypter_toc
3247d002ae7a812c671fc17a98a6a31528308368
[ "Apache-2.0" ]
null
null
null
make_juypter_toc/test.py
rawrgulmuffins/make_juypter_toc
3247d002ae7a812c671fc17a98a6a31528308368
[ "Apache-2.0" ]
null
null
null
make_juypter_toc/test.py
rawrgulmuffins/make_juypter_toc
3247d002ae7a812c671fc17a98a6a31528308368
[ "Apache-2.0" ]
null
null
null
""" """ from make_juypter_toc import headers_to_table def test_headers_to_tables_no_headers(): table = headers_to_table(None, "test") assert table is None def test_headers_to_tables_one_level_nesting(): example_data = [ u'#Introduction', u'#Why Flask?\n', u'#Topics Not Covered\n'] table = headers_to_table(example_data, "test_name") expected_table = [ u'* [Introduction](http://localhost:8888/notebooks/test_name#Introduction)', u'* [Why Flask?](http://localhost:8888/notebooks/test_name#Why-Flask?)', u'* [Topics Not Covered](http://localhost:8888/notebooks/test_name#Topics-Not-Covered)',] assert table == expected_table def test_headers_to_tables_two_level_nesting(): example_data = [ u'#Introduction', u'##Why Flask?\n', u'#Topics Not Covered\n'] table = headers_to_table(example_data, "test_name") expected_table = [ u'* [Introduction](http://localhost:8888/notebooks/test_name#Introduction)', u' * [Why Flask?](http://localhost:8888/notebooks/test_name#Why-Flask?)', u'* [Topics Not Covered](http://localhost:8888/notebooks/test_name#Topics-Not-Covered)',] assert table == expected_table def test_headers_to_tables_three_level_nesting(): example_data = [ u'#Introduction', u'##Why Flask?\n', u'###Topics Not Covered\n', u'#Routes\n',] table = headers_to_table(example_data, "test_name") expected_table = [ u'* [Introduction](http://localhost:8888/notebooks/test_name#Introduction)', u' * [Why Flask?](http://localhost:8888/notebooks/test_name#Why-Flask?)', u' * [Topics Not Covered](http://localhost:8888/notebooks/test_name#Topics-Not-Covered)', u'* [Routes](http://localhost:8888/notebooks/test_name#Routes)',] assert table == expected_table
42.695652
108
0.637984
241
1,964
4.950207
0.153527
0.087175
0.142498
0.217938
0.881811
0.86337
0.812238
0.812238
0.812238
0.812238
0
0.026042
0.217923
1,964
45
109
43.644444
0.750651
0
0
0.692308
0
0.076923
0.47675
0
0
0
0
0
0.102564
1
0.102564
false
0
0.025641
0
0.128205
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
1c8c9fe4b11dff3e3575416803702baa4022065b
14,423
py
Python
engine/trainer.py
ZHUXUHAN/reid-baseline
43e8734be52a90d8131af8c4b43536ba6911bdaa
[ "MIT" ]
2
2019-11-30T08:11:22.000Z
2019-12-11T14:35:01.000Z
engine/trainer.py
ZHUXUHAN/reid-baseline
43e8734be52a90d8131af8c4b43536ba6911bdaa
[ "MIT" ]
1
2020-01-09T03:48:26.000Z
2020-03-07T01:22:37.000Z
engine/trainer.py
ZHUXUHAN/reid-baseline
43e8734be52a90d8131af8c4b43536ba6911bdaa
[ "MIT" ]
1
2019-11-30T09:24:17.000Z
2019-11-30T09:24:17.000Z
# encoding: utf-8 """ @author: sherlock @contact: sherlockliao01@gmail.com """ import logging import torch import torch.nn as nn from ignite.engine import Engine, Events from ignite.handlers import ModelCheckpoint, Timer from ignite.metrics import RunningAverage from utils.reid_metric import R1_mAP from utils.reid_metric import R1_mAP_reranking_training from apex.parallel import DistributedDataParallel as DDP from apex.fp16_utils import * from apex import amp, optimizers from apex.multi_tensor_apply import multi_tensor_applier global ITER ITER = 0 def create_supervised_trainer(model, optimizer, loss_fn, aligned_train, pcb_train, mgn_train, new_pcb_train, device=None): """ Factory function for creating a trainer for supervised models Args: model (`torch.nn.Module`): the model to train optimizer (`torch.optim.Optimizer`): the optimizer to use loss_fn (torch.nn loss function): the loss function to use device (str, optional): device type specification (default: None). Applies to both model and batches. Returns: Engine: a trainer engine with supervised update function """ if device: if torch.cuda.device_count() > 1: model = nn.DataParallel(model) model.to(device) def _update(engine, batch): model.train() optimizer.zero_grad() img, target = batch img = img.to(device) if torch.cuda.device_count() >= 1 else img target = target.to(device) if torch.cuda.device_count() >= 1 else target if aligned_train: score, feat, local_feat = model(img) loss = loss_fn(score, feat, target, None, local_feat) elif pcb_train: score, feat, local_score, local_feat, res3_feat, res3_score = model(img, None) loss = loss_fn(score, feat, target, local_score, local_feat, res3_feat, res3_score) elif new_pcb_train: score, feat, local_score, local_feat = model(img, None) loss = loss_fn(score, feat, target, local_score, local_feat) elif mgn_train: score, feat, local_feat = model(img) loss = loss_fn(score, feat, target, None, local_feat) else: score, feat = model(img) loss = loss_fn(score, feat, target, None, None) loss.backward() optimizer.step() if type(score) == tuple: sum_score = 0 for s in score: sum_score += (s.max(1)[1] == target).float().mean() acc = sum_score / len(score) else: acc = (score.max(1)[1] == target).float().mean() return loss.item(), acc.item() return Engine(_update) def create_supervised_trainer_with_center(model, center_criterion, optimizer, optimizer_center, loss_fn, cetner_loss_weight, aligned_train, pcb_train, mgn_train, arc_train, new_pcb_train, device=None): """ Factory function for creating a trainer for supervised models Args: model (`torch.nn.Module`): the model to train optimizer (`torch.optim.Optimizer`): the optimizer to use loss_fn (torch.nn loss function): the loss function to use device (str, optional): device type specification (default: None). Applies to both model and batches. Returns: Engine: a trainer engine with supervised update function """ if device: if torch.cuda.device_count() > 1: model = nn.DataParallel(model) model.to(device) model, optimizer = amp.initialize(model, optimizer,opt_level='O1') def _update(engine, batch): model.train() optimizer.zero_grad() optimizer_center.zero_grad() img, target = batch img = img.to(device) if torch.cuda.device_count() >= 1 else img target = target.to(device) if torch.cuda.device_count() >= 1 else target if aligned_train: score, feat, local_feat = model(img) loss = loss_fn(score, feat, target, None, local_feat) elif pcb_train: if arc_train: score, feat, local_score, local_feat = model(img, target) else: score, feat, local_score, local_feat = model(img, None) loss = loss_fn(score, feat, target, local_score, local_feat, None, None) elif new_pcb_train: if arc_train: score, feat, local_score, local_feat = model(img, target) else: score, feat, local_score, local_feat, local_score_2, local_feat_2 = model(img, None) loss = loss_fn(score, feat, target, local_score, local_feat, local_score_2, local_feat_2) elif mgn_train: score, feat, local_feat = model(img) loss = loss_fn(score, feat, target, None, local_feat, None, None) else: score, feat = model(img) loss = loss_fn(score, feat, target, None, None) # print("Total loss is {}, center loss is {}".format(loss, center_criterion(feat, target))) loss.backward() #if you use fp16 please use follwing codes # with amp.scale_loss(loss, optimizer) as scaled_loss: # scaled_loss.backward() optimizer.step() for param in center_criterion.parameters(): param.grad.data *= (1. / cetner_loss_weight) optimizer_center.step() # compute acc if type(score) == tuple: sum_score = 0 for s in score: sum_score += (s.max(1)[1] == target).float().mean() acc = sum_score / len(score) else: acc = (score.max(1)[1] == target).float().mean() return loss.item(), acc.item() return Engine(_update) def create_supervised_evaluator(model, metrics, device=None): """ Factory function for creating an evaluator for supervised models Args: model (`torch.nn.Module`): the model to train metrics (dict of str - :class:`ignite.metrics.Metric`): a map of metric names to Metrics device (str, optional): device type specification (default: None). Applies to both model and batches. Returns: Engine: an evaluator engine with supervised inference function """ if device: if torch.cuda.device_count() > 1: model = nn.DataParallel(model) model.to(device) def _inference(engine, batch): model.eval() with torch.no_grad(): data, pids, camids, data_flip = batch data = data.to(device) if torch.cuda.device_count() >= 1 else data data_flip = data_flip.to(device) if torch.cuda.device_count() >= 1 else data_flip feat, local_feat = model(data, None) feat_flip, local_feat_flip = model(data_flip, None) return feat, local_feat, pids, camids, feat_flip, local_feat_flip engine = Engine(_inference) for name, metric in metrics.items(): metric.attach(engine, name) return engine def do_train( cfg, model, train_loader, val_loader, optimizer, scheduler, loss_fn, num_query, start_epoch ): log_period = cfg.SOLVER.LOG_PERIOD checkpoint_period = cfg.SOLVER.CHECKPOINT_PERIOD eval_period = cfg.SOLVER.EVAL_PERIOD output_dir = cfg.OUTPUT_DIR device = cfg.MODEL.DEVICE epochs = cfg.SOLVER.MAX_EPOCHS aligned_train = cfg.MODEL.ALIGNED pcb_train = cfg.MODEL.PCB mgn_train = cfg.MODEL.MGN new_pcb_train = cfg.MODEL.NEW_PCB logger = logging.getLogger("reid_baseline.train") logger.info("Start training") trainer = create_supervised_trainer(model, optimizer, loss_fn, aligned_train, pcb_train, mgn_train, new_pcb_train, device=device) # evaluator = create_supervised_evaluator(model, metrics={'r1_mAP': R1_mAP(num_query, max_rank=50, feat_norm=cfg.TEST.FEAT_NORM)}, device=device) checkpointer = ModelCheckpoint(output_dir, cfg.MODEL.NAME, checkpoint_period, n_saved=10, require_empty=False) timer = Timer(average=True) trainer.add_event_handler(Events.EPOCH_COMPLETED, checkpointer, {'model': model, 'optimizer': optimizer}) timer.attach(trainer, start=Events.EPOCH_STARTED, resume=Events.ITERATION_STARTED, pause=Events.ITERATION_COMPLETED, step=Events.ITERATION_COMPLETED) # average metric to attach on trainer RunningAverage(output_transform=lambda x: x[0]).attach(trainer, 'avg_loss') RunningAverage(output_transform=lambda x: x[1]).attach(trainer, 'avg_acc') @trainer.on(Events.STARTED) def start_training(engine): engine.state.epoch = start_epoch @trainer.on(Events.EPOCH_STARTED) def adjust_learning_rate(engine): scheduler.step() @trainer.on(Events.ITERATION_COMPLETED) def log_training_loss(engine): global ITER ITER += 1 if ITER % log_period == 0: logger.info("Epoch[{}] Iteration[{}/{}] Loss: {:.3f}, Acc: {:.3f}, Base Lr: {:.2e}" .format(engine.state.epoch, ITER, len(train_loader), engine.state.metrics['avg_loss'], engine.state.metrics['avg_acc'], scheduler.get_lr()[0])) if len(train_loader) == ITER: ITER = 0 # adding handlers using `trainer.on` decorator API @trainer.on(Events.EPOCH_COMPLETED) def print_times(engine): logger.info('Epoch {} done. Time per batch: {:.3f}[s] Speed: {:.1f}[samples/s]' .format(engine.state.epoch, timer.value() * timer.step_count, train_loader.batch_size / timer.value())) logger.info('-' * 10) timer.reset() @trainer.on(Events.EPOCH_COMPLETED) def log_validation_results(engine): pass if engine.state.epoch % eval_period == 0: evaluator.run(val_loader) cmc, mAP = evaluator.state.metrics['r1_mAP'] logger.info("Validation Results - Epoch: {}".format(engine.state.epoch)) logger.info("mAP: {:.1%}".format(mAP)) for r in [1, 5, 10]: logger.info("CMC curve, Rank-{:<3}:{:.1%}".format(r, cmc[r - 1])) trainer.run(train_loader, max_epochs=epochs) def do_train_with_center( cfg, model, center_criterion, train_loader, val_loader, optimizer, optimizer_center, scheduler, loss_fn, num_query, start_epoch, datasets ): log_period = cfg.SOLVER.LOG_PERIOD checkpoint_period = cfg.SOLVER.CHECKPOINT_PERIOD eval_period = cfg.SOLVER.EVAL_PERIOD output_dir = cfg.OUTPUT_DIR device = cfg.MODEL.DEVICE epochs = cfg.SOLVER.MAX_EPOCHS mgn_train = cfg.MODEL.MGN aligned_train = cfg.MODEL.ALIGNED pcb_train = cfg.MODEL.PCB arc_train = cfg.MODEL.ARC new_pcb_train = cfg.MODEL.NEW_PCB logger = logging.getLogger("reid_baseline.train") logger.info("Start training") trainer = create_supervised_trainer_with_center(model, center_criterion, optimizer, optimizer_center, loss_fn, cfg.SOLVER.CENTER_LOSS_WEIGHT, aligned_train, pcb_train, mgn_train, arc_train, new_pcb_train, device=device) evaluator = create_supervised_evaluator(model, metrics={ 'r1_mAP': R1_mAP_reranking_training(num_query, max_rank=200, feat_norm=cfg.TEST.FEAT_NORM)}, device=device) checkpointer = ModelCheckpoint(output_dir, cfg.MODEL.NAME, checkpoint_period, n_saved=10, require_empty=False) timer = Timer(average=True) trainer.add_event_handler(Events.EPOCH_COMPLETED, checkpointer, {'model': model, 'optimizer': optimizer, 'center_param': center_criterion, 'optimizer_center': optimizer_center}) timer.attach(trainer, start=Events.EPOCH_STARTED, resume=Events.ITERATION_STARTED, pause=Events.ITERATION_COMPLETED, step=Events.ITERATION_COMPLETED) # average metric to attach on trainer RunningAverage(output_transform=lambda x: x[0]).attach(trainer, 'avg_loss') RunningAverage(output_transform=lambda x: x[1]).attach(trainer, 'avg_acc') @trainer.on(Events.STARTED) def start_training(engine): engine.state.epoch = start_epoch @trainer.on(Events.EPOCH_STARTED) def adjust_learning_rate(engine): scheduler.step() @trainer.on(Events.ITERATION_COMPLETED) def log_training_loss(engine): global ITER ITER += 1 if ITER % log_period == 0: logger.info("Epoch[{}] Iteration[{}/{}] Loss: {:.3f}, Acc: {:.3f}, Base Lr: {:.2e}" .format(engine.state.epoch, ITER, len(train_loader), engine.state.metrics['avg_loss'], engine.state.metrics['avg_acc'], scheduler.get_lr()[0])) if len(train_loader) == ITER: ITER = 0 # adding handlers using `trainer.on` decorator API @trainer.on(Events.EPOCH_COMPLETED) def print_times(engine): logger.info('Epoch {} done. Time per batch: {:.3f}[s] Speed: {:.1f}[samples/s]' .format(engine.state.epoch, timer.value() * timer.step_count, train_loader.batch_size / timer.value())) logger.info('-' * 10) timer.reset() @trainer.on(Events.EPOCH_COMPLETED) def log_validation_results(engine): pass if engine.state.epoch % eval_period == 0: evaluator.run(val_loader) cmc, mAP = evaluator.state.metrics['r1_mAP'] logger.info("Validation Results - Epoch: {}".format(engine.state.epoch)) logger.info("mAP: {:.1%}".format(mAP)) for r in [1, 5, 10]: logger.info("CMC curve, Rank-{:<3}:{:.1%}".format(r, cmc[r - 1])) trainer.run(train_loader, max_epochs=epochs)
38.876011
149
0.611107
1,750
14,423
4.850286
0.132
0.025448
0.016494
0.017672
0.836004
0.817271
0.81303
0.798186
0.790528
0.78016
0
0.009491
0.28406
14,423
370
150
38.981081
0.812512
0.130763
0
0.770677
0
0.007519
0.049265
0
0
0
0
0
0
1
0.067669
false
0.007519
0.045113
0
0.135338
0.007519
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
c7870094a711d2fa299ac0f77cc731fbd5c1b694
197
py
Python
baekjoon/Python/10869.py
Lumia1108/TIL
fe2e233d6d05c7d04f50f688f6c168e4d6d4ce46
[ "MIT" ]
null
null
null
baekjoon/Python/10869.py
Lumia1108/TIL
fe2e233d6d05c7d04f50f688f6c168e4d6d4ce46
[ "MIT" ]
null
null
null
baekjoon/Python/10869.py
Lumia1108/TIL
fe2e233d6d05c7d04f50f688f6c168e4d6d4ce46
[ "MIT" ]
null
null
null
import math a = input() b = a.split(' ') print(int(b[0]) + int(b[1])) print(int(b[0]) - int(b[1])) print(int(b[0]) * int(b[1])) print(math.floor(int(b[0]) / int(b[1]))) print(int(b[0]) % int(b[1]))
24.625
40
0.543147
44
197
2.431818
0.25
0.373832
0.233645
0.373832
0.700935
0.700935
0.700935
0.700935
0.700935
0.700935
0
0.057471
0.116751
197
8
41
24.625
0.557471
0
0
0
0
0
0.005051
0
0
0
0
0
0
1
0
false
0
0.125
0
0.125
0.625
0
0
0
null
1
1
1
0
1
1
1
1
1
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
10
1bf9777a57561ddd12b3cde763c8d9ae0dde47d5
610
py
Python
notebooks/solutions/12B_vectorizer_params.py
chofchof/scipy-2018-sklearn
c72bf8eafc8e8300b76fe203ae93f482b44043e2
[ "CC0-1.0" ]
1
2019-04-25T04:40:55.000Z
2019-04-25T04:40:55.000Z
notebooks/solutions/12B_vectorizer_params.py
chofchof/scipy-2018-sklearn
c72bf8eafc8e8300b76fe203ae93f482b44043e2
[ "CC0-1.0" ]
null
null
null
notebooks/solutions/12B_vectorizer_params.py
chofchof/scipy-2018-sklearn
c72bf8eafc8e8300b76fe203ae93f482b44043e2
[ "CC0-1.0" ]
null
null
null
# CountVectorizer vectorizer = CountVectorizer(min_df=10, ngram_range=(1, 3)) vectorizer.fit(text_train) X_train = vectorizer.transform(text_train) X_test = vectorizer.transform(text_test) clf = LogisticRegression() clf.fit(X_train, y_train) visualize_coefficients(clf, vectorizer.get_feature_names()) # TfidfVectorizer vectorizer = TfidfVectorizer(min_df=10, ngram_range=(1, 3)) vectorizer.fit(text_train) X_train = vectorizer.transform(text_train) X_test = vectorizer.transform(text_test) clf = LogisticRegression() clf.fit(X_train, y_train) visualize_coefficients(clf, vectorizer.get_feature_names())
26.521739
59
0.809836
82
610
5.731707
0.292683
0.076596
0.085106
0.051064
0.829787
0.829787
0.829787
0.829787
0.829787
0.829787
0
0.014235
0.078689
610
23
60
26.521739
0.822064
0.05082
0
0.857143
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
4029fe91cd66923e59c0dc1e2af40e1832a1449e
7,404
py
Python
pic4rl/pic4rl/agents/trainer.py
PIC4SeRCentre/pic4rl
1a1a511042bf332c96750de084d9ac3a302efa12
[ "MIT" ]
1
2021-01-08T10:40:47.000Z
2021-01-08T10:40:47.000Z
pic4rl/pic4rl/agents/trainer.py
PIC4SeRCentre/pic4rl
1a1a511042bf332c96750de084d9ac3a302efa12
[ "MIT" ]
null
null
null
pic4rl/pic4rl/agents/trainer.py
PIC4SeRCentre/pic4rl
1a1a511042bf332c96750de084d9ac3a302efa12
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 import os from gazebo_msgs.srv import DeleteEntity from gazebo_msgs.srv import SpawnEntity from geometry_msgs.msg import Pose import rclpy from rclpy.node import Node from rclpy.qos import QoSProfile from std_srvs.srv import Empty from geometry_msgs.msg import Twist import json import numpy as np import random import sys import time import math #from pic4rl.pic4rl_environment import Pic4rlEnvironment from pic4rl.agents.ddpg_visual_agent import DDPGVisualAgent class Pic4Trainer(): def __init__(self, agent, load_episode, episode_size, train_start, env): super().__init__() #self.env = Pic4rlEnvironment() self.env = env() self.Agent = agent self.load_episode = load_episode self.episode_size = episode_size self.eval_episode = 20 self.train_start = train_start self.train_score_list = [] self.eval_score_list = [] #self.results_path = '/home/mauromartini/mauro_ws/scores/lidar/last' self.results_path = '/home/results' def process(self): print("[Trainer.py] process") global_step = 0 for episode in range(self.load_episode+1, self.episode_size): global_step += 1 if global_step == self.train_start+1: print('Start training models, global step:', global_step) score = self.make_episode(episode, global_step) print( "Episode:", episode, "score:", score, "memory length:", self.Agent.memory.mem_len, "epsilon:", self.Agent.epsilon) #"avg Hz:", 1/self.avg_cmd_vel[0]) param_keys = ['epsilon'] param_values = [self.Agent.epsilon] param_dictionary = dict(zip(param_keys, param_values)) self.train_score_list.append(score) # Update result and save model every 20 episodes if episode > 600 and episode % 20 == 0: self.save_score(episode) self.Agent.save_model(episode, param_dictionary) # Epsilon (exploration policy) if self.Agent.epsilon > self.Agent.epsilon_min: self.Agent.epsilon *= self.Agent.epsilon_decay if episode % self.eval_episode == 0: score = self.make_episode(episode, training = False) print("Evaluation episode | Reward ", score) self.eval_score_list.append(score) def make_episode(self, episode, global_step = None, training = True): local_step = 0 done = False score = 0 # Reset environment state = self.env.reset(episode) while not done: local_step += 1 #print('[trainer][make_episode] new local step at time: ', time.time()) # Action based on the current state if local_step == 1: action = np.array([0.0, 0.0], dtype = np.float32) else: state = next_state action = self.Agent.get_action(state) #print('[trainer][make_episode] action taken') if np.any(np.isnan(action)): print("Action:", action) action = np.array([0.0, 0.0], dtype = np.float32) #print("Action:", action) #print("Action size:", action.shape) next_state, reward, done, info = self.env.step(action) # Save <s, a, r, s'> samples if local_step > 1 and training: self.Agent.remember(state, action, next_state, reward, done) if global_step > self.train_start: time_check = time.time() self.Agent.train() print('Total time for training:', time.time() - time_check) # UPDATE TARGET NETWORKS time_check= time.time() self.Agent.update_target_model_soft() print('time for target model update:', time.time()-time_check) score += reward return score def save_score(self, episode): with open(os.path.join(self.results_path,'train_score_episode'+str(episode)+'.json'), 'w') as outfile: json.dump(self.train_score_list, outfile) with open(os.path.join(self.results_path,'eval_score_episode'+str(episode)+'.json'), 'w') as outfile: json.dump(self.eval_score_list, outfile) def evalutate_Hz(self, init = False): if init: elf.start = time.time() else: end = time.time() delta = end - self.start if delta<=3: self.avg_cmd_vel[1]+=1 self.avg_cmd_vel[0] = (self.avg_cmd_vel[0]*(self.avg_cmd_vel[1]-1)\ + delta)\ /self.avg_cmd_vel[1] self.start = end class Pic4VisualTrainer(): def __init__(self, agent, load_episode, episode_size, train_start, env): self.env = env() self.Agent = agent self.load_episode = load_episode self.episode_size = episode_size self.train_start = train_start self.eval_episode = 20 self.train_score_list = [] self.eval_score_list = [] self.results_path = '/home/mauromartini/mauro_ws/scores/camera/rosbot/last' def process(self): global_step = 0 for episode in range(self.load_episode+1, self.episode_size): global_step += 1 if global_step == self.train_start+1: print('Start training models, global step:', global_step) score = self.make_episode(episode, global_step) print( "Episode:", episode, "score:", score, "memory length:", self.Agent.memory.mem_len, "epsilon:", self.Agent.epsilon) #"avg Hz:", 1/self.avg_cmd_vel[0]) param_keys = ['epsilon'] param_values = [self.Agent.epsilon] param_dictionary = dict(zip(param_keys, param_values)) self.train_score_list.append(score) if episode % self.eval_episode == 0: score = self.make_episode(episode, training = False) print("Evaluation episode | Reward ", score) self.eval_score_list.append(score) # Update result and save model every 20 episodes if episode > 600 and episode % 20 == 0: self.save_score(episode) self.Agent.save_model(episode, param_dictionary) # Epsilon (exploration policy) if self.Agent.epsilon > self.Agent.epsilon_min: self.Agent.epsilon *= self.Agent.epsilon_decay def make_episode(self, episode, global_step = None, training = True): local_step = 0 done = False score = 0 # Reset environment state = self.env.reset(episode) goal = state[0] depth_image = state[1] #print('goal info', goal) #print('depth image', depth_image) while not done: local_step += 1 #print('new local step at time: ', time.time()) # Action based on the current state if local_step == 1: action = np.array([0.0, 0.0], dtype = np.float32) else: state = next_state goal = state[0] depth_image = state[1] action = self.Agent.get_action(goal, depth_image) if np.any(np.isnan(action)): print("Action:", action) action = np.array([0.0, 0.0], dtype = np.float32) #print("Action:", action) #print("Action size:", action.shape) next_state, reward, done, info = self.env.step(action) next_goal = next_state[0] next_image = next_state[1] # Save <s, a, r, s'> samples if local_step > 1 and training: self.Agent.remember(goal, depth_image, action, next_goal, next_image, reward, done) if global_step > self.train_start: #time_check = time.time() self.Agent.train() #print('Total time for training:', time.time() - time_check) # UPDATE TARGET NETWORKS #time_check= time.time() self.Agent.update_target_model_soft() #print('time for target model update:', time.time()-time_check) score += reward return score def save_score(self, episode): with open(os.path.join(self.results_path,'train_score_episode'+str(episode)+'.json'), 'w') as outfile: json.dump(self.train_score_list, outfile) with open(os.path.join(self.results_path,'eval_score_episode'+str(episode)+'.json'), 'w') as outfile: json.dump(self.eval_score_list, outfile)
29.73494
104
0.692193
1,071
7,404
4.605976
0.153128
0.051085
0.038922
0.018447
0.83884
0.806811
0.788364
0.766876
0.766876
0.758565
0
0.014856
0.181794
7,404
248
105
29.854839
0.799439
0.148568
0
0.755952
0
0
0.073808
0.008449
0
0
0
0
0
1
0.053571
false
0
0.095238
0
0.172619
0.065476
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
40381ad4933f4af180f966e2de58bd6ad269f6d9
5,523
py
Python
pants/targets/migrations/0001_initial.py
osagga/PriceAndNutritionTrackingSystem
6b654dcb6ad5902554471d98000a719f06ec03d9
[ "Apache-2.0" ]
72
2019-01-29T13:22:34.000Z
2022-01-06T09:42:33.000Z
pants/targets/migrations/0001_initial.py
osagga/PriceAndNutritionTrackingSystem
6b654dcb6ad5902554471d98000a719f06ec03d9
[ "Apache-2.0" ]
14
2019-05-10T19:59:26.000Z
2021-01-11T00:37:17.000Z
pants/targets/migrations/0001_initial.py
osagga/PriceAndNutritionTrackingSystem
6b654dcb6ad5902554471d98000a719f06ec03d9
[ "Apache-2.0" ]
17
2020-03-03T21:24:09.000Z
2022-01-06T09:42:37.000Z
# Generated by Django 2.0.1 on 2018-08-16 12:24 from django.conf import settings import django.core.validators from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Maximums', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('kilojoules', models.DecimalField(blank=True, decimal_places=1, max_digits=6, null=True, validators=[django.core.validators.MinValueValidator(0)])), ('protein', models.DecimalField(blank=True, decimal_places=3, max_digits=6, null=True, validators=[django.core.validators.MinValueValidator(0)])), ('fibre', models.DecimalField(blank=True, decimal_places=3, max_digits=6, null=True, validators=[django.core.validators.MinValueValidator(0)])), ('carbohydrate', models.DecimalField(blank=True, decimal_places=3, max_digits=6, null=True, validators=[django.core.validators.MinValueValidator(0)])), ('fat', models.DecimalField(blank=True, decimal_places=3, max_digits=6, null=True, validators=[django.core.validators.MinValueValidator(0)])), ('sugar', models.DecimalField(blank=True, decimal_places=3, max_digits=6, null=True, validators=[django.core.validators.MinValueValidator(0)])), ('saturatedfat', models.DecimalField(blank=True, decimal_places=3, max_digits=6, null=True, validators=[django.core.validators.MinValueValidator(0)])), ('sodium', models.DecimalField(blank=True, decimal_places=0, max_digits=6, null=True, validators=[django.core.validators.MinValueValidator(0)])), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('cost', models.DecimalField(blank=True, decimal_places=2, max_digits=4, null=True, validators=[django.core.validators.MinValueValidator(0)])), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='Minimums', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('kilojoules', models.DecimalField(blank=True, decimal_places=1, max_digits=6, null=True, validators=[django.core.validators.MinValueValidator(0)])), ('protein', models.DecimalField(blank=True, decimal_places=3, max_digits=6, null=True, validators=[django.core.validators.MinValueValidator(0)])), ('fibre', models.DecimalField(blank=True, decimal_places=3, max_digits=6, null=True, validators=[django.core.validators.MinValueValidator(0)])), ('carbohydrate', models.DecimalField(blank=True, decimal_places=3, max_digits=6, null=True, validators=[django.core.validators.MinValueValidator(0)])), ('fat', models.DecimalField(blank=True, decimal_places=3, max_digits=6, null=True, validators=[django.core.validators.MinValueValidator(0)])), ('sugar', models.DecimalField(blank=True, decimal_places=3, max_digits=6, null=True, validators=[django.core.validators.MinValueValidator(0)])), ('saturatedfat', models.DecimalField(blank=True, decimal_places=3, max_digits=6, null=True, validators=[django.core.validators.MinValueValidator(0)])), ('sodium', models.DecimalField(blank=True, decimal_places=0, max_digits=6, null=True, validators=[django.core.validators.MinValueValidator(0)])), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('cost', models.DecimalField(blank=True, decimal_places=2, max_digits=4, null=True, validators=[django.core.validators.MinValueValidator(0)])), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='Target', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(blank=True, max_length=100)), ('slug', models.CharField(blank=True, max_length=50, unique=True)), ('description', models.CharField(blank=True, max_length=250)), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('daily_target', models.BooleanField(default=False, help_text='If set, will be used for daily target on diary/home page')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ], ), migrations.AddField( model_name='minimums', name='of_target', field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='minimum', to='targets.Target'), ), migrations.AddField( model_name='maximums', name='of_target', field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='maximum', to='targets.Target'), ), ]
67.353659
167
0.65526
608
5,523
5.820724
0.179276
0.053405
0.107375
0.137327
0.830461
0.830461
0.802487
0.802487
0.802487
0.788923
0
0.017488
0.202788
5,523
81
168
68.185185
0.786282
0.008148
0
0.662162
1
0
0.074142
0
0
0
0
0
0
1
0
false
0
0.054054
0
0.108108
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
40408c76ea8a6c4657e27728b57b1f4d3963be68
6,545
py
Python
loldib/getratings/models/NA/na_thresh/na_thresh_bot.py
koliupy/loldib
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
[ "Apache-2.0" ]
null
null
null
loldib/getratings/models/NA/na_thresh/na_thresh_bot.py
koliupy/loldib
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
[ "Apache-2.0" ]
null
null
null
loldib/getratings/models/NA/na_thresh/na_thresh_bot.py
koliupy/loldib
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
[ "Apache-2.0" ]
null
null
null
from getratings.models.ratings import Ratings class NA_Thresh_Bot_Aatrox(Ratings): pass class NA_Thresh_Bot_Ahri(Ratings): pass class NA_Thresh_Bot_Akali(Ratings): pass class NA_Thresh_Bot_Alistar(Ratings): pass class NA_Thresh_Bot_Amumu(Ratings): pass class NA_Thresh_Bot_Anivia(Ratings): pass class NA_Thresh_Bot_Annie(Ratings): pass class NA_Thresh_Bot_Ashe(Ratings): pass class NA_Thresh_Bot_AurelionSol(Ratings): pass class NA_Thresh_Bot_Azir(Ratings): pass class NA_Thresh_Bot_Bard(Ratings): pass class NA_Thresh_Bot_Blitzcrank(Ratings): pass class NA_Thresh_Bot_Brand(Ratings): pass class NA_Thresh_Bot_Braum(Ratings): pass class NA_Thresh_Bot_Caitlyn(Ratings): pass class NA_Thresh_Bot_Camille(Ratings): pass class NA_Thresh_Bot_Cassiopeia(Ratings): pass class NA_Thresh_Bot_Chogath(Ratings): pass class NA_Thresh_Bot_Corki(Ratings): pass class NA_Thresh_Bot_Darius(Ratings): pass class NA_Thresh_Bot_Diana(Ratings): pass class NA_Thresh_Bot_Draven(Ratings): pass class NA_Thresh_Bot_DrMundo(Ratings): pass class NA_Thresh_Bot_Ekko(Ratings): pass class NA_Thresh_Bot_Elise(Ratings): pass class NA_Thresh_Bot_Evelynn(Ratings): pass class NA_Thresh_Bot_Ezreal(Ratings): pass class NA_Thresh_Bot_Fiddlesticks(Ratings): pass class NA_Thresh_Bot_Fiora(Ratings): pass class NA_Thresh_Bot_Fizz(Ratings): pass class NA_Thresh_Bot_Galio(Ratings): pass class NA_Thresh_Bot_Gangplank(Ratings): pass class NA_Thresh_Bot_Garen(Ratings): pass class NA_Thresh_Bot_Gnar(Ratings): pass class NA_Thresh_Bot_Gragas(Ratings): pass class NA_Thresh_Bot_Graves(Ratings): pass class NA_Thresh_Bot_Hecarim(Ratings): pass class NA_Thresh_Bot_Heimerdinger(Ratings): pass class NA_Thresh_Bot_Illaoi(Ratings): pass class NA_Thresh_Bot_Irelia(Ratings): pass class NA_Thresh_Bot_Ivern(Ratings): pass class NA_Thresh_Bot_Janna(Ratings): pass class NA_Thresh_Bot_JarvanIV(Ratings): pass class NA_Thresh_Bot_Jax(Ratings): pass class NA_Thresh_Bot_Jayce(Ratings): pass class NA_Thresh_Bot_Jhin(Ratings): pass class NA_Thresh_Bot_Jinx(Ratings): pass class NA_Thresh_Bot_Kalista(Ratings): pass class NA_Thresh_Bot_Karma(Ratings): pass class NA_Thresh_Bot_Karthus(Ratings): pass class NA_Thresh_Bot_Kassadin(Ratings): pass class NA_Thresh_Bot_Katarina(Ratings): pass class NA_Thresh_Bot_Kayle(Ratings): pass class NA_Thresh_Bot_Kayn(Ratings): pass class NA_Thresh_Bot_Kennen(Ratings): pass class NA_Thresh_Bot_Khazix(Ratings): pass class NA_Thresh_Bot_Kindred(Ratings): pass class NA_Thresh_Bot_Kled(Ratings): pass class NA_Thresh_Bot_KogMaw(Ratings): pass class NA_Thresh_Bot_Leblanc(Ratings): pass class NA_Thresh_Bot_LeeSin(Ratings): pass class NA_Thresh_Bot_Leona(Ratings): pass class NA_Thresh_Bot_Lissandra(Ratings): pass class NA_Thresh_Bot_Lucian(Ratings): pass class NA_Thresh_Bot_Lulu(Ratings): pass class NA_Thresh_Bot_Lux(Ratings): pass class NA_Thresh_Bot_Malphite(Ratings): pass class NA_Thresh_Bot_Malzahar(Ratings): pass class NA_Thresh_Bot_Maokai(Ratings): pass class NA_Thresh_Bot_MasterYi(Ratings): pass class NA_Thresh_Bot_MissFortune(Ratings): pass class NA_Thresh_Bot_MonkeyKing(Ratings): pass class NA_Thresh_Bot_Mordekaiser(Ratings): pass class NA_Thresh_Bot_Morgana(Ratings): pass class NA_Thresh_Bot_Nami(Ratings): pass class NA_Thresh_Bot_Nasus(Ratings): pass class NA_Thresh_Bot_Nautilus(Ratings): pass class NA_Thresh_Bot_Nidalee(Ratings): pass class NA_Thresh_Bot_Nocturne(Ratings): pass class NA_Thresh_Bot_Nunu(Ratings): pass class NA_Thresh_Bot_Olaf(Ratings): pass class NA_Thresh_Bot_Orianna(Ratings): pass class NA_Thresh_Bot_Ornn(Ratings): pass class NA_Thresh_Bot_Pantheon(Ratings): pass class NA_Thresh_Bot_Poppy(Ratings): pass class NA_Thresh_Bot_Quinn(Ratings): pass class NA_Thresh_Bot_Rakan(Ratings): pass class NA_Thresh_Bot_Rammus(Ratings): pass class NA_Thresh_Bot_RekSai(Ratings): pass class NA_Thresh_Bot_Renekton(Ratings): pass class NA_Thresh_Bot_Rengar(Ratings): pass class NA_Thresh_Bot_Riven(Ratings): pass class NA_Thresh_Bot_Rumble(Ratings): pass class NA_Thresh_Bot_Ryze(Ratings): pass class NA_Thresh_Bot_Sejuani(Ratings): pass class NA_Thresh_Bot_Shaco(Ratings): pass class NA_Thresh_Bot_Shen(Ratings): pass class NA_Thresh_Bot_Shyvana(Ratings): pass class NA_Thresh_Bot_Singed(Ratings): pass class NA_Thresh_Bot_Sion(Ratings): pass class NA_Thresh_Bot_Sivir(Ratings): pass class NA_Thresh_Bot_Skarner(Ratings): pass class NA_Thresh_Bot_Sona(Ratings): pass class NA_Thresh_Bot_Soraka(Ratings): pass class NA_Thresh_Bot_Swain(Ratings): pass class NA_Thresh_Bot_Syndra(Ratings): pass class NA_Thresh_Bot_TahmKench(Ratings): pass class NA_Thresh_Bot_Taliyah(Ratings): pass class NA_Thresh_Bot_Talon(Ratings): pass class NA_Thresh_Bot_Taric(Ratings): pass class NA_Thresh_Bot_Teemo(Ratings): pass class NA_Thresh_Bot_Thresh(Ratings): pass class NA_Thresh_Bot_Tristana(Ratings): pass class NA_Thresh_Bot_Trundle(Ratings): pass class NA_Thresh_Bot_Tryndamere(Ratings): pass class NA_Thresh_Bot_TwistedFate(Ratings): pass class NA_Thresh_Bot_Twitch(Ratings): pass class NA_Thresh_Bot_Udyr(Ratings): pass class NA_Thresh_Bot_Urgot(Ratings): pass class NA_Thresh_Bot_Varus(Ratings): pass class NA_Thresh_Bot_Vayne(Ratings): pass class NA_Thresh_Bot_Veigar(Ratings): pass class NA_Thresh_Bot_Velkoz(Ratings): pass class NA_Thresh_Bot_Vi(Ratings): pass class NA_Thresh_Bot_Viktor(Ratings): pass class NA_Thresh_Bot_Vladimir(Ratings): pass class NA_Thresh_Bot_Volibear(Ratings): pass class NA_Thresh_Bot_Warwick(Ratings): pass class NA_Thresh_Bot_Xayah(Ratings): pass class NA_Thresh_Bot_Xerath(Ratings): pass class NA_Thresh_Bot_XinZhao(Ratings): pass class NA_Thresh_Bot_Yasuo(Ratings): pass class NA_Thresh_Bot_Yorick(Ratings): pass class NA_Thresh_Bot_Zac(Ratings): pass class NA_Thresh_Bot_Zed(Ratings): pass class NA_Thresh_Bot_Ziggs(Ratings): pass class NA_Thresh_Bot_Zilean(Ratings): pass class NA_Thresh_Bot_Zyra(Ratings): pass
15.695444
46
0.766692
972
6,545
4.736626
0.151235
0.209818
0.389661
0.479583
0.803432
0.803432
0
0
0
0
0
0
0.169748
6,545
416
47
15.733173
0.847258
0
0
0.498195
0
0
0
0
0
0
0
0
0
1
0
true
0.498195
0.00361
0
0.501805
0
0
0
0
null
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
1
0
0
8
40b5944b20e2212d6acc93c65bd502eae4fc630d
9,644
py
Python
complex_auto/plotting.py
entn-at/cae-invar
9cdb09c6e62ad08e0c873b10c28565b50268a50f
[ "MIT" ]
31
2019-07-16T13:29:11.000Z
2021-07-29T07:41:13.000Z
complex_auto/plotting.py
entn-at/cae-invar
9cdb09c6e62ad08e0c873b10c28565b50268a50f
[ "MIT" ]
3
2019-11-11T15:59:32.000Z
2020-11-10T11:58:54.000Z
complex_auto/plotting.py
entn-at/cae-invar
9cdb09c6e62ad08e0c873b10c28565b50268a50f
[ "MIT" ]
8
2019-07-19T09:29:53.000Z
2021-01-12T21:37:22.000Z
""" Created on July 05, 2019 @author: Stefan Lattner Sony CSL Paris, France """ import os import math import torch import numpy as np import matplotlib.pyplot as plt from complex_auto.plot import plot_hist, make_tiles from complex_auto.util import cuda_variable, to_numpy def plot_train_state_2d(loss_curve_eval, loss_curve_train, model, x, y, epoch, out_dir, length_ngram): model.eval() x = cuda_variable(x) y = cuda_variable(y) # calculate mapping of untransposed data amp_x, phase_x = model(x) amp_y, phase_y = model(y) recon_y = model.backward(amp_x, phase_y) recon_x = model.backward(amp_y, phase_x) make_tiles(to_numpy(recon_y).reshape(recon_y.shape[0], 1, -1, length_ngram), os.path.join(out_dir, f"recon_y{epoch}.png")) make_tiles(to_numpy(recon_x).reshape(recon_x.shape[0], 1, -1, length_ngram), os.path.join(out_dir, f"recon_x{epoch}.png")) plot_hist(to_numpy(recon_x), f"recon_x_hist_ep{epoch}", os.path.join(out_dir, f"recon_x_hist_ep{epoch}.png")) plot_hist(to_numpy(recon_y), f"recon_y_hist_ep{epoch}", os.path.join(out_dir, f"recon_y_hist_ep{epoch}.png")) half_weight = model.layer.weight.shape[0] // 2 make_tiles(to_numpy(model.layer.weight[:half_weight]).reshape(len( model.layer.weight[:half_weight]), 1, -1, length_ngram), os.path.join(out_dir, f"filters_x{epoch}.png")) make_tiles(to_numpy(model.layer.weight[half_weight:]).reshape(len( model.layer.weight[half_weight:]), 1, -1, length_ngram), os.path.join(out_dir, f"filters_y{epoch}.png")) plot_hist(to_numpy(model.layer.weight[:half_weight]).reshape(len( model.layer.weight[:half_weight]), 1, -1, length_ngram), f"filters_x_hist_ep{epoch}", os.path.join(out_dir, f"filters_x_hist_ep{epoch}.png")) plot_hist(to_numpy(model.layer.weight[half_weight:]).reshape(len( model.layer.weight[half_weight:]), 1, -1, length_ngram), f"filters_y_hist_ep{epoch}", os.path.join(out_dir, f"filters_y_hist_ep{epoch}.png")) # make_tiles(to_numpy(model.layer.weight).reshape(len( # model.layer.weight), # 1, -1, # length_ngram), # os.path.join(out_dir, f"filters_x{epoch}.png")) make_tiles(to_numpy(amp_x)[:, None, None, :], os.path.join(out_dir, f"ampx_{epoch}.png")) plot_hist(to_numpy(amp_x), f"ampx_hist_ep{epoch}", os.path.join(out_dir, f"ampx_hist_ep{epoch}.png")) make_tiles(to_numpy(amp_y)[:, None, None, :], os.path.join(out_dir, f"ampy_{epoch}.png")) plot_hist(to_numpy(amp_y), f"ampy_hist_ep{epoch}", os.path.join(out_dir, f"ampy_hist_ep{epoch}.png")) make_tiles(to_numpy(phase_x)[:, None, None, :], os.path.join(out_dir, f"phasex_{epoch}.png")) plot_hist(to_numpy(phase_x), f"phasex_hist_ep{epoch}", os.path.join(out_dir, f"phasex_hist_ep{epoch}.png")) make_tiles(to_numpy(phase_y)[:, None, None, :], os.path.join(out_dir, f"phasey_{epoch}.png")) plot_hist(to_numpy(phase_y), f"phasey_hist_ep{epoch}", os.path.join(out_dir, f"phasey_hist_ep{epoch}.png")) plot_hist(to_numpy(x.data), f"input_hist_ep{epoch}", os.path.join(out_dir, f"input_hist_ep{epoch}.png")) plot_hist(to_numpy(y.data), f"target_hist_ep{epoch}", os.path.join(out_dir, f"target_hist_ep{epoch}.png")) input_np = to_numpy(x.data) make_tiles(input_np.reshape(x.shape[0], 1, -1, length_ngram), os.path.join(out_dir, f"input_{epoch}.png")) target_np = to_numpy(y.data) make_tiles(target_np.reshape(y.shape[0], 1, -1, length_ngram), os.path.join(out_dir, f"target_{epoch}.png")) plt.clf() plt.plot(loss_curve_train) plt.plot(loss_curve_eval) plt.savefig( os.path.join(out_dir, f"loss_curve_{epoch}.png")) def plot_train_state_1d(loss_curve_eval, loss_curve_train, model, x, y, epoch, out_dir, length_ngram): model.eval() x = cuda_variable(x) y = cuda_variable(y) # calculate mapping of untransposed data amp_x, phase_x = model(x) amp_y, phase_y = model(y) recon_y = model.backward(amp_x, phase_y) recon_x = model.backward(amp_y, phase_x) plot_audiobatch(recon_y[:20, None, :].detach().cpu(), os.path.join(out_dir, f"recon_y{epoch}.png")) plot_audiobatch(recon_x[:20, None, :].detach().cpu(), os.path.join(out_dir, f"recon_x{epoch}.png")) plot_audiobatch(y[:20, None, :].detach().cpu(), os.path.join(out_dir, f"input_y_sig{epoch}.png")) plot_audiobatch(x[:20, None, :].detach().cpu(), os.path.join(out_dir, f"input_x_sig{epoch}.png")) plot_hist(to_numpy(recon_x), f"recon_x_hist_ep{epoch}", os.path.join(out_dir, f"recon_x_hist_ep{epoch}.png")) plot_hist(to_numpy(recon_y), f"recon_y_hist_ep{epoch}", os.path.join(out_dir, f"recon_y_hist_ep{epoch}.png")) make_tiles(to_numpy(model.layer.weight).reshape(len( model.layer.weight), 1, -1, length_ngram), os.path.join(out_dir, f"filters_x{epoch}.png")) make_tiles(to_numpy(model.layer.weight).reshape(len( model.layer.weight), 1, -1, length_ngram), os.path.join(out_dir, f"filters_y{epoch}.png")) plot_audiobatch(model.layer.weight[:20, None, :].detach().cpu(), os.path.join(out_dir, f"filters_sig_real{epoch}.png")) half = model.layer.weight.shape[0] // 2 plot_audiobatch(model.layer.weight[half:half+20, None, :].detach().cpu(), os.path.join(out_dir, f"filters_sig_compl{epoch}.png")) make_tiles(to_numpy(amp_x)[:, None, None, :], os.path.join(out_dir, f"ampx_{epoch}.png")) plot_hist(to_numpy(amp_x), f"ampx_hist_ep{epoch}", os.path.join(out_dir, f"ampx_hist_ep{epoch}.png")) make_tiles(to_numpy(amp_y)[:, None, None, :], os.path.join(out_dir, f"ampy_{epoch}.png")) plot_hist(to_numpy(amp_y), f"ampy_hist_ep{epoch}", os.path.join(out_dir, f"ampy_hist_ep{epoch}.png")) make_tiles(to_numpy(phase_x)[:, None, None, :], os.path.join(out_dir, f"phasex_{epoch}.png")) plot_hist(to_numpy(phase_x), f"phasex_hist_ep{epoch}", os.path.join(out_dir, f"phasex_hist_ep{epoch}.png")) make_tiles(to_numpy(phase_y)[:, None, None, :], os.path.join(out_dir, f"phasey_{epoch}.png")) plot_hist(to_numpy(phase_y), f"phasey_hist_ep{epoch}", os.path.join(out_dir, f"phasey_hist_ep{epoch}.png")) plot_hist(to_numpy(x.data), f"input_hist_ep{epoch}", os.path.join(out_dir, f"input_hist_ep{epoch}.png")) plot_hist(to_numpy(y.data), f"target_hist_ep{epoch}", os.path.join(out_dir, f"target_hist_ep{epoch}.png")) input_np = to_numpy(x.data) make_tiles(input_np.reshape(x.shape[0], 1, -1, length_ngram), os.path.join(out_dir, f"input_{epoch}.png")) target_np = to_numpy(y.data) make_tiles(target_np.reshape(y.shape[0], 1, -1, length_ngram), os.path.join(out_dir, f"target_{epoch}.png")) plt.clf() plt.plot(loss_curve_train) plt.plot(loss_curve_eval) plt.savefig( os.path.join(out_dir, f"loss_curve_{epoch}.png")) def plot_audiobatch(batch: torch.Tensor, fn, num_example: int=30, verbose: bool=False): # by Stephane Rivaud plt.clf() audio_list = [(audio.squeeze(), 22050) for audio in batch] # determine the number of rows and columns ncols = int(math.sqrt(len(audio_list))) nrows = math.ceil(len(audio_list) / ncols) # plotting files fig, ax = plt.subplots(ncols=ncols, nrows=nrows, sharex=True, sharey=True) for k in range(len(audio_list)): audio, sr = audio_list[k] i, j = k // ncols, k % ncols x = torch.arange(audio.size(0), dtype=torch.float32) / sr ax[i, j].plot(x.numpy(), audio.numpy(), linewidth=1) #ax[i, j].set_xlabel('Time (s)') #ax[i, j].set_ylabel('Amplitude') #ax[i, j].set_title(f'Sample {k}') #plt.show() plt.savefig(fn) #plt.savefig(fn+".pdf")
45.706161
86
0.549461
1,345
9,644
3.669145
0.10855
0.057143
0.091185
0.118541
0.821885
0.802432
0.789868
0.789868
0.789868
0.78845
0
0.010092
0.311593
9,644
210
87
45.92381
0.733243
0.053401
0
0.73125
0
0
0.145964
0.093904
0
0
0
0
0
1
0.01875
false
0
0.04375
0
0.0625
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
40dceb7f2c4df839073f969fa66a6d2bb13aafed
5,222
py
Python
user/vistas/templates/organizaciones.py
ZerpaTechnology/occoa
a8c0bd2657bc058801a883109c0ec0d608d04ccc
[ "Apache-2.0" ]
null
null
null
user/vistas/templates/organizaciones.py
ZerpaTechnology/occoa
a8c0bd2657bc058801a883109c0ec0d608d04ccc
[ "Apache-2.0" ]
null
null
null
user/vistas/templates/organizaciones.py
ZerpaTechnology/occoa
a8c0bd2657bc058801a883109c0ec0d608d04ccc
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/python # -*- coding: utf-8 -*- doc+='''<!DOCTYPE html> <html> ''' data["titulo"]="Unexpo núcleo Charallave" doc+=''' ''' incluir(data,"head") doc+=''' <body class="sin-pad sin-marg"> <div class="container sin-pad"> <div class="row bg-ubuntu_jet marg-t5"> <div class="col-md-12 height-10 ohidden"> <img src="''' try: doc+=str(data['base_url']+'static/imgs/portada.jpg') except Exception, e: doc+=str(e) doc+='''" class="width-100p top-100"> </div> </div> ''' incluir(data,"barra-buscador") doc+=''' <div class="row bg-porcelain height-50"> <div class="col-md-4 height-5 pad-2"> <h1>CENUC</h1> </div> <div class="col-md-2 height-5"> <a href="" class="d-block marg-05 decoration-none"><div class="pad-1 bg-ubuntu_blue white decoration-none">MISIÓN</div></a> <a href="" class="d-block marg-05 decoration-none"><div class="pad-1 bg-ubuntu_blue white decoration-none">VISIÓN</div></a> <a href="" class="d-block marg-05 decoration-none"><div class="pad-1 bg-ubuntu_blue white decoration-none">SERVICIOS</div></a> </div> <div class="col-md-6 height-5 pad-1 bg-ubuntu_porcelain font-ubuntu"> <p> CENUC es el centro estudiantil de nuestro núcleo </p> </div> </div> <hr> <div class="row bg-porcelain height-50"> <div class="col-md-4 height-5 pad-2"> <h1>CIDGUN</h1> </div> <div class="col-md-2 height-5"> <a href="" class="d-block marg-05 decoration-none"><div class="pad-1 bg-ubuntu_blue white decoration-none">MISIÓN</div></a> <a href="" class="d-block marg-05 decoration-none"><div class="pad-1 bg-ubuntu_blue white decoration-none">VISIÓN</div></a> <a href="" class="d-block marg-05 decoration-none"><div class="pad-1 bg-ubuntu_blue white decoration-none">SERVICIOS</div></a> </div> <div class="col-md-6 height-5 pad-1 bg-ubuntu_porcelain font-ubuntu"> <p> El Centro de Innovación y Desarrollo Gran Unexpo (C.I.D.G.UN por sus siglas) es una organización estudiantil conformada por un grupo de estudiantes de nuestro núcleo de challarave con el fin de garantizar la capacidad creación y ejecución de proyecto generados por nuestros estudiantes, de modo que la UNEXPO como casa de estudios de CIDGUN pueda mantener sus capacidades Cientifico-Tegnologicas y competir ante otras instituciones que desarrollen tecnologias en igualdad de condiciones. </p> <p> ¡Haciendo nuestro aporte para construir un país potencia! </p> </div> </div> <hr> <div class="row bg-porcelain height-50"> <div class="col-md-4 height-5 pad-2"> <h1>MEGUN</h1> </div> <div class="col-md-2 height-5"> <a href="" class="d-block marg-05 decoration-none"><div class="pad-1 bg-ubuntu_blue white decoration-none">MISIÓN</div></a> <a href="" class="d-block marg-05 decoration-none"><div class="pad-1 bg-ubuntu_blue white decoration-none">VISIÓN</div></a> <a href="" class="d-block marg-05 decoration-none"><div class="pad-1 bg-ubuntu_blue white decoration-none">SERVICIOS</div></a> </div> <div class="col-md-6 height-5 pad-1 bg-ubuntu_porcelain font-ubuntu"> <p> Organización politica del núcleo Charallave </p> </div> </div> <hr> <div class="row bg-porcelain height-50"> <div class="col-md-4 height-5 pad-2"> <h1>CECHAR</h1> </div> <div class="col-md-2 height-5"> <a href="" class="d-block marg-05 decoration-none"><div class="pad-1 bg-ubuntu_blue white decoration-none">MISIÓN</div></a> <a href="" class="d-block marg-05 decoration-none"><div class="pad-1 bg-ubuntu_blue white decoration-none">VISIÓN</div></a> <a href="" class="d-block marg-05 decoration-none"><div class="pad-1 bg-ubuntu_blue white decoration-none">SERVICIOS</div></a> </div> <div class="col-md-6 height-5 pad-1 bg-ubuntu_porcelain font-ubuntu"> <p> Organización Estudiantil encargada de organizar y entrenar a los estudiantes del nucleo para escurciones. </p> </div> </div> <hr> <div class="row bg-porcelain height-50"> <div class="col-md-4 height-5 pad-2"> <h1>FUNEC</h1> </div> <div class="col-md-2 height-5"> <a href="" class="d-block marg-05 decoration-none"><div class="pad-1 bg-ubuntu_blue white decoration-none">MISIÓN</div></a> <a href="" class="d-block marg-05 decoration-none"><div class="pad-1 bg-ubuntu_blue white decoration-none">VISIÓN</div></a> <a href="" class="d-block marg-05 decoration-none"><div class="pad-1 bg-ubuntu_blue white decoration-none">SERVICIOS</div></a> </div> <div class="col-md-6 height-5 pad-1 bg-ubuntu_porcelain font-ubuntu"> <p> Organización politica del núcleo Charallave </p> </div> </div> <hr> <div class="row bg-porcelain height-50"> <div class="col-md-4 height-5 pad-2"> <h1>UNEXPO SOMOS TODOS</h1> </div> <div class="col-md-2 height-5"> <a href="" class="d-block marg-05 decoration-none"><div class="pad-1 bg-ubuntu_blue white decoration-none">MISIÓN</div></a> <a href="" class="d-block marg-05 decoration-none"><div class="pad-1 bg-ubuntu_blue white decoration-none">VISIÓN</div></a> <a href="" class="d-block marg-05 decoration-none"><div class="pad-1 bg-ubuntu_blue white decoration-none">SERVICIOS</div></a> </div> <div class="col-md-6 height-5 pad-1 bg-ubuntu_porcelain font-ubuntu"> <p> Organización politica del núcleo Charallave </p> </div> </div> ''' incluir(data,"footer") doc+=''' <div class="row"> </div> </div> ''' incluir(data,"header") doc+=''' </body> </html>'''
38.970149
489
0.702413
896
5,222
4.065848
0.169643
0.101016
0.039528
0.079056
0.723305
0.723305
0.723305
0.723305
0.723305
0.723305
0
0.029538
0.105324
5,222
134
490
38.970149
0.75
0.007277
0
0.784615
0
0.192308
0.956396
0.199884
0
0
0
0
0
0
null
null
0
0
null
null
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
8
40f8522c89aa246b3bfc27673608dd91c0d9d588
2,368
py
Python
examples/logic.py
imfd/nlglib
6479aef47f7a5bfecc1c581f0a67ba1714bdda68
[ "MIT" ]
44
2016-02-09T23:44:42.000Z
2022-01-31T03:04:16.000Z
examples/logic.py
imfd/nlglib
6479aef47f7a5bfecc1c581f0a67ba1714bdda68
[ "MIT" ]
8
2018-07-06T08:28:35.000Z
2019-12-10T18:52:01.000Z
examples/logic.py
imfd/nlglib
6479aef47f7a5bfecc1c581f0a67ba1714bdda68
[ "MIT" ]
19
2018-05-18T14:48:43.000Z
2020-08-06T16:07:56.000Z
import logging from nlglib.realisation.simplenlg.realisation import Realiser from nlglib.lexicalisation import Lexicaliser from nlglib.macroplanning import * from nlglib.microplanning import * from nlglib.features import TENSE def run(): realise = Realiser(host='nlg.kutlak.info') lex = Lexicaliser(templates={ 'x': String('X'), 'arthur': Male('Arthur'), 'shrubbery': Clause(Var(0), VP('find', NP('a', 'shrubbery'), features=[TENSE.future])), 'knight': Clause(Var(0), VP('is', NP('a', 'knight'))), 'say_ni': Clause(Var(0), VP('say', Interjection('"Ni!"'))), }) print(realise(lex(formula_to_rst(expr(r'x'))))) print(realise(lex(formula_to_rst(expr(r'-x'))))) print(realise(lex(formula_to_rst(expr(r'x = 5'))))) print(realise(lex(formula_to_rst(expr(r'x != 5'))))) print(realise(lex(formula_to_rst(expr(r'knight(arthur)'))))) print(realise(lex(formula_to_rst(expr(r'-knight(arthur)'))))) print(realise(lex(formula_to_rst(expr(r'say_ni(arthur)'))))) print(realise(lex(formula_to_rst(expr(r'-say_ni(arthur)'))))) print(realise(lex(formula_to_rst(expr(r'shrubbery(arthur)'))))) print(realise(lex(formula_to_rst(expr(r'-shrubbery(arthur)'))))) print(realise(lex(formula_to_rst(expr(r'knight(arthur) & say_ni(arthur)'))))) print(realise(lex(formula_to_rst(expr(r'say_ni(arthur) | knight(arthur)'))))) print(realise(lex(formula_to_rst(expr(r'say_ni(arthur) -> knight(arthur)'))))) print(realise(lex(formula_to_rst(expr(r'knight(arthur) <-> say_ni(arthur)'))))) print(realise(lex(formula_to_rst(expr(r'say_ni(arthur) & -knight(arthur)'))))) print(realise(lex(formula_to_rst(expr(r'say_ni(arthur) | -knight(arthur)'))))) print(realise(lex(formula_to_rst(expr(r'say_ni(arthur) -> -knight(arthur)'))))) print(realise(lex(formula_to_rst(expr(r'-knight(arthur) <-> say_ni(arthur)'))))) print(realise(lex(formula_to_rst(expr(r'-knight(arthur) <-> -say_ni(arthur)'))))) print(realise(lex(formula_to_rst(expr(r'-(knight(arthur) <-> say_ni(arthur))'))))) print(realise(lex(formula_to_rst(expr(r'say_ni(arthur) & knight(arthur) & shrubbery(arthur)'))))) print(realise(lex(formula_to_rst(expr(r'say_ni(arthur) | knight(arthur) | shrubbery(arthur)'))))) if __name__ == '__main__': logging.basicConfig(level=logging.WARNING) run()
45.538462
101
0.679476
343
2,368
4.495627
0.154519
0.171206
0.214008
0.313878
0.708171
0.708171
0.708171
0.708171
0.708171
0.708171
0
0.002382
0.113598
2,368
51
102
46.431373
0.732253
0
0
0
0
0
0.26478
0
0
0
0
0
0
1
0.025
false
0
0.15
0
0.175
0.55
0
0
0
null
0
1
1
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
8
9087b486168a97f178a78dcfd276ce84c59eba4c
66
py
Python
todo_list/managers/__init__.py
Diorgeles/Sthima
d83b545d9a65a8f792fc8fe39a12ee1a4e5b85c4
[ "CC0-1.0" ]
null
null
null
todo_list/managers/__init__.py
Diorgeles/Sthima
d83b545d9a65a8f792fc8fe39a12ee1a4e5b85c4
[ "CC0-1.0" ]
null
null
null
todo_list/managers/__init__.py
Diorgeles/Sthima
d83b545d9a65a8f792fc8fe39a12ee1a4e5b85c4
[ "CC0-1.0" ]
null
null
null
from todo_list.managers.item_list_manager import Item_list_manager
66
66
0.924242
11
66
5.090909
0.636364
0.285714
0.535714
0
0
0
0
0
0
0
0
0
0.045455
66
1
66
66
0.888889
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
90a7fb6cf80c0cd4a5c9f2dde9ab74704505e202
3,919
py
Python
dlkit/records/repository/basic/base_records.py
UOC/dlkit
a9d265db67e81b9e0f405457464e762e2c03f769
[ "MIT" ]
2
2018-02-23T12:16:11.000Z
2020-10-08T17:54:24.000Z
dlkit/records/repository/basic/base_records.py
UOC/dlkit
a9d265db67e81b9e0f405457464e762e2c03f769
[ "MIT" ]
87
2017-04-21T18:57:15.000Z
2021-12-13T19:43:57.000Z
dlkit/records/repository/basic/base_records.py
UOC/dlkit
a9d265db67e81b9e0f405457464e762e2c03f769
[ "MIT" ]
1
2018-03-01T16:44:25.000Z
2018-03-01T16:44:25.000Z
from dlkit.json_.utilities import JSONClientValidated from dlkit.json_.repository.objects import Asset, AssetList from dlkit.primordium.id.primitives import Id from dlkit.abstract_osid.osid.errors import IllegalState from bson.objectid import ObjectId from ...osid.base_records import ProvenanceRecord class ProvenanceAssetRecord(ProvenanceRecord): def get_provenance_parent(self): if self.has_provenance(): collection = JSONClientValidated('repository', collection='Asset', runtime=self.my_osid_object._runtime) result = collection.find_one({'_id': ObjectId(Id(self.get_provenance_id()).get_identifier())}) return Asset(osid_object_map=result, runtime=self.my_osid_object._runtime, proxy=self.my_osid_object._proxy) raise IllegalState("Asset has no provenance parent.") def has_provenance_children(self): collection = JSONClientValidated('repository', collection='Asset', runtime=self.my_osid_object._runtime) if collection.find({'provenanceId': self.my_osid_object.object_map['id']}).count() > 0: return True else: return False def get_provenance_children(self): if self.has_provenance_children(): collection = JSONClientValidated('repository', collection='Asset', runtime=self.my_osid_object._runtime) result = collection.find({'provenanceId': self.my_osid_object.object_map['id']}) return AssetList(result, runtime=self.my_osid_object._runtime, proxy=self.my_osid_object._proxy) raise IllegalState('No provenance children.') provenance_children = property(fget=get_provenance_children) provenance_parent = property(fget=get_provenance_parent) class ProvenanceCompositionRecord(ProvenanceRecord): def get_provenance_parent(self): if self.has_provenance(): collection = JSONClientValidated('repository', collection='Composition', runtime=self.my_osid_object._runtime) result = collection.find_one({'_id': ObjectId(Id(self.get_provenance_id()).get_identifier())}) return Asset(osid_object_map=result, runtime=self.my_osid_object._runtime, proxy=self.my_osid_object._proxy) raise IllegalState("Composition has no provenance parent.") def has_provenance_children(self): collection = JSONClientValidated('repository', collection='Composition', runtime=self.my_osid_object._runtime) if collection.find({'provenanceId': self.my_osid_object.object_map['id']}).count() > 0: return True else: return False def get_provenance_children(self): if self.has_provenance_children(): collection = JSONClientValidated('repository', collection='Composition', runtime=self.my_osid_object._runtime) result = collection.find({'provenanceId': self.my_osid_object.object_map['id']}) return AssetList(result, runtime=self.my_osid_object._runtime, proxy=self.my_osid_object._proxy) raise IllegalState('No provenance children.') provenance_children = property(fget=get_provenance_children) provenance_parent = property(fget=get_provenance_parent)
47.792683
106
0.596581
362
3,919
6.176796
0.151934
0.089445
0.080501
0.128801
0.852415
0.852415
0.852415
0.852415
0.852415
0.852415
0
0.000752
0.321511
3,919
81
107
48.382716
0.840165
0
0
0.852941
0
0
0.072467
0
0
0
0
0
0
1
0.088235
false
0
0.088235
0
0.382353
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
90ca5b2f284d7eed62f400b2479bad260a036533
81,203
py
Python
tests/bugs/core_4115_test.py
reevespaul/firebird-qa
98f16f425aa9ab8ee63b86172f959d63a2d76f21
[ "MIT" ]
null
null
null
tests/bugs/core_4115_test.py
reevespaul/firebird-qa
98f16f425aa9ab8ee63b86172f959d63a2d76f21
[ "MIT" ]
null
null
null
tests/bugs/core_4115_test.py
reevespaul/firebird-qa
98f16f425aa9ab8ee63b86172f959d63a2d76f21
[ "MIT" ]
null
null
null
#coding:utf-8 # # id: bugs.core_4115 # title: EXECUTE BLOCK execution cause server crash # decription: # Confirmed lost of connection (but *not* crash) on 2.5.2.26540. # Last lines in trace: # INSERT INTO PU_BTET(ID,PBIZ_ID,... )VALUES(1711941,1559865, ...); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,... # (i.e. it occurs when performing EXECUTE BLOCK) # STDERR contains: # Statement failed, SQLSTATE = 08006 # Unable to complete network request to host "localhost". # -Error reading data from the connection. # No such problem on 2.5.7.27050 # # 02-mar-2021. Re-implemented in order to have ability to run this test on Linux. # Ttest creates table and fills it with non-ascii characters in init_script, using charset = UTF8. # Then it generates .sql script for running it in separae ISQL process. # This script makes connection to test DB using charset = WIN1250 and perform needed DML. # Result will be redirected to .log which will be opened via codecs.open(...encoding='cp1250'). # Its content will be converted to UTF8 for showing in expected_stdout. # # Checked on: # * Windows: 4.0.0.2377, 3.0.8.33420, 2.5.9.27152 # * Linux: 4.0.0.2377, 3.0.8.33415 # # tracker_id: CORE-4115 # min_versions: ['2.5.7'] # versions: 2.5.7 # qmid: None import pytest from firebird.qa import db_factory, isql_act, Action # version: 2.5.7 # resources: None substitutions_1 = [] init_script_1 = """""" db_1 = db_factory(charset='WIN1250', sql_dialect=1, init=init_script_1) # test_script_1 #--- # # import os # import codecs # import subprocess # import time # # db_conn.close() # # #-------------------------------------------- # # def flush_and_close( file_handle ): # # https://docs.python.org/2/library/os.html#os.fsync # # If you're starting with a Python file object f, # # first do f.flush(), and # # then do os.fsync(f.fileno()), to ensure that all internal buffers associated with f are written to disk. # global os # # file_handle.flush() # if file_handle.mode not in ('r', 'rb') and file_handle.name != os.devnull: # # otherwise: "OSError: [Errno 9] Bad file descriptor"! # os.fsync(file_handle.fileno()) # file_handle.close() # # #-------------------------------------------- # # def cleanup( f_names_list ): # global os # for i in range(len( f_names_list )): # if type(f_names_list[i]) == file: # del_name = f_names_list[i].name # elif type(f_names_list[i]) == str: # del_name = f_names_list[i] # else: # print('Unrecognized type of element:', f_names_list[i], ' - can not be treated as file.') # del_name = None # # if del_name and os.path.isfile( del_name ): # os.remove( del_name ) # # #-------------------------------------------- # # # Code to be executed further in separate ISQL process: # ############################# # sql_txt=''' set bail on; # set names WIN1250; # connect '%(dsn)s' user '%(user_name)s' password '%(user_password)s'; # # create domain xadoszam as varchar(20); # create domain xarf10 as numeric(15, 10); # create domain xarf10n as numeric(15, 10) not null; # create domain xarfoly as numeric(15, 4); # create domain xbinary as blob sub_type 0 segment size 80; # create domain xblnev as varchar(15); # create domain xcrcn as varchar(30) not null; # create domain xcrdn as timestamp not null; # create domain xcrsn as varchar(30) not null; # create domain xcrun as varchar(30) not null; # create domain xdat as timestamp; # create domain xdatum as timestamp; # create domain xdatumn as timestamp not null; # create domain xdnem as varchar(3); # create domain xegysn as numeric(15, 4) not null; # create domain xert as numeric(15, 2); # create domain xert10 as numeric(15, 10); # create domain xert4 as numeric(15, 4); # create domain xert4n as numeric(15, 4) not null; # create domain xert6 as numeric(15, 6); # create domain xertdev as numeric(15, 4); # create domain xertdevn as numeric(15, 4) not null; # create domain xertgy as numeric(15, 2); # create domain xertn as numeric(15, 2) not null; # create domain xfloat52 as numeric(5, 2); # create domain xid as integer; # create domain xidn as integer not null; # create domain xiktn as integer not null; # create domain ximage as blob sub_type 0 segment size 80; # create domain xinfo as varchar(1000); # create domain xinfo2 as varchar(2000); # create domain xint as integer; # create domain xintn as integer not null; # create domain xkarfoly as numeric(15, 6); # create domain xkod as varchar(12); # create domain xkodn as varchar(12) not null; # create domain xlmc as varchar(30); # create domain xlmd as timestamp; # create domain xlms as varchar(30); # create domain xlmu as varchar(30); # create domain xmegj1 as blob sub_type text segment size 80; # create domain xmegyseg as varchar(6); # create domain xmenny as numeric(15, 3); # create domain xmennyn as numeric(15, 3) not null; # create domain xmeny as numeric(9, 3); # create domain xnyelv as varchar(2); # create domain xpfjsz as varchar(34); # create domain xpidn as integer not null; # create domain xpoz as integer; # create domain xreport as blob sub_type text segment size 80; # create domain xszamlamaszk as varchar(50); # create domain xszazalek as numeric(6, 2); # create domain xszazalekn as numeric(6, 2) not null; # create domain xszla as varchar(9); # create domain xszlan as varchar(9) not null; # create domain xszoveg as blob sub_type text segment size 80; # create domain xszovegn as blob sub_type text segment size 80 not null; # create domain xtblnev as varchar(15); # create domain xthnev as varchar(100); # create domain xthnevn as varchar(100) not null; # create domain xtimestamp as timestamp; # create domain xtimestampn as timestamp not null; # create domain xtort as numeric(15, 4); # create domain xtortn as numeric(15, 4) not null; # create domain xtrnevn as varchar(20) not null; # create domain xvar1 as varchar(1); # create domain xvar10 as varchar(10); # create domain xvar100 as varchar(100); # create domain xvar1000 as varchar(1000); # create domain xvar10000 as varchar(10000); # create domain xvar1000n as varchar(1000) not null; # create domain xvar100n as varchar(100) not null; # create domain xvar10n as varchar(10) not null; # create domain xvar11 as varchar(11); # create domain xvar12 as varchar(12); # create domain xvar12n as varchar(12) not null; # create domain xvar13 as varchar(13); # create domain xvar13n as varchar(13) not null; # create domain xvar14 as varchar(14); # create domain xvar140 as varchar(140); # create domain xvar14n as varchar(14) not null; # create domain xvar16 as varchar(16); # create domain xvar1n as varchar(1) not null; # create domain xvar2 as varchar(2); # create domain xvar20 as varchar(20); # create domain xvar200 as varchar(200); # create domain xvar200n as varchar(200) not null; # create domain xvar20n as varchar(20) not null; # create domain xvar24 as varchar(24); # create domain xvar25 as varchar(25); # create domain xvar25n as varchar(25) not null; # create domain xvar2n as varchar(2) not null; # create domain xvar3 as varchar(3); # create domain xvar30 as varchar(30); # create domain xvar300 as varchar(300); # create domain xvar300n as varchar(300) not null; # create domain xvar30n as varchar(30) not null; # create domain xvar32 as varchar(32); # create domain xvar32000 as varchar(32000); # create domain xvar34 as varchar(34); # create domain xvar3n as varchar(3) not null; # create domain xvar4 as varchar(4); # create domain xvar40 as varchar(40); # create domain xvar40n as varchar(40) not null; # create domain xvar4n as varchar(4) not null; # create domain xvar5 as varchar(5); # create domain xvar50 as varchar(50); # create domain xvar500 as varchar(500); # create domain xvar500n as varchar(500) not null; # create domain xvar50n as varchar(50) not null; # create domain xvar5n as varchar(5) not null; # create domain xvar6 as varchar(6); # create domain xvar60n as varchar(60) not null; # create domain xvar63 as varchar(63); # create domain xvar6n as varchar(6) not null; # create domain xvar70 as varchar(70); # create domain xvar70n as varchar(70) not null; # create domain xvar7n as varchar(7) not null; # create domain xvar8 as varchar(8); # create domain xvar80 as varchar(80); # create domain xvar80n as varchar(80) not null; # create domain xvar8n as varchar(8) not null; # create domain xvar9 as varchar(9); # create domain xvert as varchar(8); # commit work; # # create table pu_btet (id xidn, # pbiz_id xidn, # gysor xintn, # tipus xvar1n, # afa_kulcs xvar5, # alap xertn, # ado xertn, # egys_ar xert4, # dev_alap xert4, # dev_ado xert4, # devegys_ar xert4, # db xmenny, # szoveg xvar50, # megjegyzes xmegj1, # cru xcrun, # crd xcrdn, # lmu xlmu, # lmd xlmd, # termek_id xid, # tjegyz_id xid, # bto_id xid, # mert_id xid, # artip_kod xvar14, # enged_id xid, # gysor_tol xint, # gysor_ig xint, # afa_ossze xvar1, # pbtetkapcs xvar5, # kamozgnem_id xid, # melleklet_db xint, # sarzs xvar12, # lejar xdatum, # felszab xdatum, # pu_afatipus_id xid, # afa_alap xert, # afa_ado xert, # afa_akulcs xvar5, # db2 xmenny, # mert_id2 xid, # mert_seged xvar1, # jutalek xert, # hull xvar1, # kod1 xvar40, # kod2 xvar40, # kod3 xvar40, # kod4 xvar40, # kod5 xvar40, # eloleg_biz_id xid, # lista_ar xert4, # egys_ar_diff xert4, # pu_ar_id xid, # akcio_szazalek xszazalek, # akciozott_ar xert4, # pu_mn_id xid, # afa_szla xszla, # afa_eszla xszla, # afa_minosit1 xkod, # afa_minosit2 xkod, # bado xert, # balap xert, # bdev xvar3, # barf xarf10, # ef_alap xert, # ef_ado xert, # ef_egys_ar xert4, # ef_dev_alap xert, # ef_dev_ado xert, # ef_dev_egys_ar xert4, # ef_szazalek xert, # malap xert, # mado xert, # mdev xdnem, # marfdatum xdatum, # idoszakszla_datum xvar50, # tovabbszamla_ugyf_id xid, # telj xdatum, # telj_arfolyam xarf10, # szarmhely xvar2, # ktrk_kod xvar20, # richtextmegj xszoveg, # db_keszlet xmenny, # mert_id_keszlet xid, # auto_arfkul_eloleg_btet_id xid, # szallitojegy_szam xvar20); # set term ^ ; # # create trigger insertpu_btet for pu_btet inactive before insert position 0 # as begin # new.cru = user; # new.crd = 'now'; # end^ # # create trigger updatepu_btet for pu_btet active before update position 0 # as begin # new.lmu = user; # new.lmd = 'now'; # end^ # set term ;^ # commit work; # # set term ^; # execute block as # begin # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712188,1560100,1,'N','15',59424,8914,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712187,1560099,1,'N','15',5467,820,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712186,1560098,1,'N','15',12991,1949,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712185,1560097,1,'N','15',30145,4522,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712184,1560096,1,'N','15',6455,968,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712183,1560095,1,'N','15',28020,4203,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712182,1560094,1,'N','15',4630,694,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712181,1560093,1,'N','15',36930,5540,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712180,1560092,1,'N','15',10734,1610,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712179,1560091,1,'N','15',3292,494,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712178,1560090,1,'N','15',27993,4199,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712177,1560089,1,'N','15',3195,479,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712176,1560088,1,'N','15',6520,978,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712175,1560087,1,'N','15',12399,1860,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712174,1560086,1,'N','15',17525,2629,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712173,1560085,1,'N','15',27000,4050,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712172,1560084,1,'N','15',28982,4347,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712171,1560083,1,'N','15',62384,9358,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712170,1560082,1,'N','15',29794,4469,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712169,1560081,1,'N','15',38982,5847,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712168,1560080,1,'N','15',32526,4879,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712167,1560079,1,'N','15',37630,5645,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712166,1560078,1,'N','15',3390,509,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712165,1560077,1,'N','15',75737,11361,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712164,1560076,1,'N','15',24257,3639,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712163,1560075,1,'N','15',44000,6600,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712162,1560074,1,'N','15',65663,9849,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712161,1560073,1,'N','15',33673,5051,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712160,1560072,1,'N','15',14943,2242,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712159,1560071,1,'N','15',4127,619,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712158,1560070,1,'N','15',35404,5311,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712157,1560069,1,'N','15',33337,5000,0,0,0,'2507-2511','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712156,1560068,1,'N','15',65810,9872,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712155,1560067,1,'N','25',84931,21233,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712154,1560066,1,'N','25',35486,8872,0,0,0,'656121,122','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712153,1560065,1,'N','25',46162,11541,0,0,0,'53479','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712152,1560064,1,'N','25',17183,4296,0,0,0,'700301,304','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712151,1560063,1,'N','25',38130,9532,0,0,0,'700117,700296','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712150,1560062,1,'N','25',48936,12234,0,0,0,'428','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712149,1560061,1,'N','25',38138,9534,0,0,0,'54431','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712148,1560060,1,'N','25',39791,9948,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712147,1560059,1,'N','25',92128,23032,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712146,1560058,1,'N','25',19069,4767,0,0,0,'429','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712145,1560057,1,'N','15',41340,6201,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712144,1560056,1,'N','15',76320,11448,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712143,1560055,1,'N','15',41460,6219,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712142,1560054,1,'N','25',20750,5188,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712141,1560053,1,'N','25',4152,1038,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712140,1560052,1,'N','25',240,60,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712139,1560051,1,'N','25',43223,10806,0,0,0,'53435','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712138,1560050,1,'N','25',9818,2455,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712137,1560049,1,'N','25',65693,16423,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712136,1560048,1,'N','25',36700,9175,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712135,1560047,1,'N','15',5534,830,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712134,1560046,1,'N','25',1550,388,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712133,1560045,1,'N','15',9036,1355,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712132,1560044,1,'N','25',12039,3010,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712131,1560043,1,'N','25',20280,5070,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712130,1560042,1,'N','15',124416,18662,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712129,1560041,1,'N','25',8420,2105,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712128,1560040,1,'N','25',78348,19587,0,0,0,'997915.602576','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712127,1560039,1,'N','25',7300,1825,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712126,1560038,1,'N','25',6393,1598,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712125,1560037,1,'N','25',8200,2050,0,0,0,'997916.602575','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712124,1560036,1,'N','25',46660,11665,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712123,1560035,1,'N','25',3228,807,0,0,0,'997917.602574','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712122,1560034,1,'N','15',303913,45587,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712121,1560033,1,'N','25',2032100,508025,0,0,0,'807851','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712120,1560032,1,'N','15',33571,5036,0,0,0,'61124-126','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712119,1560031,1,'N','25',40825,10206,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712118,1560030,1,'N','15',38106,5716,0,0,0,'128-130','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712117,1560029,1,'N','15',21545,3232,0,0,0,'2512','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712116,1560028,1,'N','25',30798,7700,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712115,1560027,2,'N','25',8588,2147,0,0,0,'28758','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712114,1560027,1,'N','15',20753,3113,0,0,0,'28758','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712113,1560026,2,'N','25',42716,10679,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712112,1560026,1,'N','15',31593,4739,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712111,1560025,1,'N','25',58080,14520,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712110,1560024,2,'N','25',11536,2884,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712109,1560024,1,'N','15',65790,9869,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712108,1560023,1,'N','25',42028,10507,0,0,0,'707335.708232','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712107,1560022,1,'N','15',216186,32428,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712106,1560021,1,'N','15',14112,2117,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712105,1560020,1,'N','15',26591,3989,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712104,1560019,1,'N','25',13980,3495,0,0,0,'708234','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712103,1560018,1,'N','25',172058,43015,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712102,1560017,1,'N','25',40842,10211,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712101,1560016,1,'N','25',248637,62159,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712100,1560015,2,'N','25',133760,33440,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712099,1560015,1,'N','15',527385,79108,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712098,1560014,1,'N','25',511560,127890,0,0,0,'3522','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712097,1560013,1,'N','15',7507,1126,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712096,1560012,1,'N','15',22440,3366,0,0,0,'28634','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712095,1560011,1,'N','25',29237,7309,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712094,1560010,1,'N','15',12647,1897,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712093,1560009,1,'N','15',52600,7890,0,0,0,'75098-99','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712092,1560008,1,'N','25',357092,89273,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712091,1560007,1,'N','15',44649,6697,0,0,0,'75094-5.75092','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712090,1560006,1,'N','15',22651,3398,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712089,1560005,1,'N','15',46998,7050,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712088,1560004,2,'N','25',98323,24581,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712087,1560004,1,'N','15',302600,45390,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712086,1560003,1,'N','15',95588,14338,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712085,1560002,1,'N','15',106649,15997,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712084,1560001,1,'N','15',290751,43613,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:13.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712083,1560000,1,'N','15',96480,14472,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712082,1559999,1,'N','15',54363,8155,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712081,1559998,1,'N','15',109602,16440,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712080,1559997,1,'N','25',986468,246617,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712079,1559996,1,'N','15',51440,7716,0,0,0,'15077,079','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712078,1559995,1,'N','15',61132,9170,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712077,1559994,1,'N','15',71938,10791,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712076,1559993,2,'N','25',357860,89465,0,0,0,'11916','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712075,1559993,1,'N','15',194888,29233,0,0,0,'11916','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712074,1559992,1,'N','15',70543,10581,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712073,1559991,1,'N','15',232673,34901,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712072,1559990,1,'N','15',67939,10191,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712071,1559989,1,'N','15',67430,10114,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712070,1559988,1,'N','15',257317,38597,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712069,1559987,1,'N','15',63028,9454,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712068,1559986,1,'N',' 5',52895,2645,0,0,0,'17559-560','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712067,1559985,1,'N','25',186876,46719,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712066,1559984,1,'N','15',49979,7497,0,0,0,'23025.28','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712065,1559983,1,'N','15',940004,141001,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712064,1559982,1,'N','15',36301,5445,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712063,1559981,1,'N','15',59507,8926,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712062,1559980,1,'N','15',107136,16070,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712061,1559979,1,'N','25',357696,89424,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712060,1559978,1,'N','15',15251,2288,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712059,1559977,2,'N','25',137352,34338,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712058,1559977,1,'N','15',29920,4488,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712057,1559976,1,'N','15',57186,8578,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712056,1559975,1,'N','25',176048,44012,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712055,1559974,1,'N','15',18968,2845,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712054,1559973,1,'N','15',53789,8068,0,0,0,'61318-320','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712053,1559972,1,'N','15',22820,3423,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712052,1559971,1,'N','15',35130,5270,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712051,1559970,1,'N','15',55930,8390,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712050,1559969,1,'N','15',338290,50743,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712049,1559968,1,'N','15',13678,2052,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712048,1559967,1,'N','15',159118,23868,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712047,1559966,1,'N','15',32627,4894,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712046,1559965,1,'N','15',12000,1800,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712045,1559964,1,'N','15',42680,6402,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712044,1559963,1,'N','15',22775,3416,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712043,1559962,1,'N','15',53105,7966,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712042,1559961,1,'N','25',32469,8117,0,0,0,'45618,620','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712041,1559960,1,'N','15',130094,19514,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712040,1559959,1,'N','15',14701,2205,0,0,0,'52361','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712039,1559958,1,'N','15',12912,1937,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1712038,1559957,1,'N','NK',10662,0,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712037,1559956,1,'N','15',27643,4146,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712036,1559955,2,'N','NK',14216,0,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712035,1559955,1,'N','25',799,200,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712034,1559954,2,'N','NK',30517,0,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712033,1559954,1,'N','25',400,100,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712032,1559953,2,'N','NK',56586,0,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712031,1559953,1,'N','25',1199,300,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1712030,1559952,1,'N','NK',10662,0,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1712029,1559951,1,'N','NK',24427,0,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1712028,1559950,1,'N','NK',45391,0,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712027,1559949,1,'N','15',81129,12169,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712026,1559948,1,'N','15',79560,11934,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712025,1559947,1,'N','15',14364,2155,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712024,1559946,1,'N','15',11522,1728,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712023,1559945,1,'N','15',28106,4216,0,0,0,'20546,549','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712022,1559944,1,'N','15',622689,93403,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712021,1559943,1,'N','15',58940,8841,0,0,0,'20540-41','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712020,1559942,1,'N','15',34094,5114,0,0,0,'20574-575','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712019,1559941,1,'N','15',31861,4779,0,0,0,'2518-22','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712018,1559940,1,'N','15',23012,3452,0,0,0,'2513-16','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712017,1559939,1,'N','25',40570,10142,0,0,0,'401922-926','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712016,1559938,1,'N','25',33346,8336,0,0,0,'401918-920','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712015,1559937,1,'N','15',91396,13709,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712014,1559936,1,'N','25',10790,2698,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712013,1559935,1,'N','25',108257,27064,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712012,1559934,1,'N','15',2270,341,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712011,1559933,2,'N','25',11076,2769,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712010,1559933,1,'N','15',21260,3189,0,0,0,'NINCS SZÖVEG','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712009,1559932,2,'N','25',7673,1918,0,0,0,'28755,784','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD)VALUES(1712008,1559932,1,'N','15',16161,2424,0,0,0,'28755,784','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000'); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1712007,1559931,1,'N','NK',143734,0,0,0,0,'Bónuszba beszámitva D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1712006,1559930,1,'N','NK',663448,0,0,0,0,'Bónuszba beszámitva D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1712005,1559929,1,'N','NK',401818,0,0,0,0,'Bónuszba beszámitva D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1712004,1559928,1,'N','NK',87800,0,0,0,0,'Bónuszba beszámitva D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1712003,1559927,1,'N','NK',56250,0,0,0,0,'Bónuszba beszámitva D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1712002,1559926,1,'N','NK',307720,0,0,0,0,'Bónuszba beszámitva D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1712001,1559925,1,'N','NK',64338,0,0,0,0,'Bónuszba beszámitva D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1712000,1559924,1,'N','NK',70564,0,0,0,0,'Bónuszba beszámitva D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711999,1559923,1,'N','NK',459,0,0,0,0,'Visszáru szla D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711998,1559922,1,'N','NK',1080,0,0,0,0,'Visszáru szla D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711997,1559921,1,'N','NK',1150,0,0,0,0,'Visszáru szla D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711996,1559920,1,'N','NK',2800,0,0,0,0,'Visszáru szla D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711995,1559919,1,'N','NK',6480,0,0,0,0,'Visszáru szla D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711994,1559918,1,'N','NK',4670,0,0,0,0,'Visszáru szla D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711993,1559917,1,'N','NK',800,0,0,0,0,'Visszáru szla D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711992,1559916,1,'N','NK',15300,0,0,0,0,'Visszáru szla D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711991,1559915,1,'N','NK',523202,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711990,1559914,1,'N','NK',844886,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711989,1559913,1,'N','NK',646060,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711988,1559912,1,'N','NK',2197306,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711987,1559911,1,'N','NK',111715,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711986,1559910,1,'N','NK',6836,0,0,0,0,' D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711985,1559909,1,'N','NK',49479,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711984,1559908,1,'N','NK',337500,0,0,0,0,'Milek D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711983,1559907,1,'N','NK',110850,0,0,0,0,'Milek D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711982,1559906,1,'N','NK',130800,0,0,0,0,'Ergomat D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711981,1559905,1,'N','NK',82992,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711980,1559904,1,'N','NK',128144,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711979,1559903,1,'N','NK',37245,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711978,1559902,1,'N','NK',25374,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711977,1559901,1,'N','NK',132515,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711976,1559900,1,'N','NK',20250,0,0,0,0,'Elektro-top D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711975,1559899,1,'N','NK',37098,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711974,1559898,1,'N','NK',95067,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711973,1559897,1,'N','NK',99276,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711972,1559896,1,'N','NK',119538,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711971,1559895,1,'N','NK',124008,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711970,1559894,1,'N','NK',124356,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711969,1559893,1,'N','NK',221511,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711968,1559892,1,'N','NK',58665,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711967,1559891,1,'N','NK',37446,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711966,1559890,1,'N','NK',70422,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711965,1559889,1,'N','NK',157506,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711964,1559888,1,'N','NK',68310,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711963,1559887,1,'N','NK',87085,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711962,1559886,1,'N','NK',6698,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711961,1559885,1,'N','NK',6698,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711960,1559884,1,'N','NK',6698,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711959,1559883,1,'N','NK',6698,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711958,1559882,1,'N','NK',6698,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711957,1559881,1,'N','NK',6698,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711956,1559880,1,'N','NK',6120,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711955,1559879,1,'N','NK',78091,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711954,1559878,1,'N','NK',67735,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711953,1559877,1,'N','NK',67335,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711952,1559876,1,'N','NK',80344,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711951,1559875,1,'N','NK',1000484,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711950,1559874,1,'N','NK',87634,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711949,1559873,1,'N','NK',73417,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711948,1559872,1,'N','NK',110644,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711947,1559871,1,'N','NK',38715,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711946,1559870,1,'N','NK',185,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711945,1559869,1,'N','NK',498140,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711944,1559868,1,'N','NK',354749,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711943,1559867,1,'N','NK',123206,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711942,1559866,1,'N','NK',47458,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711941,1559865,1,'N','NK',52165,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # INSERT INTO PU_BTET(ID,PBIZ_ID,GYSOR,TIPUS,AFA_KULCS,ALAP,ADO,EGYS_AR,DEVEGYS_AR,DB,SZOVEG,CRU,CRD,LMU,LMD,PU_AFATIPUS_ID)VALUES(1711940,1559864,1,'N','NK',103244,0,0,0,0,'Számla kiegyenlitése D 193','SYSDBA','2005.10.07 07:38:12.000','SYSDBA','2007.06.15 08:47:42.000',1726469); # end # ^ # set term ;^ # commit; # # set list on; # select count(*) from pu_btet; # ''' % dict(globals(), **locals()) # # f_run_sql = open( os.path.join(context['temp_directory'], 'tmp_4115_win1250.sql'), 'w' ) # # # REMOVE INDENTATION IN .SQL to prevent limitation on length of single command (execute block). # # Write into .sql file each line without leading and trailing spaces, # # with decoding from utf8 to win1250: # ##################################### # for i in sql_txt.split('\\n'): # f_run_sql.write( ''.join( (i.strip().decode('utf8').encode('cp1250'), '\\n' ) ) ) # # flush_and_close( f_run_sql ) # # # result: file tmp_3489_win1250.sql is encoded in win1250 # # f_run_log = open( os.path.splitext(f_run_sql.name)[0]+'.log', 'w') # subprocess.call( [ context['isql_path'], '-q', '-i', f_run_sql.name ], # stdout = f_run_log, # stderr = subprocess.STDOUT # ) # flush_and_close( f_run_log ) # result: output will be encoded in win1250 # # with codecs.open(f_run_log.name, 'r', encoding='cp1250' ) as f: # result_in_cp1250 = f.readlines() # # for i in result_in_cp1250: # print( i.encode('utf8') ) # # # cleanup: # ########### # cleanup( (f_run_sql, f_run_log) ) # # #--- #act_1 = python_act('db_1', test_script_1, substitutions=substitutions_1) expected_stdout_1 = """ COUNT 249 """ @pytest.mark.version('>=2.5.7') @pytest.mark.xfail def test_1(db_1): pytest.fail("Test not IMPLEMENTED")
129.9248
305
0.682709
15,180
81,203
3.548946
0.099605
0.021347
0.018042
0.074546
0.763128
0.746422
0.737512
0.73109
0.728231
0.7276
0
0.237322
0.11853
81,203
624
306
130.133013
0.515326
0.979447
0
0
0
0
0.079218
0
0
0
0
0
0
1
0.083333
false
0
0.166667
0
0.25
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
1
0
0
0
0
1
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
90d78271299d3c4296a7c75f67ea38b7da5d63dd
3,972
py
Python
tests/test_OHLCV.py
nardew/talipp
9300aa8b632bcfe7b48ed63c06fb06793a4a5674
[ "MIT" ]
54
2020-11-19T02:27:04.000Z
2022-02-22T06:31:05.000Z
tests/test_OHLCV.py
nardew/talipp
9300aa8b632bcfe7b48ed63c06fb06793a4a5674
[ "MIT" ]
24
2020-11-01T17:56:28.000Z
2021-09-15T18:40:04.000Z
tests/test_OHLCV.py
nardew/talipp
9300aa8b632bcfe7b48ed63c06fb06793a4a5674
[ "MIT" ]
14
2020-12-10T22:43:37.000Z
2022-01-15T22:23:42.000Z
import unittest from datetime import datetime from talipp.ohlcv import OHLCV, OHLCVFactory class Test(unittest.TestCase): def test_from_matrix_w_volume(self): values = [ [1, 2, 3, 4, 5], [6, 7, 8, 9, 0] ] self.assertListEqual(OHLCVFactory.from_matrix(values), [ OHLCV(1, 2, 3, 4, 5), OHLCV(6, 7, 8, 9, 0), ]) def test_from_matrix_wo_volume(self): values = [ [1, 2, 3, 4], [6, 7, 8, 9] ] self.assertListEqual(OHLCVFactory.from_matrix(values), [ OHLCV(1, 2, 3, 4, None), OHLCV(6, 7, 8, 9, None), ]) def test_from_matrix_w_time(self): now = datetime.now() values = [ [1, 2, 3, 4, 5, now], [6, 7, 8, 9, 0, now] ] self.assertListEqual(OHLCVFactory.from_matrix(values), [ OHLCV(1, 2, 3, 4, 5, now), OHLCV(6, 7, 8, 9, 0, now), ]) def test_from_matrix2_w_volume(self): values = [ [1, 6], [2, 7], [3, 8], [4, 9], [5, 0] ] self.assertListEqual(OHLCVFactory.from_matrix2(values), [ OHLCV(1, 2, 3, 4, 5), OHLCV(6, 7, 8, 9, 0), ]) def test_from_matrix2_wo_volume(self): values = [ [1, 6], [2, 7], [3, 8], [4, 9] ] self.assertListEqual(OHLCVFactory.from_matrix2(values), [ OHLCV(1, 2, 3, 4, None), OHLCV(6, 7, 8, 9, None), ]) def test_from_matrix2_w_time(self): now = datetime.now() values = [ [1, 6], [2, 7], [3, 8], [4, 9], [5, 0], [now, now] ] self.assertListEqual(OHLCVFactory.from_matrix2(values), [ OHLCV(1, 2, 3, 4, 5, now), OHLCV(6, 7, 8, 9, 0, now), ]) def test_from_dict_w_volume(self): values = { 'open': [1, 6], 'high': [2, 7], 'low': [3, 8], 'close': [4, 9], 'volume': [5, 0] } self.assertListEqual(OHLCVFactory.from_dict(values), [ OHLCV(1, 2, 3, 4, 5), OHLCV(6, 7, 8, 9, 0), ]) def test_from_dict_wo_volume(self): values = { 'open': [1, 6], 'high': [2, 7], 'low': [3, 8], 'close': [4, 9] } self.assertListEqual(OHLCVFactory.from_dict(values), [ OHLCV(1, 2, 3, 4, None), OHLCV(6, 7, 8, 9, None), ]) def test_from_dict_w_time(self): now = datetime.now() values = { 'open': [1, 6], 'high': [2, 7], 'low': [3, 8], 'close': [4, 9], 'volume': [5, 0], 'time': [now, now] } self.assertListEqual(OHLCVFactory.from_dict(values), [ OHLCV(1, 2, 3, 4, 5, now), OHLCV(6, 7, 8, 9, 0, now), ]) if __name__ == '__main__': unittest.main()
28.371429
63
0.33283
375
3,972
3.384
0.106667
0.018913
0.028369
0.037825
0.881009
0.835303
0.813239
0.763593
0.721828
0.721828
0
0.095974
0.543555
3,972
139
64
28.57554
0.603971
0
0
0.598291
0
0
0.018127
0
0
0
0
0
0.076923
1
0.076923
false
0
0.025641
0
0.111111
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
291eca323cc7f38afc96e15377d0ab136cc79398
2,959
py
Python
url_marker.py
AlexEgiazarov/ChanGuard
2a98ff8e8b06d891d31fd5fea31cdb5520cf9e54
[ "MIT" ]
1
2020-01-20T07:24:37.000Z
2020-01-20T07:24:37.000Z
url_marker.py
AlexEgiazarov/ChanGuard
2a98ff8e8b06d891d31fd5fea31cdb5520cf9e54
[ "MIT" ]
null
null
null
url_marker.py
AlexEgiazarov/ChanGuard
2a98ff8e8b06d891d31fd5fea31cdb5520cf9e54
[ "MIT" ]
null
null
null
#!/usr/bin/python # -*- coding: utf-8 -*- """ url matching regex http://daringfireball.net/2010/07/improved_regex_for_matching_urls """ """ The regex patterns in this gist are intended to match any URLs, including "mailto:foo@example.com", "x-whatever://foo", etc. For a pattern that attempts only to match web URLs (http, https), see: https://gist.github.com/gruber/8891611 """ ANY_URL_REGEX = r"""(?i)\b((?:[a-z][\w-]+:(?:/{1,3}|[a-z0-9%])|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:'".,<>?«»“”‘’]))""" """ The regex patterns in this gist are intended only to match web URLs -- http, https, and naked domains like "example.com". For a pattern that attempts to match all URLs, regardless of protocol, see: https://gist.github.com/gruber/249502 """ WEB_URL_REGEX = r"""(?i)\b((?:https?:(?:/{1,3}|[a-z0-9%])|[a-z0-9.\-]+[.](?:com|net|org|edu|gov|mil|aero|asia|biz|cat|coop|info|int|jobs|mobi|museum|name|post|pro|tel|travel|xxx|ac|ad|ae|af|ag|ai|al|am|an|ao|aq|ar|as|at|au|aw|ax|az|ba|bb|bd|be|bf|bg|bh|bi|bj|bm|bn|bo|br|bs|bt|bv|bw|by|bz|ca|cc|cd|cf|cg|ch|ci|ck|cl|cm|cn|co|cr|cs|cu|cv|cx|cy|cz|dd|de|dj|dk|dm|do|dz|ec|ee|eg|eh|er|es|et|eu|fi|fj|fk|fm|fo|fr|ga|gb|gd|ge|gf|gg|gh|gi|gl|gm|gn|gp|gq|gr|gs|gt|gu|gw|gy|hk|hm|hn|hr|ht|hu|id|ie|il|im|in|io|iq|ir|is|it|je|jm|jo|jp|ke|kg|kh|ki|km|kn|kp|kr|kw|ky|kz|la|lb|lc|li|lk|lr|ls|lt|lu|lv|ly|ma|mc|md|me|mg|mh|mk|ml|mm|mn|mo|mp|mq|mr|ms|mt|mu|mv|mw|mx|my|mz|na|nc|ne|nf|ng|ni|nl|no|np|nr|nu|nz|om|pa|pe|pf|pg|ph|pk|pl|pm|pn|pr|ps|pt|pw|py|qa|re|ro|rs|ru|rw|sa|sb|sc|sd|se|sg|sh|si|sj|Ja|sk|sl|sm|sn|so|sr|ss|st|su|sv|sx|sy|sz|tc|td|tf|tg|th|tj|tk|tl|tm|tn|to|tp|tr|tt|tv|tw|tz|ua|ug|uk|us|uy|uz|va|vc|ve|vg|vi|vn|vu|wf|ws|ye|yt|yu|za|zm|zw)/)(?:[^\s()<>{}\[\]]+|\([^\s()]*?\([^\s()]+\)[^\s()]*?\)|\([^\s]+?\))+(?:\([^\s()]*?\([^\s()]+\)[^\s()]*?\)|\([^\s]+?\)|[^\s`!()\[\]{};:'".,<>?«»“”‘’])|(?:(?<!@)[a-z0-9]+(?:[.\-][a-z0-9]+)*[.](?:com|net|org|edu|gov|mil|aero|asia|biz|cat|coop|info|int|jobs|mobi|museum|name|post|pro|tel|travel|xxx|ac|ad|ae|af|ag|ai|al|am|an|ao|aq|ar|as|at|au|aw|ax|az|ba|bb|bd|be|bf|bg|bh|bi|bj|bm|bn|bo|br|bs|bt|bv|bw|by|bz|ca|cc|cd|cf|cg|ch|ci|ck|cl|cm|cn|co|cr|cs|cu|cv|cx|cy|cz|dd|de|dj|dk|dm|do|dz|ec|ee|eg|eh|er|es|et|eu|fi|fj|fk|fm|fo|fr|ga|gb|gd|ge|gf|gg|gh|gi|gl|gm|gn|gp|gq|gr|gs|gt|gu|gw|gy|hk|hm|hn|hr|ht|hu|id|ie|il|im|in|io|iq|ir|is|it|je|jm|jo|jp|ke|kg|kh|ki|km|kn|kp|kr|kw|ky|kz|la|lb|lc|li|lk|lr|ls|lt|lu|lv|ly|ma|mc|md|me|mg|mh|mk|ml|mm|mn|mo|mp|mq|mr|ms|mt|mu|mv|mw|mx|my|mz|na|nc|ne|nf|ng|ni|nl|no|np|nr|nu|nz|om|pa|pe|pf|pg|ph|pk|pl|pm|pn|pr|ps|pt|pw|py|qa|re|ro|rs|ru|rw|sa|sb|sc|sd|se|sg|sh|si|sj|Ja|sk|sl|sm|sn|so|sr|ss|st|su|sv|sx|sy|sz|tc|td|tf|tg|th|tj|tk|tl|tm|tn|to|tp|tr|tt|tv|tw|tz|ua|ug|uk|us|uy|uz|va|vc|ve|vg|vi|vn|vu|wf|ws|ye|yt|yu|za|zm|zw)\b/?(?!@)))""" """ This regex pattern matches IP:PORT """ IP_REGEX = r"""[0-9]+(?:\.[0-9]+){3}:[0-9]+"""
105.678571
2,013
0.599527
732
2,959
2.416667
0.472678
0.015828
0.02035
0.022612
0.83437
0.788016
0.75749
0.721311
0.67948
0.673827
0
0.016445
0.034133
2,959
28
2,014
105.678571
0.60112
0.041906
0
0
0
0.666667
0.966087
0.966087
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
1
1
0
0
0
0
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
10
2924b36922b552cf61f39c4d0fddf489724e727f
3,084
py
Python
Classes/Extractors/LBP.py
lukkascost/MachineLearn
0c6228216d3c1e8b8f85b3e88b1306a7ff042e81
[ "MIT" ]
5
2018-02-01T22:15:46.000Z
2021-05-12T13:34:22.000Z
Classes/Extractors/LBP.py
lukkascost/MachineLearn
0c6228216d3c1e8b8f85b3e88b1306a7ff042e81
[ "MIT" ]
null
null
null
Classes/Extractors/LBP.py
lukkascost/MachineLearn
0c6228216d3c1e8b8f85b3e88b1306a7ff042e81
[ "MIT" ]
null
null
null
import numpy as np class Lbp8Bits: """""" def __init__(self, input_array): self.input_array = input_array self.att58 = [1, 2, 3, 4, 5, 7, 8, 9, 13, 15, 16, 17, 25, 29, 31, 32, 33, 49, 57, 61, 63, 64, 65, 97, 113, 121, 125, 127, 128, 129, 130, 132, 136, 144, 160, 192, 193, 194, 196, 200, 208, 224, 225, 226, 228, 232, 240, 241, 242, 244, 248, 249, 250, 252, 253, 254, 255, 256] self.number_of_attributes = 257 self.histogram = np.zeros(self.number_of_attributes) def calculate_attributes(self): """""" for i in range(1, self.input_array.shape[0] - 1): for j in range(1, self.input_array.shape[1] - 1): central = self.input_array[i, j] p11 = int(self.input_array[i - 1, j - 1] < central) p12 = int(self.input_array[i - 1, j] < central) p13 = int(self.input_array[i - 1, j + 1] < central) p21 = int(self.input_array[i, j - 1] < central) p23 = int(self.input_array[i, j + 1] < central) p31 = int(self.input_array[i + 1, j - 1] < central) p32 = int(self.input_array[i + 1, j] < central) p33 = int(self.input_array[i + 1, j + 1] < central) output = p11 + p21 * 2 + p31 * 4 + p32 * 8 + p33 * 16 + p23 * 32 + p13 * 64 + p12 * 128 self.histogram[output] += 1 if not (output in self.att58): self.histogram[-1] += 1 def set_attributes_values(self, lbp_att): """""" self.histogram = lbp_att self.number_of_attributes = len(lbp_att) def export_to_classifier(self, label): """""" returnable = np.full((self.number_of_attributes + 1), "", dtype=object) returnable[:-1] = self.histogram returnable[-1] = label return returnable class Lbp4Bits: """""" def __init__(self, input_array): self.input_array = input_array self.number_of_attributes = 16 self.histogram = np.zeros(self.number_of_attributes) def calculate_attributes(self): """""" for i in range(1, self.input_array.shape[0] - 1): for j in range(1, self.input_array.shape[1] - 1): central = self.input_array[i, j] p12 = int(self.input_array[i - 1, j] < central) p21 = int(self.input_array[i, j - 1] < central) p23 = int(self.input_array[i, j + 1] < central) p32 = int(self.input_array[i + 1, j] < central) output = p21 + p32 * 2 + p23 * 4 + p12 * 8 self.histogram[output] += 1 def set_attributes_values(self, lbp_att): """""" self.histogram = lbp_att self.number_of_attributes = len(lbp_att) def export_to_classifier(self, label): """""" returnable = np.full((self.number_of_attributes + 1), "", dtype=object) returnable[:-1] = self.histogram returnable[-1] = label return returnable
41.12
111
0.534695
418
3,084
3.791866
0.267943
0.15142
0.194322
0.132492
0.75836
0.75836
0.75836
0.75836
0.75836
0.664353
0
0.122824
0.329442
3,084
74
112
41.675676
0.643617
0
0
0.714286
0
0
0
0
0
0
0
0
0
1
0.142857
false
0
0.017857
0
0.232143
0
0
0
0
null
0
1
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
292d5832c61a34bab385257377311a705f9b3d5b
3,267
py
Python
AdventOfCode2015/Day15/Day15.py
MattTitmas/AdventOfCode
36be4f6bf973f77ff93b08dc69c977bb11951f27
[ "MIT" ]
null
null
null
AdventOfCode2015/Day15/Day15.py
MattTitmas/AdventOfCode
36be4f6bf973f77ff93b08dc69c977bb11951f27
[ "MIT" ]
null
null
null
AdventOfCode2015/Day15/Day15.py
MattTitmas/AdventOfCode
36be4f6bf973f77ff93b08dc69c977bb11951f27
[ "MIT" ]
null
null
null
def part1(): values = {i.split(":")[0] : {j.split(" ")[0] : int(j.split(" ")[1]) for j in i.split(": ")[1].split(", ")} for i in open("input.txt","r").read().split("\n")} maxScore = 0 for sprinkles in range(0, 101): for peanut in range(0, 100-sprinkles): for frosting in range(0, 100-sprinkles-peanut): if sprinkles + peanut + frosting <= 100: sugar = 100 - (sprinkles + peanut + frosting) totalCapacity = max(0, values['Sprinkles']['capacity'] * sprinkles + values['PeanutButter']['capacity'] * peanut + values['Frosting']['capacity'] * frosting + values['Sugar']['capacity'] * sugar) totalDurability = max(0, values['Sprinkles']['durability'] * sprinkles + values['PeanutButter']['durability'] * peanut + values['Frosting']['durability'] * frosting + values['Sugar']['durability'] * sugar) totalFlavour = max(0, values['Sprinkles']['flavor'] * sprinkles + values['PeanutButter']['flavor'] * peanut + values['Frosting']['flavor'] * frosting + values['Sugar']['flavor'] * sugar) totalTexture = max(0, values['Sprinkles']['texture'] * sprinkles + values['PeanutButter']['texture'] * peanut + values['Frosting']['texture'] * frosting + values['Sugar']['texture'] * sugar) score = totalCapacity * totalDurability * totalFlavour * totalTexture maxScore = max(maxScore, score) return maxScore def part2(): values = {i.split(":")[0] : {j.split(" ")[0] : int(j.split(" ")[1]) for j in i.split(": ")[1].split(", ")} for i in open("input.txt","r").read().split("\n")} maxScore = 0 for sprinkles in range(0, 101): for peanut in range(0, 101): for frosting in range(0, 101): if sprinkles + peanut + frosting <= 100: sugar = 100 - (sprinkles + peanut + frosting) totalCapacity = max(0, values['Sprinkles']['capacity'] * sprinkles + values['PeanutButter']['capacity'] * peanut + values['Frosting']['capacity'] * frosting + values['Sugar']['capacity'] * sugar) totalDurability = max(0, values['Sprinkles']['durability'] * sprinkles + values['PeanutButter']['durability'] * peanut + values['Frosting']['durability'] * frosting + values['Sugar']['durability'] * sugar) totalFlavour = max(0, values['Sprinkles']['flavor'] * sprinkles + values['PeanutButter']['flavor'] * peanut + values['Frosting']['flavor'] * frosting + values['Sugar']['flavor'] * sugar) totalTexture = max(0, values['Sprinkles']['texture'] * sprinkles + values['PeanutButter']['texture'] * peanut + values['Frosting']['texture'] * frosting + values['Sugar']['texture'] * sugar) totalCalories = values['Sprinkles']['calories'] * sprinkles + values['PeanutButter']['calories'] * peanut + values['Frosting']['calories'] * frosting + values['Sugar']['calories'] * sugar score = totalCapacity * totalDurability * totalFlavour * totalTexture if totalCalories == 500: maxScore = max(maxScore, score) return maxScore print(f"answer to part1: {part1()}") print(f"answer to part2: {part2()}")
85.973684
225
0.587389
329
3,267
5.832827
0.139818
0.070349
0.126628
0.079208
0.88692
0.853048
0.751433
0.751433
0.751433
0.751433
0
0.02499
0.228344
3,267
37
226
88.297297
0.736216
0
0
0.705882
0
0
0.206981
0
0
0
0
0
0
1
0.058824
false
0
0
0
0.117647
0.058824
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
292e01843351db171535c44a3ff250022f7980c3
71,151
py
Python
sdk/python/pulumi_azure/compute/windows_virtual_machine_scale_set.py
adnang/pulumi-azure
32360d2f1e41e27d7fdd6522cb26d65e531f279f
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
sdk/python/pulumi_azure/compute/windows_virtual_machine_scale_set.py
adnang/pulumi-azure
32360d2f1e41e27d7fdd6522cb26d65e531f279f
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
sdk/python/pulumi_azure/compute/windows_virtual_machine_scale_set.py
adnang/pulumi-azure
32360d2f1e41e27d7fdd6522cb26d65e531f279f
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import json import warnings import pulumi import pulumi.runtime from typing import Union from .. import utilities, tables class WindowsVirtualMachineScaleSet(pulumi.CustomResource): additional_capabilities: pulumi.Output[dict] """ A `additional_capabilities` block as defined below. * `ultraSsdEnabled` (`bool`) - Should the capacity to enable Data Disks of the `UltraSSD_LRS` storage account type be supported on this Virtual Machine Scale Set? Defaults to `false`. Changing this forces a new resource to be created. """ additional_unattend_contents: pulumi.Output[list] """ One or more `additional_unattend_content` blocks as defined below. * `content` (`str`) - The XML formatted content that is added to the unattend.xml file for the specified path and component. Changing this forces a new resource to be created. * `setting` (`str`) - The name of the setting to which the content applies. Possible values are `AutoLogon` and `FirstLogonCommands`. Changing this forces a new resource to be created. """ admin_password: pulumi.Output[str] """ The Password which should be used for the local-administrator on this Virtual Machine. Changing this forces a new resource to be created. """ admin_username: pulumi.Output[str] """ The username of the local administrator on each Virtual Machine Scale Set instance. Changing this forces a new resource to be created. """ automatic_instance_repair: pulumi.Output[dict] """ A `automatic_instance_repair` block as defined below. To enable the automatic instance repair, this Virtual Machine Scale Set must have a valid `health_probe_id` or an [Application Health Extension](https://docs.microsoft.com/en-us/azure/virtual-machine-scale-sets/virtual-machine-scale-sets-health-extension). * `enabled` (`bool`) - Should the automatic instance repair be enabled on this Virtual Machine Scale Set? * `gracePeriod` (`str`) - Amount of time (in minutes, between 30 and 90, defaults to 30 minutes) for which automatic repairs will be delayed. The grace period starts right after the VM is found unhealthy. The time duration should be specified in ISO 8601 format. """ automatic_os_upgrade_policy: pulumi.Output[dict] """ A `automatic_os_upgrade_policy` block as defined below. This is Required and can only be specified when `upgrade_mode` is set to `Automatic`. * `disableAutomaticRollback` (`bool`) - Should automatic rollbacks be disabled? Changing this forces a new resource to be created. * `enableAutomaticOsUpgrade` (`bool`) - Should OS Upgrades automatically be applied to Scale Set instances in a rolling fashion when a newer version of the OS Image becomes available? Changing this forces a new resource to be created. """ boot_diagnostics: pulumi.Output[dict] """ A `boot_diagnostics` block as defined below. * `storageAccountUri` (`str`) - The Primary/Secondary Endpoint for the Azure Storage Account which should be used to store Boot Diagnostics, including Console Output and Screenshots from the Hypervisor. """ computer_name_prefix: pulumi.Output[str] """ The prefix which should be used for the name of the Virtual Machines in this Scale Set. If unspecified this defaults to the value for the `name` field. If the value of the `name` field is not a valid `computer_name_prefix`, then you must specify `computer_name_prefix`. """ custom_data: pulumi.Output[str] """ The Base64-Encoded Custom Data which should be used for this Virtual Machine Scale Set. """ data_disks: pulumi.Output[list] """ One or more `data_disk` blocks as defined below. * `caching` (`str`) - The type of Caching which should be used for this Data Disk. Possible values are `None`, `ReadOnly` and `ReadWrite`. * `create_option` (`str`) - The create option which should be used for this Data Disk. Possible values are `Empty` and `FromImage`. Defaults to `Empty`. (`FromImage` should only be used if the source image includes data disks). * `disk_encryption_set_id` (`str`) - The ID of the Disk Encryption Set which should be used to encrypt this Data Disk. * `disk_size_gb` (`float`) - The size of the Data Disk which should be created. * `lun` (`float`) - The Logical Unit Number of the Data Disk, which must be unique within the Virtual Machine. * `storage_account_type` (`str`) - The Type of Storage Account which should back this Data Disk. Possible values include `Standard_LRS`, `StandardSSD_LRS`, `Premium_LRS` and `UltraSSD_LRS`. * `write_accelerator_enabled` (`bool`) - Should Write Accelerator be enabled for this Data Disk? Defaults to `false`. """ do_not_run_extensions_on_overprovisioned_machines: pulumi.Output[bool] """ Should Virtual Machine Extensions be run on Overprovisioned Virtual Machines in the Scale Set? Defaults to `false`. """ enable_automatic_updates: pulumi.Output[bool] """ Are automatic updates enabled for this Virtual Machine? Defaults to `true`. """ eviction_policy: pulumi.Output[str] """ The Policy which should be used Virtual Machines are Evicted from the Scale Set. Changing this forces a new resource to be created. """ health_probe_id: pulumi.Output[str] """ The ID of a Load Balancer Probe which should be used to determine the health of an instance. Changing this forces a new resource to be created. This is Required and can only be specified when `upgrade_mode` is set to `Automatic` or `Rolling`. """ identity: pulumi.Output[dict] """ A `identity` block as defined below. * `identityIds` (`list`) - A list of User Managed Identity ID's which should be assigned to the Windows Virtual Machine Scale Set. * `principal_id` (`str`) - The ID of the System Managed Service Principal. * `type` (`str`) - The type of Managed Identity which should be assigned to the Windows Virtual Machine Scale Set. Possible values are `SystemAssigned`, `UserAssigned` and `SystemAssigned, UserAssigned`. """ instances: pulumi.Output[float] """ The number of Virtual Machines in the Scale Set. """ license_type: pulumi.Output[str] """ Specifies the type of on-premise license (also known as [Azure Hybrid Use Benefit](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-hybrid-use-benefit-licensing)) which should be used for this Virtual Machine Scale Set. Possible values are `None`, `Windows_Client` and `Windows_Server`. Changing this forces a new resource to be created. """ location: pulumi.Output[str] """ The Azure location where the Windows Virtual Machine Scale Set should exist. Changing this forces a new resource to be created. """ max_bid_price: pulumi.Output[float] """ The maximum price you're willing to pay for each Virtual Machine in this Scale Set, in US Dollars; which must be greater than the current spot price. If this bid price falls below the current spot price the Virtual Machines in the Scale Set will be evicted using the `eviction_policy`. Defaults to `-1`, which means that each Virtual Machine in the Scale Set should not be evicted for price reasons. """ name: pulumi.Output[str] """ The name of the Windows Virtual Machine Scale Set. Changing this forces a new resource to be created. """ network_interfaces: pulumi.Output[list] """ One or more `network_interface` blocks as defined below. * `dns_servers` (`list`) - A list of IP Addresses of DNS Servers which should be assigned to the Network Interface. * `enable_accelerated_networking` (`bool`) - Does this Network Interface support Accelerated Networking? Defaults to `false`. * `enable_ip_forwarding` (`bool`) - Does this Network Interface support IP Forwarding? Defaults to `false`. * `ip_configurations` (`list`) - One or more `ip_configuration` blocks as defined above. * `applicationGatewayBackendAddressPoolIds` (`list`) - A list of Backend Address Pools ID's from a Application Gateway which this Virtual Machine Scale Set should be connected to. * `applicationSecurityGroupIds` (`list`) - A list of Application Security Group ID's which this Virtual Machine Scale Set should be connected to. * `loadBalancerBackendAddressPoolIds` (`list`) - A list of Backend Address Pools ID's from a Load Balancer which this Virtual Machine Scale Set should be connected to. * `loadBalancerInboundNatRulesIds` (`list`) - A list of NAT Rule ID's from a Load Balancer which this Virtual Machine Scale Set should be connected to. * `name` (`str`) - The Name which should be used for this IP Configuration. * `primary` (`bool`) - Is this the Primary IP Configuration for this Network Interface? Defaults to `false`. * `public_ip_addresses` (`list`) - A `public_ip_address` block as defined below. * `domain_name_label` (`str`) - The Prefix which should be used for the Domain Name Label for each Virtual Machine Instance. Azure concatenates the Domain Name Label and Virtual Machine Index to create a unique Domain Name Label for each Virtual Machine. * `idle_timeout_in_minutes` (`float`) - The Idle Timeout in Minutes for the Public IP Address. Possible values are in the range `4` to `32`. * `ipTags` (`list`) - One or more `ip_tag` blocks as defined above. * `tag` (`str`) - The IP Tag associated with the Public IP, such as `SQL` or `Storage`. * `type` (`str`) - The Type of IP Tag, such as `FirstPartyUsage`. * `name` (`str`) - The Name of the Public IP Address Configuration. * `public_ip_prefix_id` (`str`) - The ID of the Public IP Address Prefix from where Public IP Addresses should be allocated. Changing this forces a new resource to be created. * `subnet_id` (`str`) - The ID of the Subnet which this IP Configuration should be connected to. * `version` (`str`) - The Internet Protocol Version which should be used for this IP Configuration. Possible values are `IPv4` and `IPv6`. Defaults to `IPv4`. * `name` (`str`) - The Name which should be used for this Network Interface. Changing this forces a new resource to be created. * `network_security_group_id` (`str`) - The ID of a Network Security Group which should be assigned to this Network Interface. * `primary` (`bool`) - Is this the Primary IP Configuration? """ os_disk: pulumi.Output[dict] """ An `os_disk` block as defined below. * `caching` (`str`) - The Type of Caching which should be used for the Internal OS Disk. Possible values are `None`, `ReadOnly` and `ReadWrite`. * `diffDiskSettings` (`dict`) - A `diff_disk_settings` block as defined above. Changing this forces a new resource to be created. * `option` (`str`) * `disk_encryption_set_id` (`str`) - The ID of the Disk Encryption Set which should be used to encrypt this OS Disk. * `disk_size_gb` (`float`) - The Size of the Internal OS Disk in GB, if you wish to vary from the size used in the image this Virtual Machine Scale Set is sourced from. * `storage_account_type` (`str`) - The Type of Storage Account which should back this the Internal OS Disk. Possible values include `Standard_LRS`, `StandardSSD_LRS` and `Premium_LRS`. * `write_accelerator_enabled` (`bool`) - Should Write Accelerator be Enabled for this OS Disk? Defaults to `false`. """ overprovision: pulumi.Output[bool] """ Should Azure over-provision Virtual Machines in this Scale Set? This means that multiple Virtual Machines will be provisioned and Azure will keep the instances which become available first - which improves provisioning success rates and improves deployment time. You're not billed for these over-provisioned VM's and they don't count towards the Subscription Quota. Defaults to `false`. """ plan: pulumi.Output[dict] priority: pulumi.Output[str] """ The Priority of this Virtual Machine Scale Set. Possible values are `Regular` and `Spot`. Defaults to `Regular`. Changing this value forces a new resource. """ provision_vm_agent: pulumi.Output[bool] """ Should the Azure VM Agent be provisioned on each Virtual Machine in the Scale Set? Defaults to `true`. Changing this value forces a new resource to be created. """ proximity_placement_group_id: pulumi.Output[str] """ The ID of the Proximity Placement Group in which the Virtual Machine Scale Set should be assigned to. Changing this forces a new resource to be created. """ resource_group_name: pulumi.Output[str] """ The name of the Resource Group in which the Windows Virtual Machine Scale Set should be exist. Changing this forces a new resource to be created. """ rolling_upgrade_policy: pulumi.Output[dict] """ A `rolling_upgrade_policy` block as defined below. This is Required and can only be specified when `upgrade_mode` is set to `Automatic` or `Rolling`. * `maxBatchInstancePercent` (`float`) - The maximum percent of total virtual machine instances that will be upgraded simultaneously by the rolling upgrade in one batch. As this is a maximum, unhealthy instances in previous or future batches can cause the percentage of instances in a batch to decrease to ensure higher reliability. Changing this forces a new resource to be created. * `maxUnhealthyInstancePercent` (`float`) - The maximum percentage of the total virtual machine instances in the scale set that can be simultaneously unhealthy, either as a result of being upgraded, or by being found in an unhealthy state by the virtual machine health checks before the rolling upgrade aborts. This constraint will be checked prior to starting any batch. Changing this forces a new resource to be created. * `maxUnhealthyUpgradedInstancePercent` (`float`) - The maximum percentage of upgraded virtual machine instances that can be found to be in an unhealthy state. This check will happen after each batch is upgraded. If this percentage is ever exceeded, the rolling update aborts. Changing this forces a new resource to be created. * `pauseTimeBetweenBatches` (`str`) - The wait time between completing the update for all virtual machines in one batch and starting the next batch. The time duration should be specified in ISO 8601 format. Changing this forces a new resource to be created. """ scale_in_policy: pulumi.Output[str] """ The scale-in policy rule that decides which virtual machines are chosen for removal when a Virtual Machine Scale Set is scaled in. Possible values for the scale-in policy rules are `Default`, `NewestVM` and `OldestVM`, defaults to `Default`. For more information about scale in policy, please [refer to this doc](https://docs.microsoft.com/en-us/azure/virtual-machine-scale-sets/virtual-machine-scale-sets-scale-in-policy). """ secrets: pulumi.Output[list] """ One or more `secret` blocks as defined below. * `certificates` (`list`) - One or more `certificate` blocks as defined above. * `store` (`str`) - The certificate store on the Virtual Machine where the certificate should be added. * `url` (`str`) - The Secret URL of a Key Vault Certificate. * `key_vault_id` (`str`) - The ID of the Key Vault from which all Secrets should be sourced. """ single_placement_group: pulumi.Output[bool] """ Should this Virtual Machine Scale Set be limited to a Single Placement Group, which means the number of instances will be capped at 100 Virtual Machines. Defaults to `true`. """ sku: pulumi.Output[str] """ The Virtual Machine SKU for the Scale Set, such as `Standard_F2`. """ source_image_id: pulumi.Output[str] """ The ID of an Image which each Virtual Machine in this Scale Set should be based on. """ source_image_reference: pulumi.Output[dict] """ A `source_image_reference` block as defined below. * `offer` (`str`) - Specifies the offer of the image used to create the virtual machines. * `publisher` (`str`) - Specifies the publisher of the image used to create the virtual machines. * `sku` (`str`) - Specifies the SKU of the image used to create the virtual machines. * `version` (`str`) - Specifies the version of the image used to create the virtual machines. """ tags: pulumi.Output[dict] """ A mapping of tags which should be assigned to this Virtual Machine Scale Set. """ terminate_notification: pulumi.Output[dict] """ A `terminate_notification` block as defined below. * `enabled` (`bool`) - Should the terminate notification be enabled on this Virtual Machine Scale Set? Defaults to `false`. * `timeout` (`str`) - Length of time (in minutes, between 5 and 15) a notification to be sent to the VM on the instance metadata server till the VM gets deleted. The time duration should be specified in ISO 8601 format. """ timezone: pulumi.Output[str] """ Specifies the time zone of the virtual machine, [the possible values are defined here](https://jackstromberg.com/2017/01/list-of-time-zones-consumed-by-azure/). """ unique_id: pulumi.Output[str] """ The Unique ID for this Windows Virtual Machine Scale Set. """ upgrade_mode: pulumi.Output[str] """ Specifies how Upgrades (e.g. changing the Image/SKU) should be performed to Virtual Machine Instances. Possible values are `Automatic`, `Manual` and `Rolling`. Defaults to `Manual`. """ winrm_listeners: pulumi.Output[list] """ One or more `winrm_listener` blocks as defined below. * `certificateUrl` (`str`) - The Secret URL of a Key Vault Certificate, which must be specified when `protocol` is set to `Https`. * `protocol` (`str`) - The Protocol of the WinRM Listener. Possible values are `Http` and `Https`. """ zone_balance: pulumi.Output[bool] """ Should the Virtual Machines in this Scale Set be strictly evenly distributed across Availability Zones? Defaults to `false`. Changing this forces a new resource to be created. """ zones: pulumi.Output[list] """ A list of Availability Zones in which the Virtual Machines in this Scale Set should be created in. Changing this forces a new resource to be created. """ def __init__(__self__, resource_name, opts=None, additional_capabilities=None, additional_unattend_contents=None, admin_password=None, admin_username=None, automatic_instance_repair=None, automatic_os_upgrade_policy=None, boot_diagnostics=None, computer_name_prefix=None, custom_data=None, data_disks=None, do_not_run_extensions_on_overprovisioned_machines=None, enable_automatic_updates=None, eviction_policy=None, health_probe_id=None, identity=None, instances=None, license_type=None, location=None, max_bid_price=None, name=None, network_interfaces=None, os_disk=None, overprovision=None, plan=None, priority=None, provision_vm_agent=None, proximity_placement_group_id=None, resource_group_name=None, rolling_upgrade_policy=None, scale_in_policy=None, secrets=None, single_placement_group=None, sku=None, source_image_id=None, source_image_reference=None, tags=None, terminate_notification=None, timezone=None, upgrade_mode=None, winrm_listeners=None, zone_balance=None, zones=None, __props__=None, __name__=None, __opts__=None): """ Manages a Windows Virtual Machine Scale Set. ## Disclaimers > **Note** This provider will automatically update & reimage the nodes in the Scale Set (if Required) during an Update - this behaviour can be configured using the `features` configuration within the Provider configuration block. > **Note:** This resource does not support Unmanaged Disks. If you need to use Unmanaged Disks you can continue to use the `compute.ScaleSet` resource instead ## Example Usage ```python import pulumi import pulumi_azure as azure example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe") example_virtual_network = azure.network.VirtualNetwork("exampleVirtualNetwork", resource_group_name=example_resource_group.name, location=example_resource_group.location, address_spaces=["10.0.0.0/16"]) internal = azure.network.Subnet("internal", resource_group_name=example_resource_group.name, virtual_network_name=example_virtual_network.name, address_prefix="10.0.2.0/24") example_windows_virtual_machine_scale_set = azure.compute.WindowsVirtualMachineScaleSet("exampleWindowsVirtualMachineScaleSet", resource_group_name=example_resource_group.name, location=example_resource_group.location, sku="Standard_F2", instances=1, admin_password="P@55w0rd1234!", admin_username="adminuser", source_image_reference={ "publisher": "MicrosoftWindowsServer", "offer": "WindowsServer", "sku": "2016-Datacenter-Server-Core", "version": "latest", }, os_disk={ "storageAccountType": "Standard_LRS", "caching": "ReadWrite", }, network_interface=[{ "name": "example", "primary": True, "ip_configuration": [{ "name": "internal", "primary": True, "subnetId": internal.id, }], }]) ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[dict] additional_capabilities: A `additional_capabilities` block as defined below. :param pulumi.Input[list] additional_unattend_contents: One or more `additional_unattend_content` blocks as defined below. :param pulumi.Input[str] admin_password: The Password which should be used for the local-administrator on this Virtual Machine. Changing this forces a new resource to be created. :param pulumi.Input[str] admin_username: The username of the local administrator on each Virtual Machine Scale Set instance. Changing this forces a new resource to be created. :param pulumi.Input[dict] automatic_instance_repair: A `automatic_instance_repair` block as defined below. To enable the automatic instance repair, this Virtual Machine Scale Set must have a valid `health_probe_id` or an [Application Health Extension](https://docs.microsoft.com/en-us/azure/virtual-machine-scale-sets/virtual-machine-scale-sets-health-extension). :param pulumi.Input[dict] automatic_os_upgrade_policy: A `automatic_os_upgrade_policy` block as defined below. This is Required and can only be specified when `upgrade_mode` is set to `Automatic`. :param pulumi.Input[dict] boot_diagnostics: A `boot_diagnostics` block as defined below. :param pulumi.Input[str] computer_name_prefix: The prefix which should be used for the name of the Virtual Machines in this Scale Set. If unspecified this defaults to the value for the `name` field. If the value of the `name` field is not a valid `computer_name_prefix`, then you must specify `computer_name_prefix`. :param pulumi.Input[str] custom_data: The Base64-Encoded Custom Data which should be used for this Virtual Machine Scale Set. :param pulumi.Input[list] data_disks: One or more `data_disk` blocks as defined below. :param pulumi.Input[bool] do_not_run_extensions_on_overprovisioned_machines: Should Virtual Machine Extensions be run on Overprovisioned Virtual Machines in the Scale Set? Defaults to `false`. :param pulumi.Input[bool] enable_automatic_updates: Are automatic updates enabled for this Virtual Machine? Defaults to `true`. :param pulumi.Input[str] eviction_policy: The Policy which should be used Virtual Machines are Evicted from the Scale Set. Changing this forces a new resource to be created. :param pulumi.Input[str] health_probe_id: The ID of a Load Balancer Probe which should be used to determine the health of an instance. Changing this forces a new resource to be created. This is Required and can only be specified when `upgrade_mode` is set to `Automatic` or `Rolling`. :param pulumi.Input[dict] identity: A `identity` block as defined below. :param pulumi.Input[float] instances: The number of Virtual Machines in the Scale Set. :param pulumi.Input[str] license_type: Specifies the type of on-premise license (also known as [Azure Hybrid Use Benefit](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-hybrid-use-benefit-licensing)) which should be used for this Virtual Machine Scale Set. Possible values are `None`, `Windows_Client` and `Windows_Server`. Changing this forces a new resource to be created. :param pulumi.Input[str] location: The Azure location where the Windows Virtual Machine Scale Set should exist. Changing this forces a new resource to be created. :param pulumi.Input[float] max_bid_price: The maximum price you're willing to pay for each Virtual Machine in this Scale Set, in US Dollars; which must be greater than the current spot price. If this bid price falls below the current spot price the Virtual Machines in the Scale Set will be evicted using the `eviction_policy`. Defaults to `-1`, which means that each Virtual Machine in the Scale Set should not be evicted for price reasons. :param pulumi.Input[str] name: The name of the Windows Virtual Machine Scale Set. Changing this forces a new resource to be created. :param pulumi.Input[list] network_interfaces: One or more `network_interface` blocks as defined below. :param pulumi.Input[dict] os_disk: An `os_disk` block as defined below. :param pulumi.Input[bool] overprovision: Should Azure over-provision Virtual Machines in this Scale Set? This means that multiple Virtual Machines will be provisioned and Azure will keep the instances which become available first - which improves provisioning success rates and improves deployment time. You're not billed for these over-provisioned VM's and they don't count towards the Subscription Quota. Defaults to `false`. :param pulumi.Input[str] priority: The Priority of this Virtual Machine Scale Set. Possible values are `Regular` and `Spot`. Defaults to `Regular`. Changing this value forces a new resource. :param pulumi.Input[bool] provision_vm_agent: Should the Azure VM Agent be provisioned on each Virtual Machine in the Scale Set? Defaults to `true`. Changing this value forces a new resource to be created. :param pulumi.Input[str] proximity_placement_group_id: The ID of the Proximity Placement Group in which the Virtual Machine Scale Set should be assigned to. Changing this forces a new resource to be created. :param pulumi.Input[str] resource_group_name: The name of the Resource Group in which the Windows Virtual Machine Scale Set should be exist. Changing this forces a new resource to be created. :param pulumi.Input[dict] rolling_upgrade_policy: A `rolling_upgrade_policy` block as defined below. This is Required and can only be specified when `upgrade_mode` is set to `Automatic` or `Rolling`. :param pulumi.Input[str] scale_in_policy: The scale-in policy rule that decides which virtual machines are chosen for removal when a Virtual Machine Scale Set is scaled in. Possible values for the scale-in policy rules are `Default`, `NewestVM` and `OldestVM`, defaults to `Default`. For more information about scale in policy, please [refer to this doc](https://docs.microsoft.com/en-us/azure/virtual-machine-scale-sets/virtual-machine-scale-sets-scale-in-policy). :param pulumi.Input[list] secrets: One or more `secret` blocks as defined below. :param pulumi.Input[bool] single_placement_group: Should this Virtual Machine Scale Set be limited to a Single Placement Group, which means the number of instances will be capped at 100 Virtual Machines. Defaults to `true`. :param pulumi.Input[str] sku: The Virtual Machine SKU for the Scale Set, such as `Standard_F2`. :param pulumi.Input[str] source_image_id: The ID of an Image which each Virtual Machine in this Scale Set should be based on. :param pulumi.Input[dict] source_image_reference: A `source_image_reference` block as defined below. :param pulumi.Input[dict] tags: A mapping of tags which should be assigned to this Virtual Machine Scale Set. :param pulumi.Input[dict] terminate_notification: A `terminate_notification` block as defined below. :param pulumi.Input[str] timezone: Specifies the time zone of the virtual machine, [the possible values are defined here](https://jackstromberg.com/2017/01/list-of-time-zones-consumed-by-azure/). :param pulumi.Input[str] upgrade_mode: Specifies how Upgrades (e.g. changing the Image/SKU) should be performed to Virtual Machine Instances. Possible values are `Automatic`, `Manual` and `Rolling`. Defaults to `Manual`. :param pulumi.Input[list] winrm_listeners: One or more `winrm_listener` blocks as defined below. :param pulumi.Input[bool] zone_balance: Should the Virtual Machines in this Scale Set be strictly evenly distributed across Availability Zones? Defaults to `false`. Changing this forces a new resource to be created. :param pulumi.Input[list] zones: A list of Availability Zones in which the Virtual Machines in this Scale Set should be created in. Changing this forces a new resource to be created. The **additional_capabilities** object supports the following: * `ultraSsdEnabled` (`pulumi.Input[bool]`) - Should the capacity to enable Data Disks of the `UltraSSD_LRS` storage account type be supported on this Virtual Machine Scale Set? Defaults to `false`. Changing this forces a new resource to be created. The **additional_unattend_contents** object supports the following: * `content` (`pulumi.Input[str]`) - The XML formatted content that is added to the unattend.xml file for the specified path and component. Changing this forces a new resource to be created. * `setting` (`pulumi.Input[str]`) - The name of the setting to which the content applies. Possible values are `AutoLogon` and `FirstLogonCommands`. Changing this forces a new resource to be created. The **automatic_instance_repair** object supports the following: * `enabled` (`pulumi.Input[bool]`) - Should the automatic instance repair be enabled on this Virtual Machine Scale Set? * `gracePeriod` (`pulumi.Input[str]`) - Amount of time (in minutes, between 30 and 90, defaults to 30 minutes) for which automatic repairs will be delayed. The grace period starts right after the VM is found unhealthy. The time duration should be specified in ISO 8601 format. The **automatic_os_upgrade_policy** object supports the following: * `disableAutomaticRollback` (`pulumi.Input[bool]`) - Should automatic rollbacks be disabled? Changing this forces a new resource to be created. * `enableAutomaticOsUpgrade` (`pulumi.Input[bool]`) - Should OS Upgrades automatically be applied to Scale Set instances in a rolling fashion when a newer version of the OS Image becomes available? Changing this forces a new resource to be created. The **boot_diagnostics** object supports the following: * `storageAccountUri` (`pulumi.Input[str]`) - The Primary/Secondary Endpoint for the Azure Storage Account which should be used to store Boot Diagnostics, including Console Output and Screenshots from the Hypervisor. The **data_disks** object supports the following: * `caching` (`pulumi.Input[str]`) - The type of Caching which should be used for this Data Disk. Possible values are `None`, `ReadOnly` and `ReadWrite`. * `create_option` (`pulumi.Input[str]`) - The create option which should be used for this Data Disk. Possible values are `Empty` and `FromImage`. Defaults to `Empty`. (`FromImage` should only be used if the source image includes data disks). * `disk_encryption_set_id` (`pulumi.Input[str]`) - The ID of the Disk Encryption Set which should be used to encrypt this Data Disk. * `disk_size_gb` (`pulumi.Input[float]`) - The size of the Data Disk which should be created. * `lun` (`pulumi.Input[float]`) - The Logical Unit Number of the Data Disk, which must be unique within the Virtual Machine. * `storage_account_type` (`pulumi.Input[str]`) - The Type of Storage Account which should back this Data Disk. Possible values include `Standard_LRS`, `StandardSSD_LRS`, `Premium_LRS` and `UltraSSD_LRS`. * `write_accelerator_enabled` (`pulumi.Input[bool]`) - Should Write Accelerator be enabled for this Data Disk? Defaults to `false`. The **identity** object supports the following: * `identityIds` (`pulumi.Input[list]`) - A list of User Managed Identity ID's which should be assigned to the Windows Virtual Machine Scale Set. * `principal_id` (`pulumi.Input[str]`) - The ID of the System Managed Service Principal. * `type` (`pulumi.Input[str]`) - The type of Managed Identity which should be assigned to the Windows Virtual Machine Scale Set. Possible values are `SystemAssigned`, `UserAssigned` and `SystemAssigned, UserAssigned`. The **network_interfaces** object supports the following: * `dns_servers` (`pulumi.Input[list]`) - A list of IP Addresses of DNS Servers which should be assigned to the Network Interface. * `enable_accelerated_networking` (`pulumi.Input[bool]`) - Does this Network Interface support Accelerated Networking? Defaults to `false`. * `enable_ip_forwarding` (`pulumi.Input[bool]`) - Does this Network Interface support IP Forwarding? Defaults to `false`. * `ip_configurations` (`pulumi.Input[list]`) - One or more `ip_configuration` blocks as defined above. * `applicationGatewayBackendAddressPoolIds` (`pulumi.Input[list]`) - A list of Backend Address Pools ID's from a Application Gateway which this Virtual Machine Scale Set should be connected to. * `applicationSecurityGroupIds` (`pulumi.Input[list]`) - A list of Application Security Group ID's which this Virtual Machine Scale Set should be connected to. * `loadBalancerBackendAddressPoolIds` (`pulumi.Input[list]`) - A list of Backend Address Pools ID's from a Load Balancer which this Virtual Machine Scale Set should be connected to. * `loadBalancerInboundNatRulesIds` (`pulumi.Input[list]`) - A list of NAT Rule ID's from a Load Balancer which this Virtual Machine Scale Set should be connected to. * `name` (`pulumi.Input[str]`) - The Name which should be used for this IP Configuration. * `primary` (`pulumi.Input[bool]`) - Is this the Primary IP Configuration for this Network Interface? Defaults to `false`. * `public_ip_addresses` (`pulumi.Input[list]`) - A `public_ip_address` block as defined below. * `domain_name_label` (`pulumi.Input[str]`) - The Prefix which should be used for the Domain Name Label for each Virtual Machine Instance. Azure concatenates the Domain Name Label and Virtual Machine Index to create a unique Domain Name Label for each Virtual Machine. * `idle_timeout_in_minutes` (`pulumi.Input[float]`) - The Idle Timeout in Minutes for the Public IP Address. Possible values are in the range `4` to `32`. * `ipTags` (`pulumi.Input[list]`) - One or more `ip_tag` blocks as defined above. * `tag` (`pulumi.Input[str]`) - The IP Tag associated with the Public IP, such as `SQL` or `Storage`. * `type` (`pulumi.Input[str]`) - The Type of IP Tag, such as `FirstPartyUsage`. * `name` (`pulumi.Input[str]`) - The Name of the Public IP Address Configuration. * `public_ip_prefix_id` (`pulumi.Input[str]`) - The ID of the Public IP Address Prefix from where Public IP Addresses should be allocated. Changing this forces a new resource to be created. * `subnet_id` (`pulumi.Input[str]`) - The ID of the Subnet which this IP Configuration should be connected to. * `version` (`pulumi.Input[str]`) - The Internet Protocol Version which should be used for this IP Configuration. Possible values are `IPv4` and `IPv6`. Defaults to `IPv4`. * `name` (`pulumi.Input[str]`) - The Name which should be used for this Network Interface. Changing this forces a new resource to be created. * `network_security_group_id` (`pulumi.Input[str]`) - The ID of a Network Security Group which should be assigned to this Network Interface. * `primary` (`pulumi.Input[bool]`) - Is this the Primary IP Configuration? The **os_disk** object supports the following: * `caching` (`pulumi.Input[str]`) - The Type of Caching which should be used for the Internal OS Disk. Possible values are `None`, `ReadOnly` and `ReadWrite`. * `diffDiskSettings` (`pulumi.Input[dict]`) - A `diff_disk_settings` block as defined above. Changing this forces a new resource to be created. * `option` (`pulumi.Input[str]`) * `disk_encryption_set_id` (`pulumi.Input[str]`) - The ID of the Disk Encryption Set which should be used to encrypt this OS Disk. * `disk_size_gb` (`pulumi.Input[float]`) - The Size of the Internal OS Disk in GB, if you wish to vary from the size used in the image this Virtual Machine Scale Set is sourced from. * `storage_account_type` (`pulumi.Input[str]`) - The Type of Storage Account which should back this the Internal OS Disk. Possible values include `Standard_LRS`, `StandardSSD_LRS` and `Premium_LRS`. * `write_accelerator_enabled` (`pulumi.Input[bool]`) - Should Write Accelerator be Enabled for this OS Disk? Defaults to `false`. The **plan** object supports the following: * `name` (`pulumi.Input[str]`) - The name of the Windows Virtual Machine Scale Set. Changing this forces a new resource to be created. * `product` (`pulumi.Input[str]`) * `publisher` (`pulumi.Input[str]`) - Specifies the publisher of the image used to create the virtual machines. The **rolling_upgrade_policy** object supports the following: * `maxBatchInstancePercent` (`pulumi.Input[float]`) - The maximum percent of total virtual machine instances that will be upgraded simultaneously by the rolling upgrade in one batch. As this is a maximum, unhealthy instances in previous or future batches can cause the percentage of instances in a batch to decrease to ensure higher reliability. Changing this forces a new resource to be created. * `maxUnhealthyInstancePercent` (`pulumi.Input[float]`) - The maximum percentage of the total virtual machine instances in the scale set that can be simultaneously unhealthy, either as a result of being upgraded, or by being found in an unhealthy state by the virtual machine health checks before the rolling upgrade aborts. This constraint will be checked prior to starting any batch. Changing this forces a new resource to be created. * `maxUnhealthyUpgradedInstancePercent` (`pulumi.Input[float]`) - The maximum percentage of upgraded virtual machine instances that can be found to be in an unhealthy state. This check will happen after each batch is upgraded. If this percentage is ever exceeded, the rolling update aborts. Changing this forces a new resource to be created. * `pauseTimeBetweenBatches` (`pulumi.Input[str]`) - The wait time between completing the update for all virtual machines in one batch and starting the next batch. The time duration should be specified in ISO 8601 format. Changing this forces a new resource to be created. The **secrets** object supports the following: * `certificates` (`pulumi.Input[list]`) - One or more `certificate` blocks as defined above. * `store` (`pulumi.Input[str]`) - The certificate store on the Virtual Machine where the certificate should be added. * `url` (`pulumi.Input[str]`) - The Secret URL of a Key Vault Certificate. * `key_vault_id` (`pulumi.Input[str]`) - The ID of the Key Vault from which all Secrets should be sourced. The **source_image_reference** object supports the following: * `offer` (`pulumi.Input[str]`) - Specifies the offer of the image used to create the virtual machines. * `publisher` (`pulumi.Input[str]`) - Specifies the publisher of the image used to create the virtual machines. * `sku` (`pulumi.Input[str]`) - Specifies the SKU of the image used to create the virtual machines. * `version` (`pulumi.Input[str]`) - Specifies the version of the image used to create the virtual machines. The **terminate_notification** object supports the following: * `enabled` (`pulumi.Input[bool]`) - Should the terminate notification be enabled on this Virtual Machine Scale Set? Defaults to `false`. * `timeout` (`pulumi.Input[str]`) - Length of time (in minutes, between 5 and 15) a notification to be sent to the VM on the instance metadata server till the VM gets deleted. The time duration should be specified in ISO 8601 format. The **winrm_listeners** object supports the following: * `certificateUrl` (`pulumi.Input[str]`) - The Secret URL of a Key Vault Certificate, which must be specified when `protocol` is set to `Https`. * `protocol` (`pulumi.Input[str]`) - The Protocol of the WinRM Listener. Possible values are `Http` and `Https`. """ if __name__ is not None: warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning) resource_name = __name__ if __opts__ is not None: warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning) opts = __opts__ if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = dict() __props__['additional_capabilities'] = additional_capabilities __props__['additional_unattend_contents'] = additional_unattend_contents if admin_password is None: raise TypeError("Missing required property 'admin_password'") __props__['admin_password'] = admin_password if admin_username is None: raise TypeError("Missing required property 'admin_username'") __props__['admin_username'] = admin_username __props__['automatic_instance_repair'] = automatic_instance_repair __props__['automatic_os_upgrade_policy'] = automatic_os_upgrade_policy __props__['boot_diagnostics'] = boot_diagnostics __props__['computer_name_prefix'] = computer_name_prefix __props__['custom_data'] = custom_data __props__['data_disks'] = data_disks __props__['do_not_run_extensions_on_overprovisioned_machines'] = do_not_run_extensions_on_overprovisioned_machines __props__['enable_automatic_updates'] = enable_automatic_updates __props__['eviction_policy'] = eviction_policy __props__['health_probe_id'] = health_probe_id __props__['identity'] = identity if instances is None: raise TypeError("Missing required property 'instances'") __props__['instances'] = instances __props__['license_type'] = license_type __props__['location'] = location __props__['max_bid_price'] = max_bid_price __props__['name'] = name if network_interfaces is None: raise TypeError("Missing required property 'network_interfaces'") __props__['network_interfaces'] = network_interfaces if os_disk is None: raise TypeError("Missing required property 'os_disk'") __props__['os_disk'] = os_disk __props__['overprovision'] = overprovision __props__['plan'] = plan __props__['priority'] = priority __props__['provision_vm_agent'] = provision_vm_agent __props__['proximity_placement_group_id'] = proximity_placement_group_id if resource_group_name is None: raise TypeError("Missing required property 'resource_group_name'") __props__['resource_group_name'] = resource_group_name __props__['rolling_upgrade_policy'] = rolling_upgrade_policy __props__['scale_in_policy'] = scale_in_policy __props__['secrets'] = secrets __props__['single_placement_group'] = single_placement_group if sku is None: raise TypeError("Missing required property 'sku'") __props__['sku'] = sku __props__['source_image_id'] = source_image_id __props__['source_image_reference'] = source_image_reference __props__['tags'] = tags __props__['terminate_notification'] = terminate_notification __props__['timezone'] = timezone __props__['upgrade_mode'] = upgrade_mode __props__['winrm_listeners'] = winrm_listeners __props__['zone_balance'] = zone_balance __props__['zones'] = zones __props__['unique_id'] = None super(WindowsVirtualMachineScaleSet, __self__).__init__( 'azure:compute/windowsVirtualMachineScaleSet:WindowsVirtualMachineScaleSet', resource_name, __props__, opts) @staticmethod def get(resource_name, id, opts=None, additional_capabilities=None, additional_unattend_contents=None, admin_password=None, admin_username=None, automatic_instance_repair=None, automatic_os_upgrade_policy=None, boot_diagnostics=None, computer_name_prefix=None, custom_data=None, data_disks=None, do_not_run_extensions_on_overprovisioned_machines=None, enable_automatic_updates=None, eviction_policy=None, health_probe_id=None, identity=None, instances=None, license_type=None, location=None, max_bid_price=None, name=None, network_interfaces=None, os_disk=None, overprovision=None, plan=None, priority=None, provision_vm_agent=None, proximity_placement_group_id=None, resource_group_name=None, rolling_upgrade_policy=None, scale_in_policy=None, secrets=None, single_placement_group=None, sku=None, source_image_id=None, source_image_reference=None, tags=None, terminate_notification=None, timezone=None, unique_id=None, upgrade_mode=None, winrm_listeners=None, zone_balance=None, zones=None): """ Get an existing WindowsVirtualMachineScaleSet resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param str id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[dict] additional_capabilities: A `additional_capabilities` block as defined below. :param pulumi.Input[list] additional_unattend_contents: One or more `additional_unattend_content` blocks as defined below. :param pulumi.Input[str] admin_password: The Password which should be used for the local-administrator on this Virtual Machine. Changing this forces a new resource to be created. :param pulumi.Input[str] admin_username: The username of the local administrator on each Virtual Machine Scale Set instance. Changing this forces a new resource to be created. :param pulumi.Input[dict] automatic_instance_repair: A `automatic_instance_repair` block as defined below. To enable the automatic instance repair, this Virtual Machine Scale Set must have a valid `health_probe_id` or an [Application Health Extension](https://docs.microsoft.com/en-us/azure/virtual-machine-scale-sets/virtual-machine-scale-sets-health-extension). :param pulumi.Input[dict] automatic_os_upgrade_policy: A `automatic_os_upgrade_policy` block as defined below. This is Required and can only be specified when `upgrade_mode` is set to `Automatic`. :param pulumi.Input[dict] boot_diagnostics: A `boot_diagnostics` block as defined below. :param pulumi.Input[str] computer_name_prefix: The prefix which should be used for the name of the Virtual Machines in this Scale Set. If unspecified this defaults to the value for the `name` field. If the value of the `name` field is not a valid `computer_name_prefix`, then you must specify `computer_name_prefix`. :param pulumi.Input[str] custom_data: The Base64-Encoded Custom Data which should be used for this Virtual Machine Scale Set. :param pulumi.Input[list] data_disks: One or more `data_disk` blocks as defined below. :param pulumi.Input[bool] do_not_run_extensions_on_overprovisioned_machines: Should Virtual Machine Extensions be run on Overprovisioned Virtual Machines in the Scale Set? Defaults to `false`. :param pulumi.Input[bool] enable_automatic_updates: Are automatic updates enabled for this Virtual Machine? Defaults to `true`. :param pulumi.Input[str] eviction_policy: The Policy which should be used Virtual Machines are Evicted from the Scale Set. Changing this forces a new resource to be created. :param pulumi.Input[str] health_probe_id: The ID of a Load Balancer Probe which should be used to determine the health of an instance. Changing this forces a new resource to be created. This is Required and can only be specified when `upgrade_mode` is set to `Automatic` or `Rolling`. :param pulumi.Input[dict] identity: A `identity` block as defined below. :param pulumi.Input[float] instances: The number of Virtual Machines in the Scale Set. :param pulumi.Input[str] license_type: Specifies the type of on-premise license (also known as [Azure Hybrid Use Benefit](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-hybrid-use-benefit-licensing)) which should be used for this Virtual Machine Scale Set. Possible values are `None`, `Windows_Client` and `Windows_Server`. Changing this forces a new resource to be created. :param pulumi.Input[str] location: The Azure location where the Windows Virtual Machine Scale Set should exist. Changing this forces a new resource to be created. :param pulumi.Input[float] max_bid_price: The maximum price you're willing to pay for each Virtual Machine in this Scale Set, in US Dollars; which must be greater than the current spot price. If this bid price falls below the current spot price the Virtual Machines in the Scale Set will be evicted using the `eviction_policy`. Defaults to `-1`, which means that each Virtual Machine in the Scale Set should not be evicted for price reasons. :param pulumi.Input[str] name: The name of the Windows Virtual Machine Scale Set. Changing this forces a new resource to be created. :param pulumi.Input[list] network_interfaces: One or more `network_interface` blocks as defined below. :param pulumi.Input[dict] os_disk: An `os_disk` block as defined below. :param pulumi.Input[bool] overprovision: Should Azure over-provision Virtual Machines in this Scale Set? This means that multiple Virtual Machines will be provisioned and Azure will keep the instances which become available first - which improves provisioning success rates and improves deployment time. You're not billed for these over-provisioned VM's and they don't count towards the Subscription Quota. Defaults to `false`. :param pulumi.Input[str] priority: The Priority of this Virtual Machine Scale Set. Possible values are `Regular` and `Spot`. Defaults to `Regular`. Changing this value forces a new resource. :param pulumi.Input[bool] provision_vm_agent: Should the Azure VM Agent be provisioned on each Virtual Machine in the Scale Set? Defaults to `true`. Changing this value forces a new resource to be created. :param pulumi.Input[str] proximity_placement_group_id: The ID of the Proximity Placement Group in which the Virtual Machine Scale Set should be assigned to. Changing this forces a new resource to be created. :param pulumi.Input[str] resource_group_name: The name of the Resource Group in which the Windows Virtual Machine Scale Set should be exist. Changing this forces a new resource to be created. :param pulumi.Input[dict] rolling_upgrade_policy: A `rolling_upgrade_policy` block as defined below. This is Required and can only be specified when `upgrade_mode` is set to `Automatic` or `Rolling`. :param pulumi.Input[str] scale_in_policy: The scale-in policy rule that decides which virtual machines are chosen for removal when a Virtual Machine Scale Set is scaled in. Possible values for the scale-in policy rules are `Default`, `NewestVM` and `OldestVM`, defaults to `Default`. For more information about scale in policy, please [refer to this doc](https://docs.microsoft.com/en-us/azure/virtual-machine-scale-sets/virtual-machine-scale-sets-scale-in-policy). :param pulumi.Input[list] secrets: One or more `secret` blocks as defined below. :param pulumi.Input[bool] single_placement_group: Should this Virtual Machine Scale Set be limited to a Single Placement Group, which means the number of instances will be capped at 100 Virtual Machines. Defaults to `true`. :param pulumi.Input[str] sku: The Virtual Machine SKU for the Scale Set, such as `Standard_F2`. :param pulumi.Input[str] source_image_id: The ID of an Image which each Virtual Machine in this Scale Set should be based on. :param pulumi.Input[dict] source_image_reference: A `source_image_reference` block as defined below. :param pulumi.Input[dict] tags: A mapping of tags which should be assigned to this Virtual Machine Scale Set. :param pulumi.Input[dict] terminate_notification: A `terminate_notification` block as defined below. :param pulumi.Input[str] timezone: Specifies the time zone of the virtual machine, [the possible values are defined here](https://jackstromberg.com/2017/01/list-of-time-zones-consumed-by-azure/). :param pulumi.Input[str] unique_id: The Unique ID for this Windows Virtual Machine Scale Set. :param pulumi.Input[str] upgrade_mode: Specifies how Upgrades (e.g. changing the Image/SKU) should be performed to Virtual Machine Instances. Possible values are `Automatic`, `Manual` and `Rolling`. Defaults to `Manual`. :param pulumi.Input[list] winrm_listeners: One or more `winrm_listener` blocks as defined below. :param pulumi.Input[bool] zone_balance: Should the Virtual Machines in this Scale Set be strictly evenly distributed across Availability Zones? Defaults to `false`. Changing this forces a new resource to be created. :param pulumi.Input[list] zones: A list of Availability Zones in which the Virtual Machines in this Scale Set should be created in. Changing this forces a new resource to be created. The **additional_capabilities** object supports the following: * `ultraSsdEnabled` (`pulumi.Input[bool]`) - Should the capacity to enable Data Disks of the `UltraSSD_LRS` storage account type be supported on this Virtual Machine Scale Set? Defaults to `false`. Changing this forces a new resource to be created. The **additional_unattend_contents** object supports the following: * `content` (`pulumi.Input[str]`) - The XML formatted content that is added to the unattend.xml file for the specified path and component. Changing this forces a new resource to be created. * `setting` (`pulumi.Input[str]`) - The name of the setting to which the content applies. Possible values are `AutoLogon` and `FirstLogonCommands`. Changing this forces a new resource to be created. The **automatic_instance_repair** object supports the following: * `enabled` (`pulumi.Input[bool]`) - Should the automatic instance repair be enabled on this Virtual Machine Scale Set? * `gracePeriod` (`pulumi.Input[str]`) - Amount of time (in minutes, between 30 and 90, defaults to 30 minutes) for which automatic repairs will be delayed. The grace period starts right after the VM is found unhealthy. The time duration should be specified in ISO 8601 format. The **automatic_os_upgrade_policy** object supports the following: * `disableAutomaticRollback` (`pulumi.Input[bool]`) - Should automatic rollbacks be disabled? Changing this forces a new resource to be created. * `enableAutomaticOsUpgrade` (`pulumi.Input[bool]`) - Should OS Upgrades automatically be applied to Scale Set instances in a rolling fashion when a newer version of the OS Image becomes available? Changing this forces a new resource to be created. The **boot_diagnostics** object supports the following: * `storageAccountUri` (`pulumi.Input[str]`) - The Primary/Secondary Endpoint for the Azure Storage Account which should be used to store Boot Diagnostics, including Console Output and Screenshots from the Hypervisor. The **data_disks** object supports the following: * `caching` (`pulumi.Input[str]`) - The type of Caching which should be used for this Data Disk. Possible values are `None`, `ReadOnly` and `ReadWrite`. * `create_option` (`pulumi.Input[str]`) - The create option which should be used for this Data Disk. Possible values are `Empty` and `FromImage`. Defaults to `Empty`. (`FromImage` should only be used if the source image includes data disks). * `disk_encryption_set_id` (`pulumi.Input[str]`) - The ID of the Disk Encryption Set which should be used to encrypt this Data Disk. * `disk_size_gb` (`pulumi.Input[float]`) - The size of the Data Disk which should be created. * `lun` (`pulumi.Input[float]`) - The Logical Unit Number of the Data Disk, which must be unique within the Virtual Machine. * `storage_account_type` (`pulumi.Input[str]`) - The Type of Storage Account which should back this Data Disk. Possible values include `Standard_LRS`, `StandardSSD_LRS`, `Premium_LRS` and `UltraSSD_LRS`. * `write_accelerator_enabled` (`pulumi.Input[bool]`) - Should Write Accelerator be enabled for this Data Disk? Defaults to `false`. The **identity** object supports the following: * `identityIds` (`pulumi.Input[list]`) - A list of User Managed Identity ID's which should be assigned to the Windows Virtual Machine Scale Set. * `principal_id` (`pulumi.Input[str]`) - The ID of the System Managed Service Principal. * `type` (`pulumi.Input[str]`) - The type of Managed Identity which should be assigned to the Windows Virtual Machine Scale Set. Possible values are `SystemAssigned`, `UserAssigned` and `SystemAssigned, UserAssigned`. The **network_interfaces** object supports the following: * `dns_servers` (`pulumi.Input[list]`) - A list of IP Addresses of DNS Servers which should be assigned to the Network Interface. * `enable_accelerated_networking` (`pulumi.Input[bool]`) - Does this Network Interface support Accelerated Networking? Defaults to `false`. * `enable_ip_forwarding` (`pulumi.Input[bool]`) - Does this Network Interface support IP Forwarding? Defaults to `false`. * `ip_configurations` (`pulumi.Input[list]`) - One or more `ip_configuration` blocks as defined above. * `applicationGatewayBackendAddressPoolIds` (`pulumi.Input[list]`) - A list of Backend Address Pools ID's from a Application Gateway which this Virtual Machine Scale Set should be connected to. * `applicationSecurityGroupIds` (`pulumi.Input[list]`) - A list of Application Security Group ID's which this Virtual Machine Scale Set should be connected to. * `loadBalancerBackendAddressPoolIds` (`pulumi.Input[list]`) - A list of Backend Address Pools ID's from a Load Balancer which this Virtual Machine Scale Set should be connected to. * `loadBalancerInboundNatRulesIds` (`pulumi.Input[list]`) - A list of NAT Rule ID's from a Load Balancer which this Virtual Machine Scale Set should be connected to. * `name` (`pulumi.Input[str]`) - The Name which should be used for this IP Configuration. * `primary` (`pulumi.Input[bool]`) - Is this the Primary IP Configuration for this Network Interface? Defaults to `false`. * `public_ip_addresses` (`pulumi.Input[list]`) - A `public_ip_address` block as defined below. * `domain_name_label` (`pulumi.Input[str]`) - The Prefix which should be used for the Domain Name Label for each Virtual Machine Instance. Azure concatenates the Domain Name Label and Virtual Machine Index to create a unique Domain Name Label for each Virtual Machine. * `idle_timeout_in_minutes` (`pulumi.Input[float]`) - The Idle Timeout in Minutes for the Public IP Address. Possible values are in the range `4` to `32`. * `ipTags` (`pulumi.Input[list]`) - One or more `ip_tag` blocks as defined above. * `tag` (`pulumi.Input[str]`) - The IP Tag associated with the Public IP, such as `SQL` or `Storage`. * `type` (`pulumi.Input[str]`) - The Type of IP Tag, such as `FirstPartyUsage`. * `name` (`pulumi.Input[str]`) - The Name of the Public IP Address Configuration. * `public_ip_prefix_id` (`pulumi.Input[str]`) - The ID of the Public IP Address Prefix from where Public IP Addresses should be allocated. Changing this forces a new resource to be created. * `subnet_id` (`pulumi.Input[str]`) - The ID of the Subnet which this IP Configuration should be connected to. * `version` (`pulumi.Input[str]`) - The Internet Protocol Version which should be used for this IP Configuration. Possible values are `IPv4` and `IPv6`. Defaults to `IPv4`. * `name` (`pulumi.Input[str]`) - The Name which should be used for this Network Interface. Changing this forces a new resource to be created. * `network_security_group_id` (`pulumi.Input[str]`) - The ID of a Network Security Group which should be assigned to this Network Interface. * `primary` (`pulumi.Input[bool]`) - Is this the Primary IP Configuration? The **os_disk** object supports the following: * `caching` (`pulumi.Input[str]`) - The Type of Caching which should be used for the Internal OS Disk. Possible values are `None`, `ReadOnly` and `ReadWrite`. * `diffDiskSettings` (`pulumi.Input[dict]`) - A `diff_disk_settings` block as defined above. Changing this forces a new resource to be created. * `option` (`pulumi.Input[str]`) * `disk_encryption_set_id` (`pulumi.Input[str]`) - The ID of the Disk Encryption Set which should be used to encrypt this OS Disk. * `disk_size_gb` (`pulumi.Input[float]`) - The Size of the Internal OS Disk in GB, if you wish to vary from the size used in the image this Virtual Machine Scale Set is sourced from. * `storage_account_type` (`pulumi.Input[str]`) - The Type of Storage Account which should back this the Internal OS Disk. Possible values include `Standard_LRS`, `StandardSSD_LRS` and `Premium_LRS`. * `write_accelerator_enabled` (`pulumi.Input[bool]`) - Should Write Accelerator be Enabled for this OS Disk? Defaults to `false`. The **plan** object supports the following: * `name` (`pulumi.Input[str]`) - The name of the Windows Virtual Machine Scale Set. Changing this forces a new resource to be created. * `product` (`pulumi.Input[str]`) * `publisher` (`pulumi.Input[str]`) - Specifies the publisher of the image used to create the virtual machines. The **rolling_upgrade_policy** object supports the following: * `maxBatchInstancePercent` (`pulumi.Input[float]`) - The maximum percent of total virtual machine instances that will be upgraded simultaneously by the rolling upgrade in one batch. As this is a maximum, unhealthy instances in previous or future batches can cause the percentage of instances in a batch to decrease to ensure higher reliability. Changing this forces a new resource to be created. * `maxUnhealthyInstancePercent` (`pulumi.Input[float]`) - The maximum percentage of the total virtual machine instances in the scale set that can be simultaneously unhealthy, either as a result of being upgraded, or by being found in an unhealthy state by the virtual machine health checks before the rolling upgrade aborts. This constraint will be checked prior to starting any batch. Changing this forces a new resource to be created. * `maxUnhealthyUpgradedInstancePercent` (`pulumi.Input[float]`) - The maximum percentage of upgraded virtual machine instances that can be found to be in an unhealthy state. This check will happen after each batch is upgraded. If this percentage is ever exceeded, the rolling update aborts. Changing this forces a new resource to be created. * `pauseTimeBetweenBatches` (`pulumi.Input[str]`) - The wait time between completing the update for all virtual machines in one batch and starting the next batch. The time duration should be specified in ISO 8601 format. Changing this forces a new resource to be created. The **secrets** object supports the following: * `certificates` (`pulumi.Input[list]`) - One or more `certificate` blocks as defined above. * `store` (`pulumi.Input[str]`) - The certificate store on the Virtual Machine where the certificate should be added. * `url` (`pulumi.Input[str]`) - The Secret URL of a Key Vault Certificate. * `key_vault_id` (`pulumi.Input[str]`) - The ID of the Key Vault from which all Secrets should be sourced. The **source_image_reference** object supports the following: * `offer` (`pulumi.Input[str]`) - Specifies the offer of the image used to create the virtual machines. * `publisher` (`pulumi.Input[str]`) - Specifies the publisher of the image used to create the virtual machines. * `sku` (`pulumi.Input[str]`) - Specifies the SKU of the image used to create the virtual machines. * `version` (`pulumi.Input[str]`) - Specifies the version of the image used to create the virtual machines. The **terminate_notification** object supports the following: * `enabled` (`pulumi.Input[bool]`) - Should the terminate notification be enabled on this Virtual Machine Scale Set? Defaults to `false`. * `timeout` (`pulumi.Input[str]`) - Length of time (in minutes, between 5 and 15) a notification to be sent to the VM on the instance metadata server till the VM gets deleted. The time duration should be specified in ISO 8601 format. The **winrm_listeners** object supports the following: * `certificateUrl` (`pulumi.Input[str]`) - The Secret URL of a Key Vault Certificate, which must be specified when `protocol` is set to `Https`. * `protocol` (`pulumi.Input[str]`) - The Protocol of the WinRM Listener. Possible values are `Http` and `Https`. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = dict() __props__["additional_capabilities"] = additional_capabilities __props__["additional_unattend_contents"] = additional_unattend_contents __props__["admin_password"] = admin_password __props__["admin_username"] = admin_username __props__["automatic_instance_repair"] = automatic_instance_repair __props__["automatic_os_upgrade_policy"] = automatic_os_upgrade_policy __props__["boot_diagnostics"] = boot_diagnostics __props__["computer_name_prefix"] = computer_name_prefix __props__["custom_data"] = custom_data __props__["data_disks"] = data_disks __props__["do_not_run_extensions_on_overprovisioned_machines"] = do_not_run_extensions_on_overprovisioned_machines __props__["enable_automatic_updates"] = enable_automatic_updates __props__["eviction_policy"] = eviction_policy __props__["health_probe_id"] = health_probe_id __props__["identity"] = identity __props__["instances"] = instances __props__["license_type"] = license_type __props__["location"] = location __props__["max_bid_price"] = max_bid_price __props__["name"] = name __props__["network_interfaces"] = network_interfaces __props__["os_disk"] = os_disk __props__["overprovision"] = overprovision __props__["plan"] = plan __props__["priority"] = priority __props__["provision_vm_agent"] = provision_vm_agent __props__["proximity_placement_group_id"] = proximity_placement_group_id __props__["resource_group_name"] = resource_group_name __props__["rolling_upgrade_policy"] = rolling_upgrade_policy __props__["scale_in_policy"] = scale_in_policy __props__["secrets"] = secrets __props__["single_placement_group"] = single_placement_group __props__["sku"] = sku __props__["source_image_id"] = source_image_id __props__["source_image_reference"] = source_image_reference __props__["tags"] = tags __props__["terminate_notification"] = terminate_notification __props__["timezone"] = timezone __props__["unique_id"] = unique_id __props__["upgrade_mode"] = upgrade_mode __props__["winrm_listeners"] = winrm_listeners __props__["zone_balance"] = zone_balance __props__["zones"] = zones return WindowsVirtualMachineScaleSet(resource_name, opts=opts, __props__=__props__) def translate_output_property(self, prop): return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop def translate_input_property(self, prop): return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
90.638217
1,037
0.723757
9,804
71,151
5.123011
0.05712
0.047525
0.03094
0.027595
0.928901
0.92028
0.915621
0.906045
0.899892
0.896866
0
0.002593
0.197791
71,151
784
1,038
90.753827
0.877361
0.575916
0
0.01105
1
0
0.166899
0.059183
0
0
0
0
0
1
0.022099
false
0.044199
0.033149
0.01105
0.314917
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
2941cda90dd30c3093da4519b119dfdef4e8008d
45
py
Python
core.py
Thanos983/ctor
137f2138cbb8fdd88ca8afd666d6734d96329cef
[ "MIT" ]
null
null
null
core.py
Thanos983/ctor
137f2138cbb8fdd88ca8afd666d6734d96329cef
[ "MIT" ]
null
null
null
core.py
Thanos983/ctor
137f2138cbb8fdd88ca8afd666d6734d96329cef
[ "MIT" ]
null
null
null
import requests class Core(object): pass
11.25
19
0.733333
6
45
5.5
1
0
0
0
0
0
0
0
0
0
0
0
0.2
45
4
20
11.25
0.916667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
7
2953f84503713a99df51509c627ef17a14fac7b1
3,778
py
Python
app/views.py
muneneee/flask
c00ecaaa5dc140e6be9e478df4336951b01f7658
[ "MIT" ]
null
null
null
app/views.py
muneneee/flask
c00ecaaa5dc140e6be9e478df4336951b01f7658
[ "MIT" ]
null
null
null
app/views.py
muneneee/flask
c00ecaaa5dc140e6be9e478df4336951b01f7658
[ "MIT" ]
null
null
null
from flask import render_template from app import app from .request import get_news from newsapi import NewsApiClient #views @app.route('/') def index(): ''' view page function that returns index page ''' newsapi = NewsApiClient(api_key = 'e7f56e932284432c89095aa5928310c7' ) topheadlines = newsapi.get_top_headlines(sources="fox-news") articles = topheadlines['articles'] desc = [] news = [] date = [] link = [] for i in range(len(articles)): myarticles = articles[i] news.append(myarticles['title']) desc.append(myarticles['description']) date.append(myarticles['publishedAt']) link.append(myarticles['url']) mylist = zip(news, desc, date,link) title = 'Home - Welcome to newshub' return render_template('index.html', title = title,context= mylist) @app.route('/cnn') def cnn(): ''' view page function that returns cnn page ''' newsapi = NewsApiClient(api_key = 'e7f56e932284432c89095aa5928310c7' ) topheadlines = newsapi.get_top_headlines(sources="cnn") articles = topheadlines['articles'] desc = [] news = [] date = [] link = [] for i in range(len(articles)): myarticles = articles[i] news.append(myarticles['title']) desc.append(myarticles['description']) date.append(myarticles['publishedAt']) link.append(myarticles['url']) mylist = zip(news, desc, date,link) title = 'Home - Welcome to newshub' return render_template('cnn.html', title = title,context= mylist) @app.route('/espn') def espn(): ''' view page function that returns espn page ''' newsapi = NewsApiClient(api_key = 'e7f56e932284432c89095aa5928310c7' ) topheadlines = newsapi.get_top_headlines(sources="espn") articles = topheadlines['articles'] desc = [] news = [] date = [] link = [] for i in range(len(articles)): myarticles = articles[i] news.append(myarticles['title']) desc.append(myarticles['description']) date.append(myarticles['publishedAt']) link.append(myarticles['url']) mylist = zip(news, desc, date,link) title = 'Home - Welcome to newshub' return render_template('espn.html', title = title,context= mylist) @app.route('/fortune') def fortune(): ''' view page function that returns fortune page ''' newsapi = NewsApiClient(api_key = 'e7f56e932284432c89095aa5928310c7' ) topheadlines = newsapi.get_top_headlines(sources="fortune") articles = topheadlines['articles'] desc = [] news = [] date = [] link =[] for i in range(len(articles)): myarticles = articles[i] news.append(myarticles['title']) desc.append(myarticles['description']) date.append(myarticles['publishedAt']) link.append(myarticles['url']) mylist = zip(news, desc, date,link) title = 'Home - Welcome to newshub' return render_template('fortune.html', title = title,context= mylist) @app.route('/ign') def ign(): ''' view page function that returns ign page ''' newsapi = NewsApiClient(api_key = 'e7f56e932284432c89095aa5928310c7' ) topheadlines = newsapi.get_top_headlines(sources="ign") articles = topheadlines['articles'] desc = [] news = [] date = [] link = [] for i in range(len(articles)): myarticles = articles[i] news.append(myarticles['title']) desc.append(myarticles['description']) date.append(myarticles['publishedAt']) link.append(myarticles['url']) mylist = zip(news, desc, date,link) title = 'Home - Welcome to newshub' return render_template('ign.html', title = title,context= mylist)
23.320988
74
0.634727
399
3,778
5.954887
0.140351
0.13468
0.03367
0.042088
0.901515
0.833333
0.833333
0.774411
0.774411
0.774411
0
0.043014
0.23081
3,778
161
75
23.465839
0.774604
0.057438
0
0.744681
0
0
0.163271
0.045911
0
0
0
0
0
1
0.053191
false
0
0.042553
0
0.148936
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
2985d69f18c0b147db06d52cf7f8029cc616ec76
261
py
Python
hapiclient/gallery/gallery_test.py
lkilcommons/client-python
7f8f895bd6b43aa12c4531e4498d1bab91c30691
[ "BSD-3-Clause" ]
null
null
null
hapiclient/gallery/gallery_test.py
lkilcommons/client-python
7f8f895bd6b43aa12c4531e4498d1bab91c30691
[ "BSD-3-Clause" ]
null
null
null
hapiclient/gallery/gallery_test.py
lkilcommons/client-python
7f8f895bd6b43aa12c4531e4498d1bab91c30691
[ "BSD-3-Clause" ]
null
null
null
if True: from hapiclient import gallery gallery('http://hapi-server.org/servers/TestData/hapi', 'dataset1', loglevel='debug') if False: from hapiclient import gallery gallery('http://hapi-server.org/servers/TestData/hapi','dataset1', 'vector')
32.625
89
0.720307
33
261
5.69697
0.515152
0.148936
0.212766
0.287234
0.829787
0.829787
0.829787
0.829787
0.829787
0.829787
0
0.008811
0.130268
261
7
90
37.285714
0.819383
0
0
0.333333
0
0
0.440613
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
11
4696f392b2685c7d3c14d440e6f8fa7b534e9b36
5,184
py
Python
tests/test_apigatewayv2/test_apigatewayv2_vpclinks.py
symroe/moto
4e106995af6f2820273528fca8a4e9ee288690a5
[ "Apache-2.0" ]
null
null
null
tests/test_apigatewayv2/test_apigatewayv2_vpclinks.py
symroe/moto
4e106995af6f2820273528fca8a4e9ee288690a5
[ "Apache-2.0" ]
1
2022-02-19T02:10:45.000Z
2022-02-19T02:15:52.000Z
tests/test_apigatewayv2/test_apigatewayv2_vpclinks.py
symroe/moto
4e106995af6f2820273528fca8a4e9ee288690a5
[ "Apache-2.0" ]
null
null
null
import boto3 import pytest from botocore.exceptions import ClientError from moto import mock_apigatewayv2 @mock_apigatewayv2 def test_get_vpc_links_empty(): client = boto3.client("apigatewayv2", region_name="eu-west-1") resp = client.get_vpc_links() resp.should.have.key("Items").equals([]) @mock_apigatewayv2 def test_create_vpc_links(): client = boto3.client("apigatewayv2", region_name="eu-west-1") resp = client.create_vpc_link( Name="vpcl", SecurityGroupIds=["sg1", "sg2"], SubnetIds=["sid1", "sid2"], Tags={"key1": "value1"}, ) resp.should.have.key("CreatedDate") resp.should.have.key("Name").equals("vpcl") resp.should.have.key("SecurityGroupIds").equals(["sg1", "sg2"]) resp.should.have.key("SubnetIds").equals(["sid1", "sid2"]) resp.should.have.key("Tags").equals({"key1": "value1"}) resp.should.have.key("VpcLinkId") resp.should.have.key("VpcLinkStatus").equals("AVAILABLE") resp.should.have.key("VpcLinkVersion").equals("V2") @mock_apigatewayv2 def test_get_vpc_link(): client = boto3.client("apigatewayv2", region_name="eu-west-1") vpc_link_id = client.create_vpc_link( Name="vpcl", SecurityGroupIds=["sg1", "sg2"], SubnetIds=["sid1", "sid2"], Tags={"key1": "value1"}, )["VpcLinkId"] resp = client.get_vpc_link(VpcLinkId=vpc_link_id) resp.should.have.key("CreatedDate") resp.should.have.key("Name").equals("vpcl") resp.should.have.key("SecurityGroupIds").equals(["sg1", "sg2"]) resp.should.have.key("SubnetIds").equals(["sid1", "sid2"]) resp.should.have.key("Tags").equals({"key1": "value1"}) resp.should.have.key("VpcLinkId") resp.should.have.key("VpcLinkStatus").equals("AVAILABLE") resp.should.have.key("VpcLinkVersion").equals("V2") @mock_apigatewayv2 def test_get_vpc_link_unknown(): client = boto3.client("apigatewayv2", region_name="ap-southeast-1") with pytest.raises(ClientError) as exc: client.get_vpc_link(VpcLinkId="unknown") err = exc.value.response["Error"] err["Code"].should.equal("NotFoundException") err["Message"].should.equal("Invalid VpcLink identifier specified unknown") @mock_apigatewayv2 def test_get_vpc_links(): client = boto3.client("apigatewayv2", region_name="eu-west-1") vpc_link_id = client.create_vpc_link( Name="vpcl", SecurityGroupIds=["sg1", "sg2"], SubnetIds=["sid1", "sid2"], Tags={"key1": "value1"}, )["VpcLinkId"] links = client.get_vpc_links()["Items"] links.should.have.length_of(1) links[0]["VpcLinkId"].should.equal(vpc_link_id) client.create_vpc_link( Name="vpcl", SecurityGroupIds=["sg1", "sg2"], SubnetIds=["sid1", "sid2"], Tags={"key1": "value1"}, ) links = client.get_vpc_links()["Items"] links.should.have.length_of(2) @mock_apigatewayv2 def test_delete_vpc_link(): client = boto3.client("apigatewayv2", region_name="eu-north-1") vpc_link_id = client.create_vpc_link( Name="vpcl", SecurityGroupIds=["sg1", "sg2"], SubnetIds=["sid1", "sid2"], Tags={"key1": "value1"}, )["VpcLinkId"] links = client.get_vpc_links()["Items"] links.should.have.length_of(1) client.delete_vpc_link(VpcLinkId=vpc_link_id) links = client.get_vpc_links()["Items"] links.should.have.length_of(0) @mock_apigatewayv2 def test_update_vpc_link(): client = boto3.client("apigatewayv2", region_name="eu-north-1") vpc_link_id = client.create_vpc_link( Name="vpcl", SecurityGroupIds=["sg1", "sg2"], SubnetIds=["sid1", "sid2"], Tags={"key1": "value1"}, )["VpcLinkId"] resp = client.update_vpc_link(VpcLinkId=vpc_link_id, Name="vpcl2") resp.should.have.key("CreatedDate") resp.should.have.key("Name").equals("vpcl2") resp.should.have.key("SecurityGroupIds").equals(["sg1", "sg2"]) resp.should.have.key("SubnetIds").equals(["sid1", "sid2"]) resp.should.have.key("Tags").equals({"key1": "value1"}) resp.should.have.key("VpcLinkId") resp.should.have.key("VpcLinkStatus").equals("AVAILABLE") resp.should.have.key("VpcLinkVersion").equals("V2") @mock_apigatewayv2 def test_untag_vpc_link(): client = boto3.client("apigatewayv2", region_name="eu-west-1") vpc_link_id = client.create_vpc_link( Name="vpcl", SecurityGroupIds=["sg1", "sg2"], SubnetIds=["sid1", "sid2"], Tags={"Key1": "value1", "key2": "val2"}, )["VpcLinkId"] arn = f"arn:aws:apigateway:eu-west-1::/vpclinks/{vpc_link_id}" client.untag_resource(ResourceArn=arn, TagKeys=["Key1"]) resp = client.get_vpc_link(VpcLinkId=vpc_link_id) resp.should.have.key("CreatedDate") resp.should.have.key("Name").equals("vpcl") resp.should.have.key("SecurityGroupIds").equals(["sg1", "sg2"]) resp.should.have.key("SubnetIds").equals(["sid1", "sid2"]) resp.should.have.key("Tags").equals({"key2": "val2"}) resp.should.have.key("VpcLinkId") resp.should.have.key("VpcLinkStatus").equals("AVAILABLE") resp.should.have.key("VpcLinkVersion").equals("V2")
31.803681
79
0.6576
646
5,184
5.116099
0.131579
0.111952
0.139788
0.169743
0.827837
0.817247
0.790318
0.772163
0.772163
0.772163
0
0.026292
0.15625
5,184
162
80
32
0.72931
0
0
0.725806
0
0
0.20814
0.010224
0
0
0
0
0
1
0.064516
false
0
0.032258
0
0.096774
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
d3c41c23cc0ce53f18df6af845291e12c4b04c05
5,167
py
Python
Test/test_modify_contact.py
Lagorim/python_test
9c9f5accfba73bb458e3c705bd1b37bd3536d3de
[ "Apache-2.0" ]
1
2019-04-04T08:21:49.000Z
2019-04-04T08:21:49.000Z
Test/test_modify_contact.py
Lagorim/python_test
9c9f5accfba73bb458e3c705bd1b37bd3536d3de
[ "Apache-2.0" ]
null
null
null
Test/test_modify_contact.py
Lagorim/python_test
9c9f5accfba73bb458e3c705bd1b37bd3536d3de
[ "Apache-2.0" ]
null
null
null
from Model_class.contact import Contact from random import randrange def test_modify_firstname(app): if app.contact.count() == 0: app.contact.create(Contact(firstname="")) old_contacts = app.contact.get_contact_list() index = randrange(len(old_contacts)) contact = Contact(firstname="Firstname") contact.id = old_contacts[index].id app.contact.modify_contact_by_index(index, contact) new_contacts = app.contact.get_contact_list() assert len(old_contacts) == len(new_contacts) old_contacts[index] = contact assert sorted(old_contacts, key=Contact.id_or_max) == sorted(new_contacts, key=Contact.id_or_max) #def test_modify_firstname_empty_all(app): # if app.contact.count() == 0: # app.contact.create(Contact(firstname="")) # old_contacts = app.contact.get_contact_list() # app.contact.modify_first_contact(Contact(firstname="")) # new_contacts = app.contact.get_contact_list() # assert len(old_contacts) == len(new_contacts) #def test_modify_firstname_empty(app): # if app.contact.count() == 0: # app.contact.create(Contact(firstname="bhsbdjj")) # old_contacts = app.contact.get_contact_list() # app.contact.modify_first_contact(Contact(firstname="")) # new_contacts = app.contact.get_contact_list() # assert len(old_contacts) == len(new_contacts) #def test_modify_firstname_ru(app): # if app.contact.count() == 0: # app.contact.create(Contact(firstname="")) # old_contacts = app.contact.get_contact_list() # app.contact.modify_first_contact(Contact(firstname="РВИООЫВЛ")) # new_contacts = app.contact.get_contact_list() # assert len(old_contacts) == len(new_contacts) #def test_modify_firstname_rus(app): # if app.contact.count() == 0: # app.contact.create(Contact(firstname="ОРИВРЫОРП")) # old_contacts = app.contact.get_contact_list() # app.contact.modify_first_contact(Contact(firstname="РВИООЫВЛ")) # new_contacts = app.contact.get_contact_list() # assert len(old_contacts) == len(new_contacts) #def test_modify_firstname_number(app): # if app.contact.count() == 0: # app.contact.create(Contact(firstname="")) # old_contacts = app.contact.get_contact_list() # app.contact.modify_first_contact(Contact(firstname="1546258:`")) # new_contacts = app.contact.get_contact_list() # assert len(old_contacts) == len(new_contacts) #def test_modify_firstname_numbers(app): # if app.contact.count() == 0: # app.contact.create(Contact(firstname="12565715!:")) # old_contacts = app.contact.get_contact_list() # app.contact.modify_first_contact(Contact(firstname="")) # new_contacts = app.contact.get_contact_list() # assert len(old_contacts) == len(new_contacts) #def test_modify_lastname(app): # if app.contact.count() == 0: # app.contact.create(Contact(lastname="")) # old_contacts = app.contact.get_contact_list() # app.contact.modify_first_contact(Contact(lastname="Lastname")) # new_contacts = app.contact.get_contact_list() # assert len(old_contacts) == len(new_contacts) #def test_modify_lastname_empty_all(app): # if app.contact.count() == 0: # app.contact.create(Contact(lastname="")) # old_contacts = app.contact.get_contact_list() # app.contact.modify_first_contact(Contact(lastname="")) # new_contacts = app.contact.get_contact_list() # assert len(old_contacts) == len(new_contacts) #def test_modify_lastname_empty(app): # if app.contact.count() == 0: # app.contact.create(Contact(lastname="bhsbdjj")) # old_contacts = app.contact.get_contact_list() # app.contact.modify_first_contact(Contact(lastname="")) # new_contacts = app.contact.get_contact_list() # assert len(old_contacts) == len(new_contacts) #def test_modify_lastname_ru(app): # if app.contact.count() == 0: # app.contact.create(Contact(lastname="")) # old_contacts = app.contact.get_contact_list() # app.contact.modify_first_contact(Contact(lastname="РВИООЫВЛ")) # new_contacts = app.contact.get_contact_list() # assert len(old_contacts) == len(new_contacts) #def test_modify_lastname_rus(app): # if app.contact.count() == 0: # app.contact.create(Contact(lastname="ОРИВРЫОРП")) # old_contacts = app.contact.get_contact_list() # app.contact.modify_first_contact(Contact(lastname="РВИООЫВЛ")) # new_contacts = app.contact.get_contact_list() # assert len(old_contacts) == len(new_contacts) #def test_modify_lastname_number(app): # if app.contact.count() == 0: # app.contact.create(Contact(lastname="")) # old_contacts = app.contact.get_contact_list() # app.contact.modify_first_contact(Contact(lastname="1546258:`")) # new_contacts = app.contact.get_contact_list() # assert len(old_contacts) == len(new_contacts) #def test_modify_lastname_numbers(app): # if app.contact.count() == 0: # app.contact.create(Contact(lastname="12565715!:")) # old_contacts = app.contact.get_contact_list() # app.contact.modify_first_contact(Contact(lastname="")) # new_contacts = app.contact.get_contact_list() # assert len(old_contacts) == len(new_contacts)
39.143939
101
0.715889
676
5,167
5.186391
0.063609
0.199658
0.143754
0.167712
0.926697
0.920422
0.906161
0.906161
0.906161
0.906161
0
0.009905
0.140314
5,167
131
102
39.442748
0.779379
0.833559
0
0
0
0
0.011704
0
0
0
0
0
0.142857
1
0.071429
false
0
0.142857
0
0.214286
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
d3de1192ebd94457f548114e4d537d268636ebc1
159,066
py
Python
release/scripts/addons/rigify/metarigs/Animals/cat.py
noorbeast/BlenderSource
65ebecc5108388965678b04b43463b85f6c69c1d
[ "Naumen", "Condor-1.1", "MS-PL" ]
3
2019-09-16T10:29:19.000Z
2022-02-11T14:43:18.000Z
engine/2.80/scripts/addons/rigify/metarigs/Animals/cat.py
byteinc/Phasor
f7d23a489c2b4bcc3c1961ac955926484ff8b8d9
[ "Unlicense" ]
null
null
null
engine/2.80/scripts/addons/rigify/metarigs/Animals/cat.py
byteinc/Phasor
f7d23a489c2b4bcc3c1961ac955926484ff8b8d9
[ "Unlicense" ]
null
null
null
import bpy from mathutils import Color def create(obj): # generated by rigify.utils.write_metarig bpy.ops.object.mode_set(mode='EDIT') arm = obj.data for i in range(6): arm.rigify_colors.add() arm.rigify_colors[0].name = "Root" arm.rigify_colors[0].active = Color((0.5490196347236633, 1.0, 1.0)) arm.rigify_colors[0].normal = Color((0.4352940022945404, 0.18431399762630463, 0.4156860113143921)) arm.rigify_colors[0].select = Color((0.31372547149658203, 0.7843138575553894, 1.0)) arm.rigify_colors[0].standard_colors_lock = True arm.rigify_colors[1].name = "IK" arm.rigify_colors[1].active = Color((0.5490196347236633, 1.0, 1.0)) arm.rigify_colors[1].normal = Color((0.6039220094680786, 0.0, 0.0)) arm.rigify_colors[1].select = Color((0.31372547149658203, 0.7843138575553894, 1.0)) arm.rigify_colors[1].standard_colors_lock = True arm.rigify_colors[2].name = "Special" arm.rigify_colors[2].active = Color((0.5490196347236633, 1.0, 1.0)) arm.rigify_colors[2].normal = Color((0.9568629860877991, 0.7882350087165833, 0.04705899953842163)) arm.rigify_colors[2].select = Color((0.31372547149658203, 0.7843138575553894, 1.0)) arm.rigify_colors[2].standard_colors_lock = True arm.rigify_colors[3].name = "Tweak" arm.rigify_colors[3].active = Color((0.5490196347236633, 1.0, 1.0)) arm.rigify_colors[3].normal = Color((0.03921600058674812, 0.21176500618457794, 0.5803920030593872)) arm.rigify_colors[3].select = Color((0.31372547149658203, 0.7843138575553894, 1.0)) arm.rigify_colors[3].standard_colors_lock = True arm.rigify_colors[4].name = "FK" arm.rigify_colors[4].active = Color((0.5490196347236633, 1.0, 1.0)) arm.rigify_colors[4].normal = Color((0.11764699965715408, 0.5686269998550415, 0.035294000059366226)) arm.rigify_colors[4].select = Color((0.31372547149658203, 0.7843138575553894, 1.0)) arm.rigify_colors[4].standard_colors_lock = True arm.rigify_colors[5].name = "Extra" arm.rigify_colors[5].active = Color((0.5490196347236633, 1.0, 1.0)) arm.rigify_colors[5].normal = Color((0.9686279892921448, 0.2509799897670746, 0.09411799907684326)) arm.rigify_colors[5].select = Color((0.31372547149658203, 0.7843138575553894, 1.0)) arm.rigify_colors[5].standard_colors_lock = True for i in range(29): arm.rigify_layers.add() arm.rigify_layers[0].name = "Face" arm.rigify_layers[0].row = 1 arm.rigify_layers[0].selset = False arm.rigify_layers[0].group = 5 arm.rigify_layers[1].name = "Face (Primary)" arm.rigify_layers[1].row = 2 arm.rigify_layers[1].selset = False arm.rigify_layers[1].group = 2 arm.rigify_layers[2].name = "Face (Secondary)" arm.rigify_layers[2].row = 2 arm.rigify_layers[2].selset = False arm.rigify_layers[2].group = 3 arm.rigify_layers[3].name = "Spine" arm.rigify_layers[3].row = 3 arm.rigify_layers[3].selset = False arm.rigify_layers[3].group = 3 arm.rigify_layers[4].name = "Spine (Tweak)" arm.rigify_layers[4].row = 4 arm.rigify_layers[4].selset = False arm.rigify_layers[4].group = 4 arm.rigify_layers[5].name = "Paws" arm.rigify_layers[5].row = 5 arm.rigify_layers[5].selset = False arm.rigify_layers[5].group = 6 arm.rigify_layers[6].name = "Paws (Tweak)" arm.rigify_layers[6].row = 6 arm.rigify_layers[6].selset = False arm.rigify_layers[6].group = 4 arm.rigify_layers[7].name = "Arm.L (IK)" arm.rigify_layers[7].row = 7 arm.rigify_layers[7].selset = False arm.rigify_layers[7].group = 2 arm.rigify_layers[8].name = "Arm.L (FK)" arm.rigify_layers[8].row = 8 arm.rigify_layers[8].selset = False arm.rigify_layers[8].group = 5 arm.rigify_layers[9].name = "Arm,L (Tweak)" arm.rigify_layers[9].row = 9 arm.rigify_layers[9].selset = False arm.rigify_layers[9].group = 4 arm.rigify_layers[10].name = "Arm.R (IK)" arm.rigify_layers[10].row = 7 arm.rigify_layers[10].selset = False arm.rigify_layers[10].group = 2 arm.rigify_layers[11].name = "Arm.R (FK)" arm.rigify_layers[11].row = 8 arm.rigify_layers[11].selset = False arm.rigify_layers[11].group = 5 arm.rigify_layers[12].name = "Arm.R (Tweak)" arm.rigify_layers[12].row = 9 arm.rigify_layers[12].selset = False arm.rigify_layers[12].group = 4 arm.rigify_layers[13].name = "Leg.L (IK)" arm.rigify_layers[13].row = 10 arm.rigify_layers[13].selset = False arm.rigify_layers[13].group = 2 arm.rigify_layers[14].name = "Leg.L (FK)" arm.rigify_layers[14].row = 11 arm.rigify_layers[14].selset = False arm.rigify_layers[14].group = 5 arm.rigify_layers[15].name = "Leg.L (Tweak)" arm.rigify_layers[15].row = 12 arm.rigify_layers[15].selset = False arm.rigify_layers[15].group = 4 arm.rigify_layers[16].name = "Leg.R (IK)" arm.rigify_layers[16].row = 10 arm.rigify_layers[16].selset = False arm.rigify_layers[16].group = 2 arm.rigify_layers[17].name = "Leg.R (FK)" arm.rigify_layers[17].row = 11 arm.rigify_layers[17].selset = False arm.rigify_layers[17].group = 5 arm.rigify_layers[18].name = "Leg.R (Tweak)" arm.rigify_layers[18].row = 12 arm.rigify_layers[18].selset = False arm.rigify_layers[18].group = 4 arm.rigify_layers[19].name = "Tail" arm.rigify_layers[19].row = 13 arm.rigify_layers[19].selset = False arm.rigify_layers[19].group = 3 arm.rigify_layers[20].name = "Tail (Tweaks)" arm.rigify_layers[20].row = 14 arm.rigify_layers[20].selset = False arm.rigify_layers[20].group = 4 arm.rigify_layers[21].name = " " arm.rigify_layers[21].row = 1 arm.rigify_layers[21].selset = False arm.rigify_layers[21].group = 0 arm.rigify_layers[22].name = " " arm.rigify_layers[22].row = 1 arm.rigify_layers[22].selset = False arm.rigify_layers[22].group = 0 arm.rigify_layers[23].name = " " arm.rigify_layers[23].row = 1 arm.rigify_layers[23].selset = False arm.rigify_layers[23].group = 0 arm.rigify_layers[24].name = " " arm.rigify_layers[24].row = 1 arm.rigify_layers[24].selset = False arm.rigify_layers[24].group = 0 arm.rigify_layers[25].name = " " arm.rigify_layers[25].row = 1 arm.rigify_layers[25].selset = False arm.rigify_layers[25].group = 0 arm.rigify_layers[26].name = " " arm.rigify_layers[26].row = 1 arm.rigify_layers[26].selset = False arm.rigify_layers[26].group = 0 arm.rigify_layers[27].name = " " arm.rigify_layers[27].row = 1 arm.rigify_layers[27].selset = False arm.rigify_layers[27].group = 0 arm.rigify_layers[28].name = "Root" arm.rigify_layers[28].row = 16 arm.rigify_layers[28].selset = False arm.rigify_layers[28].group = 1 bones = {} bone = arm.edit_bones.new('tail.004') bone.head[:] = -0.0000, 0.5531, 0.2488 bone.tail[:] = -0.0000, 0.4543, 0.2321 bone.roll = 0.0000 bone.use_connect = False bones['tail.004'] = bone.name bone = arm.edit_bones.new('tail.003') bone.head[:] = -0.0000, 0.4543, 0.2321 bone.tail[:] = -0.0000, 0.3513, 0.2284 bone.roll = 0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['tail.004']] bones['tail.003'] = bone.name bone = arm.edit_bones.new('tail.002') bone.head[:] = -0.0000, 0.3513, 0.2284 bone.tail[:] = -0.0000, 0.2460, 0.2324 bone.roll = 0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['tail.003']] bones['tail.002'] = bone.name bone = arm.edit_bones.new('tail.001') bone.head[:] = -0.0000, 0.2460, 0.2324 bone.tail[:] = 0.0000, 0.1499, 0.2500 bone.roll = 0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['tail.002']] bones['tail.001'] = bone.name bone = arm.edit_bones.new('spine') bone.head[:] = 0.0000, 0.1499, 0.2500 bone.tail[:] = 0.0000, 0.0769, 0.2272 bone.roll = 0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['tail.001']] bones['spine'] = bone.name bone = arm.edit_bones.new('spine.001') bone.head[:] = 0.0000, 0.0769, 0.2272 bone.tail[:] = 0.0000, 0.0180, 0.2240 bone.roll = 0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['spine']] bones['spine.001'] = bone.name bone = arm.edit_bones.new('pelvis.L') bone.head[:] = 0.0000, 0.1499, 0.2500 bone.tail[:] = 0.0391, 0.1124, 0.2699 bone.roll = 2.3502 bone.use_connect = False bone.parent = arm.edit_bones[bones['spine']] bones['pelvis.L'] = bone.name bone = arm.edit_bones.new('pelvis.R') bone.head[:] = 0.0000, 0.1499, 0.2500 bone.tail[:] = -0.0391, 0.1124, 0.2699 bone.roll = -2.3502 bone.use_connect = False bone.parent = arm.edit_bones[bones['spine']] bones['pelvis.R'] = bone.name bone = arm.edit_bones.new('pelvis.C') bone.head[:] = 0.0000, 0.1499, 0.2500 bone.tail[:] = 0.0000, 0.1344, 0.1727 bone.roll = -0.0000 bone.use_connect = False bone.parent = arm.edit_bones[bones['spine']] bones['pelvis.C'] = bone.name bone = arm.edit_bones.new('spine.002') bone.head[:] = 0.0000, 0.0180, 0.2240 bone.tail[:] = 0.0000, -0.0513, 0.2271 bone.roll = 0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['spine.001']] bones['spine.002'] = bone.name bone = arm.edit_bones.new('thigh.L') bone.head[:] = 0.0291, 0.1148, 0.2460 bone.tail[:] = 0.0293, 0.1074, 0.1682 bone.roll = 3.1383 bone.use_connect = False bone.parent = arm.edit_bones[bones['pelvis.L']] bones['thigh.L'] = bone.name bone = arm.edit_bones.new('thigh.R') bone.head[:] = -0.0291, 0.1148, 0.2460 bone.tail[:] = -0.0293, 0.1074, 0.1682 bone.roll = -3.1383 bone.use_connect = False bone.parent = arm.edit_bones[bones['pelvis.R']] bones['thigh.R'] = bone.name bone = arm.edit_bones.new('spine.003') bone.head[:] = 0.0000, -0.0513, 0.2271 bone.tail[:] = 0.0000, -0.1571, 0.2355 bone.roll = 0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['spine.002']] bones['spine.003'] = bone.name bone = arm.edit_bones.new('belly.C') bone.head[:] = 0.0000, -0.0081, 0.1829 bone.tail[:] = 0.0000, -0.0081, 0.1383 bone.roll = 6.2832 bone.use_connect = False bone.parent = arm.edit_bones[bones['spine.002']] bones['belly.C'] = bone.name bone = arm.edit_bones.new('shin.L') bone.head[:] = 0.0293, 0.1074, 0.1682 bone.tail[:] = 0.0293, 0.1684, 0.1073 bone.roll = 3.1416 bone.use_connect = True bone.parent = arm.edit_bones[bones['thigh.L']] bones['shin.L'] = bone.name bone = arm.edit_bones.new('shin.R') bone.head[:] = -0.0293, 0.1074, 0.1682 bone.tail[:] = -0.0293, 0.1684, 0.1073 bone.roll = -3.1416 bone.use_connect = True bone.parent = arm.edit_bones[bones['thigh.R']] bones['shin.R'] = bone.name bone = arm.edit_bones.new('spine.004') bone.head[:] = 0.0000, -0.1571, 0.2355 bone.tail[:] = 0.0000, -0.1736, 0.2395 bone.roll = 0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['spine.003']] bones['spine.004'] = bone.name bone = arm.edit_bones.new('Breast.C') bone.head[:] = 0.0000, -0.1169, 0.1866 bone.tail[:] = 0.0000, -0.1279, 0.1479 bone.roll = -6.2832 bone.use_connect = False bone.parent = arm.edit_bones[bones['spine.003']] bones['Breast.C'] = bone.name bone = arm.edit_bones.new('shoulder.L') bone.head[:] = 0.0111, -0.0973, 0.2645 bone.tail[:] = 0.0346, -0.1427, 0.2105 bone.roll = 2.2707 bone.use_connect = False bone.parent = arm.edit_bones[bones['spine.003']] bones['shoulder.L'] = bone.name bone = arm.edit_bones.new('shoulder.R') bone.head[:] = -0.0111, -0.0973, 0.2645 bone.tail[:] = -0.0346, -0.1427, 0.2105 bone.roll = -2.2707 bone.use_connect = False bone.parent = arm.edit_bones[bones['spine.003']] bones['shoulder.R'] = bone.name bone = arm.edit_bones.new('foot.L') bone.head[:] = 0.0293, 0.1684, 0.1073 bone.tail[:] = 0.0293, 0.1530, 0.0167 bone.roll = 3.1416 bone.use_connect = True bone.parent = arm.edit_bones[bones['shin.L']] bones['foot.L'] = bone.name bone = arm.edit_bones.new('foot.R') bone.head[:] = -0.0293, 0.1684, 0.1073 bone.tail[:] = -0.0293, 0.1530, 0.0167 bone.roll = -3.1416 bone.use_connect = True bone.parent = arm.edit_bones[bones['shin.R']] bones['foot.R'] = bone.name bone = arm.edit_bones.new('spine.005') bone.head[:] = 0.0000, -0.1736, 0.2395 bone.tail[:] = 0.0000, -0.1860, 0.2445 bone.roll = 0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['spine.004']] bones['spine.005'] = bone.name bone = arm.edit_bones.new('upper_arm.L') bone.head[:] = 0.0313, -0.1292, 0.2018 bone.tail[:] = 0.0313, -0.0998, 0.1235 bone.roll = 3.1416 bone.use_connect = False bone.parent = arm.edit_bones[bones['shoulder.L']] bones['upper_arm.L'] = bone.name bone = arm.edit_bones.new('upper_arm.R') bone.head[:] = -0.0313, -0.1292, 0.2018 bone.tail[:] = -0.0313, -0.0998, 0.1235 bone.roll = -3.1416 bone.use_connect = False bone.parent = arm.edit_bones[bones['shoulder.R']] bones['upper_arm.R'] = bone.name bone = arm.edit_bones.new('r_toe.L') bone.head[:] = 0.0293, 0.1530, 0.0167 bone.tail[:] = 0.0293, 0.1334, 0.0039 bone.roll = 3.1416 bone.use_connect = True bone.parent = arm.edit_bones[bones['foot.L']] bones['r_toe.L'] = bone.name bone = arm.edit_bones.new('r_toe.R') bone.head[:] = -0.0293, 0.1530, 0.0167 bone.tail[:] = -0.0293, 0.1334, 0.0039 bone.roll = -3.1416 bone.use_connect = True bone.parent = arm.edit_bones[bones['foot.R']] bones['r_toe.R'] = bone.name bone = arm.edit_bones.new('spine.006') bone.head[:] = 0.0000, -0.1860, 0.2445 bone.tail[:] = 0.0000, -0.2599, 0.2789 bone.roll = 0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['spine.005']] bones['spine.006'] = bone.name bone = arm.edit_bones.new('forearm.L') bone.head[:] = 0.0313, -0.0998, 0.1235 bone.tail[:] = 0.0313, -0.1178, 0.0248 bone.roll = 3.1416 bone.use_connect = True bone.parent = arm.edit_bones[bones['upper_arm.L']] bones['forearm.L'] = bone.name bone = arm.edit_bones.new('forearm.R') bone.head[:] = -0.0313, -0.0998, 0.1235 bone.tail[:] = -0.0313, -0.1178, 0.0248 bone.roll = -3.1416 bone.use_connect = True bone.parent = arm.edit_bones[bones['upper_arm.R']] bones['forearm.R'] = bone.name bone = arm.edit_bones.new('r_palm.001.L') bone.head[:] = 0.0220, 0.1457, 0.0123 bone.tail[:] = 0.0215, 0.1401, 0.0123 bone.roll = 0.0014 bone.use_connect = False bone.parent = arm.edit_bones[bones['r_toe.L']] bones['r_palm.001.L'] = bone.name bone = arm.edit_bones.new('r_palm.002.L') bone.head[:] = 0.0297, 0.1458, 0.0123 bone.tail[:] = 0.0311, 0.1393, 0.0123 bone.roll = -0.0005 bone.use_connect = False bone.parent = arm.edit_bones[bones['r_toe.L']] bones['r_palm.002.L'] = bone.name bone = arm.edit_bones.new('r_palm.003.L') bone.head[:] = 0.0363, 0.1473, 0.0123 bone.tail[:] = 0.0376, 0.1407, 0.0123 bone.roll = 0.0000 bone.use_connect = False bone.parent = arm.edit_bones[bones['r_toe.L']] bones['r_palm.003.L'] = bone.name bone = arm.edit_bones.new('r_palm.004.L') bone.head[:] = 0.0449, 0.1501, 0.0123 bone.tail[:] = 0.0466, 0.1479, 0.0123 bone.roll = -0.0004 bone.use_connect = False bone.parent = arm.edit_bones[bones['r_toe.L']] bones['r_palm.004.L'] = bone.name bone = arm.edit_bones.new('r_palm.001.R') bone.head[:] = -0.0220, 0.1457, 0.0123 bone.tail[:] = -0.0215, 0.1401, 0.0123 bone.roll = -0.0014 bone.use_connect = False bone.parent = arm.edit_bones[bones['r_toe.R']] bones['r_palm.001.R'] = bone.name bone = arm.edit_bones.new('r_palm.002.R') bone.head[:] = -0.0297, 0.1458, 0.0123 bone.tail[:] = -0.0311, 0.1393, 0.0123 bone.roll = 0.0005 bone.use_connect = False bone.parent = arm.edit_bones[bones['r_toe.R']] bones['r_palm.002.R'] = bone.name bone = arm.edit_bones.new('r_palm.003.R') bone.head[:] = -0.0363, 0.1473, 0.0123 bone.tail[:] = -0.0376, 0.1407, 0.0123 bone.roll = -0.0000 bone.use_connect = False bone.parent = arm.edit_bones[bones['r_toe.R']] bones['r_palm.003.R'] = bone.name bone = arm.edit_bones.new('r_palm.004.R') bone.head[:] = -0.0449, 0.1501, 0.0123 bone.tail[:] = -0.0466, 0.1479, 0.0123 bone.roll = 0.0004 bone.use_connect = False bone.parent = arm.edit_bones[bones['r_toe.R']] bones['r_palm.004.R'] = bone.name bone = arm.edit_bones.new('face') bone.head[:] = 0.0000, -0.1860, 0.2445 bone.tail[:] = 0.0000, -0.1860, 0.3056 bone.roll = 0.0000 bone.use_connect = False bone.parent = arm.edit_bones[bones['spine.006']] bones['face'] = bone.name bone = arm.edit_bones.new('hand.L') bone.head[:] = 0.0313, -0.1178, 0.0248 bone.tail[:] = 0.0313, -0.1261, 0.0108 bone.roll = 3.1416 bone.use_connect = True bone.parent = arm.edit_bones[bones['forearm.L']] bones['hand.L'] = bone.name bone = arm.edit_bones.new('hand.R') bone.head[:] = -0.0313, -0.1178, 0.0248 bone.tail[:] = -0.0313, -0.1261, 0.0108 bone.roll = -3.1416 bone.use_connect = True bone.parent = arm.edit_bones[bones['forearm.R']] bones['hand.R'] = bone.name bone = arm.edit_bones.new('r_index.001.L') bone.head[:] = 0.0215, 0.1367, 0.0087 bone.tail[:] = 0.0217, 0.1325, 0.0070 bone.roll = -0.3427 bone.use_connect = False bone.parent = arm.edit_bones[bones['r_palm.001.L']] bones['r_index.001.L'] = bone.name bone = arm.edit_bones.new('r_middle.001.L') bone.head[:] = 0.0311, 0.1358, 0.0117 bone.tail[:] = 0.0324, 0.1297, 0.0092 bone.roll = -1.0029 bone.use_connect = False bone.parent = arm.edit_bones[bones['r_palm.002.L']] bones['r_middle.001.L'] = bone.name bone = arm.edit_bones.new('r_ring.001.L') bone.head[:] = 0.0376, 0.1372, 0.0117 bone.tail[:] = 0.0389, 0.1311, 0.0092 bone.roll = -1.0029 bone.use_connect = False bone.parent = arm.edit_bones[bones['r_palm.003.L']] bones['r_ring.001.L'] = bone.name bone = arm.edit_bones.new('r_pinky.001.L') bone.head[:] = 0.0466, 0.1444, 0.0083 bone.tail[:] = 0.0476, 0.1412, 0.0074 bone.roll = -1.7551 bone.use_connect = False bone.parent = arm.edit_bones[bones['r_palm.004.L']] bones['r_pinky.001.L'] = bone.name bone = arm.edit_bones.new('r_index.001.R') bone.head[:] = -0.0215, 0.1367, 0.0087 bone.tail[:] = -0.0217, 0.1325, 0.0070 bone.roll = 0.3427 bone.use_connect = False bone.parent = arm.edit_bones[bones['r_palm.001.R']] bones['r_index.001.R'] = bone.name bone = arm.edit_bones.new('r_middle.001.R') bone.head[:] = -0.0311, 0.1358, 0.0117 bone.tail[:] = -0.0324, 0.1297, 0.0092 bone.roll = 1.0029 bone.use_connect = False bone.parent = arm.edit_bones[bones['r_palm.002.R']] bones['r_middle.001.R'] = bone.name bone = arm.edit_bones.new('r_ring.001.R') bone.head[:] = -0.0376, 0.1372, 0.0117 bone.tail[:] = -0.0389, 0.1311, 0.0092 bone.roll = 1.0029 bone.use_connect = False bone.parent = arm.edit_bones[bones['r_palm.003.R']] bones['r_ring.001.R'] = bone.name bone = arm.edit_bones.new('r_pinky.001.R') bone.head[:] = -0.0466, 0.1444, 0.0083 bone.tail[:] = -0.0476, 0.1412, 0.0074 bone.roll = 1.7551 bone.use_connect = False bone.parent = arm.edit_bones[bones['r_palm.004.R']] bones['r_pinky.001.R'] = bone.name bone = arm.edit_bones.new('nose') bone.head[:] = 0.0000, -0.2709, 0.2463 bone.tail[:] = 0.0000, -0.2705, 0.2315 bone.roll = -0.0018 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['nose'] = bone.name bone = arm.edit_bones.new('lip.T.L') bone.head[:] = 0.0000, -0.2762, 0.2076 bone.tail[:] = 0.0142, -0.2683, 0.2053 bone.roll = 0.0739 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['lip.T.L'] = bone.name bone = arm.edit_bones.new('lip.B.L') bone.head[:] = 0.0000, -0.2748, 0.2025 bone.tail[:] = 0.0102, -0.2676, 0.2025 bone.roll = 0.0154 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['lip.B.L'] = bone.name bone = arm.edit_bones.new('jaw') bone.head[:] = 0.0000, -0.2126, 0.2015 bone.tail[:] = 0.0000, -0.2524, 0.1977 bone.roll = 0.0000 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['jaw'] = bone.name bone = arm.edit_bones.new('ear.L') bone.head[:] = 0.0361, -0.2279, 0.2662 bone.tail[:] = 0.0259, -0.2360, 0.2749 bone.roll = -2.7956 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['ear.L'] = bone.name bone = arm.edit_bones.new('ear.R') bone.head[:] = -0.0361, -0.2279, 0.2662 bone.tail[:] = -0.0259, -0.2360, 0.2749 bone.roll = 2.7956 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['ear.R'] = bone.name bone = arm.edit_bones.new('lip.T.R') bone.head[:] = 0.0000, -0.2762, 0.2076 bone.tail[:] = -0.0142, -0.2683, 0.2053 bone.roll = -0.0739 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['lip.T.R'] = bone.name bone = arm.edit_bones.new('lip.B.R') bone.head[:] = 0.0000, -0.2748, 0.2025 bone.tail[:] = -0.0102, -0.2676, 0.2025 bone.roll = -0.0154 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['lip.B.R'] = bone.name bone = arm.edit_bones.new('brow.B.L') bone.head[:] = 0.0450, -0.2472, 0.2375 bone.tail[:] = 0.0336, -0.2577, 0.2472 bone.roll = -0.4810 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['brow.B.L'] = bone.name bone = arm.edit_bones.new('lid.T.L') bone.head[:] = 0.0398, -0.2525, 0.2396 bone.tail[:] = 0.0327, -0.2600, 0.2432 bone.roll = -0.4405 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['lid.T.L'] = bone.name bone = arm.edit_bones.new('brow.B.R') bone.head[:] = -0.0450, -0.2472, 0.2375 bone.tail[:] = -0.0336, -0.2577, 0.2472 bone.roll = 0.4810 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['brow.B.R'] = bone.name bone = arm.edit_bones.new('lid.T.R') bone.head[:] = -0.0398, -0.2525, 0.2396 bone.tail[:] = -0.0327, -0.2600, 0.2432 bone.roll = 0.4405 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['lid.T.R'] = bone.name bone = arm.edit_bones.new('forehead.L') bone.head[:] = 0.0103, -0.2600, 0.2669 bone.tail[:] = 0.0097, -0.2694, 0.2527 bone.roll = 1.6514 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['forehead.L'] = bone.name bone = arm.edit_bones.new('forehead.R') bone.head[:] = -0.0103, -0.2600, 0.2669 bone.tail[:] = -0.0097, -0.2694, 0.2527 bone.roll = -1.6514 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['forehead.R'] = bone.name bone = arm.edit_bones.new('eye.L') bone.head[:] = 0.0170, -0.2441, 0.2385 bone.tail[:] = 0.0170, -0.2738, 0.2385 bone.roll = 0.0000 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['eye.L'] = bone.name bone = arm.edit_bones.new('eye.R') bone.head[:] = -0.0170, -0.2441, 0.2385 bone.tail[:] = -0.0170, -0.2738, 0.2385 bone.roll = -0.0000 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['eye.R'] = bone.name bone = arm.edit_bones.new('cheek.T.L') bone.head[:] = 0.0450, -0.2472, 0.2375 bone.tail[:] = 0.0308, -0.2584, 0.2187 bone.roll = 0.3924 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['cheek.T.L'] = bone.name bone = arm.edit_bones.new('cheek.T.R') bone.head[:] = -0.0450, -0.2472, 0.2375 bone.tail[:] = -0.0308, -0.2584, 0.2187 bone.roll = -0.3924 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['cheek.T.R'] = bone.name bone = arm.edit_bones.new('teeth.T') bone.head[:] = 0.0000, -0.2724, 0.2129 bone.tail[:] = 0.0000, -0.2477, 0.2129 bone.roll = 0.0000 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['teeth.T'] = bone.name bone = arm.edit_bones.new('teeth.B') bone.head[:] = 0.0000, -0.2709, 0.2076 bone.tail[:] = 0.0000, -0.2463, 0.2076 bone.roll = 0.0000 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['teeth.B'] = bone.name bone = arm.edit_bones.new('tongue') bone.head[:] = 0.0000, -0.2693, 0.2091 bone.tail[:] = 0.0000, -0.2610, 0.2098 bone.roll = 0.0000 bone.use_connect = False bone.parent = arm.edit_bones[bones['face']] bones['tongue'] = bone.name bone = arm.edit_bones.new('f_toe.L') bone.head[:] = 0.0313, -0.1261, 0.0108 bone.tail[:] = 0.0313, -0.1416, 0.0009 bone.roll = -3.1416 bone.use_connect = True bone.parent = arm.edit_bones[bones['hand.L']] bones['f_toe.L'] = bone.name bone = arm.edit_bones.new('f_toe.R') bone.head[:] = -0.0313, -0.1261, 0.0108 bone.tail[:] = -0.0313, -0.1416, 0.0009 bone.roll = 3.1416 bone.use_connect = True bone.parent = arm.edit_bones[bones['hand.R']] bones['f_toe.R'] = bone.name bone = arm.edit_bones.new('r_index.002.L') bone.head[:] = 0.0217, 0.1325, 0.0070 bone.tail[:] = 0.0221, 0.1271, 0.0038 bone.roll = -0.2465 bone.use_connect = True bone.parent = arm.edit_bones[bones['r_index.001.L']] bones['r_index.002.L'] = bone.name bone = arm.edit_bones.new('r_middle.002.L') bone.head[:] = 0.0324, 0.1297, 0.0092 bone.tail[:] = 0.0343, 0.1210, 0.0039 bone.roll = -0.7479 bone.use_connect = True bone.parent = arm.edit_bones[bones['r_middle.001.L']] bones['r_middle.002.L'] = bone.name bone = arm.edit_bones.new('r_ring.002.L') bone.head[:] = 0.0389, 0.1311, 0.0092 bone.tail[:] = 0.0407, 0.1229, 0.0042 bone.roll = -0.7479 bone.use_connect = True bone.parent = arm.edit_bones[bones['r_ring.001.L']] bones['r_ring.002.L'] = bone.name bone = arm.edit_bones.new('r_pinky.002.L') bone.head[:] = 0.0476, 0.1412, 0.0074 bone.tail[:] = 0.0494, 0.1351, 0.0032 bone.roll = -0.8965 bone.use_connect = True bone.parent = arm.edit_bones[bones['r_pinky.001.L']] bones['r_pinky.002.L'] = bone.name bone = arm.edit_bones.new('r_index.002.R') bone.head[:] = -0.0217, 0.1325, 0.0070 bone.tail[:] = -0.0221, 0.1271, 0.0038 bone.roll = 0.2465 bone.use_connect = True bone.parent = arm.edit_bones[bones['r_index.001.R']] bones['r_index.002.R'] = bone.name bone = arm.edit_bones.new('r_middle.002.R') bone.head[:] = -0.0324, 0.1297, 0.0092 bone.tail[:] = -0.0343, 0.1210, 0.0039 bone.roll = 0.7479 bone.use_connect = True bone.parent = arm.edit_bones[bones['r_middle.001.R']] bones['r_middle.002.R'] = bone.name bone = arm.edit_bones.new('r_ring.002.R') bone.head[:] = -0.0389, 0.1311, 0.0092 bone.tail[:] = -0.0407, 0.1229, 0.0042 bone.roll = 0.7479 bone.use_connect = True bone.parent = arm.edit_bones[bones['r_ring.001.R']] bones['r_ring.002.R'] = bone.name bone = arm.edit_bones.new('r_pinky.002.R') bone.head[:] = -0.0476, 0.1412, 0.0074 bone.tail[:] = -0.0494, 0.1351, 0.0032 bone.roll = 0.8965 bone.use_connect = True bone.parent = arm.edit_bones[bones['r_pinky.001.R']] bones['r_pinky.002.R'] = bone.name bone = arm.edit_bones.new('nose.001') bone.head[:] = 0.0000, -0.2705, 0.2315 bone.tail[:] = 0.0000, -0.2804, 0.2205 bone.roll = 0.0253 bone.use_connect = True bone.parent = arm.edit_bones[bones['nose']] bones['nose.001'] = bone.name bone = arm.edit_bones.new('lip.T.L.001') bone.head[:] = 0.0142, -0.2683, 0.2053 bone.tail[:] = 0.0314, -0.2428, 0.2103 bone.roll = -0.0306 bone.use_connect = True bone.parent = arm.edit_bones[bones['lip.T.L']] bones['lip.T.L.001'] = bone.name bone = arm.edit_bones.new('lip.B.L.001') bone.head[:] = 0.0102, -0.2676, 0.2025 bone.tail[:] = 0.0314, -0.2428, 0.2103 bone.roll = 0.1360 bone.use_connect = True bone.parent = arm.edit_bones[bones['lip.B.L']] bones['lip.B.L.001'] = bone.name bone = arm.edit_bones.new('chin') bone.head[:] = 0.0000, -0.2524, 0.1977 bone.tail[:] = 0.0000, -0.2681, 0.1948 bone.roll = 0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['jaw']] bones['chin'] = bone.name bone = arm.edit_bones.new('ear.L.001') bone.head[:] = 0.0259, -0.2360, 0.2749 bone.tail[:] = 0.0367, -0.2346, 0.2792 bone.roll = 1.1038 bone.use_connect = True bone.parent = arm.edit_bones[bones['ear.L']] bones['ear.L.001'] = bone.name bone = arm.edit_bones.new('ear.R.001') bone.head[:] = -0.0259, -0.2360, 0.2749 bone.tail[:] = -0.0367, -0.2346, 0.2792 bone.roll = -1.1038 bone.use_connect = True bone.parent = arm.edit_bones[bones['ear.R']] bones['ear.R.001'] = bone.name bone = arm.edit_bones.new('lip.T.R.001') bone.head[:] = -0.0142, -0.2683, 0.2053 bone.tail[:] = -0.0314, -0.2428, 0.2103 bone.roll = 0.0306 bone.use_connect = True bone.parent = arm.edit_bones[bones['lip.T.R']] bones['lip.T.R.001'] = bone.name bone = arm.edit_bones.new('lip.B.R.001') bone.head[:] = -0.0102, -0.2676, 0.2025 bone.tail[:] = -0.0314, -0.2428, 0.2103 bone.roll = -0.1360 bone.use_connect = True bone.parent = arm.edit_bones[bones['lip.B.R']] bones['lip.B.R.001'] = bone.name bone = arm.edit_bones.new('brow.B.L.001') bone.head[:] = 0.0336, -0.2577, 0.2472 bone.tail[:] = 0.0242, -0.2642, 0.2478 bone.roll = -0.0715 bone.use_connect = True bone.parent = arm.edit_bones[bones['brow.B.L']] bones['brow.B.L.001'] = bone.name bone = arm.edit_bones.new('lid.T.L.001') bone.head[:] = 0.0327, -0.2600, 0.2432 bone.tail[:] = 0.0236, -0.2656, 0.2440 bone.roll = 0.1058 bone.use_connect = True bone.parent = arm.edit_bones[bones['lid.T.L']] bones['lid.T.L.001'] = bone.name bone = arm.edit_bones.new('brow.B.R.001') bone.head[:] = -0.0336, -0.2577, 0.2472 bone.tail[:] = -0.0242, -0.2642, 0.2478 bone.roll = 0.0715 bone.use_connect = True bone.parent = arm.edit_bones[bones['brow.B.R']] bones['brow.B.R.001'] = bone.name bone = arm.edit_bones.new('lid.T.R.001') bone.head[:] = -0.0327, -0.2600, 0.2432 bone.tail[:] = -0.0236, -0.2656, 0.2440 bone.roll = -0.1058 bone.use_connect = True bone.parent = arm.edit_bones[bones['lid.T.R']] bones['lid.T.R.001'] = bone.name bone = arm.edit_bones.new('forehead.L.001') bone.head[:] = 0.0287, -0.2477, 0.2649 bone.tail[:] = 0.0241, -0.2601, 0.2567 bone.roll = 2.1575 bone.use_connect = False bone.parent = arm.edit_bones[bones['forehead.L']] bones['forehead.L.001'] = bone.name bone = arm.edit_bones.new('forehead.R.001') bone.head[:] = -0.0287, -0.2477, 0.2649 bone.tail[:] = -0.0241, -0.2601, 0.2567 bone.roll = -2.1575 bone.use_connect = False bone.parent = arm.edit_bones[bones['forehead.R']] bones['forehead.R.001'] = bone.name bone = arm.edit_bones.new('cheek.T.L.001') bone.head[:] = 0.0308, -0.2584, 0.2187 bone.tail[:] = 0.0121, -0.2695, 0.2220 bone.roll = -0.5048 bone.use_connect = True bone.parent = arm.edit_bones[bones['cheek.T.L']] bones['cheek.T.L.001'] = bone.name bone = arm.edit_bones.new('cheek.T.R.001') bone.head[:] = -0.0308, -0.2584, 0.2187 bone.tail[:] = -0.0121, -0.2695, 0.2220 bone.roll = 0.5048 bone.use_connect = True bone.parent = arm.edit_bones[bones['cheek.T.R']] bones['cheek.T.R.001'] = bone.name bone = arm.edit_bones.new('tongue.001') bone.head[:] = 0.0000, -0.2610, 0.2098 bone.tail[:] = 0.0000, -0.2461, 0.2100 bone.roll = 0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['tongue']] bones['tongue.001'] = bone.name bone = arm.edit_bones.new('f_palm.004.L') bone.head[:] = 0.0393, -0.1278, 0.0100 bone.tail[:] = 0.0406, -0.1304, 0.0100 bone.roll = -0.0006 bone.use_connect = False bone.parent = arm.edit_bones[bones['f_toe.L']] bones['f_palm.004.L'] = bone.name bone = arm.edit_bones.new('f_palm.001.L') bone.head[:] = 0.0216, -0.1278, 0.0100 bone.tail[:] = 0.0199, -0.1331, 0.0100 bone.roll = 0.0004 bone.use_connect = False bone.parent = arm.edit_bones[bones['f_toe.L']] bones['f_palm.001.L'] = bone.name bone = arm.edit_bones.new('f_palm.002.L') bone.head[:] = 0.0273, -0.1278, 0.0100 bone.tail[:] = 0.0273, -0.1345, 0.0100 bone.roll = 3.1416 bone.use_connect = False bone.parent = arm.edit_bones[bones['f_toe.L']] bones['f_palm.002.L'] = bone.name bone = arm.edit_bones.new('f_palm.003.L') bone.head[:] = 0.0341, -0.1278, 0.0100 bone.tail[:] = 0.0340, -0.1345, 0.0100 bone.roll = 0.0101 bone.use_connect = False bone.parent = arm.edit_bones[bones['f_toe.L']] bones['f_palm.003.L'] = bone.name bone = arm.edit_bones.new('f_palm.004.R') bone.head[:] = -0.0393, -0.1278, 0.0100 bone.tail[:] = -0.0406, -0.1304, 0.0100 bone.roll = 0.0006 bone.use_connect = False bone.parent = arm.edit_bones[bones['f_toe.R']] bones['f_palm.004.R'] = bone.name bone = arm.edit_bones.new('f_palm.001.R') bone.head[:] = -0.0216, -0.1278, 0.0100 bone.tail[:] = -0.0199, -0.1331, 0.0100 bone.roll = -0.0004 bone.use_connect = False bone.parent = arm.edit_bones[bones['f_toe.R']] bones['f_palm.001.R'] = bone.name bone = arm.edit_bones.new('f_palm.002.R') bone.head[:] = -0.0273, -0.1278, 0.0100 bone.tail[:] = -0.0273, -0.1345, 0.0100 bone.roll = -3.1416 bone.use_connect = False bone.parent = arm.edit_bones[bones['f_toe.R']] bones['f_palm.002.R'] = bone.name bone = arm.edit_bones.new('f_palm.003.R') bone.head[:] = -0.0341, -0.1278, 0.0100 bone.tail[:] = -0.0340, -0.1345, 0.0100 bone.roll = -0.0101 bone.use_connect = False bone.parent = arm.edit_bones[bones['f_toe.R']] bones['f_palm.003.R'] = bone.name bone = arm.edit_bones.new('nose.002') bone.head[:] = 0.0000, -0.2804, 0.2205 bone.tail[:] = 0.0000, -0.2787, 0.2155 bone.roll = 0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['nose.001']] bones['nose.002'] = bone.name bone = arm.edit_bones.new('chin.001') bone.head[:] = 0.0000, -0.2681, 0.1948 bone.tail[:] = 0.0000, -0.2749, 0.2015 bone.roll = 0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['chin']] bones['chin.001'] = bone.name bone = arm.edit_bones.new('ear.L.002') bone.head[:] = 0.0367, -0.2346, 0.2792 bone.tail[:] = 0.0513, -0.2371, 0.2879 bone.roll = 1.6702 bone.use_connect = True bone.parent = arm.edit_bones[bones['ear.L.001']] bones['ear.L.002'] = bone.name bone = arm.edit_bones.new('ear.R.002') bone.head[:] = -0.0367, -0.2346, 0.2792 bone.tail[:] = -0.0513, -0.2371, 0.2879 bone.roll = -1.6702 bone.use_connect = True bone.parent = arm.edit_bones[bones['ear.R.001']] bones['ear.R.002'] = bone.name bone = arm.edit_bones.new('brow.B.L.002') bone.head[:] = 0.0242, -0.2642, 0.2478 bone.tail[:] = 0.0131, -0.2694, 0.2432 bone.roll = -0.1515 bone.use_connect = True bone.parent = arm.edit_bones[bones['brow.B.L.001']] bones['brow.B.L.002'] = bone.name bone = arm.edit_bones.new('lid.T.L.002') bone.head[:] = 0.0236, -0.2656, 0.2440 bone.tail[:] = 0.0137, -0.2665, 0.2401 bone.roll = -0.1882 bone.use_connect = True bone.parent = arm.edit_bones[bones['lid.T.L.001']] bones['lid.T.L.002'] = bone.name bone = arm.edit_bones.new('brow.B.R.002') bone.head[:] = -0.0242, -0.2642, 0.2478 bone.tail[:] = -0.0131, -0.2694, 0.2432 bone.roll = 0.1515 bone.use_connect = True bone.parent = arm.edit_bones[bones['brow.B.R.001']] bones['brow.B.R.002'] = bone.name bone = arm.edit_bones.new('lid.T.R.002') bone.head[:] = -0.0236, -0.2656, 0.2440 bone.tail[:] = -0.0137, -0.2665, 0.2401 bone.roll = 0.1882 bone.use_connect = True bone.parent = arm.edit_bones[bones['lid.T.R.001']] bones['lid.T.R.002'] = bone.name bone = arm.edit_bones.new('forehead.L.002') bone.head[:] = 0.0405, -0.2354, 0.2607 bone.tail[:] = 0.0402, -0.2481, 0.2487 bone.roll = 0.5185 bone.use_connect = False bone.parent = arm.edit_bones[bones['forehead.L.001']] bones['forehead.L.002'] = bone.name bone = arm.edit_bones.new('forehead.R.002') bone.head[:] = -0.0405, -0.2354, 0.2607 bone.tail[:] = -0.0402, -0.2481, 0.2487 bone.roll = -0.5185 bone.use_connect = False bone.parent = arm.edit_bones[bones['forehead.R.001']] bones['forehead.R.002'] = bone.name bone = arm.edit_bones.new('nose.L') bone.head[:] = 0.0121, -0.2695, 0.2220 bone.tail[:] = 0.0062, -0.2742, 0.2210 bone.roll = 2.5249 bone.use_connect = True bone.parent = arm.edit_bones[bones['cheek.T.L.001']] bones['nose.L'] = bone.name bone = arm.edit_bones.new('nose.R') bone.head[:] = -0.0121, -0.2695, 0.2220 bone.tail[:] = -0.0062, -0.2742, 0.2210 bone.roll = -2.5249 bone.use_connect = True bone.parent = arm.edit_bones[bones['cheek.T.R.001']] bones['nose.R'] = bone.name bone = arm.edit_bones.new('tongue.002') bone.head[:] = 0.0000, -0.2461, 0.2100 bone.tail[:] = 0.0000, -0.2309, 0.2083 bone.roll = 0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['tongue.001']] bones['tongue.002'] = bone.name bone = arm.edit_bones.new('f_pinky.001.L') bone.head[:] = 0.0406, -0.1304, 0.0074 bone.tail[:] = 0.0408, -0.1337, 0.0065 bone.roll = -0.6234 bone.use_connect = False bone.parent = arm.edit_bones[bones['f_palm.004.L']] bones['f_pinky.001.L'] = bone.name bone = arm.edit_bones.new('f_index.001.L') bone.head[:] = 0.0199, -0.1331, 0.0077 bone.tail[:] = 0.0193, -0.1372, 0.0060 bone.roll = 0.7154 bone.use_connect = False bone.parent = arm.edit_bones[bones['f_palm.001.L']] bones['f_index.001.L'] = bone.name bone = arm.edit_bones.new('f_middle.001.L') bone.head[:] = 0.0273, -0.1345, 0.0107 bone.tail[:] = 0.0273, -0.1407, 0.0082 bone.roll = 0.0000 bone.use_connect = False bone.parent = arm.edit_bones[bones['f_palm.002.L']] bones['f_middle.001.L'] = bone.name bone = arm.edit_bones.new('f_ring.001.L') bone.head[:] = 0.0340, -0.1345, 0.0107 bone.tail[:] = 0.0340, -0.1407, 0.0082 bone.roll = 0.0000 bone.use_connect = False bone.parent = arm.edit_bones[bones['f_palm.003.L']] bones['f_ring.001.L'] = bone.name bone = arm.edit_bones.new('f_pinky.001.R') bone.head[:] = -0.0406, -0.1304, 0.0074 bone.tail[:] = -0.0408, -0.1337, 0.0065 bone.roll = 0.6234 bone.use_connect = False bone.parent = arm.edit_bones[bones['f_palm.004.R']] bones['f_pinky.001.R'] = bone.name bone = arm.edit_bones.new('f_index.001.R') bone.head[:] = -0.0199, -0.1331, 0.0077 bone.tail[:] = -0.0193, -0.1372, 0.0060 bone.roll = -0.7154 bone.use_connect = False bone.parent = arm.edit_bones[bones['f_palm.001.R']] bones['f_index.001.R'] = bone.name bone = arm.edit_bones.new('f_middle.001.R') bone.head[:] = -0.0273, -0.1345, 0.0107 bone.tail[:] = -0.0273, -0.1407, 0.0082 bone.roll = -0.0000 bone.use_connect = False bone.parent = arm.edit_bones[bones['f_palm.002.R']] bones['f_middle.001.R'] = bone.name bone = arm.edit_bones.new('f_ring.001.R') bone.head[:] = -0.0340, -0.1345, 0.0107 bone.tail[:] = -0.0340, -0.1407, 0.0082 bone.roll = -0.0000 bone.use_connect = False bone.parent = arm.edit_bones[bones['f_palm.003.R']] bones['f_ring.001.R'] = bone.name bone = arm.edit_bones.new('nose.003') bone.head[:] = 0.0000, -0.2787, 0.2155 bone.tail[:] = 0.0000, -0.2788, 0.2123 bone.roll = -0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['nose.002']] bones['nose.003'] = bone.name bone = arm.edit_bones.new('ear.L.003') bone.head[:] = 0.0513, -0.2371, 0.2879 bone.tail[:] = 0.0498, -0.2277, 0.2690 bone.roll = -2.2767 bone.use_connect = True bone.parent = arm.edit_bones[bones['ear.L.002']] bones['ear.L.003'] = bone.name bone = arm.edit_bones.new('ear.R.003') bone.head[:] = -0.0513, -0.2371, 0.2879 bone.tail[:] = -0.0498, -0.2277, 0.2690 bone.roll = 2.2767 bone.use_connect = True bone.parent = arm.edit_bones[bones['ear.R.002']] bones['ear.R.003'] = bone.name bone = arm.edit_bones.new('brow.B.L.003') bone.head[:] = 0.0131, -0.2694, 0.2432 bone.tail[:] = 0.0086, -0.2691, 0.2358 bone.roll = -0.2157 bone.use_connect = True bone.parent = arm.edit_bones[bones['brow.B.L.002']] bones['brow.B.L.003'] = bone.name bone = arm.edit_bones.new('lid.T.L.003') bone.head[:] = 0.0137, -0.2665, 0.2401 bone.tail[:] = 0.0097, -0.2657, 0.2291 bone.roll = -0.4253 bone.use_connect = True bone.parent = arm.edit_bones[bones['lid.T.L.002']] bones['lid.T.L.003'] = bone.name bone = arm.edit_bones.new('brow.B.R.003') bone.head[:] = -0.0131, -0.2694, 0.2432 bone.tail[:] = -0.0086, -0.2691, 0.2358 bone.roll = 0.2157 bone.use_connect = True bone.parent = arm.edit_bones[bones['brow.B.R.002']] bones['brow.B.R.003'] = bone.name bone = arm.edit_bones.new('lid.T.R.003') bone.head[:] = -0.0137, -0.2665, 0.2401 bone.tail[:] = -0.0097, -0.2657, 0.2291 bone.roll = 0.4253 bone.use_connect = True bone.parent = arm.edit_bones[bones['lid.T.R.002']] bones['lid.T.R.003'] = bone.name bone = arm.edit_bones.new('temple.L') bone.head[:] = 0.0367, -0.2123, 0.2525 bone.tail[:] = 0.0380, -0.2135, 0.2277 bone.roll = -0.0789 bone.use_connect = False bone.parent = arm.edit_bones[bones['forehead.L.002']] bones['temple.L'] = bone.name bone = arm.edit_bones.new('temple.R') bone.head[:] = -0.0367, -0.2123, 0.2525 bone.tail[:] = -0.0380, -0.2135, 0.2277 bone.roll = 0.0789 bone.use_connect = False bone.parent = arm.edit_bones[bones['forehead.R.002']] bones['temple.R'] = bone.name bone = arm.edit_bones.new('nose.L.001') bone.head[:] = 0.0062, -0.2742, 0.2210 bone.tail[:] = 0.0000, -0.2804, 0.2205 bone.roll = 0.1646 bone.use_connect = True bone.parent = arm.edit_bones[bones['nose.L']] bones['nose.L.001'] = bone.name bone = arm.edit_bones.new('nose.R.001') bone.head[:] = -0.0062, -0.2742, 0.2210 bone.tail[:] = 0.0000, -0.2804, 0.2205 bone.roll = -0.1646 bone.use_connect = True bone.parent = arm.edit_bones[bones['nose.R']] bones['nose.R.001'] = bone.name bone = arm.edit_bones.new('f_pinky.002.L') bone.head[:] = 0.0408, -0.1337, 0.0065 bone.tail[:] = 0.0413, -0.1400, 0.0023 bone.roll = -0.2560 bone.use_connect = True bone.parent = arm.edit_bones[bones['f_pinky.001.L']] bones['f_pinky.002.L'] = bone.name bone = arm.edit_bones.new('f_index.002.L') bone.head[:] = 0.0193, -0.1372, 0.0060 bone.tail[:] = 0.0186, -0.1427, 0.0028 bone.roll = 0.5229 bone.use_connect = True bone.parent = arm.edit_bones[bones['f_index.001.L']] bones['f_index.002.L'] = bone.name bone = arm.edit_bones.new('f_middle.002.L') bone.head[:] = 0.0273, -0.1407, 0.0082 bone.tail[:] = 0.0273, -0.1496, 0.0030 bone.roll = 0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['f_middle.001.L']] bones['f_middle.002.L'] = bone.name bone = arm.edit_bones.new('f_ring.002.L') bone.head[:] = 0.0340, -0.1407, 0.0082 bone.tail[:] = 0.0340, -0.1491, 0.0033 bone.roll = 0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['f_ring.001.L']] bones['f_ring.002.L'] = bone.name bone = arm.edit_bones.new('f_pinky.002.R') bone.head[:] = -0.0408, -0.1337, 0.0065 bone.tail[:] = -0.0413, -0.1400, 0.0023 bone.roll = 0.2560 bone.use_connect = True bone.parent = arm.edit_bones[bones['f_pinky.001.R']] bones['f_pinky.002.R'] = bone.name bone = arm.edit_bones.new('f_index.002.R') bone.head[:] = -0.0193, -0.1372, 0.0060 bone.tail[:] = -0.0186, -0.1427, 0.0028 bone.roll = -0.5229 bone.use_connect = True bone.parent = arm.edit_bones[bones['f_index.001.R']] bones['f_index.002.R'] = bone.name bone = arm.edit_bones.new('f_middle.002.R') bone.head[:] = -0.0273, -0.1407, 0.0082 bone.tail[:] = -0.0273, -0.1496, 0.0030 bone.roll = -0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['f_middle.001.R']] bones['f_middle.002.R'] = bone.name bone = arm.edit_bones.new('f_ring.002.R') bone.head[:] = -0.0340, -0.1407, 0.0082 bone.tail[:] = -0.0340, -0.1491, 0.0033 bone.roll = -0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['f_ring.001.R']] bones['f_ring.002.R'] = bone.name bone = arm.edit_bones.new('nose.004') bone.head[:] = 0.0000, -0.2788, 0.2123 bone.tail[:] = 0.0000, -0.2785, 0.2091 bone.roll = 0.0000 bone.use_connect = True bone.parent = arm.edit_bones[bones['nose.003']] bones['nose.004'] = bone.name bone = arm.edit_bones.new('ear.L.004') bone.head[:] = 0.0498, -0.2277, 0.2690 bone.tail[:] = 0.0361, -0.2279, 0.2662 bone.roll = -3.8181 bone.use_connect = True bone.parent = arm.edit_bones[bones['ear.L.003']] bones['ear.L.004'] = bone.name bone = arm.edit_bones.new('ear.R.004') bone.head[:] = -0.0498, -0.2277, 0.2690 bone.tail[:] = -0.0361, -0.2279, 0.2662 bone.roll = 3.8181 bone.use_connect = True bone.parent = arm.edit_bones[bones['ear.R.003']] bones['ear.R.004'] = bone.name bone = arm.edit_bones.new('lid.B.L') bone.head[:] = 0.0097, -0.2657, 0.2291 bone.tail[:] = 0.0150, -0.2661, 0.2277 bone.roll = 0.0693 bone.use_connect = True bone.parent = arm.edit_bones[bones['lid.T.L.003']] bones['lid.B.L'] = bone.name bone = arm.edit_bones.new('lid.B.R') bone.head[:] = -0.0097, -0.2657, 0.2291 bone.tail[:] = -0.0150, -0.2661, 0.2277 bone.roll = -0.0693 bone.use_connect = True bone.parent = arm.edit_bones[bones['lid.T.R.003']] bones['lid.B.R'] = bone.name bone = arm.edit_bones.new('jaw.L') bone.head[:] = 0.0380, -0.2135, 0.2277 bone.tail[:] = 0.0284, -0.2162, 0.2076 bone.roll = 0.1964 bone.use_connect = True bone.parent = arm.edit_bones[bones['temple.L']] bones['jaw.L'] = bone.name bone = arm.edit_bones.new('jaw.R') bone.head[:] = -0.0380, -0.2135, 0.2277 bone.tail[:] = -0.0284, -0.2162, 0.2076 bone.roll = -0.1964 bone.use_connect = True bone.parent = arm.edit_bones[bones['temple.R']] bones['jaw.R'] = bone.name bone = arm.edit_bones.new('lid.B.L.001') bone.head[:] = 0.0150, -0.2661, 0.2277 bone.tail[:] = 0.0221, -0.2652, 0.2262 bone.roll = 0.1759 bone.use_connect = True bone.parent = arm.edit_bones[bones['lid.B.L']] bones['lid.B.L.001'] = bone.name bone = arm.edit_bones.new('lid.B.R.001') bone.head[:] = -0.0150, -0.2661, 0.2277 bone.tail[:] = -0.0221, -0.2652, 0.2262 bone.roll = -0.1759 bone.use_connect = True bone.parent = arm.edit_bones[bones['lid.B.R']] bones['lid.B.R.001'] = bone.name bone = arm.edit_bones.new('jaw.L.001') bone.head[:] = 0.0284, -0.2162, 0.2076 bone.tail[:] = 0.0235, -0.2371, 0.2014 bone.roll = -0.0005 bone.use_connect = True bone.parent = arm.edit_bones[bones['jaw.L']] bones['jaw.L.001'] = bone.name bone = arm.edit_bones.new('jaw.R.001') bone.head[:] = -0.0284, -0.2162, 0.2076 bone.tail[:] = -0.0235, -0.2371, 0.2014 bone.roll = 0.0005 bone.use_connect = True bone.parent = arm.edit_bones[bones['jaw.R']] bones['jaw.R.001'] = bone.name bone = arm.edit_bones.new('lid.B.L.002') bone.head[:] = 0.0221, -0.2652, 0.2262 bone.tail[:] = 0.0333, -0.2602, 0.2291 bone.roll = 0.0161 bone.use_connect = True bone.parent = arm.edit_bones[bones['lid.B.L.001']] bones['lid.B.L.002'] = bone.name bone = arm.edit_bones.new('lid.B.R.002') bone.head[:] = -0.0221, -0.2652, 0.2262 bone.tail[:] = -0.0333, -0.2602, 0.2291 bone.roll = -0.0161 bone.use_connect = True bone.parent = arm.edit_bones[bones['lid.B.R.001']] bones['lid.B.R.002'] = bone.name bone = arm.edit_bones.new('chin.L') bone.head[:] = 0.0235, -0.2371, 0.2014 bone.tail[:] = 0.0287, -0.2395, 0.2103 bone.roll = 0.4176 bone.use_connect = True bone.parent = arm.edit_bones[bones['jaw.L.001']] bones['chin.L'] = bone.name bone = arm.edit_bones.new('chin.R') bone.head[:] = -0.0235, -0.2371, 0.2014 bone.tail[:] = -0.0287, -0.2395, 0.2103 bone.roll = -0.4176 bone.use_connect = True bone.parent = arm.edit_bones[bones['jaw.R.001']] bones['chin.R'] = bone.name bone = arm.edit_bones.new('lid.B.L.003') bone.head[:] = 0.0333, -0.2602, 0.2291 bone.tail[:] = 0.0398, -0.2525, 0.2396 bone.roll = -0.0675 bone.use_connect = True bone.parent = arm.edit_bones[bones['lid.B.L.002']] bones['lid.B.L.003'] = bone.name bone = arm.edit_bones.new('lid.B.R.003') bone.head[:] = -0.0333, -0.2602, 0.2291 bone.tail[:] = -0.0398, -0.2525, 0.2396 bone.roll = 0.0675 bone.use_connect = True bone.parent = arm.edit_bones[bones['lid.B.R.002']] bones['lid.B.R.003'] = bone.name bone = arm.edit_bones.new('cheek.B.L') bone.head[:] = 0.0287, -0.2395, 0.2103 bone.tail[:] = 0.0448, -0.2396, 0.2234 bone.roll = -0.3125 bone.use_connect = True bone.parent = arm.edit_bones[bones['chin.L']] bones['cheek.B.L'] = bone.name bone = arm.edit_bones.new('cheek.B.R') bone.head[:] = -0.0287, -0.2395, 0.2103 bone.tail[:] = -0.0448, -0.2396, 0.2234 bone.roll = 0.3125 bone.use_connect = True bone.parent = arm.edit_bones[bones['chin.R']] bones['cheek.B.R'] = bone.name bone = arm.edit_bones.new('cheek.B.L.001') bone.head[:] = 0.0448, -0.2396, 0.2234 bone.tail[:] = 0.0478, -0.2312, 0.2379 bone.roll = -0.0215 bone.use_connect = True bone.parent = arm.edit_bones[bones['cheek.B.L']] bones['cheek.B.L.001'] = bone.name bone = arm.edit_bones.new('cheek.B.R.001') bone.head[:] = -0.0448, -0.2396, 0.2234 bone.tail[:] = -0.0478, -0.2312, 0.2379 bone.roll = 0.0215 bone.use_connect = True bone.parent = arm.edit_bones[bones['cheek.B.R']] bones['cheek.B.R.001'] = bone.name bone = arm.edit_bones.new('brow.T.L') bone.head[:] = 0.0478, -0.2312, 0.2379 bone.tail[:] = 0.0402, -0.2481, 0.2487 bone.roll = -0.6301 bone.use_connect = True bone.parent = arm.edit_bones[bones['cheek.B.L.001']] bones['brow.T.L'] = bone.name bone = arm.edit_bones.new('brow.T.R') bone.head[:] = -0.0478, -0.2312, 0.2379 bone.tail[:] = -0.0402, -0.2481, 0.2487 bone.roll = 0.6301 bone.use_connect = True bone.parent = arm.edit_bones[bones['cheek.B.R.001']] bones['brow.T.R'] = bone.name bone = arm.edit_bones.new('brow.T.L.001') bone.head[:] = 0.0402, -0.2481, 0.2487 bone.tail[:] = 0.0241, -0.2601, 0.2567 bone.roll = 0.3622 bone.use_connect = True bone.parent = arm.edit_bones[bones['brow.T.L']] bones['brow.T.L.001'] = bone.name bone = arm.edit_bones.new('brow.T.R.001') bone.head[:] = -0.0402, -0.2481, 0.2487 bone.tail[:] = -0.0241, -0.2601, 0.2567 bone.roll = -0.3622 bone.use_connect = True bone.parent = arm.edit_bones[bones['brow.T.R']] bones['brow.T.R.001'] = bone.name bone = arm.edit_bones.new('brow.T.L.002') bone.head[:] = 0.0241, -0.2601, 0.2567 bone.tail[:] = 0.0097, -0.2694, 0.2527 bone.roll = 0.0684 bone.use_connect = True bone.parent = arm.edit_bones[bones['brow.T.L.001']] bones['brow.T.L.002'] = bone.name bone = arm.edit_bones.new('brow.T.R.002') bone.head[:] = -0.0241, -0.2601, 0.2567 bone.tail[:] = -0.0097, -0.2694, 0.2527 bone.roll = -0.0684 bone.use_connect = True bone.parent = arm.edit_bones[bones['brow.T.R.001']] bones['brow.T.R.002'] = bone.name bone = arm.edit_bones.new('brow.T.L.003') bone.head[:] = 0.0097, -0.2694, 0.2527 bone.tail[:] = 0.0000, -0.2709, 0.2463 bone.roll = 0.0020 bone.use_connect = True bone.parent = arm.edit_bones[bones['brow.T.L.002']] bones['brow.T.L.003'] = bone.name bone = arm.edit_bones.new('brow.T.R.003') bone.head[:] = -0.0097, -0.2694, 0.2527 bone.tail[:] = 0.0000, -0.2709, 0.2463 bone.roll = -0.0020 bone.use_connect = True bone.parent = arm.edit_bones[bones['brow.T.R.002']] bones['brow.T.R.003'] = bone.name bpy.ops.object.mode_set(mode='OBJECT') pbone = obj.pose.bones[bones['tail.004']] pbone.rigify_type = 'spines.super_spine' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.use_tail = True except AttributeError: pass try: pbone.rigify_parameters.pivot_pos = 6 except AttributeError: pass try: pbone.rigify_parameters.neck_pos = 9 except AttributeError: pass try: pbone.rigify_parameters.tail_pos = 4 except AttributeError: pass try: pbone.rigify_parameters.copy_rotation_axes = [True, True, True] except AttributeError: pass pbone = obj.pose.bones[bones['tail.003']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['tail.002']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['tail.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['spine']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.neck_pos = 5 except AttributeError: pass try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['spine.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['pelvis.L']] pbone.rigify_type = 'basic.super_copy' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.make_control = False except AttributeError: pass pbone = obj.pose.bones[bones['pelvis.R']] pbone.rigify_type = 'basic.super_copy' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.make_control = False except AttributeError: pass pbone = obj.pose.bones[bones['pelvis.C']] pbone.rigify_type = 'basic.super_copy' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.make_widget = False except AttributeError: pass try: pbone.rigify_parameters.make_control = False except AttributeError: pass pbone = obj.pose.bones[bones['spine.002']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['thigh.L']] pbone.rigify_type = 'limbs.super_limb' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.limb_type = "paw" except AttributeError: pass try: pbone.rigify_parameters.fk_layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass try: pbone.rigify_parameters.segments = 2 except AttributeError: pass pbone = obj.pose.bones[bones['thigh.R']] pbone.rigify_type = 'limbs.super_limb' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.limb_type = "paw" except AttributeError: pass try: pbone.rigify_parameters.fk_layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['spine.003']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['belly.C']] pbone.rigify_type = 'basic.super_copy' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['shin.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['shin.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['spine.004']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['Breast.C']] pbone.rigify_type = 'basic.super_copy' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['shoulder.L']] pbone.rigify_type = 'basic.super_copy' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'YXZ' pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.make_control = True except AttributeError: pass try: pbone.rigify_parameters.make_widget = False except AttributeError: pass pbone = obj.pose.bones[bones['shoulder.R']] pbone.rigify_type = 'basic.super_copy' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'YXZ' pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.make_control = True except AttributeError: pass try: pbone.rigify_parameters.make_widget = False except AttributeError: pass pbone = obj.pose.bones[bones['foot.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['foot.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['spine.005']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['upper_arm.L']] pbone.rigify_type = 'limbs.super_limb' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.limb_type = "paw" except AttributeError: pass try: pbone.rigify_parameters.fk_layers = [False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['upper_arm.R']] pbone.rigify_type = 'limbs.super_limb' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.fk_layers = [False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass try: pbone.rigify_parameters.limb_type = "paw" except AttributeError: pass pbone = obj.pose.bones[bones['r_toe.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['r_toe.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['spine.006']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['forearm.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['forearm.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['r_palm.001.L']] pbone.rigify_type = 'limbs.super_palm' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['r_palm.002.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['r_palm.003.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['r_palm.004.L']] pbone.rigify_type = 'limbs.super_palm' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['r_palm.001.R']] pbone.rigify_type = 'limbs.super_palm' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['r_palm.002.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['r_palm.003.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['r_palm.004.R']] pbone.rigify_type = 'limbs.super_palm' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['face']] pbone.rigify_type = 'faces.super_face' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.secondary_layers = [False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['hand.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['hand.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['r_index.001.L']] pbone.rigify_type = 'limbs.simple_tentacle' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['r_middle.001.L']] pbone.rigify_type = 'limbs.simple_tentacle' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['r_ring.001.L']] pbone.rigify_type = 'limbs.simple_tentacle' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['r_pinky.001.L']] pbone.rigify_type = 'limbs.simple_tentacle' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['r_index.001.R']] pbone.rigify_type = 'limbs.simple_tentacle' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['r_middle.001.R']] pbone.rigify_type = 'limbs.simple_tentacle' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['r_ring.001.R']] pbone.rigify_type = 'limbs.simple_tentacle' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['r_pinky.001.R']] pbone.rigify_type = 'limbs.simple_tentacle' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['nose']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lip.T.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lip.B.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['jaw']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['ear.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['ear.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lip.T.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lip.B.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['brow.B.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lid.T.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['brow.B.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lid.T.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['forehead.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['forehead.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['eye.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['eye.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['cheek.T.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['cheek.T.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['teeth.T']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['teeth.B']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['tongue']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['f_toe.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['f_toe.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['r_index.002.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['r_middle.002.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['r_ring.002.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['r_pinky.002.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['r_index.002.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['r_middle.002.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['r_ring.002.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['r_pinky.002.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['nose.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lip.T.L.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lip.B.L.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['chin']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['ear.L.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['ear.R.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lip.T.R.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lip.B.R.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['brow.B.L.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lid.T.L.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['brow.B.R.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lid.T.R.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['forehead.L.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['forehead.R.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['cheek.T.L.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['cheek.T.R.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['tongue.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['f_palm.004.L']] pbone.rigify_type = 'limbs.super_palm' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['f_palm.001.L']] pbone.rigify_type = 'limbs.super_palm' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['f_palm.002.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['f_palm.003.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['f_palm.004.R']] pbone.rigify_type = 'limbs.super_palm' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['f_palm.001.R']] pbone.rigify_type = 'limbs.super_palm' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['f_palm.002.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['f_palm.003.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['nose.002']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['chin.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['ear.L.002']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['ear.R.002']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['brow.B.L.002']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lid.T.L.002']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['brow.B.R.002']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lid.T.R.002']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['forehead.L.002']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['forehead.R.002']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['nose.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['nose.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['tongue.002']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['f_pinky.001.L']] pbone.rigify_type = 'limbs.simple_tentacle' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['f_index.001.L']] pbone.rigify_type = 'limbs.simple_tentacle' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['f_middle.001.L']] pbone.rigify_type = 'limbs.simple_tentacle' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['f_ring.001.L']] pbone.rigify_type = 'limbs.simple_tentacle' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['f_pinky.001.R']] pbone.rigify_type = 'limbs.simple_tentacle' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['f_index.001.R']] pbone.rigify_type = 'limbs.simple_tentacle' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['f_middle.001.R']] pbone.rigify_type = 'limbs.simple_tentacle' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['f_ring.001.R']] pbone.rigify_type = 'limbs.simple_tentacle' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] try: pbone.rigify_parameters.tweak_layers = [False, False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] except AttributeError: pass pbone = obj.pose.bones[bones['nose.003']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['ear.L.003']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['ear.R.003']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['brow.B.L.003']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lid.T.L.003']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['brow.B.R.003']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lid.T.R.003']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['temple.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['temple.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['nose.L.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['nose.R.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['f_pinky.002.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['f_index.002.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['f_middle.002.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['f_ring.002.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['f_pinky.002.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['f_index.002.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['f_middle.002.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['f_ring.002.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [False, False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['nose.004']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['ear.L.004']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['ear.R.004']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lid.B.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lid.B.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['jaw.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['jaw.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lid.B.L.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lid.B.R.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['jaw.L.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['jaw.R.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lid.B.L.002']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lid.B.R.002']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['chin.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['chin.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lid.B.L.003']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['lid.B.R.003']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['cheek.B.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['cheek.B.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['cheek.B.L.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['cheek.B.R.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['brow.T.L']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['brow.T.R']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['brow.T.L.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['brow.T.R.001']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['brow.T.L.002']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['brow.T.R.002']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['brow.T.L.003']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] pbone = obj.pose.bones[bones['brow.T.R.003']] pbone.rigify_type = '' pbone.lock_location = (False, False, False) pbone.lock_rotation = (False, False, False) pbone.lock_rotation_w = False pbone.lock_scale = (False, False, False) pbone.rotation_mode = 'QUATERNION' pbone.bone.layers = [True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False] bpy.ops.object.mode_set(mode='EDIT') for bone in arm.edit_bones: bone.select = False bone.select_head = False bone.select_tail = False for b in bones: bone = arm.edit_bones[bones[b]] bone.select = True bone.select_head = True bone.select_tail = True arm.edit_bones.active = bone arm.layers = [(x in [0, 3, 5, 7, 10, 13, 16, 19]) for x in range(32)] if __name__ == "__main__": create(bpy.context.active_object)
53.377852
274
0.661826
23,431
159,066
4.396483
0.021766
0.676122
0.893034
1.029375
0.972926
0.945425
0.940125
0.93031
0.918613
0.90611
0
0.063814
0.182126
159,066
2,979
275
53.39577
0.728016
0.000245
0
0.576248
1
0
0.058865
0.002113
0
0
0
0
0
1
0.000337
false
0.01552
0.000675
0
0.001012
0
0
0
0
null
1
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
10
d3fad6d59b0cec10fd6d66b97c64b53f3e95e9a3
15,827
py
Python
tests/test_basic.py
grnet/puka
6a0651c48421daac610a63567efc6f6f0e0fe83a
[ "MIT" ]
null
null
null
tests/test_basic.py
grnet/puka
6a0651c48421daac610a63567efc6f6f0e0fe83a
[ "MIT" ]
null
null
null
tests/test_basic.py
grnet/puka
6a0651c48421daac610a63567efc6f6f0e0fe83a
[ "MIT" ]
null
null
null
from __future__ import with_statement import os import puka import base class TestBasic(base.TestCase): def test_simple_roundtrip(self): client = puka.Client(self.amqp_url) promise = client.connect() client.wait(promise) promise = client.queue_declare(queue=self.name) client.wait(promise) promise = client.basic_publish(exchange='', routing_key=self.name, body=self.msg) client.wait(promise) consume_promise = client.basic_consume(queue=self.name, no_ack=True) result = client.wait(consume_promise) self.assertEqual(result['body'], self.msg) promise = client.queue_delete(queue=self.name) client.wait(promise) def test_purge(self): client = puka.Client(self.amqp_url) promise = client.connect() client.wait(promise) promise = client.queue_declare(queue=self.name) client.wait(promise) promise = client.basic_publish(exchange='', routing_key=self.name, body=self.msg) client.wait(promise) promise = client.queue_purge(queue=self.name) r = client.wait(promise) self.assertEqual(r['message_count'], 1) promise = client.queue_purge(queue=self.name) r = client.wait(promise) self.assertEqual(r['message_count'], 0) promise = client.queue_delete(queue=self.name) client.wait(promise) def test_basic_get_ack(self): client = puka.Client(self.amqp_url) promise = client.connect() client.wait(promise) promise = client.queue_declare(queue=self.name) client.wait(promise) for i in range(4): promise = client.basic_publish(exchange='', routing_key=self.name, body=self.msg+str(i)) client.wait(promise) msgs = [] for i in range(4): promise = client.basic_get(queue=self.name) result = client.wait(promise) self.assertEqual(result['body'], self.msg+str(i)) self.assertEqual(result['redelivered'], False) msgs.append( result ) promise = client.basic_get(queue=self.name) result = client.wait(promise) self.assertEqual('body' in result, False) self.assertEqual(len(client.channels.free_channels), 1) self.assertEqual(client.channels.free_channel_numbers[-1], 7) for msg in msgs: client.basic_ack(msg) self.assertEqual(len(client.channels.free_channels), 5) self.assertEqual(client.channels.free_channel_numbers[-1], 7) promise = client.queue_delete(queue=self.name) client.wait(promise) def test_basic_publish_bad_exchange(self): client = puka.Client(self.amqp_url) promise = client.connect() client.wait(promise) for i in range(2): promise = client.basic_publish(exchange='invalid_exchange', routing_key='xxx', body='') self.assertEqual(len(client.channels.free_channels), 0) self.assertEqual(client.channels.free_channel_numbers[-1], 2) with self.assertRaises(puka.NotFound) as cm: client.wait(promise) (r,) = cm.exception # unpack args of exception self.assertTrue(r.is_error) self.assertEqual(r['reply_code'], 404) self.assertEqual(len(client.channels.free_channels), 0) self.assertEqual(client.channels.free_channel_numbers[-1], 2) def test_basic_return(self): client = puka.Client(self.amqp_url) promise = client.connect() client.wait(promise) promise = client.basic_publish(exchange='', routing_key=self.name, mandatory=True, body='') with self.assertRaises(puka.NoRoute): client.wait(promise) promise = client.queue_declare(queue=self.name) client.wait(promise) promise = client.basic_publish(exchange='', routing_key=self.name, mandatory=True, body='') client.wait(promise) # no error promise = client.basic_publish(exchange='', routing_key=self.name, immediate=True, body='') with self.assertRaises(puka.NoConsumers): r = client.wait(promise) print r promise = client.queue_delete(queue=self.name) client.wait(promise) def test_persistent(self): client = puka.Client(self.amqp_url) promise = client.connect() client.wait(promise) promise = client.queue_declare(queue=self.name) client.wait(promise) promise = client.basic_publish(exchange='', routing_key=self.name, body=self.msg) # persistence=default client.wait(promise) promise = client.basic_publish(exchange='', routing_key=self.name, body=self.msg, headers={'delivery_mode':2}) client.wait(promise) promise = client.basic_publish(exchange='', routing_key=self.name, body=self.msg, headers={'delivery_mode':1}) client.wait(promise) promise = client.basic_get(queue=self.name, no_ack=True) result = client.wait(promise) self.assertTrue('delivery_mode' not in result['headers']) promise = client.basic_get(queue=self.name, no_ack=True) result = client.wait(promise) self.assertTrue('delivery_mode' in result['headers']) self.assertEquals(result['headers']['delivery_mode'], 2) promise = client.basic_get(queue=self.name, no_ack=True) result = client.wait(promise) self.assertTrue('delivery_mode' in result['headers']) self.assertEquals(result['headers']['delivery_mode'], 1) promise = client.queue_delete(queue=self.name) client.wait(promise) def test_basic_reject(self): client = puka.Client(self.amqp_url) promise = client.connect() client.wait(promise) promise = client.queue_declare(queue=self.name) client.wait(promise) promise = client.basic_publish(exchange='', routing_key=self.name, body='a') client.wait(promise) t = client.basic_get(queue=self.name) r = client.wait(t) self.assertEqual(r['body'], 'a') self.assertTrue(not r['redelivered']) client.basic_reject(r) t = client.basic_get(queue=self.name) r = client.wait(t) self.assertEqual(r['body'], 'a') self.assertTrue(r['redelivered']) promise = client.queue_delete(queue=self.name) client.wait(promise) def test_basic_reject_no_requeue(self): client = puka.Client(self.amqp_url) promise = client.connect() client.wait(promise) promise = client.queue_declare(queue=self.name) client.wait(promise) promise = client.basic_publish(exchange='', routing_key=self.name, body='a') client.wait(promise) t = client.basic_get(queue=self.name) r = client.wait(t) self.assertEqual(r['body'], 'a') self.assertTrue(not r['redelivered']) client.basic_reject(r, requeue=False) t = client.basic_get(queue=self.name) r = client.wait(t) self.assertTrue(r['empty']) self.assertFalse('redelivered' in r) self.assertFalse('body' in r) promise = client.queue_delete(queue=self.name) client.wait(promise) def test_basic_reject_dead_letter_exchange(self): client = puka.Client(self.amqp_url) promise = client.connect() client.wait(promise) promise = client.exchange_declare(exchange=self.name1, type='fanout') client.wait(promise) promise = client.queue_declare( queue=self.name, arguments={'x-dead-letter-exchange': self.name1}) client.wait(promise) promise = client.queue_declare(exclusive=True) dlxqname = client.wait(promise)['queue'] promise = client.queue_bind(queue=dlxqname, exchange=self.name1) client.wait(promise) promise = client.basic_publish(exchange='', routing_key=self.name, body='a') client.wait(promise) t = client.basic_get(queue=self.name) r = client.wait(t) self.assertEqual(r['body'], 'a') self.assertTrue(not r['redelivered']) client.basic_reject(r, requeue=False) t = client.basic_get(queue=self.name) r = client.wait(t) self.assertTrue(r['empty']) self.assertFalse('redelivered' in r) self.assertFalse('body' in r) t = client.basic_get(queue=dlxqname) r = client.wait(t) self.assertEqual(r['body'], 'a') self.assertEqual(r['headers']['x-death'][0]['reason'], 'rejected') self.assertTrue(not r['redelivered']) promise = client.queue_delete(queue=self.name) client.wait(promise) promise = client.exchange_delete(exchange=self.name1) client.wait(promise) def test_properties(self): client = puka.Client(self.amqp_url) promise = client.connect() client.wait(promise) t = client.queue_declare(queue=self.name) client.wait(t) headers = { "content_type": 'a', "content_encoding": 'b', #"headers": "delivery_mode": 2, "priority": 1, "correlation_id": 'd', "reply_to": 'e', "expiration": 'f', "message_id": 'g', "timestamp": 1, "type_": 'h', "user_id": 'guest', # that one needs to match real user "app_id": 'j', "cluster_id": 'k', "custom": 'l', "blah2": [True, 1, -1, 4611686018427387904L, -4611686018427387904L, [1,2,3,4, {"a":"b", "c":[]}]], } t = client.basic_publish(exchange='', routing_key=self.name, body='a', headers=headers.copy()) client.wait(t) t = client.basic_get(queue=self.name, no_ack=True) r = client.wait(t) self.assertEqual(r['body'], 'a') recv_headers = r['headers'] del recv_headers['x-puka-delivery-tag'] self.assertEqual(repr(headers), repr(recv_headers)) promise = client.queue_delete(queue=self.name) client.wait(promise) def test_basic_ack_fail(self): client = puka.Client(self.amqp_url) promise = client.connect() client.wait(promise) promise = client.queue_declare(queue=self.name) client.wait(promise) promise = client.basic_publish(exchange='', routing_key=self.name, body='a') client.wait(promise) promise = client.basic_consume(queue=self.name) result = client.wait(promise) with self.assertRaises(puka.PreconditionFailed): r2 = result.copy() r2['delivery_tag'] = 999 client.basic_ack(r2) client.wait(promise) promise = client.basic_consume(queue=self.name) result = client.wait(promise) client.basic_ack(result) with self.assertRaises(AssertionError): client.basic_ack(result) promise = client.queue_delete(queue=self.name) client.wait(promise) def test_basic_cancel(self): client = puka.Client(self.amqp_url) promise = client.connect() client.wait(promise) promise = client.queue_declare(queue=self.name) client.wait(promise) for i in range(2): promise = client.basic_publish(exchange='', routing_key=self.name, body='a') client.wait(promise) consume_promise = client.basic_consume(queue=self.name) msg1 = client.wait(consume_promise) self.assertEqual(msg1['body'], 'a') client.basic_ack(msg1) promise = client.basic_cancel(consume_promise) result = client.wait(promise) self.assertTrue('consumer_tag' in result) promise = client.basic_publish(exchange='', routing_key=self.name, body='b') client.wait(promise) promise = client.queue_delete(queue=self.name) client.wait(promise) def test_close(self): client = puka.Client(self.amqp_url) promise = client.connect() client.wait(promise) promise = client.queue_declare(queue=self.name) client.wait(promise) promise = client.basic_publish(exchange='', routing_key=self.name, body=self.msg) client.wait(promise) consume_promise = client.basic_consume(queue=self.name) msg_result = client.wait(consume_promise) promise = client.queue_delete(self.name) client.wait(promise) promise = client.close() client.wait(promise) def test_basic_consume_fail(self): client = puka.Client(self.amqp_url) promise = client.connect() client.wait(promise) consume_promise = client.basic_consume(queue='bad_q_name') with self.assertRaises(puka.NotFound): msg_result = client.wait(consume_promise) promise = client.close() client.wait(promise) def test_broken_ack_on_close(self): client = puka.Client(self.amqp_url) promise = client.connect() client.wait(promise) promise = client.queue_declare() qname = client.wait(promise)['queue'] promise = client.basic_publish(exchange='', routing_key=qname, body='a') client.wait(promise) promise = client.basic_get(queue=qname) r = client.wait(promise) self.assertEquals(r['body'], 'a') promise = client.queue_delete(queue=qname) client.wait(promise) promise = client.close() client.wait(promise) @base.connect def test_basic_qos(self, client): promise = client.queue_declare(queue=self.name) client.wait(promise) promise = client.basic_publish(exchange='', routing_key=self.name, body='a') client.wait(promise) promise = client.basic_publish(exchange='', routing_key=self.name, body='b') client.wait(promise) promise = client.basic_publish(exchange='', routing_key=self.name, body='c') client.wait(promise) consume_promise = client.basic_consume(queue=self.name, prefetch_count=1) result = client.wait(consume_promise, timeout=0.1) self.assertEqual(result['body'], 'a') result = client.wait(consume_promise, timeout=0.1) self.assertEqual(result, None) promise = client.basic_qos(consume_promise, prefetch_count=2) result = client.wait(promise) result = client.wait(consume_promise, timeout=0.1) self.assertEqual(result['body'], 'b') result = client.wait(consume_promise, timeout=0.1) self.assertEqual(result, None) promise = client.queue_delete(queue=self.name) client.wait(promise) if __name__ == '__main__': import tests tests.run_unittests(globals())
32.8361
81
0.590068
1,777
15,827
5.118739
0.097918
0.111038
0.155123
0.100264
0.829815
0.812335
0.782542
0.759675
0.730211
0.681838
0
0.00865
0.291464
15,827
481
82
32.904366
0.802479
0.006129
0
0.675141
0
0
0.041402
0.001399
0
0
0
0
0.149718
0
null
null
0
0.014124
null
null
0.002825
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
7
3108ec5cd474ad13a68450219f7fefea6f9a6cc5
13,984
py
Python
peacemakr/generated/api/key_service_api.py
peacemakr-io/peacemakr-python-sdk
180bbc2e480ea855dddf0e28c2f27e83a17bfb84
[ "Apache-2.0" ]
3
2020-01-27T10:07:29.000Z
2021-05-17T16:45:59.000Z
peacemakr/generated/api/key_service_api.py
peacemakr-io/peacemakr-python-sdk
180bbc2e480ea855dddf0e28c2f27e83a17bfb84
[ "Apache-2.0" ]
7
2020-06-24T03:55:36.000Z
2021-03-30T00:43:51.000Z
peacemakr/generated/api/key_service_api.py
peacemakr-io/peacemakr-python-sdk
180bbc2e480ea855dddf0e28c2f27e83a17bfb84
[ "Apache-2.0" ]
1
2021-04-27T04:12:30.000Z
2021-04-27T04:12:30.000Z
# coding: utf-8 """ Peacemakr This API describes the Peacemakr services, which enable seamless application layer encryption and verification. # noqa: E501 OpenAPI spec version: 1.0.0 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import re # noqa: F401 # python 2 and python 3 compatibility library import six from peacemakr.generated.api_client import ApiClient class KeyServiceApi(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client def get_all_encrypted_keys(self, encrypting_key_id, **kwargs): # noqa: E501 """Get all encrypted symmetric keys that are encrypted with this encrypting keyId, optionally limiting the request to a set of symmetric key domains # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_all_encrypted_keys(encrypting_key_id, async_req=True) >>> result = thread.get() :param async_req bool :param str encrypting_key_id: (required) :param list[str] symmetric_key_ids: :return: list[EncryptedSymmetricKey] If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_all_encrypted_keys_with_http_info(encrypting_key_id, **kwargs) # noqa: E501 else: (data) = self.get_all_encrypted_keys_with_http_info(encrypting_key_id, **kwargs) # noqa: E501 return data def get_all_encrypted_keys_with_http_info(self, encrypting_key_id, **kwargs): # noqa: E501 """Get all encrypted symmetric keys that are encrypted with this encrypting keyId, optionally limiting the request to a set of symmetric key domains # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_all_encrypted_keys_with_http_info(encrypting_key_id, async_req=True) >>> result = thread.get() :param async_req bool :param str encrypting_key_id: (required) :param list[str] symmetric_key_ids: :return: list[EncryptedSymmetricKey] If the method is called asynchronously, returns the request thread. """ all_params = ['encrypting_key_id', 'symmetric_key_ids'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_all_encrypted_keys" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'encrypting_key_id' is set if ('encrypting_key_id' not in params or params['encrypting_key_id'] is None): raise ValueError("Missing the required parameter `encrypting_key_id` when calling `get_all_encrypted_keys`") # noqa: E501 collection_formats = {} path_params = {} if 'encrypting_key_id' in params: path_params['encryptingKeyId'] = params['encrypting_key_id'] # noqa: E501 query_params = [] if 'symmetric_key_ids' in params: query_params.append(('symmetricKeyIds', params['symmetric_key_ids'])) # noqa: E501 collection_formats['symmetricKeyIds'] = 'csv' # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['header'] # noqa: E501 return self.api_client.call_api( '/crypto/symmetric/{encryptingKeyId}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[EncryptedSymmetricKey]', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_public_key(self, key_id, **kwargs): # noqa: E501 """Get the public key associated with the passed-in key ID # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_public_key(key_id, async_req=True) >>> result = thread.get() :param async_req bool :param str key_id: (required) :return: PublicKey If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_public_key_with_http_info(key_id, **kwargs) # noqa: E501 else: (data) = self.get_public_key_with_http_info(key_id, **kwargs) # noqa: E501 return data def get_public_key_with_http_info(self, key_id, **kwargs): # noqa: E501 """Get the public key associated with the passed-in key ID # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_public_key_with_http_info(key_id, async_req=True) >>> result = thread.get() :param async_req bool :param str key_id: (required) :return: PublicKey If the method is called asynchronously, returns the request thread. """ all_params = ['key_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_public_key" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'key_id' is set if ('key_id' not in params or params['key_id'] is None): raise ValueError("Missing the required parameter `key_id` when calling `get_public_key`") # noqa: E501 collection_formats = {} path_params = {} if 'key_id' in params: path_params['keyID'] = params['key_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['header'] # noqa: E501 return self.api_client.call_api( '/crypto/asymmetric/{keyID}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='PublicKey', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def post_new_encrypted_keys(self, encrypting_key_id, encrypted_symmetric_key, **kwargs): # noqa: E501 """Add a new encrypted key. The encrypting key that protects the encrypted key is identified with encryptingKeyId. Request must come from a registered key manager. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.post_new_encrypted_keys(encrypting_key_id, encrypted_symmetric_key, async_req=True) >>> result = thread.get() :param async_req bool :param str encrypting_key_id: (required) :param list[EncryptedSymmetricKey] encrypted_symmetric_key: (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.post_new_encrypted_keys_with_http_info(encrypting_key_id, encrypted_symmetric_key, **kwargs) # noqa: E501 else: (data) = self.post_new_encrypted_keys_with_http_info(encrypting_key_id, encrypted_symmetric_key, **kwargs) # noqa: E501 return data def post_new_encrypted_keys_with_http_info(self, encrypting_key_id, encrypted_symmetric_key, **kwargs): # noqa: E501 """Add a new encrypted key. The encrypting key that protects the encrypted key is identified with encryptingKeyId. Request must come from a registered key manager. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.post_new_encrypted_keys_with_http_info(encrypting_key_id, encrypted_symmetric_key, async_req=True) >>> result = thread.get() :param async_req bool :param str encrypting_key_id: (required) :param list[EncryptedSymmetricKey] encrypted_symmetric_key: (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['encrypting_key_id', 'encrypted_symmetric_key'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method post_new_encrypted_keys" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'encrypting_key_id' is set if ('encrypting_key_id' not in params or params['encrypting_key_id'] is None): raise ValueError("Missing the required parameter `encrypting_key_id` when calling `post_new_encrypted_keys`") # noqa: E501 # verify the required parameter 'encrypted_symmetric_key' is set if ('encrypted_symmetric_key' not in params or params['encrypted_symmetric_key'] is None): raise ValueError("Missing the required parameter `encrypted_symmetric_key` when calling `post_new_encrypted_keys`") # noqa: E501 collection_formats = {} path_params = {} if 'encrypting_key_id' in params: path_params['encryptingKeyId'] = params['encrypting_key_id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'encrypted_symmetric_key' in params: body_params = params['encrypted_symmetric_key'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['header'] # noqa: E501 return self.api_client.call_api( '/crypto/symmetric/{encryptingKeyId}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
41.250737
185
0.636156
1,657
13,984
5.085094
0.108027
0.02789
0.053406
0.025635
0.902326
0.888322
0.867671
0.850225
0.837645
0.826015
0
0.01456
0.278032
13,984
338
186
41.372781
0.820028
0.341104
0
0.715084
0
0
0.21025
0.06771
0
0
0
0
0
1
0.039106
false
0
0.022346
0
0.117318
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
31a0d2c0672e4143fb09368c1389b2593531eeb9
194
py
Python
sdc/tests/tests_perf/__init__.py
samir-nasibli/sdc
b9144c8799d6454dec3e5c550e305963b24c1570
[ "BSD-2-Clause" ]
null
null
null
sdc/tests/tests_perf/__init__.py
samir-nasibli/sdc
b9144c8799d6454dec3e5c550e305963b24c1570
[ "BSD-2-Clause" ]
null
null
null
sdc/tests/tests_perf/__init__.py
samir-nasibli/sdc
b9144c8799d6454dec3e5c550e305963b24c1570
[ "BSD-2-Clause" ]
null
null
null
from sdc.tests.tests_perf.test_perf_unicode import * from sdc.tests.tests_perf.test_perf_series_str import * from sdc.tests.tests_perf.test_perf_series import * from . import test_perf_read_csv
38.8
55
0.850515
34
194
4.470588
0.323529
0.210526
0.236842
0.335526
0.730263
0.730263
0.730263
0.539474
0.539474
0
0
0
0.082474
194
4
56
48.5
0.853933
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
9
31c8ca6eaa93665ae1d3ae7bc7dfabb8031b1c6c
26,122
py
Python
calculator-full.py
MohamedEmad2003/Calculator
4e92382d11b14c22abffc5288126924484c2e7c7
[ "MIT" ]
1
2020-06-05T15:39:18.000Z
2020-06-05T15:39:18.000Z
calculator-full.py
MohamedEmad2003/Calculator
4e92382d11b14c22abffc5288126924484c2e7c7
[ "MIT" ]
null
null
null
calculator-full.py
MohamedEmad2003/Calculator
4e92382d11b14c22abffc5288126924484c2e7c7
[ "MIT" ]
null
null
null
from tkinter import * from tkinter import messagebox window = Tk() window.title('Calculator') window.geometry('250x360+300+100') window.configure(bg='#001a1a') window.resizable(FALSE,FALSE) input_user = StringVar() click = StringVar() comma_write = StringVar() comma_has_written = comma_write.get() comma_write.set('0') #------------------------------------------------------------------------------------------------------ #-------------------------------------Functions-------------------------------------------------------- #------------------------------------------------------------------------------------------------------ def del_bt(): print("delete") x = input_user.get() input_user.set(x[:-1]) def clear_bt(): print("clear") x = input_user.get() input_user.set('') comma_has_written = comma_write.get() comma_write.set('0') def plus_bt(): equal_clicked = click.get() x = input_user.get() if len(x)>=15: messagebox.showwarning("warning", " مش مسموح لحضرتك تكتب أرقام أكتر من كده ، \n امسح أرقام وكمل حساباتك تاني \nYou can't write more number after this,\n This is the maximum length of numbers and operators" ) else: print("plus") x = input_user.get() input_user.set(x+'+') click.set('no') comma_has_written = comma_write.get() comma_write.set('0') def minus_bt(): equal_clicked = click.get() x = input_user.get() if len(x)>=15: messagebox.showwarning("warning", " مش مسموح لحضرتك تكتب أرقام أو عمليات أكتر من كده \n امسح أرقام وبعدين كمل حساباتك تاني ، \nYou can't write more number after this,\n This is the maximum length of numbers and operators" ) else: print("minus") x = input_user.get() input_user.set(x+'-') click.set('no') comma_has_written = comma_write.get() comma_write.set('0') def multi_bt(): x = input_user.get() equal_clicked = click.get() if len(x)>=15: messagebox.showwarning("warning", " مش مسموح لحضرتك تكتب أرقام أكتر من كده ، \n امسح أرقام وكمل حساباتك تاني \nYou can't write more number after this,\n This is the maximum length of numbers and operators" ) else: if equal_clicked == 'yes': print("multi") x = input_user.get() input_user.set(x+'*') click.set('no') comma_has_written = comma_write.get() comma_write.set('0') elif len(x)==0 or x[0]== '/' or x[0]== '*': input_user.set('') else: if x[-1]=='/' or x[-1]=='*': messagebox.showwarning("warning", "You mustn't write // or **, you can write only one * or /" ) elif x[-1]== '-' or x[0]== '+': print("can't write *") else: print("multi") x = input_user.get() input_user.set(x+'*') comma_has_written = comma_write.get() comma_write.set('0') def divi_bt(): equal_clicked = click.get() x = input_user.get() if len(x)>=15: messagebox.showwarning("warning", " مش مسموح لحضرتك تكتب أرقام أكتر من كده ، \n امسح أرقام وكمل حساباتك تاني \nYou can't write more number after this,\n This is the maximum length of numbers and operators" ) else: if equal_clicked == 'yes': print("divi") x = input_user.get() input_user.set(x+'/') click.set('no') comma_has_written = comma_write.get() comma_write.set('0') elif len(x)==0 or x[0]== '/' or x[0]== '*' : input_user.set('') else: if x[-1]=='/' or x[-1]=='*': messagebox.showwarning("warning", "You mustn't write // or **, you can write only one * or /" ) elif x[-1]== '-' or x[0]== '+': print("can't write /") else: print("divi") x = input_user.get() input_user.set(x+'/') comma_has_written = comma_write.get() comma_write.set('0') def equal_bt(): x = input_user.get() print('sentence is :',x) print('sentence length equal',len(x)) comma_has_written = comma_write.get() if len(x)==0: input_user.set('0') equal_clicked = click.get() click.set('yes') print(equal_clicked) comma_has_written = comma_write.get() comma_write.set('0') elif x[0]=='+' and len(x)>1: x = input_user.get() print(x) z = eval(x) input_user.set(z) print(z) equal_clicked = click.get() click.set('yes') print(equal_clicked) comma_has_written = comma_write.get() comma_write.set('0') elif (x[0]=='.' or x[0]=='+' or x[0]=='-' )and len(x)==1: if x[0]=='.': print('HaHaHa') print("equal") x = input_user.get() input_user.set('0.0') equal_clicked = click.get() click.set('yes') print(equal_clicked) comma_has_written = comma_write.get() comma_write.set('0') #-------------------------------- if equal_clicked == 'yes': x = input_user.get() input_user.set('') click.set('no') print(equal_clicked) equal_bt() #-------------------------------- elif x[0]=='+' or x[0]=='-': input_user.set('') else: equal_clicked = click.get() print("LAaaaaaaaaaaaaaaaaaa") x = input_user.get() print(x) z = eval(x) input_user.set(z) print(z) click.set('yes') print(equal_clicked) comma_write.set('0') #------------------------------------------------ elif x[0]=='.' and len(x)>1: x = input_user.get() equal_clicked = click.get() print(x) z = eval(x) input_user.set(z) print(z) click.set('yes') print(equal_clicked) comma_write.set('0') #------------------------------------------------ else: if x[-1]=='/' or x[-1]=='*' or x[-1]=='+' or x[-1]=='-': #messagebox.showwarning("warning", "You mustn't write / or * or + or - at the end of calculation without number !" ) input_user.set(x[:-1]) x = input_user.get() print(x) z = eval(x) input_user.set(z) print(z) equal_clicked = click.get() click.set('yes') print(equal_clicked) #print("length is :",len(x)) comma_has_written = comma_write.get() comma_write.set('0') elif x[:]=='.': #messagebox.showwarning("warning", "You mustn't write ' , ' (comma) without number !" ) input_user.set(x[:-1]) x = input_user.get() print(x) z = eval(x) input_user.set(z) print(z) equal_clicked = click.get() click.set('yes') print(equal_clicked) else : print("equal") x = input_user.get() print(x) z = eval(x) input_user.set(z) print(z) equal_clicked = click.get() click.set('yes') print(equal_clicked) comma_write.set('0') def comma_bt(): comma_has_written = comma_write.get() print(comma_has_written) print(comma_write) x = input_user.get() equal_clicked = click.get() if len(x)>=15: messagebox.showwarning("warning", " مش مسموح لحضرتك تكتب أرقام أكتر من كده ، \n امسح أرقام وكمل حساباتك تاني \nYou can't write more number after this,\n This is the maximum length of numbers and operators" ) else: if equal_clicked == 'yes': x = input_user.get() input_user.set('') click.set('no') print(equal_clicked) comma_bt() elif comma_has_written == '0': print("comma") x = input_user.get() input_user.set(x+'.') comma_write.set('1') else: print('can" write comma, LoL') #---------------------------------------------- def num0_bt(): equal_clicked = click.get() x = input_user.get() if len(x)>=15: messagebox.showwarning("warning", " مش مسموح لحضرتك تكتب أرقام أكتر من كده ، \n امسح أرقام وكمل حساباتك تاني \nYou can't write more number after this,\n This is the maximum length of numbers and operators" ) else: if equal_clicked == 'yes': x = input_user.get() input_user.set('') click.set('no') print(equal_clicked) num0_bt() else: print('0') x = input_user.get() input_user.set(x+'0') comma_has_written = comma_write.get() comma_write.set('0') def num1_bt(): equal_clicked = click.get() x = input_user.get() if len(x)>=15: messagebox.showwarning("warning", " مش مسموح لحضرتك تكتب أرقام أكتر من كده ، \n امسح أرقام وكمل حساباتك تاني \nYou can't write more number after this,\n This is the maximum length of numbers and operators" ) else: if equal_clicked == 'yes': x = input_user.get() input_user.set('') click.set('no') print(equal_clicked) num1_bt() else: print("1") x = input_user.get() input_user.set(x+'1') comma_has_written = comma_write.get() comma_write.set('0') def num2_bt(): equal_clicked = click.get() x = input_user.get() if len(x)>=15: messagebox.showwarning("warning", " مش مسموح لحضرتك تكتب أرقام أكتر من كده ، \n امسح أرقام وكمل حساباتك تاني \nYou can't write more number after this,\n This is the maximum length of numbers and operators" ) else: if equal_clicked == 'yes': x = input_user.get() input_user.set('') click.set('no') print(equal_clicked) num2_bt() else: print("2") x = input_user.get() input_user.set(x+'2') comma_has_written = comma_write.get() comma_write.set('0') def num3_bt(): equal_clicked = click.get() x = input_user.get() if len(x)>=15: messagebox.showwarning("warning", " مش مسموح لحضرتك تكتب أرقام أكتر من كده ، \n امسح أرقام وكمل حساباتك تاني \nYou can't write more number after this,\n This is the maximum length of numbers and operators" ) else: if equal_clicked == 'yes': x = input_user.get() input_user.set('') click.set('no') print(equal_clicked) num3_bt() else: print("3") x = input_user.get() input_user.set(x+'3') comma_has_written = comma_write.get() comma_write.set('0') def num4_bt(): equal_clicked = click.get() x = input_user.get() if len(x)>=15: messagebox.showwarning("warning", " مش مسموح لحضرتك تكتب أرقام أكتر من كده ، \n امسح أرقام وكمل حساباتك تاني \nYou can't write more number after this,\n This is the maximum length of numbers and operators" ) else: if equal_clicked == 'yes': x = input_user.get() input_user.set('') click.set('no') print(equal_clicked) num4_bt() else: print("4") x = input_user.get() input_user.set(x+'4') comma_has_written = comma_write.get() comma_write.set('0') def num5_bt(): equal_clicked = click.get() x = input_user.get() if len(x)>=15: messagebox.showwarning("warning", " مش مسموح لحضرتك تكتب أرقام أكتر من كده ، \n امسح أرقام وكمل حساباتك تاني \nYou can't write more number after this,\n This is the maximum length of numbers and operators" ) else: if equal_clicked == 'yes': x = input_user.get() input_user.set('') click.set('no') print(equal_clicked) num5_bt() else: print("5") x = input_user.get() input_user.set(x+'5') comma_has_written = comma_write.get() comma_write.set('0') def num6_bt(): equal_clicked = click.get() x = input_user.get() if len(x)>=15: messagebox.showwarning("warning", " مش مسموح لحضرتك تكتب أرقام أكتر من كده ، \n امسح أرقام وكمل حساباتك تاني \nYou can't write more number after this,\n This is the maximum length of numbers and operators" ) else: if equal_clicked == 'yes': x = input_user.get() input_user.set('') click.set('no') print(equal_clicked) num6_bt() else: print("6") x = input_user.get() input_user.set(x+'6') comma_has_written = comma_write.get() comma_write.set('0') def num7_bt(): equal_clicked = click.get() x = input_user.get() if len(x)>=15: messagebox.showwarning("warning", " مش مسموح لحضرتك تكتب أرقام أكتر من كده ، \n امسح أرقام وكمل حساباتك تاني \nYou can't write more number after this,\n This is the maximum length of numbers and operators" ) else: if equal_clicked == 'yes': x = input_user.get() input_user.set('') click.set('no') print(equal_clicked) num7_bt() else: print("7") x = input_user.get() input_user.set(x+'7') comma_has_written = comma_write.get() comma_write.set('0') def num8_bt(): equal_clicked = click.get() x = input_user.get() if len(x)>=15: messagebox.showwarning("warning", " مش مسموح لحضرتك تكتب أرقام أكتر من كده ، \n امسح أرقام وكمل حساباتك تاني \nYou can't write more number after this,\n This is the maximum length of numbers and operators" ) else: if equal_clicked == 'yes': x = input_user.get() input_user.set('') click.set('no') print(equal_clicked) num8_bt() else: print("8") x = input_user.get() input_user.set(x+'8') comma_has_written = comma_write.get() comma_write.set('0') def num9_bt(): equal_clicked = click.get() x = input_user.get() if len(x)>=15: messagebox.showwarning("warning", " مش مسموح لحضرتك تكتب أرقام أكتر من كده ، \n امسح أرقام وكمل حساباتك تاني \nYou can't write more number after this,\n This is the maximum length of numbers and operators" ) else: if equal_clicked == 'yes': x = input_user.get() input_user.set('') click.set('no') print(equal_clicked) num9_bt() else: print("9") x = input_user.get() input_user.set(x+'9') comma_has_written = comma_write.get() comma_write.set('0') #-------------------------------------------------------------------------------------------------------------- #-------------------------------------------------------------------------------------------------------------- #-------------------------------------------------------------------------------------------------------- #-------------------------------------------------Buttons------------------------------------------------ #-------------------------------------------------------------------------------------------------------- #------------------------------clear_button-------------------------------------------------- clear_button = Button(text='clear', bg='#0066cc', fg='#00ff00', activebackground='#003366', activeforeground='yellow', height=2, width=6, command=clear_bt) clear_button.place(x=10,y=130) #--------------------------------plus_button-------------------------------------------------- plus_button = Button(text='+', bg='#0066cc', fg='white', activebackground='#003366', activeforeground='yellow', height=2, width=6, command=plus_bt) plus_button.place(x=190,y=220) #------------------------------minus_button------------------------------------------------- minus_button = Button(text='-', bg='#0066cc', fg='white', activebackground='#003366', activeforeground='yellow', height=2, width=6, command=minus_bt) minus_button.place(x=190,y=175) #--------------------------------multiplication_button-------------------------------------------------- multiplication_button = Button(text='x', bg='#0066cc', fg='white', activebackground='#003366', activeforeground='yellow', height=2, width=6, command=multi_bt) multiplication_button.place(x=190,y=130) #------------------------------division_button-------------------------------------------------- division_button = Button(text='÷', bg='#0066cc', fg='white', activebackground='#003366', activeforeground='yellow', height=2, width=6, command=divi_bt) division_button.place(x=130,y=130) #--------------------------------delete_button-------------------------------------------------- delete_button = Button(text='delete', bg='#0066cc', fg='#00ff00', activebackground='#003366', activeforeground='yellow', height=2, width=6, command=del_bt) delete_button.place(x=70,y=130) #--------------------------------equal_button-------------------------------------------------- equal_button = Button(text='=', bg='#99004d', fg='white', activebackground='#4d0026', activeforeground='yellow', height=5, width=6, command=equal_bt) equal_button.place(x=190,y=265) #--------------------------------number_7_button-------------------------------------------------- number7_button = Button(text='7', bg='#99004d', fg='white', activebackground='#4d0026', activeforeground='green', height=2, width=6, command=num7_bt) number7_button.place(x=10,y=175) #---------------------------------number_8_button------------------------------------------------- number8_button = Button(text='8', bg='#99004d', fg='white', activebackground='#4d0026', activeforeground='green', height=2, width=6, command=num8_bt) number8_button.place(x=70,y=175) #---------------------------------number_9_button--------------------------------------------------------- number9_button = Button(text='9', bg='#99004d', fg='white', activebackground='#4d0026', activeforeground='green', height=2, width=6, command=num9_bt) number9_button.place(x=130,y=175) #-----------------------------------number_4_button----------------------------------------------------------------- number4_button = Button(text='4', bg='#99004d', fg='white', activebackground='#4d0026', activeforeground='green', height=2, width=6, command=num4_bt) number4_button.place(x=10,y=220) #----------------------------------number_5_button-------------------------------------------------------------------- number5_button = Button(text='5', bg='#99004d', fg='white', activebackground='#4d0026', activeforeground='green', height=2, width=6, command=num5_bt) number5_button.place(x=70,y=220) #---------------------------------number_6_button--------------------------------------------- number6_button = Button(text='6', bg='#99004d', fg='white', activebackground='#4d0026', activeforeground='green', height=2, width=6, command=num6_bt) number6_button.place(x=130,y=220) #-----------------------------------number_1_button----------------------------------------------------------------- number1_button = Button(text='1', bg='#99004d', fg='white', activebackground='#4d0026', activeforeground='green', height=2, width=6, command=num1_bt) number1_button.place(x=10,y=265) #----------------------------------number_2_button--------------------------------------------------------------------------------- number2_button = Button(text='2', bg='#99004d', fg='white', activebackground='#4d0026', activeforeground='green', height=2, width=6, command=num2_bt) number2_button.place(x=70,y=265) #---------------------------------number_3_button--------------------------------------------- number3_button = Button(text='3', bg='#99004d', fg='white', activebackground='#4d0026', activeforeground='green', height=2, width=6, command=num3_bt) number3_button.place(x=130,y=265) #---------------------------------number_0_button--------------------------------------------- number0_button = Button(text='0', bg='#99004d', fg='white', activebackground='#4d0026', activeforeground='green', height=2, width=15, command=num0_bt) number0_button.place(x=9,y=310) #---------------------------------comma_button--------------------------------------------- comma_button = Button(text=',', bg='#99004d', fg='white', activebackground='#4d0026', activeforeground='green', height=2, width=6, command=comma_bt) comma_button.place(x=130,y=310) #-------------------------------------------------------------------------------------------------------- #-------------------------------------------------Lables------------------------------------------------- #-------------------------------------------------------------------------------------------------------- screen = Label(window,textvariable= input_user, bg='#d9d9d9', fg='black', height=3, width=17, anchor=SE, padx=5, pady=5) screen.config(font=("Courier", 16)) screen.place(x=8,y=34) title_programmer = Label(window,text='By Mohamed Emad, Member at IEEE. April 2020', bg='green',fg='white',height=1,width=17) title_programmer.config(font=("Comic Sans MS", 8)) title_programmer.pack(fill='x') #------------------------------------------------------------------------------------------------------ #----------------------------------------------programming--------------------------------------------- #------------------------------------------------------------------------------------------------------ window.iconbitmap(r'C:\Users\comp\Desktop\Tkinter\calc.ico') window.mainloop()
36.739803
236
0.421063
2,555
26,122
4.163992
0.077104
0.084594
0.056396
0.065984
0.804305
0.773381
0.767741
0.75853
0.730144
0.720086
0
0.032197
0.354376
26,122
710
237
36.791549
0.597688
0.142179
0
0.719512
0
0.026132
0.174023
0.001755
0.003484
0
0
0
0
1
0.031359
false
0
0.003484
0
0.034843
0.108014
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
31dc7ce18ae8ce5e62415e150cf7ee84493ee876
44,666
py
Python
modules/__init__.py
shalei120/HopfieldLM
3fba4ee05bfc7f5041593f95457ffdf0bdc094a3
[ "MIT" ]
null
null
null
modules/__init__.py
shalei120/HopfieldLM
3fba4ee05bfc7f5041593f95457ffdf0bdc094a3
[ "MIT" ]
1
2021-04-21T14:46:16.000Z
2021-04-21T14:46:16.000Z
modules/__init__.py
shalei120/HopfieldLM
3fba4ee05bfc7f5041593f95457ffdf0bdc094a3
[ "MIT" ]
null
null
null
import torch import torch.nn as nn from math import sqrt from torch import Tensor from torch.nn import Module, Parameter from typing import Optional, Tuple, Union from .activation import HopfieldCore class Hopfield(Module): """ Module with underlying Hopfield association. """ def __init__(self, input_size: Optional[int] = None, hidden_size: Optional[int] = None, output_size: Optional[int] = None, pattern_size: Optional[int] = None, num_heads: int = 1, scaling: Optional[Union[float, Tensor]] = None, update_steps_max: Optional[Union[int, Tensor]] = 0, update_steps_eps: Union[float, Tensor] = 1e-4, normalize_stored_pattern: bool = True, normalize_stored_pattern_affine: bool = True, normalize_state_pattern: bool = True, normalize_state_pattern_affine: bool = True, normalize_pattern_projection: bool = True, normalize_pattern_projection_affine: bool = True, normalize_hopfield_space: bool = False, normalize_hopfield_space_affine: bool = False, stored_pattern_as_static: bool = False, state_pattern_as_static: bool = False, pattern_projection_as_static: bool = False, pattern_projection_as_connected: bool = False, stored_pattern_size: Optional[int] = None, pattern_projection_size: Optional[int] = None, batch_first: bool = True, association_activation: Optional[str] = None, dropout: float = 0.0, input_bias: bool = True, concat_bias_pattern: bool = False, add_zero_association: bool = False, disable_out_projection: bool = False ): """ Initialise new instance of a Hopfield module. :param input_size: depth of the input (state pattern) :param hidden_size: depth of the association space :param output_size: depth of the output projection :param pattern_size: depth of patterns to be selected :param num_heads: amount of parallel association heads :param scaling: scaling of association heads, often represented as beta (one entry per head) :param update_steps_max: maximum count of association update steps (None equals to infinity) :param update_steps_eps: minimum difference threshold between two consecutive association update steps :param normalize_stored_pattern: apply normalization on stored patterns :param normalize_stored_pattern_affine: additionally enable affine normalization of stored patterns :param normalize_state_pattern: apply normalization on state patterns :param normalize_state_pattern_affine: additionally enable affine normalization of state patterns :param normalize_pattern_projection: apply normalization on the pattern projection :param normalize_pattern_projection_affine: additionally enable affine normalization of pattern projection :param normalize_hopfield_space: enable normalization of patterns in the Hopfield space :param normalize_hopfield_space_affine: additionally enable affine normalization of patterns in Hopfield space :param stored_pattern_as_static: interpret specified stored patterns as being static :param state_pattern_as_static: interpret specified state patterns as being static :param pattern_projection_as_static: interpret specified pattern projections as being static :param pattern_projection_as_connected: connect pattern projection with stored pattern :param stored_pattern_size: depth of input (stored pattern) :param pattern_projection_size: depth of input (pattern projection) :param batch_first: flag for specifying if the first dimension of data fed to "forward" reflects the batch size :param association_activation: additional activation to be applied on the result of the Hopfield association :param dropout: dropout probability applied on the association matrix :param input_bias: bias to be added to input (state and stored pattern as well as pattern projection) :param concat_bias_pattern: bias to be concatenated to stored pattern as well as pattern projection :param add_zero_association: add a new batch of zeros to stored pattern as well as pattern projection :param disable_out_projection: disable output projection """ super(Hopfield, self).__init__() assert type(batch_first) == bool, f'"batch_first" needs to be a boolean, not {type(batch_first)}.' assert (association_activation is None) or (type(association_activation) == str) # Initialise Hopfield association module. self.association_core = HopfieldCore( embed_dim=input_size, num_heads=num_heads, dropout=dropout, bias=input_bias, add_bias_kv=concat_bias_pattern, add_zero_attn=add_zero_association, kdim=stored_pattern_size, vdim=pattern_projection_size, head_dim=hidden_size, pattern_dim=pattern_size, out_dim=output_size, disable_out_projection=disable_out_projection, key_as_static=stored_pattern_as_static, query_as_static=state_pattern_as_static, value_as_static=pattern_projection_as_static, value_as_connected=pattern_projection_as_connected, normalize_pattern=normalize_hopfield_space, normalize_pattern_affine=normalize_hopfield_space_affine) self.association_activation = None if association_activation is not None: self.association_activation = getattr(torch, association_activation, None) # Initialise stored pattern normalization. self.norm_stored_pattern = None if normalize_stored_pattern_affine: assert normalize_stored_pattern, "affine normalization without normalization has no effect." if normalize_stored_pattern: self.norm_stored_pattern = nn.LayerNorm( normalized_shape=self.hidden_size if stored_pattern_as_static else self.association_core.kdim, elementwise_affine=normalize_stored_pattern_affine) # Initialise state pattern normalization. self.norm_state_pattern = None if normalize_state_pattern_affine: assert normalize_state_pattern, "affine normalization without normalization has no effect." if normalize_state_pattern: self.norm_state_pattern = nn.LayerNorm( normalized_shape=self.hidden_size if state_pattern_as_static else self.association_core.embed_dim, elementwise_affine=normalize_state_pattern_affine) # Initialise pattern projection normalization. self.norm_pattern_projection = None if normalize_pattern_projection_affine: assert normalize_pattern_projection, "affine normalization without normalization has no effect." if normalize_pattern_projection: self.norm_pattern_projection = nn.LayerNorm( normalized_shape=self.hidden_size if pattern_projection_as_static else self.association_core.vdim, elementwise_affine=normalize_pattern_projection_affine) # Initialise remaining auxiliary properties. if self.association_core.static_execution: self.__scaling = 1.0 if scaling is None else scaling else: assert self.association_core.head_dim > 0, f'invalid hidden dimension encountered.' self.__scaling = (1.0 / sqrt(self.association_core.head_dim)) if scaling is None else scaling self.__batch_first = batch_first self.__update_steps_max = update_steps_max self.__update_steps_eps = update_steps_eps self.reset_parameters() def reset_parameters(self) -> None: """ Reset Hopfield association. :return: None """ for module in (self.association_core, self.norm_stored_pattern, self.norm_state_pattern, self.norm_pattern_projection): if hasattr(module, r'reset_parameters'): module.reset_parameters() def _maybe_transpose(self, *args: Tuple[Tensor, ...]) -> Union[Tensor, Tuple[Tensor, ...]]: """ Eventually transpose specified data. :param args: tensors to eventually transpose (dependent on the state of "batch_first") :return: eventually transposed tensors """ transposed_result = tuple(_.transpose(0, 1) for _ in args) if self.__batch_first else args return transposed_result[0] if len(transposed_result) == 1 else transposed_result def _associate(self, data: Union[Tensor, Tuple[Tensor, Tensor, Tensor]], return_raw_associations: bool = False, return_projected_patterns: bool = False, stored_pattern_padding_mask: Optional[Tensor] = None, association_mask: Optional[Tensor] = None) -> Tuple[Optional[Tensor], ...]: """ Apply Hopfield association module on specified data. :param data: data to be processed by Hopfield core module :param return_raw_associations: return raw association (softmax) values, unmodified :param return_projected_patterns: return pattern projection values, unmodified :param stored_pattern_padding_mask: mask to be applied on stored patterns :param association_mask: mask to be applied on inner association matrix :return: Hopfield-processed input data """ assert (type(data) == Tensor) or ((type(data) == tuple) and (len(data) == 3)), \ r'either one tensor to be used as "stored pattern", "state pattern" and' \ r' "pattern_projection" must be provided, or three separate ones.' if type(data) == Tensor: stored_pattern, state_pattern, pattern_projection = data, data, data else: stored_pattern, state_pattern, pattern_projection = data # Optionally transpose data. stored_pattern, state_pattern, pattern_projection = self._maybe_transpose( stored_pattern, state_pattern, pattern_projection) # Optionally apply stored pattern normalization. if self.norm_stored_pattern is not None: stored_pattern = self.norm_stored_pattern(input=stored_pattern.reshape( shape=(-1, stored_pattern.shape[2]))).reshape(shape=stored_pattern.shape) # Optionally apply state pattern normalization. if self.norm_state_pattern is not None: state_pattern = self.norm_state_pattern(input=state_pattern.reshape( shape=(-1, state_pattern.shape[2]))).reshape(shape=state_pattern.shape) # Optionally apply pattern projection normalization. if self.norm_pattern_projection is not None: pattern_projection = self.norm_pattern_projection(input=pattern_projection.reshape( shape=(-1, pattern_projection.shape[2]))).reshape(shape=pattern_projection.shape) # Apply Hopfield association and optional activation function. return self.association_core( query=state_pattern, key=stored_pattern, value=pattern_projection, key_padding_mask=stored_pattern_padding_mask, need_weights=False, attn_mask=association_mask, scaling=self.__scaling, update_steps_max=self.__update_steps_max, update_steps_eps=self.__update_steps_eps, return_raw_associations=return_raw_associations, return_pattern_projections=return_projected_patterns) def forward(self, input: Union[Tensor, Tuple[Tensor, Tensor, Tensor]], stored_pattern_padding_mask: Optional[Tensor] = None, association_mask: Optional[Tensor] = None) -> Tensor: """ Apply Hopfield association on specified data. :param input: data to be processed by Hopfield association module :param stored_pattern_padding_mask: mask to be applied on stored patterns :param association_mask: mask to be applied on inner association matrix :return: Hopfield-processed input data """ association_output = self._maybe_transpose(self._associate( data=input, return_raw_associations=False, stored_pattern_padding_mask=stored_pattern_padding_mask, association_mask=association_mask)[0]) if self.association_activation is not None: association_output = self.association_activation(association_output) return association_output def get_association_matrix(self, input: Union[Tensor, Tuple[Tensor, Tensor, Tensor]], stored_pattern_padding_mask: Optional[Tensor] = None, association_mask: Optional[Tensor] = None) -> Tensor: """ Fetch Hopfield association matrix gathered by passing through the specified data. :param input: data to be passed through the Hopfield association :param stored_pattern_padding_mask: mask to be applied on stored patterns :param association_mask: mask to be applied on inner association matrix :return: association matrix as computed by the Hopfield core module """ with torch.no_grad(): return self._associate( data=input, return_raw_associations=True, stored_pattern_padding_mask=stored_pattern_padding_mask, association_mask=association_mask)[2] def get_projected_pattern_matrix(self, input: Union[Tensor, Tuple[Tensor, Tensor, Tensor]], stored_pattern_padding_mask: Optional[Tensor] = None, association_mask: Optional[Tensor] = None) -> Tensor: """ Fetch Hopfield projected pattern matrix gathered by passing through the specified data. :param input: data to be passed through the Hopfield association :param stored_pattern_padding_mask: mask to be applied on stored patterns :param association_mask: mask to be applied on inner association matrix :return: pattern projection matrix as computed by the Hopfield core module """ with torch.no_grad(): return self._associate( data=input, return_projected_patterns=True, stored_pattern_padding_mask=stored_pattern_padding_mask, association_mask=association_mask)[3] @property def batch_first(self) -> bool: return self.__batch_first @property def scaling(self) -> Union[float, Tensor]: return self.__scaling.clone() if type(self.__scaling) == Tensor else self.__scaling @property def stored_pattern_dim(self) -> Optional[int]: return self.association_core.kdim @property def state_pattern_dim(self) -> Optional[int]: return self.association_core.embed_dim @property def pattern_projection_dim(self) -> Optional[int]: return self.association_core.vdim @property def input_size(self) -> Optional[int]: return self.state_pattern_dim @property def hidden_size(self) -> Optional[int]: return self.association_core.head_dim @property def output_size(self) -> Optional[int]: return self.association_core.out_dim @property def pattern_size(self) -> Optional[int]: return self.association_core.pattern_dim @property def update_steps_max(self) -> Optional[Union[int, Tensor]]: return self.__update_steps_max.clone() if type(self.__update_steps_max) == Tensor else self.__update_steps_max @property def update_steps_eps(self) -> Optional[Union[float, Tensor]]: return self.__update_steps_eps.clone() if type(self.__update_steps_eps) == Tensor else self.__update_steps_eps @property def stored_pattern_as_static(self) -> bool: return self.association_core.key_as_static @property def state_pattern_as_static(self) -> bool: return self.association_core.query_as_static @property def pattern_projection_as_static(self) -> bool: return self.association_core.value_as_static @property def normalize_stored_pattern(self) -> bool: return self.norm_stored_pattern is not None @property def normalize_stored_pattern_affine(self) -> bool: return self.normalize_stored_pattern and self.norm_stored_pattern.elementwise_affine @property def normalize_state_pattern(self) -> bool: return self.norm_state_pattern is not None @property def normalize_state_pattern_affine(self) -> bool: return self.normalize_state_pattern and self.norm_state_pattern.elementwise_affine @property def normalize_pattern_projection(self) -> bool: return self.norm_pattern_projection is not None @property def normalize_pattern_projection_affine(self) -> bool: return self.normalize_pattern_projection and self.norm_pattern_projection.elementwise_affine @property def normalize_hopfield_space(self) -> bool: return self.hopfield.normalize_hopfield_space @property def normalize_hopfield_space_affine(self) -> bool: return self.hopfield.normalize_hopfield_space_affine class HopfieldPooling(Module): """ Wrapper class encapsulating a trainable but fixed state pattern and "Hopfield" in one combined module to be used as a Hopfield-based pooling layer. """ def __init__(self, input_size: int, hidden_size: Optional[int] = None, output_size: Optional[int] = None, pattern_size: Optional[int] = None, num_heads: int = 1, scaling: Optional[Union[float, Tensor]] = None, update_steps_max: Optional[Union[int, Tensor]] = 0, update_steps_eps: Union[float, Tensor] = 1e-4, normalize_stored_pattern: bool = True, normalize_stored_pattern_affine: bool = True, normalize_state_pattern: bool = True, normalize_state_pattern_affine: bool = True, normalize_pattern_projection: bool = True, normalize_pattern_projection_affine: bool = True, normalize_hopfield_space: bool = False, normalize_hopfield_space_affine: bool = False, stored_pattern_as_static: bool = False, state_pattern_as_static: bool = False, pattern_projection_as_static: bool = False, pattern_projection_as_connected: bool = False, stored_pattern_size: Optional[int] = None, pattern_projection_size: Optional[int] = None, batch_first: bool = True, association_activation: Optional[str] = None, dropout: float = 0.0, input_bias: bool = True, concat_bias_pattern: bool = False, add_zero_association: bool = False, disable_out_projection: bool = False, quantity: int = 1, trainable: bool = True ): """ Initialise a new instance of a Hopfield-based pooling layer. :param input_size: depth of the input (state pattern) :param hidden_size: depth of the association space :param output_size: depth of the output projection :param pattern_size: depth of patterns to be selected :param num_heads: amount of parallel association heads :param scaling: scaling of association heads, often represented as beta (one entry per head) :param update_steps_max: maximum count of association update steps (None equals to infinity) :param update_steps_eps: minimum difference threshold between two consecutive association update steps :param normalize_stored_pattern: apply normalization on stored patterns :param normalize_stored_pattern_affine: additionally enable affine normalization of stored patterns :param normalize_state_pattern: apply normalization on state patterns :param normalize_state_pattern_affine: additionally enable affine normalization of state patterns :param normalize_pattern_projection: apply normalization on the pattern projection :param normalize_pattern_projection_affine: additionally enable affine normalization of pattern projection :param normalize_hopfield_space: enable normalization of patterns in the Hopfield space :param normalize_hopfield_space_affine: additionally enable affine normalization of patterns in Hopfield space :param stored_pattern_as_static: interpret specified stored patterns as being static :param state_pattern_as_static: interpret specified state patterns as being static :param pattern_projection_as_static: interpret specified pattern projections as being static :param pattern_projection_as_connected: connect pattern projection with stored pattern :param stored_pattern_size: depth of input (stored pattern) :param pattern_projection_size: depth of input (pattern projection) :param batch_first: flag for specifying if the first dimension of data fed to "forward" reflects the batch size :param association_activation: additional activation to be applied on the result of the Hopfield association :param dropout: dropout probability applied on the association matrix :param input_bias: bias to be added to input (state and stored pattern as well as pattern projection) :param concat_bias_pattern: bias to be concatenated to stored pattern as well as pattern projection :param add_zero_association: add a new batch of zeros to stored pattern as well as pattern projection :param disable_out_projection: disable output projection :param quantity: amount of state patterns :param trainable: state pattern used for pooling is trainable """ super(HopfieldPooling, self).__init__() self.hopfield = Hopfield( input_size=input_size, hidden_size=hidden_size, output_size=output_size, pattern_size=pattern_size, num_heads=num_heads, scaling=scaling, update_steps_max=update_steps_max, update_steps_eps=update_steps_eps, normalize_stored_pattern=normalize_stored_pattern, normalize_stored_pattern_affine=normalize_stored_pattern_affine, normalize_state_pattern=normalize_state_pattern, normalize_state_pattern_affine=normalize_state_pattern_affine, normalize_pattern_projection=normalize_pattern_projection, normalize_pattern_projection_affine=normalize_pattern_projection_affine, normalize_hopfield_space=normalize_hopfield_space, normalize_hopfield_space_affine=normalize_hopfield_space_affine, stored_pattern_as_static=stored_pattern_as_static, state_pattern_as_static=state_pattern_as_static, pattern_projection_as_static=pattern_projection_as_static, pattern_projection_as_connected=pattern_projection_as_connected, stored_pattern_size=stored_pattern_size, pattern_projection_size=pattern_projection_size, batch_first=batch_first, association_activation=association_activation, dropout=dropout, input_bias=input_bias, concat_bias_pattern=concat_bias_pattern, add_zero_association=add_zero_association, disable_out_projection=disable_out_projection) self._quantity = quantity pooling_weight_size = self.hopfield.hidden_size if state_pattern_as_static else self.hopfield.input_size self.pooling_weights = nn.Parameter(torch.empty(size=(*( (1, quantity) if batch_first else (quantity, 1) ), input_size if pooling_weight_size is None else pooling_weight_size)), requires_grad=trainable) self.reset_parameters() def reset_parameters(self) -> None: """ Reset pooling weights and underlying Hopfield association. :return: None """ if hasattr(self.hopfield, r'reset_parameters'): self.hopfield.reset_parameters() # Explicitly initialise pooling weights. nn.init.normal_(self.pooling_weights, mean=0.0, std=0.02) def _prepare_input(self, input: Union[Tensor, Tuple[Tensor, Tensor]]) -> Tuple[Tensor, Tensor, Tensor]: """ Prepare input for Hopfield association. :param input: data to be prepared :return: stored pattern, expanded state pattern as well as pattern projection """ assert (type(input) == Tensor) or ((type(input) == tuple) and (len(input) == 2)), \ r'either one tensor to be used as "stored pattern" and' \ r' "pattern_projection" must be provided, or two separate ones.' if type(input) == Tensor: stored_pattern, pattern_projection = input, input else: stored_pattern, pattern_projection = input batch_size = stored_pattern.shape[0 if self.batch_first else 1] return stored_pattern, self.pooling_weights.expand(size=(*( (batch_size, self.quantity) if self.batch_first else (self.quantity, batch_size) ), self.pooling_weights.shape[2])), pattern_projection def forward(self, input: Union[Tensor, Tuple[Tensor, Tensor]], stored_pattern_padding_mask: Optional[Tensor] = None, association_mask: Optional[Tensor] = None) -> Tensor: """ Compute Hopfield-based pooling on specified data. :param input: data to be pooled :param stored_pattern_padding_mask: mask to be applied on stored patterns :param association_mask: mask to be applied on inner association matrix :return: Hopfield-pooled input data """ return self.hopfield( input=self._prepare_input(input=input), stored_pattern_padding_mask=stored_pattern_padding_mask, association_mask=association_mask).flatten(start_dim=1) def get_association_matrix(self, input: Union[Tensor, Tuple[Tensor, Tensor]], stored_pattern_padding_mask: Optional[Tensor] = None, association_mask: Optional[Tensor] = None) -> Tensor: """ Fetch Hopfield association matrix used for pooling gathered by passing through the specified data. :param input: data to be passed through the Hopfield association :param stored_pattern_padding_mask: mask to be applied on stored patterns :param association_mask: mask to be applied on inner association matrix :return: association matrix as computed by the Hopfield core module """ with torch.no_grad(): return self.hopfield.get_association_matrix( input=self._prepare_input(input=input), stored_pattern_padding_mask=stored_pattern_padding_mask, association_mask=association_mask) def get_projected_pattern_matrix(self, input: Union[Tensor, Tuple[Tensor, Tensor]], stored_pattern_padding_mask: Optional[Tensor] = None, association_mask: Optional[Tensor] = None) -> Tensor: """ Fetch Hopfield projected pattern matrix gathered by passing through the specified data. :param input: data to be passed through the Hopfield association :param stored_pattern_padding_mask: mask to be applied on stored patterns :param association_mask: mask to be applied on inner association matrix :return: pattern projection matrix as computed by the Hopfield core module """ with torch.no_grad(): return self.hopfield.get_projected_pattern_matrix( input=self._prepare_input(input=input), stored_pattern_padding_mask=stored_pattern_padding_mask, association_mask=association_mask) @property def batch_first(self) -> bool: return self.hopfield.batch_first @property def scaling(self) -> Union[float, Tensor]: return self.hopfield.scaling @property def stored_pattern_dim(self) -> Optional[int]: return self.hopfield.stored_pattern_dim @property def state_pattern_dim(self) -> Optional[int]: return self.hopfield.state_pattern_dim @property def pattern_projection_dim(self) -> Optional[int]: return self.hopfield.pattern_projection_dim @property def input_size(self) -> Optional[int]: return self.hopfield.input_size @property def hidden_size(self) -> int: return self.hopfield.hidden_size @property def output_size(self) -> Optional[int]: return self.hopfield.output_size @property def pattern_size(self) -> Optional[int]: return self.hopfield.pattern_size @property def quantity(self) -> int: return self._quantity @property def update_steps_max(self) -> Optional[Union[int, Tensor]]: return self.hopfield.update_steps_max @property def update_steps_eps(self) -> Optional[Union[float, Tensor]]: return self.hopfield.update_steps_eps @property def stored_pattern_as_static(self) -> bool: return self.hopfield.stored_pattern_as_static @property def state_pattern_as_static(self) -> bool: return self.hopfield.state_pattern_as_static @property def pattern_projection_as_static(self) -> bool: return self.hopfield.pattern_projection_as_static @property def normalize_stored_pattern(self) -> bool: return self.hopfield.normalize_stored_pattern @property def normalize_stored_pattern_affine(self) -> bool: return self.hopfield.normalize_stored_pattern_affine @property def normalize_state_pattern(self) -> bool: return self.hopfield.normalize_state_pattern @property def normalize_state_pattern_affine(self) -> bool: return self.hopfield.normalize_state_pattern_affine @property def normalize_pattern_projection(self) -> bool: return self.hopfield.normalize_pattern_projection @property def normalize_pattern_projection_affine(self) -> bool: return self.hopfield.normalize_pattern_projection_affine class HopfieldLayer(Module): """ Wrapper class encapsulating a trainable but fixed stored pattern, pattern projection and "Hopfield" in one combined module to be used as a Hopfield-based pooling layer. """ def __init__(self, input_size: int, hidden_size: Optional[int] = None, output_size: Optional[int] = None, pattern_size: Optional[int] = None, num_heads: int = 1, scaling: Optional[Union[float, Tensor]] = None, update_steps_max: Optional[Union[int, Tensor]] = 0, update_steps_eps: Union[float, Tensor] = 1e-4, lookup_weights_as_separated: bool = False, lookup_targets_as_trainable: bool = True, normalize_stored_pattern: bool = True, normalize_stored_pattern_affine: bool = True, normalize_state_pattern: bool = True, normalize_state_pattern_affine: bool = True, normalize_pattern_projection: bool = True, normalize_pattern_projection_affine: bool = True, normalize_hopfield_space: bool = False, normalize_hopfield_space_affine: bool = False, stored_pattern_as_static: bool = False, state_pattern_as_static: bool = False, pattern_projection_as_static: bool = False, pattern_projection_as_connected: bool = False, stored_pattern_size: Optional[int] = None, pattern_projection_size: Optional[int] = None, batch_first: bool = True, association_activation: Optional[str] = None, dropout: float = 0.0, input_bias: bool = True, concat_bias_pattern: bool = False, add_zero_association: bool = False, disable_out_projection: bool = False, quantity: int = 1, trainable: bool = True ): """ Initialise a new instance of a Hopfield-based lookup layer. :param input_size: depth of the input (state pattern) :param hidden_size: depth of the association space :param output_size: depth of the output projection :param pattern_size: depth of patterns to be selected :param num_heads: amount of parallel association heads :param scaling: scaling of association heads, often represented as beta (one entry per head) :param update_steps_max: maximum count of association update steps (None equals to infinity) :param update_steps_eps: minimum difference threshold between two consecutive association update steps :param lookup_weights_as_separated: separate lookup weights from lookup target weights :param lookup_targets_as_trainable: employ trainable lookup target weights (used as pattern projection input) :param normalize_stored_pattern: apply normalization on stored patterns :param normalize_stored_pattern_affine: additionally enable affine normalization of stored patterns :param normalize_state_pattern: apply normalization on state patterns :param normalize_state_pattern_affine: additionally enable affine normalization of state patterns :param normalize_pattern_projection: apply normalization on the pattern projection :param normalize_pattern_projection_affine: additionally enable affine normalization of pattern projection :param normalize_hopfield_space: enable normalization of patterns in the Hopfield space :param normalize_hopfield_space_affine: additionally enable affine normalization of patterns in Hopfield space :param stored_pattern_as_static: interpret specified stored patterns as being static :param state_pattern_as_static: interpret specified state patterns as being static :param pattern_projection_as_static: interpret specified pattern projections as being static :param pattern_projection_as_connected: connect pattern projection with stored pattern :param stored_pattern_size: depth of input (stored pattern) :param pattern_projection_size: depth of input (pattern projection) :param batch_first: flag for specifying if the first dimension of data fed to "forward" reflects the batch size :param association_activation: additional activation to be applied on the result of the Hopfield association :param dropout: dropout probability applied on the association matrix :param input_bias: bias to be added to input (state and stored pattern as well as pattern projection) :param concat_bias_pattern: bias to be concatenated to stored pattern as well as pattern projection :param add_zero_association: add a new batch of zeros to stored pattern as well as pattern projection :param disable_out_projection: disable output projection :param quantity: amount of stored patterns :param trainable: stored pattern used for lookup is trainable """ super(HopfieldLayer, self).__init__() self.hopfield = Hopfield( input_size=input_size, hidden_size=hidden_size, output_size=output_size, pattern_size=pattern_size, num_heads=num_heads, scaling=scaling, update_steps_max=update_steps_max, update_steps_eps=update_steps_eps, normalize_stored_pattern=normalize_stored_pattern, normalize_stored_pattern_affine=normalize_stored_pattern_affine, normalize_state_pattern=normalize_state_pattern, normalize_state_pattern_affine=normalize_state_pattern_affine, normalize_pattern_projection=normalize_pattern_projection, normalize_pattern_projection_affine=normalize_pattern_projection_affine, normalize_hopfield_space=normalize_hopfield_space, normalize_hopfield_space_affine=normalize_hopfield_space_affine, stored_pattern_as_static=stored_pattern_as_static, state_pattern_as_static=state_pattern_as_static, pattern_projection_as_static=pattern_projection_as_static, pattern_projection_as_connected=pattern_projection_as_connected, stored_pattern_size=stored_pattern_size, pattern_projection_size=pattern_projection_size, batch_first=batch_first, association_activation=association_activation, dropout=dropout, input_bias=input_bias, concat_bias_pattern=concat_bias_pattern, add_zero_association=add_zero_association, disable_out_projection=disable_out_projection) self._quantity = quantity lookup_weight_size = self.hopfield.hidden_size if stored_pattern_as_static else self.hopfield.stored_pattern_dim self.lookup_weights = nn.Parameter(torch.empty(size=(*( (1, quantity) if batch_first else (quantity, 1) ), input_size if lookup_weight_size is None else lookup_weight_size)), requires_grad=trainable) if lookup_weights_as_separated: target_weight_size = self.lookup_weights.shape[ 2] if pattern_projection_size is None else pattern_projection_size self.target_weights = nn.Parameter(torch.empty(size=(*( (1, quantity) if batch_first else (quantity, 1) ), target_weight_size)), requires_grad=lookup_targets_as_trainable) else: self.register_parameter(name=r'target_weights', param=None) self.reset_parameters() def reset_parameters(self) -> None: """ Reset lookup and lookup target weights, including underlying Hopfield association. :return: None """ if hasattr(self.hopfield, r'reset_parameters'): self.hopfield.reset_parameters() # Explicitly initialise lookup and target weights. nn.init.normal_(self.lookup_weights, mean=0.0, std=0.02) if self.target_weights is not None: nn.init.normal_(self.target_weights, mean=0.0, std=0.02) def _prepare_input(self, input: Tensor) -> Tuple[Tensor, Tensor, Tensor]: """ Prepare input for Hopfield association. :param input: data to be prepared :return: stored pattern, expanded state pattern as well as pattern projection """ batch_size = input.shape[0 if self.batch_first else 1] stored_pattern = self.lookup_weights.expand(size=(*( (batch_size, self.quantity) if self.batch_first else (self.quantity, batch_size) ), self.lookup_weights.shape[2])) if self.target_weights is None: pattern_projection = stored_pattern else: pattern_projection = self.target_weights.expand(size=(*( (batch_size, self.quantity) if self.batch_first else (self.quantity, batch_size) ), self.target_weights.shape[2])) return stored_pattern, input, pattern_projection def forward(self, input: Tensor, stored_pattern_padding_mask: Optional[Tensor] = None, association_mask: Optional[Tensor] = None) -> Tensor: """ Compute Hopfield-based lookup on specified data. :param input: data to used in lookup :param stored_pattern_padding_mask: mask to be applied on stored patterns :param association_mask: mask to be applied on inner association matrix :return: result of Hopfield-based lookup on input data """ return self.hopfield( input=self._prepare_input(input=input), stored_pattern_padding_mask=stored_pattern_padding_mask, association_mask=association_mask) def get_association_matrix(self, input: Tensor, stored_pattern_padding_mask: Optional[Tensor] = None, association_mask: Optional[Tensor] = None) -> Tensor: """ Fetch Hopfield association matrix used for lookup gathered by passing through the specified data. :param input: data to be passed through the Hopfield association :param stored_pattern_padding_mask: mask to be applied on stored patterns :param association_mask: mask to be applied on inner association matrix :return: association matrix as computed by the Hopfield core module """ with torch.no_grad(): return self.hopfield.get_association_matrix( input=self._prepare_input(input=input), stored_pattern_padding_mask=stored_pattern_padding_mask, association_mask=association_mask) def get_projected_pattern_matrix(self, input: Union[Tensor, Tuple[Tensor, Tensor]], stored_pattern_padding_mask: Optional[Tensor] = None, association_mask: Optional[Tensor] = None) -> Tensor: """ Fetch Hopfield projected pattern matrix gathered by passing through the specified data. :param input: data to be passed through the Hopfield association :param stored_pattern_padding_mask: mask to be applied on stored patterns :param association_mask: mask to be applied on inner association matrix :return: pattern projection matrix as computed by the Hopfield core module """ with torch.no_grad(): return self.hopfield.get_projected_pattern_matrix( input=self._prepare_input(input=input), stored_pattern_padding_mask=stored_pattern_padding_mask, association_mask=association_mask) @property def batch_first(self) -> bool: return self.hopfield.batch_first @property def scaling(self) -> Union[float, Tensor]: return self.hopfield.scaling @property def stored_pattern_dim(self) -> Optional[int]: return self.hopfield.stored_pattern_dim @property def state_pattern_dim(self) -> Optional[int]: return self.hopfield.state_pattern_dim @property def pattern_projection_dim(self) -> Optional[int]: return self.hopfield.pattern_projection_dim @property def input_size(self) -> Optional[int]: return self.hopfield.input_size @property def hidden_size(self) -> int: return self.hopfield.hidden_size @property def output_size(self) -> Optional[int]: return self.hopfield.output_size @property def pattern_size(self) -> Optional[int]: return self.hopfield.pattern_size @property def quantity(self) -> int: return self._quantity @property def update_steps_max(self) -> Optional[Union[int, Tensor]]: return self.hopfield.update_steps_max @property def update_steps_eps(self) -> Optional[Union[float, Tensor]]: return self.hopfield.update_steps_eps @property def stored_pattern_as_static(self) -> bool: return self.hopfield.stored_pattern_as_static @property def state_pattern_as_static(self) -> bool: return self.hopfield.state_pattern_as_static @property def pattern_projection_as_static(self) -> bool: return self.hopfield.pattern_projection_as_static @property def normalize_stored_pattern(self) -> bool: return self.hopfield.normalize_stored_pattern @property def normalize_stored_pattern_affine(self) -> bool: return self.hopfield.normalize_stored_pattern_affine @property def normalize_state_pattern(self) -> bool: return self.hopfield.normalize_state_pattern @property def normalize_state_pattern_affine(self) -> bool: return self.hopfield.normalize_state_pattern_affine @property def normalize_pattern_projection(self) -> bool: return self.hopfield.normalize_pattern_projection @property def normalize_pattern_projection_affine(self) -> bool: return self.hopfield.normalize_pattern_projection_affine
49.794872
120
0.694757
5,222
44,666
5.667369
0.047108
0.068086
0.029194
0.031627
0.850853
0.815003
0.783342
0.76496
0.748099
0.722892
0
0.002071
0.24325
44,666
896
121
49.850446
0.873495
0.289706
0
0.689464
0
0
0.019154
0
0
0
0
0
0.014787
1
0.15342
false
0
0.012939
0.118299
0.314233
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
7
9ed10835d366ce1ba65ae31192d030850d57baf5
9,973
py
Python
router/city_visit_heap_test.py
awesome-archive/city_visit_planner
20befca3d70db61bc83356eedd490a298b27f96f
[ "MIT" ]
1
2019-11-14T22:08:59.000Z
2019-11-14T22:08:59.000Z
router/city_visit_heap_test.py
sandoche/city_visit_planner
20befca3d70db61bc83356eedd490a298b27f96f
[ "MIT" ]
null
null
null
router/city_visit_heap_test.py
sandoche/city_visit_planner
20befca3d70db61bc83356eedd490a298b27f96f
[ "MIT" ]
null
null
null
import unittest from data import city_visit as city_visit_ from router import city_visit_heap as city_visit_heap_ from router import city_visit_points_left class MockDayVisitParameters(city_visit_.DayVisitParametersInterface): def __init__(self, hash_key): assert isinstance(hash_key, str) self.hash_key = hash_key def DatelessHashKey(self): return self.hash_key class MockDayVisit(city_visit_.DayVisitInterface): def __init__(self, hash_key): assert isinstance(hash_key, str) self.hash_key = hash_key def DatelessHashKey(self): return self.hash_key def MockCityVisitPointsLeft(day_visit_hash_keys, cost): assert isinstance(day_visit_hash_keys, list) assert isinstance(cost, float) city_visit = city_visit_.CityVisit( [MockDayVisit(day_visit_hash_key) for day_visit_hash_key in day_visit_hash_keys], city_visit_.CityVisitSummary(cost, 0.)) return city_visit_points_left.CityVisitPointsLeft(city_visit, []) class CityVisitHeapTest(unittest.TestCase): def testGeneral(self): city_visit_heap = city_visit_heap_.CityVisitHeap(3, [MockDayVisitParameters('par')]) self.assertEqual(0, city_visit_heap.Size()) self.assertEqual([], city_visit_heap.GetCityVisitPointsLeftList()) visit_a = MockCityVisitPointsLeft(['a'], 10.) city_visit_heap.PushCityVisit(visit_a) self.assertEqual(1, city_visit_heap.Size()) self.assertRaises(AssertionError, city_visit_heap.GetCityVisitPointsLeftList) visit_b = MockCityVisitPointsLeft(['b'], 5.) city_visit_heap.PushCityVisit(visit_b) self.assertEqual(2, city_visit_heap.Size()) self.assertRaises(AssertionError, city_visit_heap.GetCityVisitPointsLeftList) visit_c = MockCityVisitPointsLeft(['c'], 7.) city_visit_heap.PushCityVisit(visit_c) self.assertEqual(3, city_visit_heap.Size()) self.assertRaises(AssertionError, city_visit_heap.GetCityVisitPointsLeftList) city_visit_heap.Shrink() self.assertEqual(3, city_visit_heap.Size()) self.assertEqual([visit_b, visit_c, visit_a], city_visit_heap.GetCityVisitPointsLeftList()) visit_d = MockCityVisitPointsLeft(['d'], 3.) city_visit_heap.PushCityVisit(visit_d) self.assertEqual(4, city_visit_heap.Size()) self.assertRaises(AssertionError, city_visit_heap.GetCityVisitPointsLeftList) visit_e = MockCityVisitPointsLeft(['e'], 15.) city_visit_heap.PushCityVisit(visit_e) self.assertEqual(5, city_visit_heap.Size()) self.assertRaises(AssertionError, city_visit_heap.GetCityVisitPointsLeftList) city_visit_heap.Shrink() self.assertEqual(3, city_visit_heap.Size()) self.assertEqual([visit_d, visit_b, visit_c], city_visit_heap.GetCityVisitPointsLeftList()) city_visit_heap.Clear() self.assertEqual(0, city_visit_heap.Size()) self.assertEqual([], city_visit_heap.GetCityVisitPointsLeftList()) def testAddingSameOrderlessHashKeyShrink(self): city_visit_heap = city_visit_heap_.CityVisitHeap(3, [MockDayVisitParameters('par')]) self.assertEqual(0, city_visit_heap.Size()) self.assertEqual([], city_visit_heap.GetCityVisitPointsLeftList()) visit_a = MockCityVisitPointsLeft(['adf'], 10.) city_visit_heap.PushCityVisit(visit_a) self.assertEqual(1, city_visit_heap.Size()) self.assertRaises(AssertionError, city_visit_heap.GetCityVisitPointsLeftList) visit_b = MockCityVisitPointsLeft(['bc'], 5.) city_visit_heap.PushCityVisit(visit_b) self.assertEqual(2, city_visit_heap.Size()) self.assertRaises(AssertionError, city_visit_heap.GetCityVisitPointsLeftList) visit_c = MockCityVisitPointsLeft(['bc'], 7.) # Same day_visit_hash_keys. city_visit_heap.PushCityVisit(visit_c) # cost higher. self.assertEqual(2, city_visit_heap.Size()) self.assertRaises(AssertionError, city_visit_heap.GetCityVisitPointsLeftList) city_visit_heap.Shrink() self.assertEqual(2, city_visit_heap.Size()) self.assertEqual([visit_b, visit_a], city_visit_heap.GetCityVisitPointsLeftList()) visit_d = MockCityVisitPointsLeft(['adf'], 3.) # Same day_visit_hash_keys. city_visit_heap.PushCityVisit(visit_d) # const lower. self.assertEqual(2, city_visit_heap.Size()) self.assertRaises(AssertionError, city_visit_heap.GetCityVisitPointsLeftList) city_visit_heap.Shrink() self.assertEqual(2, city_visit_heap.Size()) self.assertEqual([visit_d, visit_b], city_visit_heap.GetCityVisitPointsLeftList()) visit_e = MockCityVisitPointsLeft(['eg'], 15.) city_visit_heap.PushCityVisit(visit_e) self.assertEqual(3, city_visit_heap.Size()) self.assertRaises(AssertionError, city_visit_heap.GetCityVisitPointsLeftList) city_visit_heap.Shrink() self.assertEqual(3, city_visit_heap.Size()) self.assertEqual([visit_d, visit_b, visit_e], city_visit_heap.GetCityVisitPointsLeftList()) visit_f = MockCityVisitPointsLeft(['adf'], 1.) # Same day_visit_hash_keys. city_visit_heap.PushCityVisit(visit_f) # const lower. self.assertEqual(3, city_visit_heap.Size()) self.assertRaises(AssertionError, city_visit_heap.GetCityVisitPointsLeftList) visit_g = MockCityVisitPointsLeft(['eg'], 12.) # Same day_visit_hash_keys. city_visit_heap.PushCityVisit(visit_g) # const lower. self.assertEqual(3, city_visit_heap.Size()) self.assertRaises(AssertionError, city_visit_heap.GetCityVisitPointsLeftList) city_visit_heap.Shrink() self.assertEqual(3, city_visit_heap.Size()) self.assertEqual([visit_f, visit_b, visit_g], city_visit_heap.GetCityVisitPointsLeftList()) city_visit_heap.Clear() self.assertEqual(0, city_visit_heap.Size()) self.assertEqual([], city_visit_heap.GetCityVisitPointsLeftList()) def testAddingSameOrderlessHashKeyNoShrink(self): city_visit_heap = city_visit_heap_.CityVisitHeap(3, [MockDayVisitParameters('par')]) self.assertEqual(0, city_visit_heap.Size()) self.assertEqual([], city_visit_heap.GetCityVisitPointsLeftList()) visit_a = MockCityVisitPointsLeft(['adf'], 10.) city_visit_heap.PushCityVisit(visit_a) self.assertEqual(1, city_visit_heap.Size()) self.assertRaises(AssertionError, city_visit_heap.GetCityVisitPointsLeftList) visit_b = MockCityVisitPointsLeft(['bc'], 5.) city_visit_heap.PushCityVisit(visit_b) self.assertEqual(2, city_visit_heap.Size()) self.assertRaises(AssertionError, city_visit_heap.GetCityVisitPointsLeftList) visit_c = MockCityVisitPointsLeft(['bc'], 7.) # Same day_visit_hash_keys. city_visit_heap.PushCityVisit(visit_c) # cost higher. self.assertEqual(2, city_visit_heap.Size()) self.assertRaises(AssertionError, city_visit_heap.GetCityVisitPointsLeftList) visit_d = MockCityVisitPointsLeft(['adf'], 3.) # Same day_visit_hash_keys. city_visit_heap.PushCityVisit(visit_d) # const lower. self.assertEqual(2, city_visit_heap.Size()) self.assertRaises(AssertionError, city_visit_heap.GetCityVisitPointsLeftList) visit_e = MockCityVisitPointsLeft(['eg'], 15.) city_visit_heap.PushCityVisit(visit_e) self.assertEqual(3, city_visit_heap.Size()) self.assertRaises(AssertionError, city_visit_heap.GetCityVisitPointsLeftList) visit_f = MockCityVisitPointsLeft(['adf'], 1.) # Same day_visit_hash_keys. city_visit_heap.PushCityVisit(visit_f) # const lower. self.assertEqual(3, city_visit_heap.Size()) self.assertRaises(AssertionError, city_visit_heap.GetCityVisitPointsLeftList) visit_g = MockCityVisitPointsLeft(['eg'], 12.) # Same day_visit_hash_keys. city_visit_heap.PushCityVisit(visit_g) # const lower. self.assertEqual(3, city_visit_heap.Size()) self.assertRaises(AssertionError, city_visit_heap.GetCityVisitPointsLeftList) city_visit_heap.Shrink() self.assertEqual(3, city_visit_heap.Size()) self.assertEqual([visit_f, visit_b, visit_g], city_visit_heap.GetCityVisitPointsLeftList()) city_visit_heap.Clear() self.assertEqual(0, city_visit_heap.Size()) self.assertEqual([], city_visit_heap.GetCityVisitPointsLeftList()) def testCityVisitOrderlessHashKey(self): city_visit_heap_a = city_visit_heap_.CityVisitHeap(3, [ MockDayVisitParameters('parX'), MockDayVisitParameters('parY')]) city_visit_heap_b = city_visit_heap_.CityVisitHeap(3, [ MockDayVisitParameters('parY'), MockDayVisitParameters('parX')]) visit_a = MockCityVisitPointsLeft(['dayX', 'dayY'], 10.) visit_b = MockCityVisitPointsLeft(['dayY', 'dayX'], 10.) # Same pairs of day_visit_parameters and day_visit, but different order. self.assertEqual(city_visit_heap_a._CityVisitDatelessHashKey(visit_a), city_visit_heap_b._CityVisitDatelessHashKey(visit_b)) # Same here. self.assertEqual(city_visit_heap_a._CityVisitDatelessHashKey(visit_b), city_visit_heap_b._CityVisitDatelessHashKey(visit_a)) # Parameterss are different for the same day_visits. self.assertNotEqual(city_visit_heap_a._CityVisitDatelessHashKey(visit_a), city_visit_heap_b._CityVisitDatelessHashKey(visit_a)) # Day_visits are different for the same parameterss. self.assertNotEqual(city_visit_heap_a._CityVisitDatelessHashKey(visit_a), city_visit_heap_a._CityVisitDatelessHashKey(visit_b)) if __name__ == '__main__': unittest.main()
41.041152
88
0.720345
1,093
9,973
6.201281
0.089661
0.163323
0.216731
0.08026
0.864562
0.850693
0.823104
0.811744
0.800974
0.780909
0
0.008468
0.182994
9,973
242
89
41.210744
0.823392
0.049634
0
0.768817
0
0
0.009306
0
0
0
0
0
0.387097
1
0.048387
false
0
0.021505
0.010753
0.102151
0
0
0
0
null
0
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
9ef4f18a543a01acd015667c4b26335039afc45c
153
py
Python
exercicio4/nums.py
laurocruz/MC733
2b29ab120d7c9a3bb679a5c8f746bfcebbf8e924
[ "MIT" ]
null
null
null
exercicio4/nums.py
laurocruz/MC733
2b29ab120d7c9a3bb679a5c8f746bfcebbf8e924
[ "MIT" ]
null
null
null
exercicio4/nums.py
laurocruz/MC733
2b29ab120d7c9a3bb679a5c8f746bfcebbf8e924
[ "MIT" ]
null
null
null
n = 200 print(str(n)) for i in range(1,(n**2)+1): print(str(i), end=' ') print() for i in range(1,(n**2)+1): print(str(i), end=' ') print()
10.928571
27
0.503268
31
153
2.483871
0.354839
0.311688
0.155844
0.285714
0.831169
0.831169
0.831169
0.831169
0.831169
0.831169
0
0.075
0.215686
153
13
28
11.769231
0.566667
0
0
0.75
0
0
0.013245
0
0
0
0
0
0
1
0
false
0
0
0
0
0.625
0
0
0
null
1
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
10