blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 4
721
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 5
91
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 321
values | visit_date
timestamp[ns]date 2016-08-12 09:31:09
2023-09-06 10:45:07
| revision_date
timestamp[ns]date 2010-09-28 14:01:40
2023-09-06 06:22:19
| committer_date
timestamp[ns]date 2010-09-28 14:01:40
2023-09-06 06:22:19
| github_id
int64 426
681M
| star_events_count
int64 101
243k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[ns]date 2012-06-28 18:51:49
2023-09-14 21:59:16
⌀ | gha_created_at
timestamp[ns]date 2008-02-11 22:55:26
2023-08-10 11:14:58
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 26
values | language
stringclasses 2
values | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 6
10.2M
| extension
stringclasses 115
values | filename
stringlengths 3
113
| content
stringlengths 6
10.2M
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c11ea35d271d7f8962bd79545e36065ff32ef6e7
|
f509ab9825c542e09b0c6591d86ef1f9feb540a6
|
/pkgs/conf-pkg/src/genie/libs/conf/ospf/iosxe/areanetwork.py
|
82745778b4cd1275cb3c3cfa3d94dde997271999
|
[
"Apache-2.0"
] |
permissive
|
CiscoTestAutomation/genielibs
|
97f597117193aaa18028defeb69078ebb241173a
|
e42e51475cddcb10f5c7814d0fe892ac865742ba
|
refs/heads/master
| 2023-08-11T16:39:41.959947
| 2023-07-27T17:58:42
| 2023-07-27T17:58:42
| 130,717,047
| 109
| 60
|
Apache-2.0
| 2023-08-29T22:32:08
| 2018-04-23T15:21:56
|
Python
|
UTF-8
|
Python
| false
| false
| 1,310
|
py
|
areanetwork.py
|
'''
OSPF Genie Conf Object Implementation for IOSXE:
- AreaNetwork multi-line configuration implementation for IOSXE - CLI
'''
# Python
import warnings
from abc import ABC
# Genie
from genie.conf.base.cli import CliConfigBuilder
from genie.conf.base.attributes import AttributesHelper
class AreaNetwork(ABC):
def build_config(self, apply=True, attributes=None, unconfig=False,
**kwargs):
attributes = AttributesHelper(self, attributes)
configurations = CliConfigBuilder(unconfig=unconfig)
# Get area information
area = kwargs['area']
# router ospf 1
# network 192.168.1.0 0.0.0.0 area 2
# network 192.168.1.1 1.1.1.1 area 3
if attributes.value('area_network') and attributes.value('area_network_wildcard'):
# network 192.168.1.0 0.0.0.0
an_str = 'network {area_network} {area_network_wildcard}'
# + area 2
an_str += ' area {}'.format(area)
configurations.append_line(attributes.format(an_str))
return str(configurations)
def build_unconfig(self, apply=True, attributes=None, **kwargs):
return self.build_config(apply=apply, attributes=attributes,
unconfig=True, **kwargs)
|
b578e449c0664de8f3ce6f280d945937f26e1937
|
5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d
|
/alipay/aop/api/domain/AlipayBossFncInvoiceApplyModel.py
|
99e12846b57264813d4e9171cec1a0ef31536397
|
[
"Apache-2.0"
] |
permissive
|
alipay/alipay-sdk-python-all
|
8bd20882852ffeb70a6e929038bf88ff1d1eff1c
|
1fad300587c9e7e099747305ba9077d4cd7afde9
|
refs/heads/master
| 2023-08-27T21:35:01.778771
| 2023-08-23T07:12:26
| 2023-08-23T07:12:26
| 133,338,689
| 247
| 70
|
Apache-2.0
| 2023-04-25T04:54:02
| 2018-05-14T09:40:54
|
Python
|
UTF-8
|
Python
| false
| false
| 4,985
|
py
|
AlipayBossFncInvoiceApplyModel.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi
class AlipayBossFncInvoiceApplyModel(object):
def __init__(self):
self._biz_id = None
self._biz_no = None
self._invoice_amt = None
self._invoice_type = None
self._memo = None
self._mthtly_bill_nos = None
self._operator = None
self._out_biz_type = None
@property
def biz_id(self):
return self._biz_id
@biz_id.setter
def biz_id(self, value):
self._biz_id = value
@property
def biz_no(self):
return self._biz_no
@biz_no.setter
def biz_no(self, value):
self._biz_no = value
@property
def invoice_amt(self):
return self._invoice_amt
@invoice_amt.setter
def invoice_amt(self, value):
if isinstance(value, MultiCurrencyMoneyOpenApi):
self._invoice_amt = value
else:
self._invoice_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value)
@property
def invoice_type(self):
return self._invoice_type
@invoice_type.setter
def invoice_type(self, value):
self._invoice_type = value
@property
def memo(self):
return self._memo
@memo.setter
def memo(self, value):
self._memo = value
@property
def mthtly_bill_nos(self):
return self._mthtly_bill_nos
@mthtly_bill_nos.setter
def mthtly_bill_nos(self, value):
if isinstance(value, list):
self._mthtly_bill_nos = list()
for i in value:
self._mthtly_bill_nos.append(i)
@property
def operator(self):
return self._operator
@operator.setter
def operator(self, value):
self._operator = value
@property
def out_biz_type(self):
return self._out_biz_type
@out_biz_type.setter
def out_biz_type(self, value):
self._out_biz_type = value
def to_alipay_dict(self):
params = dict()
if self.biz_id:
if hasattr(self.biz_id, 'to_alipay_dict'):
params['biz_id'] = self.biz_id.to_alipay_dict()
else:
params['biz_id'] = self.biz_id
if self.biz_no:
if hasattr(self.biz_no, 'to_alipay_dict'):
params['biz_no'] = self.biz_no.to_alipay_dict()
else:
params['biz_no'] = self.biz_no
if self.invoice_amt:
if hasattr(self.invoice_amt, 'to_alipay_dict'):
params['invoice_amt'] = self.invoice_amt.to_alipay_dict()
else:
params['invoice_amt'] = self.invoice_amt
if self.invoice_type:
if hasattr(self.invoice_type, 'to_alipay_dict'):
params['invoice_type'] = self.invoice_type.to_alipay_dict()
else:
params['invoice_type'] = self.invoice_type
if self.memo:
if hasattr(self.memo, 'to_alipay_dict'):
params['memo'] = self.memo.to_alipay_dict()
else:
params['memo'] = self.memo
if self.mthtly_bill_nos:
if isinstance(self.mthtly_bill_nos, list):
for i in range(0, len(self.mthtly_bill_nos)):
element = self.mthtly_bill_nos[i]
if hasattr(element, 'to_alipay_dict'):
self.mthtly_bill_nos[i] = element.to_alipay_dict()
if hasattr(self.mthtly_bill_nos, 'to_alipay_dict'):
params['mthtly_bill_nos'] = self.mthtly_bill_nos.to_alipay_dict()
else:
params['mthtly_bill_nos'] = self.mthtly_bill_nos
if self.operator:
if hasattr(self.operator, 'to_alipay_dict'):
params['operator'] = self.operator.to_alipay_dict()
else:
params['operator'] = self.operator
if self.out_biz_type:
if hasattr(self.out_biz_type, 'to_alipay_dict'):
params['out_biz_type'] = self.out_biz_type.to_alipay_dict()
else:
params['out_biz_type'] = self.out_biz_type
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayBossFncInvoiceApplyModel()
if 'biz_id' in d:
o.biz_id = d['biz_id']
if 'biz_no' in d:
o.biz_no = d['biz_no']
if 'invoice_amt' in d:
o.invoice_amt = d['invoice_amt']
if 'invoice_type' in d:
o.invoice_type = d['invoice_type']
if 'memo' in d:
o.memo = d['memo']
if 'mthtly_bill_nos' in d:
o.mthtly_bill_nos = d['mthtly_bill_nos']
if 'operator' in d:
o.operator = d['operator']
if 'out_biz_type' in d:
o.out_biz_type = d['out_biz_type']
return o
|
941792efc44e3ad533ff29a69eab2e2703a4b867
|
a29b8d6ae6642ef80d04ae99d721b703de06db69
|
/maro/rl/utils/__init__.py
|
0be12e6bc486c12cc7731a22290250b06d5e0fd2
|
[
"LicenseRef-scancode-generic-cla",
"MIT"
] |
permissive
|
microsoft/maro
|
6aab1a4e86fddabf7f242f0d1020d985a5f7a5f3
|
b3c6a589ad9036b03221e776a6929b2bc1eb4680
|
refs/heads/master
| 2023-08-24T16:52:38.250279
| 2023-05-15T04:31:58
| 2023-05-15T04:31:58
| 230,389,247
| 764
| 158
|
MIT
| 2023-07-25T20:59:06
| 2019-12-27T06:48:27
|
Python
|
UTF-8
|
Python
| false
| false
| 700
|
py
|
__init__.py
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Union
from .objects import SHAPE_CHECK_FLAG
from .torch_utils import average_grads, get_torch_device, match_shape, ndarray_to_tensor
from .trajectory_computation import discount_cumsum
from .transition_batch import MultiTransitionBatch, TransitionBatch, merge_transition_batches
AbsTransitionBatch = Union[TransitionBatch, MultiTransitionBatch]
__all__ = [
"SHAPE_CHECK_FLAG",
"average_grads",
"get_torch_device",
"match_shape",
"ndarray_to_tensor",
"discount_cumsum",
"AbsTransitionBatch",
"MultiTransitionBatch",
"TransitionBatch",
"merge_transition_batches",
]
|
3dee2a289052e87910da23a2ff8b3784de538322
|
fbbe424559f64e9a94116a07eaaa555a01b0a7bb
|
/Keras_tensorflow/source/pbr/tests/test_setup.py
|
f3fcd40905df053e1c9691f0987d0e0c773285a8
|
[
"MIT"
] |
permissive
|
ryfeus/lambda-packs
|
6544adb4dec19b8e71d75c24d8ed789b785b0369
|
cabf6e4f1970dc14302f87414f170de19944bac2
|
refs/heads/master
| 2022-12-07T16:18:52.475504
| 2022-11-29T13:35:35
| 2022-11-29T13:35:35
| 71,386,735
| 1,283
| 263
|
MIT
| 2022-11-26T05:02:14
| 2016-10-19T18:22:39
|
Python
|
UTF-8
|
Python
| false
| false
| 18,163
|
py
|
test_setup.py
|
# Copyright (c) 2011 OpenStack Foundation
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import os
import sys
import tempfile
import testscenarios
try:
import cStringIO as io
BytesIO = io.StringIO
except ImportError:
import io
BytesIO = io.BytesIO
import fixtures
from pbr import git
from pbr import options
from pbr import packaging
from pbr.tests import base
class SkipFileWrites(base.BaseTestCase):
scenarios = [
('changelog_option_true',
dict(option_key='skip_changelog', option_value='True',
env_key='SKIP_WRITE_GIT_CHANGELOG', env_value=None,
pkg_func=git.write_git_changelog, filename='ChangeLog')),
('changelog_option_false',
dict(option_key='skip_changelog', option_value='False',
env_key='SKIP_WRITE_GIT_CHANGELOG', env_value=None,
pkg_func=git.write_git_changelog, filename='ChangeLog')),
('changelog_env_true',
dict(option_key='skip_changelog', option_value='False',
env_key='SKIP_WRITE_GIT_CHANGELOG', env_value='True',
pkg_func=git.write_git_changelog, filename='ChangeLog')),
('changelog_both_true',
dict(option_key='skip_changelog', option_value='True',
env_key='SKIP_WRITE_GIT_CHANGELOG', env_value='True',
pkg_func=git.write_git_changelog, filename='ChangeLog')),
('authors_option_true',
dict(option_key='skip_authors', option_value='True',
env_key='SKIP_GENERATE_AUTHORS', env_value=None,
pkg_func=git.generate_authors, filename='AUTHORS')),
('authors_option_false',
dict(option_key='skip_authors', option_value='False',
env_key='SKIP_GENERATE_AUTHORS', env_value=None,
pkg_func=git.generate_authors, filename='AUTHORS')),
('authors_env_true',
dict(option_key='skip_authors', option_value='False',
env_key='SKIP_GENERATE_AUTHORS', env_value='True',
pkg_func=git.generate_authors, filename='AUTHORS')),
('authors_both_true',
dict(option_key='skip_authors', option_value='True',
env_key='SKIP_GENERATE_AUTHORS', env_value='True',
pkg_func=git.generate_authors, filename='AUTHORS')),
]
def setUp(self):
super(SkipFileWrites, self).setUp()
self.temp_path = self.useFixture(fixtures.TempDir()).path
self.root_dir = os.path.abspath(os.path.curdir)
self.git_dir = os.path.join(self.root_dir, ".git")
if not os.path.exists(self.git_dir):
self.skipTest("%s is missing; skipping git-related checks"
% self.git_dir)
return
self.filename = os.path.join(self.temp_path, self.filename)
self.option_dict = dict()
if self.option_key is not None:
self.option_dict[self.option_key] = ('setup.cfg',
self.option_value)
self.useFixture(
fixtures.EnvironmentVariable(self.env_key, self.env_value))
def test_skip(self):
self.pkg_func(git_dir=self.git_dir,
dest_dir=self.temp_path,
option_dict=self.option_dict)
self.assertEqual(
not os.path.exists(self.filename),
(self.option_value.lower() in options.TRUE_VALUES
or self.env_value is not None))
_changelog_content = """7780758\x00Break parser\x00 (tag: 1_foo.1)
04316fe\x00Make python\x00 (review/monty_taylor/27519)
378261a\x00Add an integration test script.\x00
3c373ac\x00Merge "Lib\x00 (HEAD, tag: 2013.2.rc2, tag: 2013.2, mile-proposed)
182feb3\x00Fix pip invocation for old versions of pip.\x00 (tag: 0.5.17)
fa4f46e\x00Remove explicit depend on distribute.\x00 (tag: 0.5.16)
d1c53dd\x00Use pip instead of easy_install for installation.\x00
a793ea1\x00Merge "Skip git-checkout related tests when .git is missing"\x00
6c27ce7\x00Skip git-checkout related tests when .git is missing\x00
451e513\x00Bug fix: create_stack() fails when waiting\x00
4c8cfe4\x00Improve test coverage: network delete API\x00 (tag: (evil))
d7e6167\x00Bug fix: Fix pass thru filtering in list_networks\x00 (tag: ev()il)
c47ec15\x00Consider 'in-use' a non-pending volume for caching\x00 (tag: ev)il)
8696fbd\x00Improve test coverage: private extension API\x00 (tag: ev(il)
f0440f8\x00Improve test coverage: hypervisor list\x00 (tag: e(vi)l)
04984a5\x00Refactor hooks file.\x00 (HEAD, tag: 0.6.7,b, tag: (12), master)
a65e8ee\x00Remove jinja pin.\x00 (tag: 0.5.14, tag: 0.5.13)
"""
class GitLogsTest(base.BaseTestCase):
def setUp(self):
super(GitLogsTest, self).setUp()
self.temp_path = self.useFixture(fixtures.TempDir()).path
self.root_dir = os.path.abspath(os.path.curdir)
self.git_dir = os.path.join(self.root_dir, ".git")
self.useFixture(
fixtures.EnvironmentVariable('SKIP_GENERATE_AUTHORS'))
self.useFixture(
fixtures.EnvironmentVariable('SKIP_WRITE_GIT_CHANGELOG'))
def test_write_git_changelog(self):
self.useFixture(fixtures.FakePopen(lambda _: {
"stdout": BytesIO(_changelog_content.encode('utf-8'))
}))
git.write_git_changelog(git_dir=self.git_dir,
dest_dir=self.temp_path)
with open(os.path.join(self.temp_path, "ChangeLog"), "r") as ch_fh:
changelog_contents = ch_fh.read()
self.assertIn("2013.2", changelog_contents)
self.assertIn("0.5.17", changelog_contents)
self.assertIn("------", changelog_contents)
self.assertIn("Refactor hooks file", changelog_contents)
self.assertIn(
"Bug fix: create_stack() fails when waiting",
changelog_contents)
self.assertNotIn("Refactor hooks file.", changelog_contents)
self.assertNotIn("182feb3", changelog_contents)
self.assertNotIn("review/monty_taylor/27519", changelog_contents)
self.assertNotIn("0.5.13", changelog_contents)
self.assertNotIn("0.6.7", changelog_contents)
self.assertNotIn("12", changelog_contents)
self.assertNotIn("(evil)", changelog_contents)
self.assertNotIn("ev()il", changelog_contents)
self.assertNotIn("ev(il", changelog_contents)
self.assertNotIn("ev)il", changelog_contents)
self.assertNotIn("e(vi)l", changelog_contents)
self.assertNotIn('Merge "', changelog_contents)
self.assertNotIn('1_foo.1', changelog_contents)
def test_generate_authors(self):
author_old = u"Foo Foo <email@foo.com>"
author_new = u"Bar Bar <email@bar.com>"
co_author = u"Foo Bar <foo@bar.com>"
co_author_by = u"Co-authored-by: " + co_author
git_log_cmd = (
"git --git-dir=%s log --format=%%aN <%%aE>"
% self.git_dir)
git_co_log_cmd = ("git --git-dir=%s log" % self.git_dir)
git_top_level = "git rev-parse --show-toplevel"
cmd_map = {
git_log_cmd: author_new,
git_co_log_cmd: co_author_by,
git_top_level: self.root_dir,
}
exist_files = [self.git_dir,
os.path.join(self.temp_path, "AUTHORS.in")]
self.useFixture(fixtures.MonkeyPatch(
"os.path.exists",
lambda path: os.path.abspath(path) in exist_files))
def _fake_run_shell_command(cmd, **kwargs):
return cmd_map[" ".join(cmd)]
self.useFixture(fixtures.MonkeyPatch(
"pbr.git._run_shell_command",
_fake_run_shell_command))
with open(os.path.join(self.temp_path, "AUTHORS.in"), "w") as auth_fh:
auth_fh.write("%s\n" % author_old)
git.generate_authors(git_dir=self.git_dir,
dest_dir=self.temp_path)
with open(os.path.join(self.temp_path, "AUTHORS"), "r") as auth_fh:
authors = auth_fh.read()
self.assertTrue(author_old in authors)
self.assertTrue(author_new in authors)
self.assertTrue(co_author in authors)
class BuildSphinxTest(base.BaseTestCase):
scenarios = [
('true_autodoc_caps',
dict(has_opt=True, autodoc='True', has_autodoc=True)),
('true_autodoc_caps_with_excludes',
dict(has_opt=True, autodoc='True', has_autodoc=True,
excludes="fake_package.fake_private_module\n"
"fake_package.another_fake_*\n"
"fake_package.unknown_module")),
('true_autodoc_lower',
dict(has_opt=True, autodoc='true', has_autodoc=True)),
('false_autodoc',
dict(has_opt=True, autodoc='False', has_autodoc=False)),
('no_autodoc',
dict(has_opt=False, autodoc='False', has_autodoc=False)),
]
def setUp(self):
super(BuildSphinxTest, self).setUp()
self.useFixture(fixtures.MonkeyPatch(
"sphinx.setup_command.BuildDoc.run", lambda self: None))
from distutils import dist
self.distr = dist.Distribution()
self.distr.packages = ("fake_package",)
self.distr.command_options["build_sphinx"] = {
"source_dir": ["a", "."]}
pkg_fixture = fixtures.PythonPackage(
"fake_package", [("fake_module.py", b""),
("another_fake_module_for_testing.py", b""),
("fake_private_module.py", b"")])
self.useFixture(pkg_fixture)
self.useFixture(base.DiveDir(pkg_fixture.base))
self.distr.command_options["pbr"] = {}
if hasattr(self, "excludes"):
self.distr.command_options["pbr"]["autodoc_exclude_modules"] = (
'setup.cfg',
"fake_package.fake_private_module\n"
"fake_package.another_fake_*\n"
"fake_package.unknown_module")
if self.has_opt:
options = self.distr.command_options["pbr"]
options["autodoc_index_modules"] = ('setup.cfg', self.autodoc)
def test_build_doc(self):
build_doc = packaging.LocalBuildDoc(self.distr)
build_doc.run()
self.assertTrue(
os.path.exists("api/autoindex.rst") == self.has_autodoc)
self.assertTrue(
os.path.exists(
"api/fake_package.fake_module.rst") == self.has_autodoc)
if not self.has_autodoc or hasattr(self, "excludes"):
assertion = self.assertFalse
else:
assertion = self.assertTrue
assertion(
os.path.exists(
"api/fake_package.fake_private_module.rst"))
assertion(
os.path.exists(
"api/fake_package.another_fake_module_for_testing.rst"))
def test_builders_config(self):
build_doc = packaging.LocalBuildDoc(self.distr)
build_doc.finalize_options()
self.assertEqual(2, len(build_doc.builders))
self.assertIn('html', build_doc.builders)
self.assertIn('man', build_doc.builders)
build_doc = packaging.LocalBuildDoc(self.distr)
build_doc.builders = ''
build_doc.finalize_options()
self.assertEqual('', build_doc.builders)
build_doc = packaging.LocalBuildDoc(self.distr)
build_doc.builders = 'man'
build_doc.finalize_options()
self.assertEqual(1, len(build_doc.builders))
self.assertIn('man', build_doc.builders)
build_doc = packaging.LocalBuildDoc(self.distr)
build_doc.builders = 'html,man,doctest'
build_doc.finalize_options()
self.assertIn('html', build_doc.builders)
self.assertIn('man', build_doc.builders)
self.assertIn('doctest', build_doc.builders)
def test_cmd_builder_override(self):
if self.has_opt:
self.distr.command_options["pbr"] = {
"autodoc_index_modules": ('setup.cfg', self.autodoc)
}
self.distr.command_options["build_sphinx"]["builder"] = (
"command line", "non-existing-builder")
build_doc = packaging.LocalBuildDoc(self.distr)
self.assertNotIn('non-existing-builder', build_doc.builders)
self.assertIn('html', build_doc.builders)
# process command line options which should override config
build_doc.finalize_options()
self.assertIn('non-existing-builder', build_doc.builders)
self.assertNotIn('html', build_doc.builders)
def test_cmd_builder_override_multiple_builders(self):
if self.has_opt:
self.distr.command_options["pbr"] = {
"autodoc_index_modules": ('setup.cfg', self.autodoc)
}
self.distr.command_options["build_sphinx"]["builder"] = (
"command line", "builder1,builder2")
build_doc = packaging.LocalBuildDoc(self.distr)
build_doc.finalize_options()
self.assertEqual(["builder1", "builder2"], build_doc.builders)
class ParseRequirementsTestScenarios(base.BaseTestCase):
versioned_scenarios = [
('non-versioned', {'versioned': False, 'expected': ['bar']}),
('versioned', {'versioned': True, 'expected': ['bar>=1.2.3']})
]
scenarios = [
('normal', {'url': "foo\nbar", 'expected': ['foo', 'bar']}),
('normal_with_comments', {
'url': "# this is a comment\nfoo\n# and another one\nbar",
'expected': ['foo', 'bar']}),
('removes_index_lines', {'url': '-f foobar', 'expected': []}),
]
scenarios = scenarios + testscenarios.multiply_scenarios([
('ssh_egg_url', {'url': 'git+ssh://foo.com/zipball#egg=bar'}),
('git_https_egg_url', {'url': 'git+https://foo.com/zipball#egg=bar'}),
('http_egg_url', {'url': 'https://foo.com/zipball#egg=bar'}),
], versioned_scenarios)
scenarios = scenarios + testscenarios.multiply_scenarios(
[
('git_egg_url',
{'url': 'git://foo.com/zipball#egg=bar', 'name': 'bar'})
], [
('non-editable', {'editable': False}),
('editable', {'editable': True}),
],
versioned_scenarios)
def test_parse_requirements(self):
tmp_file = tempfile.NamedTemporaryFile()
req_string = self.url
if hasattr(self, 'editable') and self.editable:
req_string = ("-e %s" % req_string)
if hasattr(self, 'versioned') and self.versioned:
req_string = ("%s-1.2.3" % req_string)
with open(tmp_file.name, 'w') as fh:
fh.write(req_string)
self.assertEqual(self.expected,
packaging.parse_requirements([tmp_file.name]))
class ParseRequirementsTest(base.BaseTestCase):
def setUp(self):
super(ParseRequirementsTest, self).setUp()
(fd, self.tmp_file) = tempfile.mkstemp(prefix='openstack',
suffix='.setup')
def test_parse_requirements_override_with_env(self):
with open(self.tmp_file, 'w') as fh:
fh.write("foo\nbar")
self.useFixture(
fixtures.EnvironmentVariable('PBR_REQUIREMENTS_FILES',
self.tmp_file))
self.assertEqual(['foo', 'bar'],
packaging.parse_requirements())
def test_parse_requirements_override_with_env_multiple_files(self):
with open(self.tmp_file, 'w') as fh:
fh.write("foo\nbar")
self.useFixture(
fixtures.EnvironmentVariable('PBR_REQUIREMENTS_FILES',
"no-such-file," + self.tmp_file))
self.assertEqual(['foo', 'bar'],
packaging.parse_requirements())
def test_get_requirement_from_file_empty(self):
actual = packaging.get_reqs_from_files([])
self.assertEqual([], actual)
def test_parse_requirements_python_version(self):
with open("requirements-py%d.txt" % sys.version_info[0],
"w") as fh:
fh.write("# this is a comment\nfoobar\n# and another one\nfoobaz")
self.assertEqual(['foobar', 'foobaz'],
packaging.parse_requirements())
def test_parse_requirements_right_python_version(self):
with open("requirements-py1.txt", "w") as fh:
fh.write("thisisatrap")
with open("requirements-py%d.txt" % sys.version_info[0],
"w") as fh:
fh.write("# this is a comment\nfoobar\n# and another one\nfoobaz")
self.assertEqual(['foobar', 'foobaz'],
packaging.parse_requirements())
class ParseDependencyLinksTest(base.BaseTestCase):
def setUp(self):
super(ParseDependencyLinksTest, self).setUp()
(fd, self.tmp_file) = tempfile.mkstemp(prefix="openstack",
suffix=".setup")
def test_parse_dependency_normal(self):
with open(self.tmp_file, "w") as fh:
fh.write("http://test.com\n")
self.assertEqual(
["http://test.com"],
packaging.parse_dependency_links([self.tmp_file]))
def test_parse_dependency_with_git_egg_url(self):
with open(self.tmp_file, "w") as fh:
fh.write("-e git://foo.com/zipball#egg=bar")
self.assertEqual(
["git://foo.com/zipball#egg=bar"],
packaging.parse_dependency_links([self.tmp_file]))
|
35e88a1dbbf80b3e8ee5b03c90698ddf16221d5c
|
5f49680d6c50d554ca25b37798bc3113f637a68c
|
/GQN/model.py
|
628d3cc29ddf18ef6dbe6851ce365b602f587021
|
[] |
no_license
|
masa-su/pixyzoo
|
ad9bd4b28dc80f26d5582129f37d4c34a9ee9312
|
1fa0d9ded00c4e10e35ed1bf99892f81e5a5e3d2
|
refs/heads/master
| 2023-01-19T06:31:40.566390
| 2023-01-10T07:18:44
| 2023-01-10T07:18:44
| 156,181,088
| 107
| 23
| null | 2023-01-10T07:18:45
| 2018-11-05T08:02:08
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 6,792
|
py
|
model.py
|
import torch
from torch import nn
from torch.nn import functional as F
from pixyz.distributions import Normal
from representation import Pyramid, Tower, Pool
from inference import InferenceCore, Inference
from generation import GenerationCore, Prior, Generation
from pixyz.losses import KullbackLeibler
class GQN(nn.Module):
def __init__(self, representation="pool", L=12, shared_core=False):
super(GQN, self).__init__()
# Number of generative layers
self.L = L
self.shared_core = shared_core
# Representation network
self.representation = representation
if representation=="pyramid":
self.phi = Pyramid()
elif representation=="tower":
self.phi = Tower()
elif representation=="pool":
self.phi = Pool()
# Generation network
if shared_core:
self.inference_core = InferenceCore()
self.generation_core = GenerationCore()
else:
self.inference_core = nn.ModuleList([InferenceCore() for _ in range(L)])
self.generation_core = nn.ModuleList([GenerationCore() for _ in range(L)])
# Distribution
self.pi = Prior()
self.q = Inference()
self.g = Generation()
# EstimateELBO
def forward(self, x, v, v_q, x_q, sigma):
B, M, *_ = x.size()
# Scene encoder
if self.representation=="tower":
r = x.new_zero((B, 256, 16, 16))
else:
r = x.new_zeros((B, 256, 1, 1))
for k in range(M):
r_k = self.phi(x[:, k], v[:, k])
r += r_k
# Generator initial state
c_g = x.new_zeros((B, 128, 16, 16))
h_g = x.new_zeros((B, 128, 16, 16))
u = x.new_zeros((B, 128, 64, 64))
# Inference initial state
c_e = x.new_zeros((B, 128, 16, 16))
h_e = x.new_zeros((B, 128, 16, 16))
elbo = 0
for l in range(self.L):
# Inference state update
if self.shared_core:
c_e, h_e = self.inference_core(x_q, v_q, r, c_e, h_e, h_g, u)
else:
c_e, h_e = self.inference_core[l](x_q, v_q, r, c_e, h_e, h_g, u)
# Posterior sample
z = self.q.sample({"h_e": h_e}, reparam=True)["z"]
# ELBO KL contribution update
elbo -= KullbackLeibler(self.q, self.pi).eval({"h_e": h_e, "h_g": h_g})
# Generator state update
if self.shared_core:
c_g, h_g, u = self.generation_core(v_q, r, c_g, h_g, u, z)
else:
c_g, h_g, u = self.generation_core[l](v_q, r, c_g, h_g, u, z)
# ELBO likelihood contribution update
elbo += self.g.log_prob().eval({"u":u, "sigma":sigma, "x_q": x_q})
return elbo
def generate(self, x, v, v_q):
B, M, *_ = x.size()
# Scene encoder
if self.representation=="tower":
r = x.new_zero((B, 256, 16, 16))
else:
r = x.new_zeros((B, 256, 1, 1))
for k in range(M):
r_k = self.phi(x[:, k], v[:, k])
r += r_k
# Initial state
c_g = x.new_zeros((B, 128, 16, 16))
h_g = x.new_zeros((B, 128, 16, 16))
u = x.new_zeros((B, 128, 64, 64))
for l in range(self.L):
# Prior sample
z = self.pi.sample({"h_g": h_g})["z"]
# State update
if self.shared_core:
c_g, h_g, u = self.generation_core(v_q, r, c_g, h_g, u, z)
else:
c_g, h_g, u = self.generation_core[l](v_q, r, c_g, h_g, u, z)
x_q_hat = self.g.sample_mean({"u": u, "sigma": 0})
return torch.clamp(x_q_hat, 0, 1)
def kl_divergence(self, x, v, v_q, x_q):
B, M, *_ = x.size()
# Scene encoder
if self.representation=="tower":
r = x.new_zero((B, 256, 16, 16))
else:
r = x.new_zeros((B, 256, 1, 1))
for k in range(M):
r_k = self.phi(x[:, k], v[:, k])
r += r_k
# Generator initial state
c_g = x.new_zeros((B, 128, 16, 16))
h_g = x.new_zeros((B, 128, 16, 16))
u = x.new_zeros((B, 128, 64, 64))
# Inference initial state
c_e = x.new_zeros((B, 128, 16, 16))
h_e = x.new_zeros((B, 128, 16, 16))
kl = 0
for l in range(self.L):
# Inference state update
if self.shared_core:
c_e, h_e = self.inference_core(x_q, v_q, r, c_e, h_e, h_g, u)
else:
c_e, h_e = self.inference_core[l](x_q, v_q, r, c_e, h_e, h_g, u)
# Posterior sample
z = self.q.sample({"h_e": h_e}, reparam=True)["z"]
# KL divergence
kl += KullbackLeibler(self.q, self.pi).eval({"h_e": h_e, "h_g": h_g})
# Generator state update
if self.shared_core:
c_g, h_g, u = self.generation_core(v_q, r, c_g, h_g, u, z)
else:
c_g, h_g, u = self.generation_core[l](v_q, r, c_g, h_g, u, z)
return kl
def reconstruct(self, x, v, v_q, x_q):
B, M, *_ = x.size()
# Scene encoder
if self.representation=="tower":
r = x.new_zero((B, 256, 16, 16))
else:
r = x.new_zeros((B, 256, 1, 1))
for k in range(M):
r_k = self.phi(x[:, k], v[:, k])
r += r_k
# Generator initial state
c_g = x.new_zeros((B, 128, 16, 16))
h_g = x.new_zeros((B, 128, 16, 16))
u = x.new_zeros((B, 128, 64, 64))
# Inference initial state
c_e = x.new_zeros((B, 128, 16, 16))
h_e = x.new_zeros((B, 128, 16, 16))
for l in range(self.L):
# Inference state update
if self.shared_core:
c_e, h_e = self.inference_core(x_q, v_q, r, c_e, h_e, h_g, u)
else:
c_e, h_e = self.inference_core[l](x_q, v_q, r, c_e, h_e, h_g, u)
# Posterior sample
z = self.q.sample({"h_e": h_e}, reparam=True)["z"]
# Generator state update
if self.shared_core:
c_g, h_g, u = self.generation_core(v_q, r, c_g, h_g, u, z)
else:
c_g, h_g, u = self.generation_core[l](v_q, r, c_g, h_g, u, z)
x_q_rec = self.g.sample_mean({"u": u, "sigma": 0})
return torch.clamp(x_q_rec, 0, 1)
|
2766a868502fbd829abcc315b609551e4119bf1e
|
7e1c4dd6a2cae0597b4f4e961063cf077acdfd4c
|
/couchbase/tests/rate_limit_t.py
|
200049fa187ea0a7713c83ea03ec9002f40c2038
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
couchbase/couchbase-python-client
|
753fa434db910d175bf9ea53a5829a40ba36e938
|
c7d80434be3f917d6f25439a918aed30273f63f4
|
refs/heads/master
| 2023-08-29T14:04:13.532717
| 2023-08-24T22:53:30
| 2023-08-25T03:35:21
| 2,122,194
| 223
| 87
|
Apache-2.0
| 2023-05-30T16:05:59
| 2011-07-29T04:24:46
|
Python
|
UTF-8
|
Python
| false
| false
| 18,860
|
py
|
rate_limit_t.py
|
# Copyright 2016-2022. Couchbase, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import random
from datetime import timedelta
import pytest
from couchbase.auth import PasswordAuthenticator
from couchbase.cluster import Cluster
from couchbase.exceptions import (CollectionAlreadyExistsException,
CouchbaseException,
QuotaLimitedException,
RateLimitedException)
from couchbase.management.collections import CollectionSpec
from couchbase.management.search import SearchIndex
from couchbase.options import ClusterOptions, GetOptions
from couchbase.search import SearchOptions, TermQuery
from tests.environments.rate_limit_environment import RateLimitTestEnvironment
from tests.environments.test_environment import TestEnvironment
class RateLimitTestSuite:
TEST_MANIFEST = [
'test_rate_limits',
'test_rate_limits_collections_scopes_limits',
'test_rate_limits_egress',
'test_rate_limits_fts',
'test_rate_limits_fts_scopes',
'test_rate_limits_index_scopes',
'test_rate_limits_ingress',
'test_rate_limits_kv_scopes_data_size',
'test_rate_limits_max_conns',
'test_rate_limits_query',
]
@pytest.fixture()
def remove_docs(self, cb_env):
cb_env.remove_docs()
@pytest.fixture()
def cleanup_scope_and_collection(self, cb_env):
cb_env.drop_scope()
yield
cb_env.drop_scope()
def test_rate_limits(self, couchbase_config, cb_env):
cb_env.create_rate_limit_user(cb_env.USERNAME,
{'kv_limits': {'num_connections': 10,
'num_ops_per_min': 10,
'ingress_mib_per_min': 1,
'egress_mib_per_min': 10
}
})
conn_string = couchbase_config.get_connection_string()
cluster = None
try:
cluster = Cluster.connect(conn_string,
ClusterOptions(PasswordAuthenticator(cb_env.USERNAME, 'password')))
bucket = cluster.bucket('default')
collection = bucket.default_collection()
cb_env.try_until_timeout(5, 10, collection.upsert, 'ratelimit', "test")
except RateLimitedException:
pass
except Exception:
pytest.fail('Expected RateLimitedException')
@pytest.mark.usefixtures('cleanup_scope_and_collection')
def test_rate_limits_collections_scopes_limits(self, cb_env):
scope_name = cb_env.RATE_LIMIT_SCOPE_NAME
cb_env.create_rate_limit_scope(scope_name, {'cluster_mgr_limits': {'num_collections': 1}})
collection_spec = CollectionSpec('rate-limit-collection', scope_name=scope_name)
cb_env.cm.create_collection(collection_spec)
# verify collection exists
TestEnvironment.try_n_times_till_exception(5,
3,
cb_env.cm.create_collection,
collection_spec,
expected_exceptions=(CollectionAlreadyExistsException,))
with pytest.raises(QuotaLimitedException):
collection_spec = CollectionSpec('rate-limit-collection-1', scope_name=scope_name)
cb_env.cm.create_collection(collection_spec)
@pytest.mark.usefixtures('remove_docs')
def test_rate_limits_egress(self, couchbase_config, cb_env):
cb_env.create_rate_limit_user(cb_env.USERNAME, {'kv_limits': {'num_connections': 10,
'num_ops_per_min': 100,
'ingress_mib_per_min': 10,
'egress_mib_per_min': 2}
})
conn_string = couchbase_config.get_connection_string()
cluster = None
try:
cluster = Cluster.connect(conn_string,
ClusterOptions(PasswordAuthenticator(cb_env.USERNAME, 'password')))
bucket = cluster.bucket('default')
collection = bucket.default_collection()
doc = cb_env.random_doc_by_size(1024*512)
key = 'ratelimit-egress'
collection.upsert(key, doc)
for _ in range(3):
collection.get(key, GetOptions(timeout=timedelta(seconds=10)))
except RateLimitedException:
pass
except Exception:
pytest.fail('Expected RateLimitedException')
def test_rate_limits_fts(self, couchbase_config, cb_env):
cb_env.create_rate_limit_user(cb_env.USERNAME, {
'fts_limits': {
'num_queries_per_min': 1,
'num_concurrent_requests': 10,
'ingress_mib_per_min': 10,
'egress_mib_per_min': 10
}
})
conn_string = couchbase_config.get_connection_string()
sixm = cb_env.cluster.search_indexes()
sixm.upsert_index(SearchIndex(name='ratelimit-idx', source_name='default'))
if not cb_env.rate_limit_params.fts_indexes:
cb_env.rate_limit_params.fts_indexes = []
cb_env.rate_limit_params.fts_indexes.append('ratelimit-idx')
try:
cluster = Cluster.connect(conn_string,
ClusterOptions(PasswordAuthenticator(cb_env.USERNAME, 'password')))
cb_env.try_until_timeout(5,
50,
cluster.search_query,
'ratelimit-idx',
TermQuery('auto'),
SearchOptions(limit=1),
fts=True)
except RateLimitedException:
pass
except Exception:
pytest.fail('Expected RateLimitedException')
finally:
sixm.drop_index('ratelimit-idx')
@pytest.mark.usefixtures('cleanup_scope_and_collection') # noqa: C901
def test_rate_limits_fts_scopes(self, cb_env): # noqa: C901
scope_name = cb_env.RATE_LIMIT_SCOPE_NAME
cb_env.create_rate_limit_scope(scope_name, {'fts_limits': {'num_fts_indexes': 1}})
collection_spec = CollectionSpec('rate-limit-collection', scope_name=scope_name)
cb_env.cm.create_collection(collection_spec)
# verify collection exists
TestEnvironment.try_n_times_till_exception(5,
3,
cb_env.cm.create_collection,
collection_spec,
expected_exceptions=(CollectionAlreadyExistsException,))
# see beer-search-coll-index-params.json for ref
idx_name = "{}.{}".format(scope_name, collection_spec.name)
idx_params = {
'doc_config': {
'mode': 'scope.collection.type_field',
'type_field': 'type'
},
'mapping': {
'default_analyzer': 'standard',
'default_datetime_parser': 'dateTimeOptional',
'default_field': '_all',
'default_mapping': {
'dynamic': True,
'enabled': False
},
'default_type': '_default',
'docvalues_dynamic': True,
'index_dynamic': True,
'store_dynamic': False,
'type_field': '_type',
'types': {
idx_name: {
'dynamic': False,
'enabled': True
}
}
}
}
ixm = cb_env.cluster.search_indexes()
if not cb_env.rate_limit_params.fts_indexes:
cb_env.rate_limit_params.fts_indexes = []
with pytest.raises(QuotaLimitedException):
# random helps to avoid "Index already exist" failure
new_idx = SearchIndex(name='rate-limit-idx-{}'.format(random.randrange(0, 50)),
idx_type='fulltext-index',
source_name='default',
source_type='couchbase',
params=json.loads(json.dumps(idx_params)))
cb_env.rate_limit_params.fts_indexes.append(new_idx.name)
# try multiple times to avoid scope not w/in bucket failure
num_tries = 10
success = False
for i in range(num_tries):
try:
ixm.upsert_index(new_idx)
success = True
except CouchbaseException:
if i < (num_tries - 1):
cb_env.sleep(3)
except Exception:
raise
if not success:
ixm.upsert_index(new_idx)
# random helps to avoid "Index already exist" failure
new_idx = SearchIndex(name='rate-limit-idx-{}'.format(random.randrange(51, 100)),
idx_type='fulltext-index',
source_name='default',
source_type='couchbase',
params=json.loads(json.dumps(idx_params)))
cb_env.rate_limit_params.fts_indexes.append(new_idx.name)
ixm.upsert_index(new_idx)
@pytest.mark.usefixtures('cleanup_scope_and_collection') # noqa: C901
def test_rate_limits_index_scopes(self, cb_env): # noqa: C901
scope_name = cb_env.RATE_LIMIT_SCOPE_NAME
cb_env.create_rate_limit_scope(scope_name, {'index_limits': {'num_indexes': 1}})
collection_spec = CollectionSpec('rate-limit-collection', scope_name=scope_name)
cb_env.cm.create_collection(collection_spec)
# verify collection exists
TestEnvironment.try_n_times_till_exception(5,
3,
cb_env.cm.create_collection,
collection_spec,
expected_exceptions=(CollectionAlreadyExistsException,))
# make sure query service sees the new keyspace
# drop the index and then re-create
ixm = cb_env.cluster.query_indexes()
def create_primary_index():
try:
ixm.create_primary_index('default', scope_name=scope_name, collection_name=collection_spec.name)
indexes = ixm.get_all_indexes('default', scope_name=scope_name, collection_name=collection_spec.name)
if len(indexes) == 0:
return False
except CouchbaseException:
return False
return True
count = 1
while not create_primary_index():
if count == 5:
raise pytest.skip('Unable to create primary index.')
count += 1
indexes = ixm.get_all_indexes('default', scope_name=scope_name, collection_name=collection_spec.name)
TestEnvironment.sleep(1)
if len(indexes) > 0:
break
TestEnvironment.try_n_times(10,
3,
ixm.drop_primary_index,
'default',
scope_name=scope_name,
collection_name=collection_spec.name)
scope = cb_env.bucket.scope(scope_name)
with pytest.raises(QuotaLimitedException):
TestEnvironment.try_n_times(10,
3,
ixm.create_primary_index,
'default',
scope_name=scope_name,
collection_name=collection_spec.name)
indexes = ixm.get_all_indexes('default', scope_name=scope_name, collection_name=collection_spec.name)
assert len(indexes) >= 1
assert indexes[0].is_primary is True
assert '#primary' == indexes[0].name
assert collection_spec.name == indexes[0].collection_name
# helps to avoid "Index already exist" failure
idx_name = 'rate-limit-idx-{}'.format(random.randrange(0, 100))
scope.query("CREATE INDEX `{}` ON `{}`(testField)".format(idx_name, collection_spec.name)).execute()
@pytest.mark.usefixtures('remove_docs')
def test_rate_limits_ingress(self, couchbase_config, cb_env):
cb_env.create_rate_limit_user(cb_env.USERNAME, {
'kv_limits': {
'num_connections': 10,
'num_ops_per_min': 100,
'ingress_mib_per_min': 1,
'egress_mib_per_min': 10
}
})
conn_string = couchbase_config.get_connection_string()
cluster = None
try:
cluster = Cluster.connect(conn_string,
ClusterOptions(PasswordAuthenticator(cb_env.USERNAME, 'password')))
bucket = cluster.bucket('default')
collection = bucket.default_collection()
doc = cb_env.random_doc_by_size(1024*512)
for _ in range(3):
collection.upsert('ratelimit-ingress', doc)
except RateLimitedException:
pass
except Exception:
pytest.fail('Expected RateLimitedException')
@pytest.mark.usefixtures('cleanup_scope_and_collection')
def test_rate_limits_kv_scopes_data_size(self, cb_env):
scope_name = cb_env.RATE_LIMIT_SCOPE_NAME
cb_env.create_rate_limit_scope(scope_name, {'kv_limits': {'data_size': 1024*1024}})
collection_spec = CollectionSpec('rate-limit-collection', scope_name=scope_name)
cb_env.cm.create_collection(collection_spec)
# verify collection exists
TestEnvironment.try_n_times_till_exception(5,
3,
cb_env.cm.create_collection,
collection_spec,
expected_exceptions=(CollectionAlreadyExistsException,))
scope = cb_env.bucket.scope(scope_name)
collection = scope.collection(collection_spec.name)
doc = cb_env.random_doc_by_size(1024*512)
with pytest.raises(QuotaLimitedException):
for _ in range(5):
collection.upsert('ratelimit-datasize', doc)
def test_rate_limits_max_conns(self, couchbase_config, cb_env):
cb_env.create_rate_limit_user(cb_env.USERNAME, {
'kv_limits': {
'num_connections': 1,
'num_ops_per_min': 100,
'ingress_mib_per_min': 10,
'egress_mib_per_min': 10
}
})
cluster = None
cluster1 = None
conn_string = couchbase_config.get_connection_string()
try:
cluster = Cluster.connect(conn_string,
ClusterOptions(PasswordAuthenticator(cb_env.USERNAME, 'password')))
bucket = cluster.bucket('default')
collection = bucket.default_collection()
collection.exists('some-key')
cluster1 = Cluster(conn_string,
ClusterOptions(PasswordAuthenticator(cb_env.USERNAME, 'password')))
bucket1 = cluster1.bucket('default')
collection1 = bucket1.default_collection()
collection1.exists('some-key')
except RateLimitedException:
pass
except Exception:
pytest.fail('Expected RateLimitedException')
def test_rate_limits_query(self, couchbase_config, cb_env):
cb_env.create_rate_limit_user(cb_env.USERNAME, {
'query_limits': {
'num_queries_per_min': 1,
'num_concurrent_requests': 10,
'ingress_mib_per_min': 10,
'egress_mib_per_min': 10
}
})
conn_string = couchbase_config.get_connection_string()
cluster = None
qm = None
try:
cluster = Cluster.connect(conn_string,
ClusterOptions(PasswordAuthenticator(cb_env.USERNAME, 'password')))
qm = cluster.query_indexes()
qm.create_primary_index('default', ignore_if_exists=True)
cb_env.try_until_timeout(5, 50, cluster.query, "SELECT 'Hi there!'", query=True)
except RateLimitedException:
pass
except Exception:
pytest.fail('Expected RateLimitedException')
@pytest.mark.flaky(reruns=5, reruns_delay=1)
class ClassicRateLimitTests(RateLimitTestSuite):
@pytest.fixture(scope='class')
def test_manifest_validated(self):
def valid_test_method(meth):
attr = getattr(ClassicRateLimitTests, meth)
return callable(attr) and not meth.startswith('__') and meth.startswith('test')
method_list = [meth for meth in dir(ClassicRateLimitTests) if valid_test_method(meth)]
compare = set(RateLimitTestSuite.TEST_MANIFEST).difference(method_list)
return compare
@pytest.fixture(scope='class', name='cb_env')
def couchbase_test_environment(self, cb_base_env, test_manifest_validated):
if test_manifest_validated:
pytest.fail(f'Test manifest not validated. Missing tests: {test_manifest_validated}.')
cb_env = RateLimitTestEnvironment.from_environment(cb_base_env)
cb_env.setup()
yield cb_env
cb_env.teardown()
|
51ff85ed98c78f0264a84779b839c28edbc445f7
|
974d04d2ea27b1bba1c01015a98112d2afb78fe5
|
/python/paddle/hapi/model_summary.py
|
262547444d8d2f4eb7216f4a2ccdd460d6e26b1d
|
[
"Apache-2.0"
] |
permissive
|
PaddlePaddle/Paddle
|
b3d2583119082c8e4b74331dacc4d39ed4d7cff0
|
22a11a60e0e3d10a3cf610077a3d9942a6f964cb
|
refs/heads/develop
| 2023-08-17T21:27:30.568889
| 2023-08-17T12:38:22
| 2023-08-17T12:38:22
| 65,711,522
| 20,414
| 5,891
|
Apache-2.0
| 2023-09-14T19:20:51
| 2016-08-15T06:59:08
|
C++
|
UTF-8
|
Python
| false
| false
| 23,989
|
py
|
model_summary.py
|
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numbers
import warnings
from collections import OrderedDict
import numpy as np
import paddle
from paddle import nn
from paddle.autograd import no_grad
from paddle.static import InputSpec
__all__ = []
def summary(net, input_size=None, dtypes=None, input=None):
"""Prints a string summary of the network.
Args:
net (Layer): The network which must be a subinstance of Layer.
input_size (tuple|InputSpec|list[tuple|InputSpec], optional): Size of input tensor. if model only
have one input, input_size can be tuple or InputSpec. if model
have multiple input, input_size must be a list which contain
every input's shape. Note that input_size only dim of
batch_size can be None or -1. Default: None. Note that
input_size and input cannot be None at the same time.
dtypes (str, optional): If dtypes is None, 'float32' will be used, Default: None.
input (Tensor, optional): If input is given, input_size and dtype will be ignored, Default: None.
Returns:
Dict: A summary of the network including total params and total trainable params.
Examples:
.. code-block:: python
>>> import paddle
>>> import paddle.nn as nn
>>> paddle.seed(2023)
>>> class LeNet(nn.Layer):
... def __init__(self, num_classes=10):
... super().__init__()
... self.num_classes = num_classes
... self.features = nn.Sequential(
... nn.Conv2D(1, 6, 3, stride=1, padding=1),
... nn.ReLU(),
... nn.MaxPool2D(2, 2),
... nn.Conv2D(6, 16, 5, stride=1, padding=0),
... nn.ReLU(),
... nn.MaxPool2D(2, 2))
...
... if num_classes > 0:
... self.fc = nn.Sequential(
... nn.Linear(400, 120),
... nn.Linear(120, 84),
... nn.Linear(84, 10))
...
... def forward(self, inputs):
... x = self.features(inputs)
...
... if self.num_classes > 0:
... x = paddle.flatten(x, 1)
... x = self.fc(x)
... return x
...
>>> lenet = LeNet()
>>> params_info = paddle.summary(lenet, (1, 1, 28, 28))
>>> # doctest: +SKIP
>>> print(params_info)
---------------------------------------------------------------------------
Layer (type) Input Shape Output Shape Param #
===========================================================================
Conv2D-1 [[1, 1, 28, 28]] [1, 6, 28, 28] 60
ReLU-1 [[1, 6, 28, 28]] [1, 6, 28, 28] 0
MaxPool2D-1 [[1, 6, 28, 28]] [1, 6, 14, 14] 0
Conv2D-2 [[1, 6, 14, 14]] [1, 16, 10, 10] 2,416
ReLU-2 [[1, 16, 10, 10]] [1, 16, 10, 10] 0
MaxPool2D-2 [[1, 16, 10, 10]] [1, 16, 5, 5] 0
Linear-1 [[1, 400]] [1, 120] 48,120
Linear-2 [[1, 120]] [1, 84] 10,164
Linear-3 [[1, 84]] [1, 10] 850
===========================================================================
Total params: 61,610
Trainable params: 61,610
Non-trainable params: 0
---------------------------------------------------------------------------
Input size (MB): 0.00
Forward/backward pass size (MB): 0.11
Params size (MB): 0.24
Estimated Total Size (MB): 0.35
---------------------------------------------------------------------------
{'total_params': 61610, 'trainable_params': 61610}
>>> # doctest: -SKIP
>>> # multi input demo
>>> class LeNetMultiInput(LeNet):
... def forward(self, inputs, y):
... x = self.features(inputs)
...
... if self.num_classes > 0:
... x = paddle.flatten(x, 1)
... x = self.fc(x + y)
... return x
...
>>> lenet_multi_input = LeNetMultiInput()
>>> params_info = paddle.summary(lenet_multi_input,
... [(1, 1, 28, 28), (1, 400)],
... dtypes=['float32', 'float32'])
>>> # doctest: +SKIP
>>> print(params_info)
---------------------------------------------------------------------------
Layer (type) Input Shape Output Shape Param #
===========================================================================
Conv2D-3 [[1, 1, 28, 28]] [1, 6, 28, 28] 60
ReLU-3 [[1, 6, 28, 28]] [1, 6, 28, 28] 0
MaxPool2D-3 [[1, 6, 28, 28]] [1, 6, 14, 14] 0
Conv2D-4 [[1, 6, 14, 14]] [1, 16, 10, 10] 2,416
ReLU-4 [[1, 16, 10, 10]] [1, 16, 10, 10] 0
MaxPool2D-4 [[1, 16, 10, 10]] [1, 16, 5, 5] 0
Linear-4 [[1, 400]] [1, 120] 48,120
Linear-5 [[1, 120]] [1, 84] 10,164
Linear-6 [[1, 84]] [1, 10] 850
===========================================================================
Total params: 61,610
Trainable params: 61,610
Non-trainable params: 0
---------------------------------------------------------------------------
Input size (MB): 0.00
Forward/backward pass size (MB): 0.11
Params size (MB): 0.24
Estimated Total Size (MB): 0.35
---------------------------------------------------------------------------
{'total_params': 61610, 'trainable_params': 61610}
>>> # doctest: -SKIP
>>> # list input demo
>>> class LeNetListInput(LeNet):
... def forward(self, inputs):
... x = self.features(inputs[0])
...
... if self.num_classes > 0:
... x = paddle.flatten(x, 1)
... x = self.fc(x + inputs[1])
... return x
...
>>> lenet_list_input = LeNetListInput()
>>> input_data = [paddle.rand([1, 1, 28, 28]), paddle.rand([1, 400])]
>>> params_info = paddle.summary(lenet_list_input, input=input_data)
>>> # doctest: +SKIP
>>> print(params_info)
---------------------------------------------------------------------------
Layer (type) Input Shape Output Shape Param #
===========================================================================
Conv2D-5 [[1, 1, 28, 28]] [1, 6, 28, 28] 60
ReLU-5 [[1, 6, 28, 28]] [1, 6, 28, 28] 0
MaxPool2D-5 [[1, 6, 28, 28]] [1, 6, 14, 14] 0
Conv2D-6 [[1, 6, 14, 14]] [1, 16, 10, 10] 2,416
ReLU-6 [[1, 16, 10, 10]] [1, 16, 10, 10] 0
MaxPool2D-6 [[1, 16, 10, 10]] [1, 16, 5, 5] 0
Linear-7 [[1, 400]] [1, 120] 48,120
Linear-8 [[1, 120]] [1, 84] 10,164
Linear-9 [[1, 84]] [1, 10] 850
===========================================================================
Total params: 61,610
Trainable params: 61,610
Non-trainable params: 0
---------------------------------------------------------------------------
Input size (MB): 0.00
Forward/backward pass size (MB): 0.11
Params size (MB): 0.24
Estimated Total Size (MB): 0.35
---------------------------------------------------------------------------
{'total_params': 61610, 'trainable_params': 61610}
>>> # doctest: -SKIP
>>> # dict input demo
>>> class LeNetDictInput(LeNet):
... def forward(self, inputs):
... x = self.features(inputs['x1'])
...
... if self.num_classes > 0:
... x = paddle.flatten(x, 1)
... x = self.fc(x + inputs['x2'])
... return x
...
>>> lenet_dict_input = LeNetDictInput()
>>> input_data = {'x1': paddle.rand([1, 1, 28, 28]),
... 'x2': paddle.rand([1, 400])}
>>> params_info = paddle.summary(lenet_dict_input, input=input_data)
>>> # doctest: +SKIP
>>> print(params_info)
---------------------------------------------------------------------------
Layer (type) Input Shape Output Shape Param #
===========================================================================
Conv2D-7 [[1, 1, 28, 28]] [1, 6, 28, 28] 60
ReLU-7 [[1, 6, 28, 28]] [1, 6, 28, 28] 0
MaxPool2D-7 [[1, 6, 28, 28]] [1, 6, 14, 14] 0
Conv2D-8 [[1, 6, 14, 14]] [1, 16, 10, 10] 2,416
ReLU-8 [[1, 16, 10, 10]] [1, 16, 10, 10] 0
MaxPool2D-8 [[1, 16, 10, 10]] [1, 16, 5, 5] 0
Linear-10 [[1, 400]] [1, 120] 48,120
Linear-11 [[1, 120]] [1, 84] 10,164
Linear-12 [[1, 84]] [1, 10] 850
===========================================================================
Total params: 61,610
Trainable params: 61,610
Non-trainable params: 0
---------------------------------------------------------------------------
Input size (MB): 0.00
Forward/backward pass size (MB): 0.11
Params size (MB): 0.24
Estimated Total Size (MB): 0.35
---------------------------------------------------------------------------
{'total_params': 61610, 'trainable_params': 61610}
>>> # doctest: -SKIP
"""
if input_size is None and input is None:
raise ValueError("input_size and input cannot be None at the same time")
if input_size is None and input is not None:
if paddle.is_tensor(input):
input_size = tuple(input.shape)
elif isinstance(input, (list, tuple)):
input_size = []
for x in input:
input_size.append(tuple(x.shape))
elif isinstance(input, dict):
input_size = []
for key in input.keys():
input_size.append(tuple(input[key].shape))
elif isinstance(input, paddle.fluid.framework.Variable):
input_size = tuple(input.shape)
else:
raise ValueError(
"Input is not tensor, list, tuple and dict, unable to determine input_size, please input input_size."
)
if isinstance(input_size, InputSpec):
_input_size = tuple(input_size.shape)
elif isinstance(input_size, list):
_input_size = []
for item in input_size:
if isinstance(item, int):
item = (item,)
assert isinstance(
item, (tuple, InputSpec)
), 'When input_size is list, \
expect item in input_size is a tuple or InputSpec, but got {}'.format(
type(item)
)
if isinstance(item, InputSpec):
_input_size.append(tuple(item.shape))
else:
_input_size.append(item)
elif isinstance(input_size, int):
_input_size = (input_size,)
else:
_input_size = input_size
if not paddle.in_dynamic_mode():
warnings.warn(
"Your model was created in static graph mode, this may not get correct summary information!"
)
in_train_mode = False
else:
in_train_mode = net.training
if in_train_mode:
net.eval()
def _is_shape(shape):
for item in shape:
if isinstance(item, (list, tuple)):
return False
return True
def _check_shape(shape):
num_unknown = 0
new_shape = []
for i in range(len(shape)):
item = shape[i]
if item is None or item == -1:
num_unknown += 1
if num_unknown > 1:
raise ValueError(
'Option input_size only the dim of batch_size can be None or -1.'
)
item = 1
elif isinstance(item, numbers.Number):
if item <= 0:
raise ValueError(
"Expected element in input size greater than zero, but got {}".format(
item
)
)
new_shape.append(item)
return tuple(new_shape)
def _check_input(input_size):
if isinstance(input_size, (list, tuple)) and _is_shape(input_size):
return _check_shape(input_size)
else:
return [_check_input(i) for i in input_size]
_input_size = _check_input(_input_size)
result, params_info = summary_string(net, _input_size, dtypes, input)
print(result)
if in_train_mode:
net.train()
return params_info
@no_grad()
def summary_string(model, input_size=None, dtypes=None, input=None):
def _all_is_numper(items):
for item in items:
if not isinstance(item, numbers.Number):
return False
return True
def _build_dtypes(input_size, dtype):
if dtype is None:
dtype = 'float32'
if isinstance(input_size, (list, tuple)) and _all_is_numper(input_size):
return [dtype]
else:
return [_build_dtypes(i, dtype) for i in input_size]
if not isinstance(dtypes, (list, tuple)):
dtypes = _build_dtypes(input_size, dtypes)
batch_size = 1
summary_str = ''
depth = len(list(model.sublayers()))
def _get_shape_from_tensor(x):
if isinstance(
x, (paddle.fluid.Variable, paddle.fluid.core.eager.Tensor)
):
return list(x.shape)
elif isinstance(x, (list, tuple)):
return [_get_shape_from_tensor(xx) for xx in x]
def _get_output_shape(output):
if isinstance(output, (list, tuple)):
output_shape = [_get_output_shape(o) for o in output]
elif hasattr(output, 'shape'):
output_shape = list(output.shape)
else:
output_shape = []
return output_shape
def register_hook(layer):
def hook(layer, input, output):
class_name = str(layer.__class__).split(".")[-1].split("'")[0]
try:
layer_idx = int(layer._full_name.split('_')[-1])
except:
layer_idx = len(summary)
m_key = "%s-%i" % (class_name, layer_idx + 1)
summary[m_key] = OrderedDict()
try:
summary[m_key]["input_shape"] = _get_shape_from_tensor(input)
except:
warnings.warn('Get layer {} input shape failed!')
summary[m_key]["input_shape"] = []
try:
summary[m_key]["output_shape"] = _get_output_shape(output)
except:
warnings.warn('Get layer {} output shape failed!')
summary[m_key]["output_shape"]
params = 0
if paddle.in_dynamic_mode():
layer_state_dict = layer._parameters
else:
layer_state_dict = layer.state_dict()
summary[m_key]["trainable_params"] = 0
trainable_flag = False
for k, v in layer_state_dict.items():
params += np.prod(v.shape)
try:
if (getattr(layer, k).trainable) and (
not getattr(layer, k).stop_gradient
):
summary[m_key]["trainable_params"] += np.prod(v.shape)
summary[m_key]["trainable"] = True
trainable_flag = True
elif not trainable_flag:
summary[m_key]["trainable"] = False
except:
summary[m_key]["trainable"] = True
summary[m_key]["nb_params"] = params
if (
not isinstance(layer, nn.Sequential)
and not isinstance(layer, nn.LayerList)
and (not (layer == model) or depth < 1)
):
hooks.append(layer.register_forward_post_hook(hook))
# For rnn, gru and lstm layer
elif hasattr(layer, 'could_use_cudnn') and layer.could_use_cudnn:
hooks.append(layer.register_forward_post_hook(hook))
if isinstance(input_size, tuple):
input_size = [input_size]
def build_input(input_size, dtypes):
if isinstance(input_size, (list, tuple)) and _all_is_numper(input_size):
if isinstance(dtypes, (list, tuple)):
dtype = dtypes[0]
else:
dtype = dtypes
return paddle.cast(paddle.rand(list(input_size)), dtype)
else:
return [
build_input(i, dtype) for i, dtype in zip(input_size, dtypes)
]
# create properties
summary = OrderedDict()
hooks = []
# register hook
model.apply(register_hook)
if input is not None:
x = input
model(x)
else:
x = build_input(input_size, dtypes)
# make a forward pass
model(*x)
# remove these hooks
for h in hooks:
h.remove()
def _get_str_length(summary):
head_length = {
'layer_width': 15,
'input_shape_width': 20,
'output_shape_width': 20,
'params_width': 15,
'table_width': 75,
}
for layer in summary:
if head_length['output_shape_width'] < len(
str(summary[layer]["output_shape"])
):
head_length['output_shape_width'] = len(
str(summary[layer]["output_shape"])
)
if head_length['input_shape_width'] < len(
str(summary[layer]["input_shape"])
):
head_length['input_shape_width'] = len(
str(summary[layer]["input_shape"])
)
if head_length['layer_width'] < len(str(layer)):
head_length['layer_width'] = len(str(layer))
if head_length['params_width'] < len(
str(summary[layer]["nb_params"])
):
head_length['params_width'] = len(
str(summary[layer]["nb_params"])
)
_temp_width = 0
for k, v in head_length.items():
if k != 'table_width':
_temp_width += v
if head_length['table_width'] < _temp_width + 5:
head_length['table_width'] = _temp_width + 5
return head_length
table_width = _get_str_length(summary)
summary_str += "-" * table_width['table_width'] + "\n"
line_new = "{:^{}} {:^{}} {:^{}} {:^{}}".format(
"Layer (type)",
table_width['layer_width'],
"Input Shape",
table_width['input_shape_width'],
"Output Shape",
table_width['output_shape_width'],
"Param #",
table_width['params_width'],
)
summary_str += line_new + "\n"
summary_str += "=" * table_width['table_width'] + "\n"
total_params = 0
total_output = 0
trainable_params = 0
max_length = 0
for layer in summary:
# input_shape, output_shape, trainable, nb_params
line_new = "{:^{}} {:^{}} {:^{}} {:^{}}".format(
layer,
table_width['layer_width'],
str(summary[layer]["input_shape"]),
table_width['input_shape_width'],
str(summary[layer]["output_shape"]),
table_width['output_shape_width'],
"{:,}".format(summary[layer]["nb_params"]),
table_width['params_width'],
)
total_params += summary[layer]["nb_params"]
try:
total_output += np.sum(
np.prod(summary[layer]["output_shape"], axis=-1)
)
except:
for output_shape in summary[layer]["output_shape"]:
total_output += np.sum(np.prod(output_shape, axis=-1))
if "trainable" in summary[layer]:
if summary[layer]["trainable"]:
trainable_params += summary[layer]["trainable_params"]
summary_str += line_new + "\n"
def _get_input_size(input_size, size):
if isinstance(input_size, (list, tuple)) and _all_is_numper(input_size):
size = abs(np.prod(input_size) * 4.0 / (1024**2.0))
else:
size = sum([_get_input_size(i, size) for i in input_size])
return size
total_input_size = _get_input_size(input_size, 0)
total_output_size = abs(
2.0 * total_output * 4.0 / (1024**2.0)
) # x2 for gradients
total_params_size = abs(total_params * 4.0 / (1024**2.0))
total_size = total_params_size + total_output_size + total_input_size
summary_str += "=" * table_width['table_width'] + "\n"
summary_str += f"Total params: {total_params:,}" + "\n"
summary_str += f"Trainable params: {trainable_params:,}" + "\n"
summary_str += (
f"Non-trainable params: {total_params - trainable_params:,}" + "\n"
)
summary_str += "-" * table_width['table_width'] + "\n"
summary_str += "Input size (MB): %0.2f" % total_input_size + "\n"
summary_str += (
"Forward/backward pass size (MB): %0.2f" % total_output_size + "\n"
)
summary_str += "Params size (MB): %0.2f" % total_params_size + "\n"
summary_str += "Estimated Total Size (MB): %0.2f" % total_size + "\n"
summary_str += "-" * table_width['table_width'] + "\n"
# return summary
return summary_str, {
'total_params': total_params,
'trainable_params': trainable_params,
}
|
a18ddd65d043f15b63afdff80bfb762e4829b30c
|
b4c3019ba274e696985efa4c8c71956f98fa93c0
|
/dosagelib/plugins/wumo.py
|
832994a06b1852c1661bc745616ecbb0ee232d1b
|
[
"MIT"
] |
permissive
|
webcomics/dosage
|
35c3ba5dc8b9d26f2e730d7072e8977f9ff35c18
|
6f721b0634e1850f60d6bcf12a2858059198f251
|
refs/heads/master
| 2023-08-31T01:36:30.934158
| 2023-08-27T16:19:33
| 2023-08-27T16:19:33
| 33,205,825
| 120
| 74
|
MIT
| 2023-09-05T09:41:04
| 2015-03-31T19:40:19
|
Python
|
UTF-8
|
Python
| false
| false
| 690
|
py
|
wumo.py
|
# SPDX-License-Identifier: MIT
# Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2022 Tobias Gruetzmacher
from ..scraper import ParserScraper
class KindOfNormal(ParserScraper):
imageSearch = '//article[1]//div[@class="box-content"]//img'
prevSearch = '//a[@class="prev"]'
def __init__(self, name, url):
super(KindOfNormal, self).__init__(name)
self.url = 'http://wumo.com/' + url
@classmethod
def getmodules(cls):
return (
cls('MeAndDanielle', 'meanddanielle'),
cls('TruthFacts', 'truthfacts'),
cls('Wumo', 'wumo'),
)
|
aececff08dfcc5fd38f37bff3290444d3315fc66
|
f9308d5a8efe2dbb48e9cc87cd06405b60a9dc7b
|
/samples/python/guides/images02.py
|
89abd4c77bd6d7f1965573318478c8e679ded245
|
[
"Apache-2.0",
"CC-BY-4.0"
] |
permissive
|
google/earthengine-community
|
4e054b421f66f03507d58668084aee981062fc24
|
ce931040c518860f8788b4888c0acfdebd2952fc
|
refs/heads/master
| 2023-09-01T14:47:54.812703
| 2023-08-31T23:01:00
| 2023-08-31T23:01:39
| 200,732,820
| 428
| 552
|
Apache-2.0
| 2023-09-13T21:46:51
| 2019-08-05T21:42:11
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 2,524
|
py
|
images02.py
|
# Copyright 2020 The Google Earth Engine Community Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Earth Engine Developer's Guide examples for 'Images - Creating images'."""
# [START earthengine__images02__load_image]
loaded_image = ee.Image('JAXA/ALOS/AW3D30/V2_2')
# [END earthengine__images02__load_image]
# [START earthengine__images02__find_image]
first = (ee.ImageCollection('COPERNICUS/S2_SR')
.filterBounds(ee.Geometry.Point(-70.48, 43.3631))
.filterDate('2019-01-01', '2019-12-31')
.sort('CLOUDY_PIXEL_PERCENTAGE')
.first())
# Define a map centered on southern Maine.
map_s2 = folium.Map(location=[43.7516, -70.8155], zoom_start=11)
# Add the image layer to the map and display it.
map_s2.add_ee_layer(
first, {'bands': ['B4', 'B3', 'B2'], 'min': 0, 'max': 2000}, 'first')
display(map_s2)
# [END earthengine__images02__find_image]
# [START earthengine__images02__cloud_image]
uri = ('gs://gcp-public-data-landsat/LC08/01/001/002/'
'LC08_L1GT_001002_20160817_20170322_01_T2/'
'LC08_L1GT_001002_20160817_20170322_01_T2_B5.TIF')
cloud_image = ee.Image.loadGeoTIFF(uri)
print(cloud_image.getInfo())
# [END earthengine__images02__cloud_image]
# [START earthengine__images02__create_image]
from pprint import pprint
print('Create a constant image:')
image_1 = ee.Image(1)
pprint(image_1.getInfo())
print('\nConcatenate two images into one multi-band image:')
image_2 = ee.Image(2)
image_3 = ee.Image.cat([image_1, image_2])
pprint(image_3.getInfo())
print('\nCreate a multi-band image from a list of constants:')
multiband = ee.Image([1, 2, 3])
pprint(multiband.getInfo())
print('\nSelect and (optionally) rename bands:')
renamed = multiband.select(
['constant', 'constant_1', 'constant_2'], # old names
['band1', 'band2', 'band3']) # new names
pprint(renamed.getInfo())
print('\nAdd bands to an image:')
image_4 = image_3.addBands(ee.Image(42))
pprint(image_4.getInfo())
# [END earthengine__images02__create_image]
|
dd549db60e942d05e9749cca6da8c030dd76eecc
|
c530897cb72b6943c7226b25824444cad5f3503b
|
/usaspending_api/common/cache.py
|
562c56ce7fbb59887c7e73c7b1805165184120c1
|
[
"CC0-1.0"
] |
permissive
|
fedspendingtransparency/usaspending-api
|
fc63a22d32ea0207b7273d3e1ef26ba9dbabc42a
|
38f920438697930ae3ac57bbcaae9034877d8fb7
|
refs/heads/master
| 2023-09-01T22:00:36.633612
| 2023-08-29T18:39:18
| 2023-08-29T18:39:18
| 65,394,827
| 276
| 118
|
CC0-1.0
| 2023-09-14T20:33:15
| 2016-08-10T15:39:45
|
Python
|
UTF-8
|
Python
| false
| false
| 2,215
|
py
|
cache.py
|
import hashlib
import json
from rest_framework_extensions.key_constructor import bits
from rest_framework_extensions.key_constructor.constructors import DefaultKeyConstructor
from usaspending_api.common.helpers.dict_helpers import order_nested_object
class PathKeyBit(bits.QueryParamsKeyBit):
"""
Adds query path as a key bit
"""
def get_source_dict(self, params, view_instance, view_method, request, args, kwargs):
return {"path": request.path}
class GetPostQueryParamsKeyBit(bits.QueryParamsKeyBit):
"""
Override QueryParamsKey method in drf-extensions to ensure that the query params part of our cache key includes
directives in a POST request (i.e., request.data) as well as GET parameters
"""
def get_source_dict(self, params, view_instance, view_method, request, args, kwargs):
if hasattr(view_instance, "cache_key_whitelist"):
whitelist = view_instance.cache_key_whitelist
params = {}
for param in whitelist:
if param in request.query_params:
params[param] = request.query_params[param]
if param in request.data:
params[param] = request.data[param]
else:
params = dict(request.query_params)
params.update(dict(request.data))
if "auditTrail" in params:
del params["auditTrail"]
return {"request": json.dumps(order_nested_object(params))}
class USAspendingKeyConstructor(DefaultKeyConstructor):
"""
Handle cache key construction for API requests. If we never need to create more nuanced keys, see the
drf-extensions documentation: http://chibisov.github.io/drf-extensions/docs/#default-key-constructor
"""
path_bit = PathKeyBit()
request_params = GetPostQueryParamsKeyBit()
def prepare_key(self, key_dict):
# Order the key_dict using the order_nested_object function to make sure cache keys are always exactly the same
ordered_key_dict = json.dumps(order_nested_object(key_dict))
key_hex = hashlib.md5(ordered_key_dict.encode("utf-8")).hexdigest()
return key_hex
usaspending_key_func = USAspendingKeyConstructor()
|
85ad0aa7e943deab65b6fc12ac97df52794f6e87
|
f305f84ea6f721c2391300f0a60e21d2ce14f2a5
|
/19_数学/数论/BSGS/bsgs.py
|
17897b18c47180acc93a4dc194cff165e21c84d5
|
[] |
no_license
|
981377660LMT/algorithm-study
|
f2ada3e6959338ae1bc21934a84f7314a8ecff82
|
7e79e26bb8f641868561b186e34c1127ed63c9e0
|
refs/heads/master
| 2023-09-01T18:26:16.525579
| 2023-09-01T12:21:58
| 2023-09-01T12:21:58
| 385,861,235
| 225
| 24
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,791
|
py
|
bsgs.py
|
"""bsgs与exbsgs 离散对数(Discrete Logarithm)
https://dianhsu.com/2022/08/27/template-math/#bsgs
"""
from math import ceil, gcd, sqrt
from typing import Tuple
def bsgs(base: int, target: int, mod: int) -> int:
"""Baby-step Giant-step
在base和mod互质的情况下,求解 base^x ≡ target (mod mod) 的最小解x,
若不存在解则返回-1
时间复杂度: O(sqrt(mod)))
https://dianhsu.com/2022/08/27/template-math/#bsgs
"""
mp = dict()
t = ceil(sqrt(mod))
target %= mod
val = 1
for i in range(t):
tv = target * val % mod
mp[tv] = i
val = val * base % mod
base, val = val, 1
if base == 0:
return 1 if target == 0 else -1
for i in range(t + 1):
tv = mp.get(val, -1)
if tv != -1 and i * t - tv >= 0: # !注意这里取等号表示允许最小解为0
return i * t - tv
val = val * base % mod
return -1
def exgcd(a: int, b: int) -> Tuple[int, int, int]:
"""
求a, b最大公约数,同时求出裴蜀定理中的一组系数x, y,
满足 x*a + y*b = gcd(a, b)
ax + by = gcd_ 返回 `(gcd_, x, y)`
"""
if b == 0:
return a, 1, 0
gcd_, x, y = exgcd(b, a % b)
return gcd_, y, x - a // b * y
def exbsgs(base: int, target: int, p: int) -> int:
"""Extended Baby-step Giant-step
求解 base^x ≡ target (mod p) 的最小解x,
若不存在解则返回-1
时间复杂度: O(sqrt(p)))
https://dianhsu.com/2022/08/27/template-math/#exbsgs
"""
base %= p
target %= p
# !平凡解
if target == 1 or p == 1: # !注意这里允许最小解为0
return 0
cnt = 0
d, ad = 1, 1
while True:
d = gcd(base, p)
if d == 1:
break
if target % d:
return -1
cnt += 1
target //= d
p //= d
ad = ad * (base // d) % p
if ad == target:
return cnt
_, x, _ = exgcd(ad, p)
inv = x % p
res = bsgs(base, target * inv % p, p)
if res != -1:
res += cnt
return res
if __name__ == "__main__":
# https://judge.yosupo.jp/problem/discrete_logarithm_mod
T = int(input())
for _ in range(T):
base, target, mod = map(int, input().split())
res = exbsgs(base, target, mod)
print(res)
# https://www.luogu.com.cn/problem/P4195
# !给定a,p,b,求满足a**x ≡ b (mod p)的最小自然数x。
while True:
base, p, target = map(int, input().split())
if base == target == p == 0:
break
res = exbsgs(base, target, p)
print(res if res != -1 else "No Solution")
|
de139ed27a1cda0256e693a34e33ad430d9e6004
|
fd8ef75bb06383538cdb21ed2a0ef88e570179b7
|
/src/openfermion/circuits/trotter/algorithms/split_operator.py
|
7fe83fa449aaf8e1060cd4b2c6feed1367c1af27
|
[
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] |
permissive
|
quantumlib/OpenFermion
|
d1147383f99573d19005bd0f3e0120e9e9bed04c
|
788481753c798a72c5cb3aa9f2aa9da3ce3190b0
|
refs/heads/master
| 2023-09-04T11:00:32.124157
| 2023-08-24T21:54:30
| 2023-08-24T21:54:30
| 104,403,768
| 1,481
| 406
|
Apache-2.0
| 2023-08-24T21:54:31
| 2017-09-21T22:10:28
|
Python
|
UTF-8
|
Python
| false
| false
| 11,908
|
py
|
split_operator.py
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A Trotter algorithm using a split-operator approach."""
from typing import cast, Optional, Sequence, Tuple
import cirq
# import openfermion.circuits.gates as gates
from openfermion.circuits.gates import rot11, rot111
import openfermion.ops as ops
# import openfermion.circuits.primitives as primitives
from openfermion.circuits.primitives import bogoliubov_transform, swap_network
from openfermion.circuits.trotter.trotter_algorithm import (Hamiltonian,
TrotterStep,
TrotterAlgorithm)
class SplitOperatorTrotterAlgorithm(TrotterAlgorithm):
"""A Trotter algorithm using a split-operator approach.
This algorithm simulates a DiagonalCoulombHamiltonian. It uses Bogoliubov
transformations to switch between a basis in which the one-body terms are
convenient to simulate and a basis in which the two-body terms are
convenient to simulate. The Bogoliubov transformations are implemented
using Givens rotations.
This algorithm is described in arXiv:1706.00023.
"""
# TODO Maybe use FFFT
supported_types = {ops.DiagonalCoulombHamiltonian}
def symmetric(self, hamiltonian: Hamiltonian) -> Optional[TrotterStep]:
return SymmetricSplitOperatorTrotterStep(hamiltonian)
def asymmetric(self, hamiltonian: Hamiltonian) -> Optional[TrotterStep]:
return AsymmetricSplitOperatorTrotterStep(hamiltonian)
def controlled_symmetric(self,
hamiltonian: Hamiltonian) -> Optional[TrotterStep]:
return ControlledSymmetricSplitOperatorTrotterStep(hamiltonian)
def controlled_asymmetric(self, hamiltonian: Hamiltonian
) -> Optional[TrotterStep]:
return ControlledAsymmetricSplitOperatorTrotterStep(hamiltonian)
SPLIT_OPERATOR = SplitOperatorTrotterAlgorithm()
class SplitOperatorTrotterStep(TrotterStep):
def __init__(self,
hamiltonian: 'openfermion.DiagonalCoulombHamiltonian') -> None:
quad_ham = ops.QuadraticHamiltonian(hamiltonian.one_body)
# Get the basis change matrix that diagonalizes the one-body term
# and associated orbital energies
self.orbital_energies, self.basis_change_matrix, _ = (
quad_ham.diagonalizing_bogoliubov_transform())
super().__init__(hamiltonian)
class SymmetricSplitOperatorTrotterStep(SplitOperatorTrotterStep):
def prepare(self,
qubits: Sequence[cirq.Qid],
control_qubits: Optional[cirq.Qid] = None) -> cirq.OP_TREE:
# Change to the basis in which the one-body term is diagonal
yield cirq.inverse(
bogoliubov_transform(qubits, self.basis_change_matrix))
def trotter_step(self,
qubits: Sequence[cirq.Qid],
time: float,
control_qubit: Optional[cirq.Qid] = None) -> cirq.OP_TREE:
n_qubits = len(qubits)
# Simulate the one-body terms for half of the full time
yield (cirq.rz(rads=-0.5 * self.orbital_energies[i] * time).on(
qubits[i]) for i in range(n_qubits))
# Rotate to the computational basis
yield bogoliubov_transform(qubits, self.basis_change_matrix)
# Simulate the two-body terms for the full time
def two_body_interaction(p, q, a, b) -> cirq.OP_TREE:
yield rot11(rads=-2 * self.hamiltonian.two_body[p, q] * time).on(
a, b)
yield swap_network(qubits, two_body_interaction)
# The qubit ordering has been reversed
qubits = qubits[::-1]
# Rotate back to the basis in which the one-body term is diagonal
yield cirq.inverse(
bogoliubov_transform(qubits, self.basis_change_matrix))
# Simulate the one-body terms for half of the full time
yield (cirq.rz(rads=-0.5 * self.orbital_energies[i] * time).on(
qubits[i]) for i in range(n_qubits))
def step_qubit_permutation(
self,
qubits: Sequence[cirq.Qid],
control_qubit: Optional[cirq.Qid] = None
) -> Tuple[Sequence[cirq.Qid], Optional[cirq.Qid]]:
# A Trotter step reverses the qubit ordering
return qubits[::-1], None
def finish(self,
qubits: Sequence[cirq.Qid],
n_steps: int,
control_qubit: Optional[cirq.Qid] = None,
omit_final_swaps: bool = False) -> cirq.OP_TREE:
# Rotate back to the computational basis
yield bogoliubov_transform(qubits, self.basis_change_matrix)
# If the number of Trotter steps is odd, possibly swap qubits back
if n_steps & 1 and not omit_final_swaps:
yield swap_network(qubits)
class ControlledSymmetricSplitOperatorTrotterStep(SplitOperatorTrotterStep):
def prepare(self,
qubits: Sequence[cirq.Qid],
control_qubits: Optional[cirq.Qid] = None) -> cirq.OP_TREE:
# Change to the basis in which the one-body term is diagonal
yield cirq.inverse(
bogoliubov_transform(qubits, self.basis_change_matrix))
def trotter_step(self,
qubits: Sequence[cirq.Qid],
time: float,
control_qubit: Optional[cirq.Qid] = None) -> cirq.OP_TREE:
n_qubits = len(qubits)
if not isinstance(control_qubit, cirq.Qid):
raise TypeError('Control qudit must be specified.')
# Simulate the one-body terms for half of the full time
yield (rot11(rads=-0.5 * self.orbital_energies[i] * time).on(
control_qubit, qubits[i]) for i in range(n_qubits))
# Rotate to the computational basis
yield bogoliubov_transform(qubits, self.basis_change_matrix)
# Simulate the two-body terms for the full time
def two_body_interaction(p, q, a, b) -> cirq.OP_TREE:
yield rot111(-2 * self.hamiltonian.two_body[p, q] * time).on(
cast(cirq.Qid, control_qubit), a, b)
yield swap_network(qubits, two_body_interaction)
# The qubit ordering has been reversed
qubits = qubits[::-1]
# Rotate back to the basis in which the one-body term is diagonal
yield cirq.inverse(
bogoliubov_transform(qubits, self.basis_change_matrix))
# Simulate the one-body terms for half of the full time
yield (rot11(rads=-0.5 * self.orbital_energies[i] * time).on(
control_qubit, qubits[i]) for i in range(n_qubits))
# Apply phase from constant term
yield cirq.rz(rads=-self.hamiltonian.constant * time).on(control_qubit)
def step_qubit_permutation(
self,
qubits: Sequence[cirq.Qid],
control_qubit: Optional[cirq.Qid] = None
) -> Tuple[Sequence[cirq.Qid], Optional[cirq.Qid]]:
# A Trotter step reverses the qubit ordering
return qubits[::-1], control_qubit
def finish(self,
qubits: Sequence[cirq.Qid],
n_steps: int,
control_qubit: Optional[cirq.Qid] = None,
omit_final_swaps: bool = False) -> cirq.OP_TREE:
# Rotate back to the computational basis
yield bogoliubov_transform(qubits, self.basis_change_matrix)
# If the number of Trotter steps is odd, possibly swap qubits back
if n_steps & 1 and not omit_final_swaps:
yield swap_network(qubits)
class AsymmetricSplitOperatorTrotterStep(SplitOperatorTrotterStep):
def trotter_step(self,
qubits: Sequence[cirq.Qid],
time: float,
control_qubit: Optional[cirq.Qid] = None) -> cirq.OP_TREE:
n_qubits = len(qubits)
# Simulate the two-body terms for the full time
def two_body_interaction(p, q, a, b) -> cirq.OP_TREE:
yield rot11(rads=-2 * self.hamiltonian.two_body[p, q] * time).on(
a, b)
yield swap_network(qubits, two_body_interaction)
# The qubit ordering has been reversed
qubits = qubits[::-1]
# Rotate to the basis in which the one-body term is diagonal
yield cirq.inverse(
bogoliubov_transform(qubits, self.basis_change_matrix))
# Simulate the one-body terms for the full time
yield (cirq.rz(rads=-self.orbital_energies[i] * time).on(qubits[i])
for i in range(n_qubits))
# Rotate back to the computational basis
yield bogoliubov_transform(qubits, self.basis_change_matrix)
def step_qubit_permutation(
self,
qubits: Sequence[cirq.Qid],
control_qubit: Optional[cirq.Qid] = None
) -> Tuple[Sequence[cirq.Qid], Optional[cirq.Qid]]:
# A Trotter step reverses the qubit ordering
return qubits[::-1], None
def finish(self,
qubits: Sequence[cirq.Qid],
n_steps: int,
control_qubit: Optional[cirq.Qid] = None,
omit_final_swaps: bool = False) -> cirq.OP_TREE:
# If the number of Trotter steps is odd, possibly swap qubits back
if n_steps & 1 and not omit_final_swaps:
yield swap_network(qubits)
class ControlledAsymmetricSplitOperatorTrotterStep(SplitOperatorTrotterStep):
def trotter_step(self,
qubits: Sequence[cirq.Qid],
time: float,
control_qubit: Optional[cirq.Qid] = None) -> cirq.OP_TREE:
n_qubits = len(qubits)
if not isinstance(control_qubit, cirq.Qid):
raise TypeError('Control qudit must be specified.')
# Simulate the two-body terms for the full time
def two_body_interaction(p, q, a, b) -> cirq.OP_TREE:
yield rot111(-2 * self.hamiltonian.two_body[p, q] * time).on(
cast(cirq.Qid, control_qubit), a, b)
yield swap_network(qubits, two_body_interaction)
# The qubit ordering has been reversed
qubits = qubits[::-1]
# Rotate to the basis in which the one-body term is diagonal
yield cirq.inverse(
bogoliubov_transform(qubits, self.basis_change_matrix))
# Simulate the one-body terms for the full time
yield (rot11(rads=-self.orbital_energies[i] * time).on(
control_qubit, qubits[i]) for i in range(n_qubits))
# Rotate back to the computational basis
yield bogoliubov_transform(qubits, self.basis_change_matrix)
# Apply phase from constant term
yield cirq.rz(rads=-self.hamiltonian.constant * time).on(control_qubit)
def step_qubit_permutation(
self,
qubits: Sequence[cirq.Qid],
control_qubit: Optional[cirq.Qid] = None
) -> Tuple[Sequence[cirq.Qid], Optional[cirq.Qid]]:
# A Trotter step reverses the qubit ordering
return qubits[::-1], control_qubit
def finish(self,
qubits: Sequence[cirq.Qid],
n_steps: int,
control_qubit: Optional[cirq.Qid] = None,
omit_final_swaps: bool = False) -> cirq.OP_TREE:
# If the number of Trotter steps is odd, possibly swap qubits back
if n_steps & 1 and not omit_final_swaps:
yield swap_network(qubits)
|
f3d4da332c6237160cea3370eac5fce382d4e24c
|
67f6ca6dd3f8fb1d3104f931546c50445846083c
|
/src/main/python/smart/smartplots2_run.py
|
6edfcd66b7a7d1ef58965a6f017bbf73cafea8b2
|
[
"BSD-3-Clause-LBNL"
] |
permissive
|
LBNL-UCB-STI/beam
|
7e63cf28854a0b78e5f123629f5ff84966d75deb
|
ca433c85c592285cf4ff6c28620b3538fe9cc9ba
|
refs/heads/develop
| 2023-09-01T03:51:59.353627
| 2023-08-31T15:04:26
| 2023-08-31T15:04:26
| 73,118,824
| 142
| 71
|
NOASSERTION
| 2023-09-11T14:53:58
| 2016-11-07T20:38:29
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 2,169
|
py
|
smartplots2_run.py
|
import pandas as pd
import smartplots2_setup
plt_setup_smart2 = {
'name': 'no-futuristic-base',
'expansion_factor': (7.75/0.315) * 27.0 / 21.3,
'scenarios_itr': [15, 15, 15, 15, 15, 15, 15],
'scenarios_id': [1, 6, 7, 8, 9, 10, 11],
'scenarios_year': [2010, 2025, 2025, 2025, 2025, 2040, 2040],
'plot_size': (5, 4.5),
'bottom_labels': ['Base\n2010', 'Sharing is Caring\n2025', 'Technology Takeover\n2025', "All About Me\n2040"],
'top_labels': ["Base", "BAU", "VTO", "BAU", "VTO", "BAU", "VTO"],
}
output_folder = "/Users/haitam/workspace/pyscripts/data/smart/15thSep2019"
smartplots2_setup.pltModeSplitByTrips(plt_setup_smart2, output_folder)
smartplots2_setup.tableSummary(plt_setup_smart2, output_folder)
smartplots2_setup.pltLdvRhOccupancy(plt_setup_smart2, output_folder)
smartplots2_setup.pltModeSplitInPMT(plt_setup_smart2, output_folder)
smartplots2_setup.pltModeSplitInPMTPerCapita(plt_setup_smart2, output_folder)
smartplots2_setup.pltLdvTechnologySplitInVMT(plt_setup_smart2, output_folder)
smartplots2_setup.pltModeSplitInVMT(plt_setup_smart2, output_folder)
smartplots2_setup.pltModeSplitInVMTPerCapita(plt_setup_smart2, output_folder)
smartplots2_setup.pltRHEmptyPooled(plt_setup_smart2, output_folder)
smartplots2_setup.pltLdvRhOccupancyByVMT(plt_setup_smart2, output_folder)
plt_setup_smart2_base = {
'name': 'with-futuristic-base',
'expansion_factor': (7.75/0.315) * 27.0 / 21.3,
'scenarios_itr': [15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15],
'scenarios_id': [1, 2, 3, 6, 7, 8, 9, 4, 5, 10, 11],
'scenarios_year': [2010, 2025, 2025, 2025, 2025, 2025, 2025, 2040, 2040, 2040, 2040],
'plot_size': (7.5, 4.5),
'bottom_labels': ['Base\n2010', 'Base Short\n2025', 'Sharing is Caring\n2025', 'Technology Takeover\n2025', "Base Long\n2040", "All About Me\n2040"],
'top_labels': ["Base", "BAU", "VTO", "BAU", "VTO", "BAU", "VTO", "BAU", "VTO", "BAU", "VTO"],
}
smartplots2_setup.pltEnergyPerCapita(plt_setup_smart2_base, output_folder)
smartplots2_setup.pltLdvPersonHourTraveled(plt_setup_smart2_base, output_folder)
smartplots2_setup.pltRHWaitTime(plt_setup_smart2_base, output_folder)
|
a57c790a4086b7ad622a20d85820a00bed003f6a
|
e8b38b8dfa348ff006eb197a7906ca8e491a23dc
|
/tests/epyccel/modules/loops.py
|
6f6fec112e9ba62d32c28f97b81b334b9b8b39ef
|
[
"MIT"
] |
permissive
|
pyccel/pyccel
|
d79a81dbdff1172839a6a1227abfcc1f97e6c97b
|
1896b761ba662c90b14c195bbb6eb5cddc57cbfc
|
refs/heads/devel
| 2023-08-30T12:15:25.244401
| 2023-08-28T09:31:32
| 2023-08-28T09:31:32
| 100,463,736
| 307
| 39
|
MIT
| 2023-09-14T19:29:26
| 2017-08-16T07:59:14
|
Python
|
UTF-8
|
Python
| false
| false
| 4,482
|
py
|
loops.py
|
# pylint: disable=missing-function-docstring, missing-module-docstring
#==============================================================================
def sum_natural_numbers(n : int):
x = 0
for i in range( 1, n+1 ):
x += i
return x
# ...
def factorial(n : int):
x = 1
for i in range( 2, n+1 ):
x *= i
return x
# ...
def fibonacci(n : int):
x = 0
y = 1
for i in range( n ): # pylint: disable=unused-variable
z = x+y
x = y
y = z
return x
# ...
def double_loop(n : int):
x = 0
for i in range( 3, 10 ): # pylint: disable=unused-variable
x += 1
y = n*x
for j in range( 4, 15 ): # pylint: disable=unused-variable
z = x-y
return z
# ...
def double_loop_on_2d_array_C(z : 'int[:,:](order=C)'):
from numpy import shape
m, n = shape( z )
for i in range( m ):
for j in range( n ):
z[i,j] = i-j
# ...
def double_loop_on_2d_array_F(z : 'int[:,:](order=F)'):
from numpy import shape
m, n = shape( z )
for i in range( m ):
for j in range( n ):
z[i,j] = i-j
# ...
def product_loop_on_2d_array_C(z : 'int[:,:](order=C)'):
from numpy import shape
from itertools import product
m, n = shape( z )
x = [i for i in range(m)]
y = [j for j in range(n)]
for i,j in product( x, y ):
z[i,j] = i-j
# ...
def product_loop_on_2d_array_F(z : 'int[:,:](order=F)'):
from numpy import shape
from itertools import product
m, n = shape( z )
x = [i for i in range(m)]
y = [j for j in range(n)]
for i,j in product( x, y ):
z[i,j] = i-j
# ...
def product_loop( z : 'float[:]', m : int, n : int ):
from itertools import product
x = [i*3+2 for i in range(m)]
y = [j*7+6 for j in range(n)]
k = 0
for i,j in product( x, y ):
z[k] = i-j
k += 1
# ...
def map_on_1d_array(z : 'int[:]'):
def f(x : int):
return x+5
res = 0
for v in map( f, z ):
res *= v
return res
# ...
def enumerate_on_1d_array(z : 'int[:]'):
res = 0
for i,v in enumerate( z ):
res += v*i
return res
# ...
def enumerate_on_1d_array_with_start(z : 'int[:]', k : 'int'):
res = 0
for i,v in enumerate( z, k ):
res += v*i
return res
# ...
def zip_prod(m : int):
x = [ i for i in range(m)]
y = [2*j for j in range(m)]
res = 0
for i1,i2 in zip( x, y ):
res += i1*i2
return res
# ...
def product_loop_on_real_array(z : 'float[:]', out : 'float[:]'):
from numpy import shape
n, = shape( z )
for i in range(n):
out[i] = z[i]**2
# ...
def fizzbuzz_search_with_breaks(fizz : 'int', buzz : 'int', max_val : 'int'):
for i in range(1,max_val+1):
if i%fizz == 0 and i%buzz == 0:
break
return i
# ...
def fizzbuzz_sum_with_continue(fizz : int, buzz : int, max_val : int):
fizzbuzz_sum = 0
for i in range(1,max_val+1):
if i%fizz != 0:
continue
if i%buzz != 0:
continue
fizzbuzz_sum += i
return fizzbuzz_sum
# ...
def fibonacci_while(n : int):
x = 0
y = 1
i = 1
while i <= n:
z = x+y
x = y
y = z
i = i + 1
return x
# ...
def sum_nat_numbers_while(n : int):
x = 0
i = 0
while i <= n:
x += i
i = i + 1
return x
# ...
def double_while_sum(n : int, m : int):
x = 0
y = 0
i = 0
while x <= n:
while y <= m:
i += y
y = y + 1
i += x
x = x + 1
return i
# ...
def factorial_while(n : int):
x = 1
i = 1
while i <= n:
x = i * x
i = i + 1
return x
def while_not_0(n : int):
while n:
n -= 1
return n
def for_loop1(start : int, stop : int, step : int):
x = 0
for i in range(start, stop, step):
x += i
return x
def for_loop2():
x = 0
for i in range(1, 10, 1):
x += i
return x
def for_loop3():
x = 0
for i in range(10, 1, -2):
x += i
return x
def temp_array_in_loop(a : 'int[:]', b : 'int[:]'):
import numpy as np
c = np.zeros_like(a)
d1 = np.zeros_like(a)
d2 = np.zeros_like(a)
for _ in range(1):
for d in range(2):
b[d] += d
c[:] = b - a
d1[:] = np.abs(c)
d2[:] = np.abs(b - a)
return d1, d2
|
3b312679edf153f2be3c7801064089cdb91b47ae
|
01184c7098e40569dd48219fbe3012321cf31244
|
/rdflib/query.py
|
9ea5c2b0f2303154da3550fc20668b932dba25f6
|
[
"BSD-3-Clause"
] |
permissive
|
RDFLib/rdflib
|
1c81136f2656207042f81374540d8e1f02be28f5
|
077f4ac3abb3038b266f40dc95a8ccf9f4e9a84c
|
refs/heads/main
| 2023-08-30T11:22:00.041615
| 2023-08-29T21:31:43
| 2023-08-29T21:31:43
| 3,342,046
| 1,754
| 562
|
BSD-3-Clause
| 2023-09-12T14:58:35
| 2012-02-03T05:49:13
|
Python
|
UTF-8
|
Python
| false
| false
| 15,019
|
py
|
query.py
|
from __future__ import annotations
import itertools
import types
import warnings
from io import BytesIO
from typing import (
IO,
TYPE_CHECKING,
Any,
BinaryIO,
Dict,
Iterator,
List,
Mapping,
MutableSequence,
Optional,
Tuple,
Union,
cast,
overload,
)
from urllib.parse import urlparse
from urllib.request import url2pathname
__all__ = [
"Processor",
"UpdateProcessor",
"Result",
"ResultRow",
"ResultParser",
"ResultSerializer",
"ResultException",
"EncodeOnlyUnicode",
]
if TYPE_CHECKING:
from rdflib.graph import Graph, _TripleType
from rdflib.plugins.sparql.sparql import Query, Update
from rdflib.term import Identifier, Variable
class Processor:
"""
Query plugin interface.
This module is useful for those wanting to write a query processor
that can plugin to rdf. If you are wanting to execute a query you
likely want to do so through the Graph class query method.
"""
def __init__(self, graph: "Graph"):
pass
# type error: Missing return statement
def query( # type: ignore[empty-body]
self,
strOrQuery: Union[str, "Query"], # noqa: N803
initBindings: Mapping["str", "Identifier"] = {}, # noqa: N803
initNs: Mapping[str, Any] = {}, # noqa: N803
DEBUG: bool = False, # noqa: N803
) -> Mapping[str, Any]:
pass
class UpdateProcessor:
"""
Update plugin interface.
This module is useful for those wanting to write an update
processor that can plugin to rdflib. If you are wanting to execute
an update statement you likely want to do so through the Graph
class update method.
.. versionadded:: 4.0
"""
def __init__(self, graph: "Graph"):
pass
def update(
self,
strOrQuery: Union[str, "Update"], # noqa: N803
initBindings: Mapping["str", "Identifier"] = {}, # noqa: N803
initNs: Mapping[str, Any] = {}, # noqa: N803
) -> None:
pass
class ResultException(Exception): # noqa: N818
pass
class EncodeOnlyUnicode:
"""
This is a crappy work-around for
http://bugs.python.org/issue11649
"""
def __init__(self, stream: BinaryIO):
self.__stream = stream
def write(self, arg):
if isinstance(arg, str):
self.__stream.write(arg.encode("utf-8"))
else:
self.__stream.write(arg)
def __getattr__(self, name: str) -> Any:
return getattr(self.__stream, name)
class ResultRow(Tuple["Identifier", ...]):
"""
a single result row
allows accessing bindings as attributes or with []
>>> from rdflib import URIRef, Variable
>>> rr=ResultRow({ Variable('a'): URIRef('urn:cake') }, [Variable('a')])
>>> rr[0]
rdflib.term.URIRef(u'urn:cake')
>>> rr[1]
Traceback (most recent call last):
...
IndexError: tuple index out of range
>>> rr.a
rdflib.term.URIRef(u'urn:cake')
>>> rr.b
Traceback (most recent call last):
...
AttributeError: b
>>> rr['a']
rdflib.term.URIRef(u'urn:cake')
>>> rr['b']
Traceback (most recent call last):
...
KeyError: 'b'
>>> rr[Variable('a')]
rdflib.term.URIRef(u'urn:cake')
.. versionadded:: 4.0
"""
labels: Mapping[str, int]
def __new__(
cls, values: Mapping["Variable", "Identifier"], labels: List["Variable"]
):
# type error: Value of type variable "Self" of "__new__" of "tuple" cannot be "ResultRow" [type-var]
# type error: Generator has incompatible item type "Optional[Identifier]"; expected "_T_co" [misc]
instance = super(ResultRow, cls).__new__(cls, (values.get(v) for v in labels)) # type: ignore[type-var, misc]
instance.labels = dict((str(x[1]), x[0]) for x in enumerate(labels))
return instance
def __getattr__(self, name: str) -> "Identifier":
if name not in self.labels:
raise AttributeError(name)
return tuple.__getitem__(self, self.labels[name])
# type error: Signature of "__getitem__" incompatible with supertype "tuple"
# type error: Signature of "__getitem__" incompatible with supertype "Sequence"
def __getitem__(self, name: Union[str, int, Any]) -> "Identifier": # type: ignore[override]
try:
# type error: Invalid index type "Union[str, int, Any]" for "tuple"; expected type "int"
return tuple.__getitem__(self, name) # type: ignore[index]
except TypeError:
if name in self.labels:
# type error: Invalid index type "Union[str, int, slice, Any]" for "Mapping[str, int]"; expected type "str"
return tuple.__getitem__(self, self.labels[name]) # type: ignore[index]
if str(name) in self.labels: # passing in variable object
return tuple.__getitem__(self, self.labels[str(name)])
raise KeyError(name)
@overload
def get(self, name: str, default: "Identifier") -> "Identifier":
...
@overload
def get(
self, name: str, default: Optional["Identifier"] = ...
) -> Optional["Identifier"]:
...
def get(
self, name: str, default: Optional["Identifier"] = None
) -> Optional["Identifier"]:
try:
return self[name]
except KeyError:
return default
def asdict(self) -> Dict[str, "Identifier"]:
return dict((v, self[v]) for v in self.labels if self[v] is not None)
class Result:
"""
A common class for representing query result.
There is a bit of magic here that makes this appear like different
Python objects, depending on the type of result.
If the type is "SELECT", iterating will yield lists of ResultRow objects
If the type is "ASK", iterating will yield a single bool (or
bool(result) will return the same bool)
If the type is "CONSTRUCT" or "DESCRIBE" iterating will yield the
triples.
len(result) also works.
"""
def __init__(self, type_: str):
if type_ not in ("CONSTRUCT", "DESCRIBE", "SELECT", "ASK"):
raise ResultException("Unknown Result type: %s" % type_)
self.type = type_
#: variables contained in the result.
self.vars: Optional[List["Variable"]] = None
self._bindings: MutableSequence[Mapping["Variable", "Identifier"]] = None # type: ignore[assignment]
self._genbindings: Optional[Iterator[Mapping["Variable", "Identifier"]]] = None
self.askAnswer: Optional[bool] = None
self.graph: Optional["Graph"] = None
@property
def bindings(self) -> MutableSequence[Mapping[Variable, Identifier]]:
"""
a list of variable bindings as dicts
"""
if self._genbindings:
self._bindings += list(self._genbindings)
self._genbindings = None
return self._bindings
@bindings.setter
def bindings(
self,
b: Union[
MutableSequence[Mapping["Variable", "Identifier"]],
Iterator[Mapping[Variable, Identifier]],
],
) -> None:
if isinstance(b, (types.GeneratorType, itertools.islice)):
self._genbindings = b
self._bindings = []
else:
# type error: Incompatible types in assignment (expression has type "Union[MutableSequence[Mapping[Variable, Identifier]], Iterator[Mapping[Variable, Identifier]]]", variable has type "MutableSequence[Mapping[Variable, Identifier]]")
self._bindings = b # type: ignore[assignment]
@staticmethod
def parse(
source: Optional[IO] = None,
format: Optional[str] = None,
content_type: Optional[str] = None,
**kwargs: Any,
) -> "Result":
from rdflib import plugin
if format:
plugin_key = format
elif content_type:
plugin_key = content_type.split(";", 1)[0]
else:
plugin_key = "xml"
parser = plugin.get(plugin_key, ResultParser)()
# type error: Argument 1 to "parse" of "ResultParser" has incompatible type "Optional[IO[Any]]"; expected "IO[Any]"
return parser.parse(
source, content_type=content_type, **kwargs # type:ignore[arg-type]
)
def serialize(
self,
destination: Optional[Union[str, IO]] = None,
encoding: str = "utf-8",
format: str = "xml",
**args: Any,
) -> Optional[bytes]:
"""
Serialize the query result.
The :code:`format` argument determines the Serializer class to use.
- csv: :class:`~rdflib.plugins.sparql.results.csvresults.CSVResultSerializer`
- json: :class:`~rdflib.plugins.sparql.results.jsonresults.JSONResultSerializer`
- txt: :class:`~rdflib.plugins.sparql.results.txtresults.TXTResultSerializer`
- xml: :class:`~rdflib.plugins.sparql.results.xmlresults.XMLResultSerializer`
:param destination: Path of file output or BufferedIOBase object to write the output to.
:param encoding: Encoding of output.
:param format: One of ['csv', 'json', 'txt', xml']
:param args:
:return: bytes
"""
if self.type in ("CONSTRUCT", "DESCRIBE"):
# type error: Item "None" of "Optional[Graph]" has no attribute "serialize"
# type error: Incompatible return value type (got "Union[bytes, str, Graph, Any]", expected "Optional[bytes]")
return self.graph.serialize( # type: ignore[union-attr,return-value]
destination, encoding=encoding, format=format, **args
)
"""stolen wholesale from graph.serialize"""
from rdflib import plugin
serializer = plugin.get(format, ResultSerializer)(self)
if destination is None:
streamb: BytesIO = BytesIO()
stream2 = EncodeOnlyUnicode(streamb)
# type error: Argument 1 to "serialize" of "ResultSerializer" has incompatible type "EncodeOnlyUnicode"; expected "IO[Any]"
serializer.serialize(stream2, encoding=encoding, **args) # type: ignore[arg-type]
return streamb.getvalue()
if hasattr(destination, "write"):
stream = cast(IO[bytes], destination)
serializer.serialize(stream, encoding=encoding, **args)
else:
location = cast(str, destination)
scheme, netloc, path, params, query, fragment = urlparse(location)
if scheme == "file":
if netloc != "":
raise ValueError(
f"the file URI {location!r} has an authority component which is not supported"
)
os_path = url2pathname(path)
else:
os_path = location
with open(os_path, "wb") as stream:
serializer.serialize(stream, encoding=encoding, **args)
return None
def __len__(self) -> int:
if self.type == "ASK":
return 1
elif self.type == "SELECT":
return len(self.bindings)
else:
# type error: Argument 1 to "len" has incompatible type "Optional[Graph]"; expected "Sized"
return len(self.graph) # type: ignore[arg-type]
def __bool__(self) -> bool:
if self.type == "ASK":
# type error: Incompatible return value type (got "Optional[bool]", expected "bool")
return self.askAnswer # type: ignore[return-value]
else:
return len(self) > 0
def __iter__(
self,
) -> Iterator[Union["_TripleType", bool, ResultRow]]:
if self.type in ("CONSTRUCT", "DESCRIBE"):
# type error: Item "None" of "Optional[Graph]" has no attribute "__iter__" (not iterable)
for t in self.graph: # type: ignore[union-attr]
yield t
elif self.type == "ASK":
# type error: Incompatible types in "yield" (actual type "Optional[bool]", expected type "Union[Tuple[Identifier, Identifier, Identifier], bool, ResultRow]") [misc]
yield self.askAnswer # type: ignore[misc]
elif self.type == "SELECT":
# this iterates over ResultRows of variable bindings
if self._genbindings:
for b in self._genbindings:
if b: # don't add a result row in case of empty binding {}
self._bindings.append(b)
# type error: Argument 2 to "ResultRow" has incompatible type "Optional[List[Variable]]"; expected "List[Variable]"
yield ResultRow(b, self.vars) # type: ignore[arg-type]
self._genbindings = None
else:
for b in self._bindings:
if b: # don't add a result row in case of empty binding {}
# type error: Argument 2 to "ResultRow" has incompatible type "Optional[List[Variable]]"; expected "List[Variable]"
yield ResultRow(b, self.vars) # type: ignore[arg-type]
def __getattr__(self, name: str) -> Any:
if self.type in ("CONSTRUCT", "DESCRIBE") and self.graph is not None:
# type error: "Graph" has no attribute "__getattr__"
return self.graph.__getattr__(self, name) # type: ignore[attr-defined]
elif self.type == "SELECT" and name == "result":
warnings.warn(
"accessing the 'result' attribute is deprecated."
" Iterate over the object instead.",
DeprecationWarning,
stacklevel=2,
)
# copied from __iter__, above
# type error: Item "None" of "Optional[List[Variable]]" has no attribute "__iter__" (not iterable)
return [(tuple(b[v] for v in self.vars)) for b in self.bindings] # type: ignore[union-attr]
else:
raise AttributeError("'%s' object has no attribute '%s'" % (self, name))
def __eq__(self, other: Any) -> bool:
try:
if self.type != other.type:
return False
if self.type == "ASK":
return self.askAnswer == other.askAnswer
elif self.type == "SELECT":
return self.vars == other.vars and self.bindings == other.bindings
else:
return self.graph == other.graph
except Exception:
return False
class ResultParser:
def __init__(self):
pass
# type error: Missing return statement
def parse(self, source: IO, **kwargs: Any) -> Result: # type: ignore[empty-body]
"""return a Result object"""
pass # abstract
class ResultSerializer:
def __init__(self, result: Result):
self.result = result
def serialize(self, stream: IO, encoding: str = "utf-8", **kwargs: Any) -> None:
"""return a string properly serialized"""
pass # abstract
|
3c2b575628ed3c9674453bff1a7c1dd5130ea86d
|
e986b7650d2317f740d65f44b19e663cb646d85e
|
/jdaviz/configs/imviz/plugins/footprints/__init__.py
|
069469d820330eac60eafb898abf2d2ac28de97b
|
[
"BSD-3-Clause"
] |
permissive
|
spacetelescope/jdaviz
|
a223230d2296f7fdee17a43ae1a4bee45452ec13
|
17a864ed7d64cece18fbc29f3561c137e6bf0942
|
refs/heads/main
| 2023-08-17T05:59:55.109052
| 2023-08-15T19:46:49
| 2023-08-15T19:46:49
| 185,452,341
| 105
| 70
|
BSD-3-Clause
| 2023-09-14T15:20:16
| 2019-05-07T17:54:06
|
Python
|
UTF-8
|
Python
| false
| false
| 35
|
py
|
__init__.py
|
from .footprints import * # noqa
|
e17fe135223a68a5f0a433cb0bd021277d6e0922
|
7eae8668c2e35bfbbca526f47685702e5311cdfa
|
/tests/pubsub/test_gossipsub.py
|
20611300c861113f4a033286ec39460237237001
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
libp2p/py-libp2p
|
2fcfab669e0c3a959b63a4855403d958e5ebccab
|
b38b36862f44421aec998e438b668cff265de75c
|
refs/heads/master
| 2023-09-04T20:28:42.952740
| 2023-06-28T08:54:08
| 2023-06-28T08:54:08
| 149,072,635
| 399
| 98
|
MIT
| 2022-12-08T21:49:47
| 2018-09-17T05:20:41
|
Python
|
UTF-8
|
Python
| false
| false
| 18,278
|
py
|
test_gossipsub.py
|
import random
import pytest
import trio
from libp2p.pubsub.gossipsub import PROTOCOL_ID
from libp2p.tools.factories import IDFactory, PubsubFactory
from libp2p.tools.pubsub.utils import dense_connect, one_to_all_connect
from libp2p.tools.utils import connect
@pytest.mark.trio
async def test_join():
async with PubsubFactory.create_batch_with_gossipsub(
4, degree=4, degree_low=3, degree_high=5
) as pubsubs_gsub:
gossipsubs = [pubsub.router for pubsub in pubsubs_gsub]
hosts = [pubsub.host for pubsub in pubsubs_gsub]
hosts_indices = list(range(len(pubsubs_gsub)))
topic = "test_join"
central_node_index = 0
# Remove index of central host from the indices
hosts_indices.remove(central_node_index)
num_subscribed_peer = 2
subscribed_peer_indices = random.sample(hosts_indices, num_subscribed_peer)
# All pubsub except the one of central node subscribe to topic
for i in subscribed_peer_indices:
await pubsubs_gsub[i].subscribe(topic)
# Connect central host to all other hosts
await one_to_all_connect(hosts, central_node_index)
# Wait 2 seconds for heartbeat to allow mesh to connect
await trio.sleep(2)
# Central node publish to the topic so that this topic
# is added to central node's fanout
# publish from the randomly chosen host
await pubsubs_gsub[central_node_index].publish(topic, b"data")
# Check that the gossipsub of central node has fanout for the topic
assert topic in gossipsubs[central_node_index].fanout
# Check that the gossipsub of central node does not have a mesh for the topic
assert topic not in gossipsubs[central_node_index].mesh
# Central node subscribes the topic
await pubsubs_gsub[central_node_index].subscribe(topic)
await trio.sleep(2)
# Check that the gossipsub of central node no longer has fanout for the topic
assert topic not in gossipsubs[central_node_index].fanout
for i in hosts_indices:
if i in subscribed_peer_indices:
assert hosts[i].get_id() in gossipsubs[central_node_index].mesh[topic]
assert hosts[central_node_index].get_id() in gossipsubs[i].mesh[topic]
else:
assert (
hosts[i].get_id() not in gossipsubs[central_node_index].mesh[topic]
)
assert topic not in gossipsubs[i].mesh
@pytest.mark.trio
async def test_leave():
async with PubsubFactory.create_batch_with_gossipsub(1) as pubsubs_gsub:
gossipsub = pubsubs_gsub[0].router
topic = "test_leave"
assert topic not in gossipsub.mesh
await gossipsub.join(topic)
assert topic in gossipsub.mesh
await gossipsub.leave(topic)
assert topic not in gossipsub.mesh
# Test re-leave
await gossipsub.leave(topic)
@pytest.mark.trio
async def test_handle_graft(monkeypatch):
async with PubsubFactory.create_batch_with_gossipsub(2) as pubsubs_gsub:
gossipsubs = tuple(pubsub.router for pubsub in pubsubs_gsub)
index_alice = 0
id_alice = pubsubs_gsub[index_alice].my_id
index_bob = 1
id_bob = pubsubs_gsub[index_bob].my_id
await connect(pubsubs_gsub[index_alice].host, pubsubs_gsub[index_bob].host)
# Wait 2 seconds for heartbeat to allow mesh to connect
await trio.sleep(2)
topic = "test_handle_graft"
# Only lice subscribe to the topic
await gossipsubs[index_alice].join(topic)
# Monkey patch bob's `emit_prune` function so we can
# check if it is called in `handle_graft`
event_emit_prune = trio.Event()
async def emit_prune(topic, sender_peer_id):
event_emit_prune.set()
await trio.lowlevel.checkpoint()
monkeypatch.setattr(gossipsubs[index_bob], "emit_prune", emit_prune)
# Check that alice is bob's peer but not his mesh peer
assert gossipsubs[index_bob].peer_protocol[id_alice] == PROTOCOL_ID
assert topic not in gossipsubs[index_bob].mesh
await gossipsubs[index_alice].emit_graft(topic, id_bob)
# Check that `emit_prune` is called
await event_emit_prune.wait()
# Check that bob is alice's peer but not her mesh peer
assert topic in gossipsubs[index_alice].mesh
assert id_bob not in gossipsubs[index_alice].mesh[topic]
assert gossipsubs[index_alice].peer_protocol[id_bob] == PROTOCOL_ID
await gossipsubs[index_bob].emit_graft(topic, id_alice)
await trio.sleep(1)
# Check that bob is now alice's mesh peer
assert id_bob in gossipsubs[index_alice].mesh[topic]
@pytest.mark.trio
async def test_handle_prune():
async with PubsubFactory.create_batch_with_gossipsub(
2, heartbeat_interval=3
) as pubsubs_gsub:
gossipsubs = tuple(pubsub.router for pubsub in pubsubs_gsub)
index_alice = 0
id_alice = pubsubs_gsub[index_alice].my_id
index_bob = 1
id_bob = pubsubs_gsub[index_bob].my_id
topic = "test_handle_prune"
for pubsub in pubsubs_gsub:
await pubsub.subscribe(topic)
await connect(pubsubs_gsub[index_alice].host, pubsubs_gsub[index_bob].host)
# Wait for heartbeat to allow mesh to connect
await trio.sleep(1)
# Check that they are each other's mesh peer
assert id_alice in gossipsubs[index_bob].mesh[topic]
assert id_bob in gossipsubs[index_alice].mesh[topic]
# alice emit prune message to bob, alice should be removed
# from bob's mesh peer
await gossipsubs[index_alice].emit_prune(topic, id_bob)
# `emit_prune` does not remove bob from alice's mesh peers
assert id_bob in gossipsubs[index_alice].mesh[topic]
# NOTE: We increase `heartbeat_interval` to 3 seconds so that bob will not
# add alice back to his mesh after heartbeat.
# Wait for bob to `handle_prune`
await trio.sleep(0.1)
# Check that alice is no longer bob's mesh peer
assert id_alice not in gossipsubs[index_bob].mesh[topic]
@pytest.mark.trio
async def test_dense():
async with PubsubFactory.create_batch_with_gossipsub(10) as pubsubs_gsub:
hosts = [pubsub.host for pubsub in pubsubs_gsub]
num_msgs = 5
# All pubsub subscribe to foobar
queues = [await pubsub.subscribe("foobar") for pubsub in pubsubs_gsub]
# Densely connect libp2p hosts in a random way
await dense_connect(hosts)
# Wait 2 seconds for heartbeat to allow mesh to connect
await trio.sleep(2)
for i in range(num_msgs):
msg_content = b"foo " + i.to_bytes(1, "big")
# randomly pick a message origin
origin_idx = random.randint(0, len(hosts) - 1)
# publish from the randomly chosen host
await pubsubs_gsub[origin_idx].publish("foobar", msg_content)
await trio.sleep(0.5)
# Assert that all blocking queues receive the message
for queue in queues:
msg = await queue.get()
assert msg.data == msg_content
@pytest.mark.trio
async def test_fanout():
async with PubsubFactory.create_batch_with_gossipsub(10) as pubsubs_gsub:
hosts = [pubsub.host for pubsub in pubsubs_gsub]
num_msgs = 5
# All pubsub subscribe to foobar except for `pubsubs_gsub[0]`
subs = [await pubsub.subscribe("foobar") for pubsub in pubsubs_gsub[1:]]
# Sparsely connect libp2p hosts in random way
await dense_connect(hosts)
# Wait 2 seconds for heartbeat to allow mesh to connect
await trio.sleep(2)
topic = "foobar"
# Send messages with origin not subscribed
for i in range(num_msgs):
msg_content = b"foo " + i.to_bytes(1, "big")
# Pick the message origin to the node that is not subscribed to 'foobar'
origin_idx = 0
# publish from the randomly chosen host
await pubsubs_gsub[origin_idx].publish(topic, msg_content)
await trio.sleep(0.5)
# Assert that all blocking queues receive the message
for sub in subs:
msg = await sub.get()
assert msg.data == msg_content
# Subscribe message origin
subs.insert(0, await pubsubs_gsub[0].subscribe(topic))
# Send messages again
for i in range(num_msgs):
msg_content = b"bar " + i.to_bytes(1, "big")
# Pick the message origin to the node that is not subscribed to 'foobar'
origin_idx = 0
# publish from the randomly chosen host
await pubsubs_gsub[origin_idx].publish(topic, msg_content)
await trio.sleep(0.5)
# Assert that all blocking queues receive the message
for sub in subs:
msg = await sub.get()
assert msg.data == msg_content
@pytest.mark.trio
@pytest.mark.slow
async def test_fanout_maintenance():
async with PubsubFactory.create_batch_with_gossipsub(10) as pubsubs_gsub:
hosts = [pubsub.host for pubsub in pubsubs_gsub]
num_msgs = 5
# All pubsub subscribe to foobar
queues = []
topic = "foobar"
for i in range(1, len(pubsubs_gsub)):
q = await pubsubs_gsub[i].subscribe(topic)
# Add each blocking queue to an array of blocking queues
queues.append(q)
# Sparsely connect libp2p hosts in random way
await dense_connect(hosts)
# Wait 2 seconds for heartbeat to allow mesh to connect
await trio.sleep(2)
# Send messages with origin not subscribed
for i in range(num_msgs):
msg_content = b"foo " + i.to_bytes(1, "big")
# Pick the message origin to the node that is not subscribed to 'foobar'
origin_idx = 0
# publish from the randomly chosen host
await pubsubs_gsub[origin_idx].publish(topic, msg_content)
await trio.sleep(0.5)
# Assert that all blocking queues receive the message
for queue in queues:
msg = await queue.get()
assert msg.data == msg_content
for sub in pubsubs_gsub:
await sub.unsubscribe(topic)
queues = []
await trio.sleep(2)
# Resub and repeat
for i in range(1, len(pubsubs_gsub)):
q = await pubsubs_gsub[i].subscribe(topic)
# Add each blocking queue to an array of blocking queues
queues.append(q)
await trio.sleep(2)
# Check messages can still be sent
for i in range(num_msgs):
msg_content = b"bar " + i.to_bytes(1, "big")
# Pick the message origin to the node that is not subscribed to 'foobar'
origin_idx = 0
# publish from the randomly chosen host
await pubsubs_gsub[origin_idx].publish(topic, msg_content)
await trio.sleep(0.5)
# Assert that all blocking queues receive the message
for queue in queues:
msg = await queue.get()
assert msg.data == msg_content
@pytest.mark.trio
async def test_gossip_propagation():
async with PubsubFactory.create_batch_with_gossipsub(
2, degree=1, degree_low=0, degree_high=2, gossip_window=50, gossip_history=100
) as pubsubs_gsub:
topic = "foo"
queue_0 = await pubsubs_gsub[0].subscribe(topic)
# node 0 publish to topic
msg_content = b"foo_msg"
# publish from the randomly chosen host
await pubsubs_gsub[0].publish(topic, msg_content)
await trio.sleep(0.5)
# Assert that the blocking queues receive the message
msg = await queue_0.get()
assert msg.data == msg_content
@pytest.mark.parametrize("initial_mesh_peer_count", (7, 10, 13))
@pytest.mark.trio
async def test_mesh_heartbeat(initial_mesh_peer_count, monkeypatch):
async with PubsubFactory.create_batch_with_gossipsub(
1, heartbeat_initial_delay=100
) as pubsubs_gsub:
# It's difficult to set up the initial peer subscription condition.
# Ideally I would like to have initial mesh peer count that's below ``GossipSubDegree``
# so I can test if `mesh_heartbeat` return correct peers to GRAFT.
# The problem is that I can not set it up so that we have peers subscribe to the topic
# but not being part of our mesh peers (as these peers are the peers to GRAFT).
# So I monkeypatch the peer subscriptions and our mesh peers.
total_peer_count = 14
topic = "TEST_MESH_HEARTBEAT"
fake_peer_ids = [IDFactory() for _ in range(total_peer_count)]
peer_protocol = {peer_id: PROTOCOL_ID for peer_id in fake_peer_ids}
monkeypatch.setattr(pubsubs_gsub[0].router, "peer_protocol", peer_protocol)
peer_topics = {topic: set(fake_peer_ids)}
# Monkeypatch the peer subscriptions
monkeypatch.setattr(pubsubs_gsub[0], "peer_topics", peer_topics)
mesh_peer_indices = random.sample(
range(total_peer_count), initial_mesh_peer_count
)
mesh_peers = [fake_peer_ids[i] for i in mesh_peer_indices]
router_mesh = {topic: set(mesh_peers)}
# Monkeypatch our mesh peers
monkeypatch.setattr(pubsubs_gsub[0].router, "mesh", router_mesh)
peers_to_graft, peers_to_prune = pubsubs_gsub[0].router.mesh_heartbeat()
if initial_mesh_peer_count > pubsubs_gsub[0].router.degree:
# If number of initial mesh peers is more than `GossipSubDegree`,
# we should PRUNE mesh peers
assert len(peers_to_graft) == 0
assert (
len(peers_to_prune)
== initial_mesh_peer_count - pubsubs_gsub[0].router.degree
)
for peer in peers_to_prune:
assert peer in mesh_peers
elif initial_mesh_peer_count < pubsubs_gsub[0].router.degree:
# If number of initial mesh peers is less than `GossipSubDegree`,
# we should GRAFT more peers
assert len(peers_to_prune) == 0
assert (
len(peers_to_graft)
== pubsubs_gsub[0].router.degree - initial_mesh_peer_count
)
for peer in peers_to_graft:
assert peer not in mesh_peers
else:
assert len(peers_to_prune) == 0 and len(peers_to_graft) == 0
@pytest.mark.parametrize("initial_peer_count", (1, 4, 7))
@pytest.mark.trio
async def test_gossip_heartbeat(initial_peer_count, monkeypatch):
async with PubsubFactory.create_batch_with_gossipsub(
1, heartbeat_initial_delay=100
) as pubsubs_gsub:
# The problem is that I can not set it up so that we have peers subscribe to the topic
# but not being part of our mesh peers (as these peers are the peers to GRAFT).
# So I monkeypatch the peer subscriptions and our mesh peers.
total_peer_count = 28
topic_mesh = "TEST_GOSSIP_HEARTBEAT_1"
topic_fanout = "TEST_GOSSIP_HEARTBEAT_2"
fake_peer_ids = [IDFactory() for _ in range(total_peer_count)]
peer_protocol = {peer_id: PROTOCOL_ID for peer_id in fake_peer_ids}
monkeypatch.setattr(pubsubs_gsub[0].router, "peer_protocol", peer_protocol)
topic_mesh_peer_count = 14
# Split into mesh peers and fanout peers
peer_topics = {
topic_mesh: set(fake_peer_ids[:topic_mesh_peer_count]),
topic_fanout: set(fake_peer_ids[topic_mesh_peer_count:]),
}
# Monkeypatch the peer subscriptions
monkeypatch.setattr(pubsubs_gsub[0], "peer_topics", peer_topics)
mesh_peer_indices = random.sample(
range(topic_mesh_peer_count), initial_peer_count
)
mesh_peers = [fake_peer_ids[i] for i in mesh_peer_indices]
router_mesh = {topic_mesh: set(mesh_peers)}
# Monkeypatch our mesh peers
monkeypatch.setattr(pubsubs_gsub[0].router, "mesh", router_mesh)
fanout_peer_indices = random.sample(
range(topic_mesh_peer_count, total_peer_count), initial_peer_count
)
fanout_peers = [fake_peer_ids[i] for i in fanout_peer_indices]
router_fanout = {topic_fanout: set(fanout_peers)}
# Monkeypatch our fanout peers
monkeypatch.setattr(pubsubs_gsub[0].router, "fanout", router_fanout)
def window(topic):
if topic == topic_mesh:
return [topic_mesh]
elif topic == topic_fanout:
return [topic_fanout]
else:
return []
# Monkeypatch the memory cache messages
monkeypatch.setattr(pubsubs_gsub[0].router.mcache, "window", window)
peers_to_gossip = pubsubs_gsub[0].router.gossip_heartbeat()
# If our mesh peer count is less than `GossipSubDegree`, we should gossip to up to
# `GossipSubDegree` peers (exclude mesh peers).
if topic_mesh_peer_count - initial_peer_count < pubsubs_gsub[0].router.degree:
# The same goes for fanout so it's two times the number of peers to gossip.
assert len(peers_to_gossip) == 2 * (
topic_mesh_peer_count - initial_peer_count
)
elif (
topic_mesh_peer_count - initial_peer_count >= pubsubs_gsub[0].router.degree
):
assert len(peers_to_gossip) == 2 * (pubsubs_gsub[0].router.degree)
for peer in peers_to_gossip:
if peer in peer_topics[topic_mesh]:
# Check that the peer to gossip to is not in our mesh peers
assert peer not in mesh_peers
assert topic_mesh in peers_to_gossip[peer]
elif peer in peer_topics[topic_fanout]:
# Check that the peer to gossip to is not in our fanout peers
assert peer not in fanout_peers
assert topic_fanout in peers_to_gossip[peer]
|
5e225091b01cbce2530241d2a40735724c0de856
|
fda6a1be714d8e27a5d8dd3df795df45538f2fe7
|
/graphene/types/tests/test_type_map.py
|
55665b6b8888efe0b71d31653cc6aa352e329355
|
[
"MIT"
] |
permissive
|
graphql-python/graphene
|
6badaaa97c8ad78552a656f9da9ed577cfc37add
|
93cb33d359bf2109d1b81eaeaf052cdb06f93f49
|
refs/heads/master
| 2023-08-05T02:48:36.967050
| 2023-07-26T07:43:40
| 2023-07-26T07:43:40
| 43,056,951
| 8,187
| 1,088
|
MIT
| 2023-09-01T19:59:19
| 2015-09-24T09:18:18
|
Python
|
UTF-8
|
Python
| false
| false
| 10,417
|
py
|
test_type_map.py
|
from graphql import Undefined
from graphql.type import (
GraphQLArgument,
GraphQLEnumType,
GraphQLEnumValue,
GraphQLField,
GraphQLInputField,
GraphQLInputObjectType,
GraphQLInterfaceType,
GraphQLNonNull,
GraphQLObjectType,
GraphQLString,
)
from ..dynamic import Dynamic
from ..enum import Enum
from ..field import Field
from ..inputfield import InputField
from ..inputobjecttype import InputObjectType
from ..interface import Interface
from ..objecttype import ObjectType
from ..scalars import Int, String
from ..schema import Schema
from ..structures import List, NonNull
def create_type_map(types, auto_camelcase=True):
query = type("Query", (ObjectType,), {})
schema = Schema(query, types=types, auto_camelcase=auto_camelcase)
return schema.graphql_schema.type_map
def test_enum():
class MyEnum(Enum):
"""Description"""
foo = 1
bar = 2
@property
def description(self):
return f"Description {self.name}={self.value}"
@property
def deprecation_reason(self):
if self == MyEnum.foo:
return "Is deprecated"
type_map = create_type_map([MyEnum])
assert "MyEnum" in type_map
graphql_enum = type_map["MyEnum"]
assert isinstance(graphql_enum, GraphQLEnumType)
assert graphql_enum.name == "MyEnum"
assert graphql_enum.description == "Description"
assert graphql_enum.values == {
"foo": GraphQLEnumValue(
value=1, description="Description foo=1", deprecation_reason="Is deprecated"
),
"bar": GraphQLEnumValue(value=2, description="Description bar=2"),
}
def test_objecttype():
class MyObjectType(ObjectType):
"""Description"""
foo = String(
bar=String(description="Argument description", default_value="x"),
description="Field description",
)
bar = String(name="gizmo")
def resolve_foo(self, bar):
return bar
type_map = create_type_map([MyObjectType])
assert "MyObjectType" in type_map
graphql_type = type_map["MyObjectType"]
assert isinstance(graphql_type, GraphQLObjectType)
assert graphql_type.name == "MyObjectType"
assert graphql_type.description == "Description"
fields = graphql_type.fields
assert list(fields) == ["foo", "gizmo"]
foo_field = fields["foo"]
assert isinstance(foo_field, GraphQLField)
assert foo_field.description == "Field description"
assert foo_field.args == {
"bar": GraphQLArgument(
GraphQLString,
description="Argument description",
default_value="x",
out_name="bar",
)
}
def test_required_argument_with_default_value():
class MyObjectType(ObjectType):
foo = String(bar=String(required=True, default_value="x"))
type_map = create_type_map([MyObjectType])
graphql_type = type_map["MyObjectType"]
foo_field = graphql_type.fields["foo"]
bar_argument = foo_field.args["bar"]
assert bar_argument.default_value == "x"
assert isinstance(bar_argument.type, GraphQLNonNull)
assert bar_argument.type.of_type == GraphQLString
def test_dynamic_objecttype():
class MyObjectType(ObjectType):
"""Description"""
bar = Dynamic(lambda: Field(String))
own = Field(lambda: MyObjectType)
type_map = create_type_map([MyObjectType])
assert "MyObjectType" in type_map
assert list(MyObjectType._meta.fields) == ["bar", "own"]
graphql_type = type_map["MyObjectType"]
fields = graphql_type.fields
assert list(fields) == ["bar", "own"]
assert fields["bar"].type == GraphQLString
assert fields["own"].type == graphql_type
def test_interface():
class MyInterface(Interface):
"""Description"""
foo = String(
bar=String(description="Argument description", default_value="x"),
description="Field description",
)
bar = String(name="gizmo", first_arg=String(), other_arg=String(name="oth_arg"))
own = Field(lambda: MyInterface)
def resolve_foo(self, args, info):
return args.get("bar")
type_map = create_type_map([MyInterface])
assert "MyInterface" in type_map
graphql_type = type_map["MyInterface"]
assert isinstance(graphql_type, GraphQLInterfaceType)
assert graphql_type.name == "MyInterface"
assert graphql_type.description == "Description"
fields = graphql_type.fields
assert list(fields) == ["foo", "gizmo", "own"]
assert fields["own"].type == graphql_type
assert list(fields["gizmo"].args) == ["firstArg", "oth_arg"]
foo_field = fields["foo"]
assert isinstance(foo_field, GraphQLField)
assert foo_field.description == "Field description"
assert not foo_field.resolve # Resolver not attached in interfaces
assert foo_field.args == {
"bar": GraphQLArgument(
GraphQLString,
description="Argument description",
default_value="x",
out_name="bar",
)
}
def test_inputobject():
class OtherObjectType(InputObjectType):
thingy = NonNull(Int)
class MyInnerObjectType(InputObjectType):
some_field = String()
some_other_field = List(OtherObjectType)
class MyInputObjectType(InputObjectType):
"""Description"""
foo_bar = String(description="Field description")
bar = String(name="gizmo")
baz = NonNull(MyInnerObjectType)
own = InputField(lambda: MyInputObjectType)
def resolve_foo_bar(self, args, info):
return args.get("bar")
type_map = create_type_map([MyInputObjectType])
assert "MyInputObjectType" in type_map
graphql_type = type_map["MyInputObjectType"]
assert isinstance(graphql_type, GraphQLInputObjectType)
assert graphql_type.name == "MyInputObjectType"
assert graphql_type.description == "Description"
other_graphql_type = type_map["OtherObjectType"]
inner_graphql_type = type_map["MyInnerObjectType"]
container = graphql_type.out_type(
{
"bar": "oh!",
"baz": inner_graphql_type.out_type(
{
"some_other_field": [
other_graphql_type.out_type({"thingy": 1}),
other_graphql_type.out_type({"thingy": 2}),
]
}
),
}
)
assert isinstance(container, MyInputObjectType)
assert "bar" in container
assert container.bar == "oh!"
assert "foo_bar" not in container
assert container.foo_bar is None
assert container.baz.some_field is None
assert container.baz.some_other_field[0].thingy == 1
assert container.baz.some_other_field[1].thingy == 2
fields = graphql_type.fields
assert list(fields) == ["fooBar", "gizmo", "baz", "own"]
own_field = fields["own"]
assert own_field.type == graphql_type
foo_field = fields["fooBar"]
assert isinstance(foo_field, GraphQLInputField)
assert foo_field.description == "Field description"
def test_inputobject_undefined(set_default_input_object_type_to_undefined):
class OtherObjectType(InputObjectType):
optional_field = String()
type_map = create_type_map([OtherObjectType])
assert "OtherObjectType" in type_map
graphql_type = type_map["OtherObjectType"]
container = graphql_type.out_type({})
assert container.optional_field is Undefined
def test_objecttype_camelcase():
class MyObjectType(ObjectType):
"""Description"""
foo_bar = String(bar_foo=String())
type_map = create_type_map([MyObjectType])
assert "MyObjectType" in type_map
graphql_type = type_map["MyObjectType"]
assert isinstance(graphql_type, GraphQLObjectType)
assert graphql_type.name == "MyObjectType"
assert graphql_type.description == "Description"
fields = graphql_type.fields
assert list(fields) == ["fooBar"]
foo_field = fields["fooBar"]
assert isinstance(foo_field, GraphQLField)
assert foo_field.args == {
"barFoo": GraphQLArgument(
GraphQLString, default_value=Undefined, out_name="bar_foo"
)
}
def test_objecttype_camelcase_disabled():
class MyObjectType(ObjectType):
"""Description"""
foo_bar = String(bar_foo=String())
type_map = create_type_map([MyObjectType], auto_camelcase=False)
assert "MyObjectType" in type_map
graphql_type = type_map["MyObjectType"]
assert isinstance(graphql_type, GraphQLObjectType)
assert graphql_type.name == "MyObjectType"
assert graphql_type.description == "Description"
fields = graphql_type.fields
assert list(fields) == ["foo_bar"]
foo_field = fields["foo_bar"]
assert isinstance(foo_field, GraphQLField)
assert foo_field.args == {
"bar_foo": GraphQLArgument(
GraphQLString, default_value=Undefined, out_name="bar_foo"
)
}
def test_objecttype_with_possible_types():
class MyObjectType(ObjectType):
"""Description"""
class Meta:
possible_types = (dict,)
foo_bar = String()
type_map = create_type_map([MyObjectType])
graphql_type = type_map["MyObjectType"]
assert graphql_type.is_type_of
assert graphql_type.is_type_of({}, None) is True
assert graphql_type.is_type_of(MyObjectType(), None) is False
def test_interface_with_interfaces():
class FooInterface(Interface):
foo = String()
class BarInterface(Interface):
class Meta:
interfaces = [FooInterface]
foo = String()
bar = String()
type_map = create_type_map([FooInterface, BarInterface])
assert "FooInterface" in type_map
foo_graphql_type = type_map["FooInterface"]
assert isinstance(foo_graphql_type, GraphQLInterfaceType)
assert foo_graphql_type.name == "FooInterface"
assert "BarInterface" in type_map
bar_graphql_type = type_map["BarInterface"]
assert isinstance(bar_graphql_type, GraphQLInterfaceType)
assert bar_graphql_type.name == "BarInterface"
fields = bar_graphql_type.fields
assert list(fields) == ["foo", "bar"]
assert isinstance(fields["foo"], GraphQLField)
assert isinstance(fields["bar"], GraphQLField)
assert list(bar_graphql_type.interfaces) == list([foo_graphql_type])
|
67378211176e98a26fa1325000d3c5bd724d539f
|
88ae8695987ada722184307301e221e1ba3cc2fa
|
/third_party/libdrm/src/symbols-check.py
|
2e7ba68d10229cfb48a7694cd9b7123d143f3c3c
|
[
"BSD-3-Clause",
"Apache-2.0",
"LGPL-2.0-or-later",
"MIT",
"GPL-1.0-or-later",
"GPL-2.0-or-later"
] |
permissive
|
iridium-browser/iridium-browser
|
71d9c5ff76e014e6900b825f67389ab0ccd01329
|
5ee297f53dc7f8e70183031cff62f37b0f19d25f
|
refs/heads/master
| 2023-08-03T16:44:16.844552
| 2023-07-20T15:17:00
| 2023-07-23T16:09:30
| 220,016,632
| 341
| 40
|
BSD-3-Clause
| 2021-08-13T13:54:45
| 2019-11-06T14:32:31
| null |
UTF-8
|
Python
| false
| false
| 3,785
|
py
|
symbols-check.py
|
#!/usr/bin/env python3
import argparse
import os
import platform
import subprocess
# This list contains symbols that _might_ be exported for some platforms
PLATFORM_SYMBOLS = [
'__bss_end__',
'__bss_start__',
'__bss_start',
'__end__',
'_bss_end__',
'_edata',
'_end',
'_fini',
'_init',
]
def get_symbols(nm, lib):
'''
List all the (non platform-specific) symbols exported by the library
'''
symbols = []
platform_name = platform.system()
output = subprocess.check_output([nm, '-gP', lib],
stderr=open(os.devnull, 'w')).decode("ascii")
for line in output.splitlines():
fields = line.split()
if len(fields) == 2 or fields[1] == 'U':
continue
symbol_name = fields[0]
if platform_name == 'Linux':
if symbol_name in PLATFORM_SYMBOLS:
continue
elif platform_name == 'Darwin':
assert symbol_name[0] == '_'
symbol_name = symbol_name[1:]
symbols.append(symbol_name)
return symbols
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--symbols-file',
action='store',
required=True,
help='path to file containing symbols')
parser.add_argument('--lib',
action='store',
required=True,
help='path to library')
parser.add_argument('--nm',
action='store',
required=True,
help='path to binary (or name in $PATH)')
args = parser.parse_args()
try:
lib_symbols = get_symbols(args.nm, args.lib)
except:
# We can't run this test, but we haven't technically failed it either
# Return the GNU "skip" error code
exit(77)
mandatory_symbols = []
optional_symbols = []
with open(args.symbols_file) as symbols_file:
qualifier_optional = '(optional)'
for line in symbols_file.readlines():
# Strip comments
line = line.split('#')[0]
line = line.strip()
if not line:
continue
# Line format:
# [qualifier] symbol
qualifier = None
symbol = None
fields = line.split()
if len(fields) == 1:
symbol = fields[0]
elif len(fields) == 2:
qualifier = fields[0]
symbol = fields[1]
else:
print(args.symbols_file + ': invalid format: ' + line)
exit(1)
# The only supported qualifier is 'optional', which means the
# symbol doesn't have to be exported by the library
if qualifier and not qualifier == qualifier_optional:
print(args.symbols_file + ': invalid qualifier: ' + qualifier)
exit(1)
if qualifier == qualifier_optional:
optional_symbols.append(symbol)
else:
mandatory_symbols.append(symbol)
unknown_symbols = []
for symbol in lib_symbols:
if symbol in mandatory_symbols:
continue
if symbol in optional_symbols:
continue
unknown_symbols.append(symbol)
missing_symbols = [
sym for sym in mandatory_symbols if sym not in lib_symbols
]
for symbol in unknown_symbols:
print(args.lib + ': unknown symbol exported: ' + symbol)
for symbol in missing_symbols:
print(args.lib + ': missing symbol: ' + symbol)
if unknown_symbols or missing_symbols:
exit(1)
exit(0)
if __name__ == '__main__':
main()
|
5c8ad240ab60f50624fb865575809ae866e1ba7f
|
54292bb222c6525217458e92ddacfc4e2635b83e
|
/python/phonenumbers/carrierdata/data1.py
|
865b7c3d57bdfc7c4ba9f77e023089f2f17cd42e
|
[
"Apache-2.0"
] |
permissive
|
daviddrysdale/python-phonenumbers
|
0d69b48033d1464c0a6c358274062f1db2ee8c4a
|
2f06ef6db2ca83f3856fbb8019a0c665f5971b13
|
refs/heads/dev
| 2023-08-31T09:37:20.570690
| 2023-08-22T05:18:22
| 2023-08-22T05:18:22
| 1,643,611
| 2,944
| 406
|
Apache-2.0
| 2023-08-08T06:49:07
| 2011-04-21T03:06:38
|
Python
|
UTF-8
|
Python
| false
| false
| 356,575
|
py
|
data1.py
|
"""Per-prefix data, mapping each prefix to a dict of locale:name.
Auto-generated file, do not edit by hand.
"""
from ..util import u
# Copyright (C) 2011-2023 The Libphonenumber Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
data = {
'553199642':{'en': 'Telemig Celular'},
'553199643':{'en': 'Telemig Celular'},
'553199644':{'en': 'Telemig Celular'},
'553199645':{'en': 'Telemig Celular'},
'553199646':{'en': 'Telemig Celular'},
'553199647':{'en': 'Telemig Celular'},
'553199648':{'en': 'Telemig Celular'},
'553199649':{'en': 'Telemig Celular'},
'553199651':{'en': 'Telemig Celular'},
'553199652':{'en': 'Telemig Celular'},
'553199653':{'en': 'Telemig Celular'},
'553199654':{'en': 'Telemig Celular'},
'553199655':{'en': 'Telemig Celular'},
'553199656':{'en': 'Telemig Celular'},
'553199657':{'en': 'Telemig Celular'},
'553199658':{'en': 'Telemig Celular'},
'553199659':{'en': 'Telemig Celular'},
'553199661':{'en': 'Telemig Celular'},
'553199662':{'en': 'Telemig Celular'},
'553199663':{'en': 'Telemig Celular'},
'553199664':{'en': 'Telemig Celular'},
'553199665':{'en': 'Telemig Celular'},
'553199666':{'en': 'Telemig Celular'},
'553199667':{'en': 'Telemig Celular'},
'553199668':{'en': 'Telemig Celular'},
'553199669':{'en': 'Telemig Celular'},
'553199671':{'en': 'Telemig Celular'},
'553199672':{'en': 'Telemig Celular'},
'553199673':{'en': 'Telemig Celular'},
'553199674':{'en': 'Telemig Celular'},
'553199675':{'en': 'Telemig Celular'},
'553199676':{'en': 'Telemig Celular'},
'553199677':{'en': 'Telemig Celular'},
'553199678':{'en': 'Telemig Celular'},
'553199679':{'en': 'Telemig Celular'},
'553199681':{'en': 'Telemig Celular'},
'553199682':{'en': 'Telemig Celular'},
'553199683':{'en': 'Telemig Celular'},
'553199684':{'en': 'Telemig Celular'},
'553199685':{'en': 'Telemig Celular'},
'553199686':{'en': 'Telemig Celular'},
'553199687':{'en': 'Telemig Celular'},
'553199688':{'en': 'Telemig Celular'},
'553199689':{'en': 'Telemig Celular'},
'553199691':{'en': 'Telemig Celular'},
'553199692':{'en': 'Telemig Celular'},
'553199693':{'en': 'Telemig Celular'},
'553199694':{'en': 'Telemig Celular'},
'553199695':{'en': 'Telemig Celular'},
'553199696':{'en': 'Telemig Celular'},
'553199697':{'en': 'Telemig Celular'},
'553199698':{'en': 'Telemig Celular'},
'553199699':{'en': 'Telemig Celular'},
'553199701':{'en': 'Telemig Celular'},
'553199702':{'en': 'Telemig Celular'},
'553199703':{'en': 'Telemig Celular'},
'553199704':{'en': 'Telemig Celular'},
'553199705':{'en': 'Telemig Celular'},
'553199706':{'en': 'Telemig Celular'},
'553199707':{'en': 'Telemig Celular'},
'553199708':{'en': 'Telemig Celular'},
'553199709':{'en': 'Telemig Celular'},
'553199711':{'en': 'Telemig Celular'},
'553199712':{'en': 'Telemig Celular'},
'553199713':{'en': 'Telemig Celular'},
'553199714':{'en': 'Telemig Celular'},
'553199715':{'en': 'Telemig Celular'},
'553199717':{'en': 'Telemig Celular'},
'553199718':{'en': 'Telemig Celular'},
'553199719':{'en': 'Telemig Celular'},
'553199721':{'en': 'Telemig Celular'},
'553199722':{'en': 'Telemig Celular'},
'553199723':{'en': 'Telemig Celular'},
'553199724':{'en': 'Telemig Celular'},
'553199725':{'en': 'Telemig Celular'},
'553199726':{'en': 'Telemig Celular'},
'553199728':{'en': 'Telemig Celular'},
'553199729':{'en': 'Telemig Celular'},
'553199731':{'en': 'Telemig Celular'},
'553199732':{'en': 'Telemig Celular'},
'553199733':{'en': 'Telemig Celular'},
'553199734':{'en': 'Telemig Celular'},
'553199735':{'en': 'Telemig Celular'},
'553199736':{'en': 'Telemig Celular'},
'553199737':{'en': 'Telemig Celular'},
'553199738':{'en': 'Telemig Celular'},
'553199739':{'en': 'Telemig Celular'},
'553199741':{'en': 'Telemig Celular'},
'553199742':{'en': 'Telemig Celular'},
'553199743':{'en': 'Telemig Celular'},
'553199744':{'en': 'Telemig Celular'},
'553199745':{'en': 'Telemig Celular'},
'553199746':{'en': 'Telemig Celular'},
'553199747':{'en': 'Telemig Celular'},
'553199748':{'en': 'Telemig Celular'},
'553199749':{'en': 'Telemig Celular'},
'553199751':{'en': 'Telemig Celular'},
'553199752':{'en': 'Telemig Celular'},
'553199753':{'en': 'Telemig Celular'},
'553199755':{'en': 'Telemig Celular'},
'553199756':{'en': 'Telemig Celular'},
'553199757':{'en': 'Telemig Celular'},
'553199758':{'en': 'Telemig Celular'},
'553199759':{'en': 'Telemig Celular'},
'553199761':{'en': 'Telemig Celular'},
'553199762':{'en': 'Telemig Celular'},
'553199763':{'en': 'Telemig Celular'},
'553199764':{'en': 'Telemig Celular'},
'553199765':{'en': 'Telemig Celular'},
'553199766':{'en': 'Telemig Celular'},
'553199767':{'en': 'Telemig Celular'},
'553199768':{'en': 'Telemig Celular'},
'553199769':{'en': 'Telemig Celular'},
'553199771':{'en': 'Telemig Celular'},
'553199772':{'en': 'Telemig Celular'},
'553199773':{'en': 'Telemig Celular'},
'553199774':{'en': 'Telemig Celular'},
'553199775':{'en': 'Telemig Celular'},
'553199776':{'en': 'Telemig Celular'},
'553199777':{'en': 'Telemig Celular'},
'553199778':{'en': 'Telemig Celular'},
'553199779':{'en': 'Telemig Celular'},
'553199781':{'en': 'Telemig Celular'},
'553199782':{'en': 'Telemig Celular'},
'553199783':{'en': 'Telemig Celular'},
'553199784':{'en': 'Telemig Celular'},
'553199785':{'en': 'Telemig Celular'},
'553199786':{'en': 'Telemig Celular'},
'553199787':{'en': 'Telemig Celular'},
'553199788':{'en': 'Telemig Celular'},
'553199789':{'en': 'Telemig Celular'},
'553199791':{'en': 'Telemig Celular'},
'553199792':{'en': 'Telemig Celular'},
'553199793':{'en': 'Telemig Celular'},
'553199794':{'en': 'Telemig Celular'},
'553199795':{'en': 'Telemig Celular'},
'553199796':{'en': 'Telemig Celular'},
'553199797':{'en': 'Telemig Celular'},
'553199798':{'en': 'Telemig Celular'},
'553199799':{'en': 'Telemig Celular'},
'5531998':{'en': 'Telemig Celular'},
'553199800':{'en': 'TIM'},
'553199810':{'en': 'TIM'},
'553199820':{'en': 'TIM'},
'553199830':{'en': 'TIM'},
'553199840':{'en': 'TIM'},
'553199850':{'en': 'TIM'},
'553199860':{'en': 'TIM'},
'553199870':{'en': 'TIM'},
'553199880':{'en': 'TIM'},
'553199890':{'en': 'TIM'},
'553199901':{'en': 'Telemig Celular'},
'553199902':{'en': 'Telemig Celular'},
'553199903':{'en': 'Telemig Celular'},
'553199904':{'en': 'Telemig Celular'},
'553199905':{'en': 'Telemig Celular'},
'553199906':{'en': 'Telemig Celular'},
'553199907':{'en': 'Telemig Celular'},
'553199908':{'en': 'Telemig Celular'},
'553199909':{'en': 'Telemig Celular'},
'553199911':{'en': 'Telemig Celular'},
'553199912':{'en': 'Telemig Celular'},
'553199913':{'en': 'Telemig Celular'},
'553199914':{'en': 'Telemig Celular'},
'553199915':{'en': 'Telemig Celular'},
'553199916':{'en': 'Telemig Celular'},
'553199917':{'en': 'Telemig Celular'},
'553199918':{'en': 'Telemig Celular'},
'553199919':{'en': 'Telemig Celular'},
'553199921':{'en': 'Telemig Celular'},
'553199922':{'en': 'Telemig Celular'},
'553199923':{'en': 'Telemig Celular'},
'553199924':{'en': 'Telemig Celular'},
'553199925':{'en': 'Telemig Celular'},
'553199926':{'en': 'Telemig Celular'},
'553199927':{'en': 'Telemig Celular'},
'553199928':{'en': 'Telemig Celular'},
'553199929':{'en': 'Telemig Celular'},
'553199931':{'en': 'Telemig Celular'},
'553199932':{'en': 'Telemig Celular'},
'553199933':{'en': 'Telemig Celular'},
'553199934':{'en': 'Telemig Celular'},
'553199935':{'en': 'Telemig Celular'},
'553199936':{'en': 'Telemig Celular'},
'553199937':{'en': 'Telemig Celular'},
'553199938':{'en': 'Telemig Celular'},
'553199939':{'en': 'Telemig Celular'},
'553199941':{'en': 'Telemig Celular'},
'553199942':{'en': 'Telemig Celular'},
'553199943':{'en': 'Telemig Celular'},
'553199944':{'en': 'Telemig Celular'},
'553199945':{'en': 'Telemig Celular'},
'553199946':{'en': 'Telemig Celular'},
'553199947':{'en': 'Telemig Celular'},
'553199948':{'en': 'Telemig Celular'},
'553199949':{'en': 'Telemig Celular'},
'55319995':{'en': 'Telemig Celular'},
'55319996':{'en': 'Telemig Celular'},
'55319997':{'en': 'Telemig Celular'},
'55319998':{'en': 'Telemig Celular'},
'55319999':{'en': 'Telemig Celular'},
'55329840':{'en': 'Claro'},
'55329841':{'en': 'Claro'},
'55329842':{'en': 'Claro'},
'55329843':{'en': 'Claro'},
'55329844':{'en': 'Claro'},
'55329845':{'en': 'Claro'},
'55329846':{'en': 'Claro'},
'55329847':{'en': 'Claro'},
'553298480':{'en': 'Claro'},
'553298481':{'en': 'Claro'},
'553298482':{'en': 'Claro'},
'553298483':{'en': 'Claro'},
'553298484':{'en': 'Claro'},
'553298485':{'en': 'Claro'},
'5532985':{'en': 'Oi'},
'5532986':{'en': 'Oi'},
'5532987':{'en': 'Oi'},
'5532988':{'en': 'Oi'},
'5532989':{'en': 'Oi'},
'553299101':{'en': 'TIM'},
'553299102':{'en': 'TIM'},
'553299103':{'en': 'TIM'},
'553299104':{'en': 'TIM'},
'553299105':{'en': 'TIM'},
'553299106':{'en': 'TIM'},
'553299107':{'en': 'TIM'},
'553299108':{'en': 'TIM'},
'553299109':{'en': 'TIM'},
'553299111':{'en': 'TIM'},
'553299112':{'en': 'TIM'},
'553299113':{'en': 'TIM'},
'553299114':{'en': 'TIM'},
'553299115':{'en': 'TIM'},
'553299116':{'en': 'TIM'},
'553299117':{'en': 'TIM'},
'553299118':{'en': 'TIM'},
'553299119':{'en': 'TIM'},
'553299121':{'en': 'TIM'},
'553299122':{'en': 'TIM'},
'553299123':{'en': 'TIM'},
'553299124':{'en': 'TIM'},
'553299125':{'en': 'TIM'},
'553299126':{'en': 'TIM'},
'553299127':{'en': 'TIM'},
'553299128':{'en': 'TIM'},
'553299129':{'en': 'TIM'},
'553299131':{'en': 'TIM'},
'553299132':{'en': 'TIM'},
'553299133':{'en': 'TIM'},
'553299134':{'en': 'TIM'},
'553299135':{'en': 'TIM'},
'553299136':{'en': 'TIM'},
'553299137':{'en': 'TIM'},
'553299138':{'en': 'TIM'},
'553299139':{'en': 'TIM'},
'553299141':{'en': 'TIM'},
'553299142':{'en': 'TIM'},
'553299143':{'en': 'TIM'},
'553299144':{'en': 'TIM'},
'553299145':{'en': 'TIM'},
'553299146':{'en': 'TIM'},
'553299193':{'en': 'TIM'},
'553299194':{'en': 'TIM'},
'553299195':{'en': 'TIM'},
'553299197':{'en': 'TIM'},
'553299198':{'en': 'TIM'},
'553299199':{'en': 'TIM'},
'553299901':{'en': 'Telemig Celular'},
'553299902':{'en': 'Telemig Celular'},
'553299903':{'en': 'Telemig Celular'},
'553299904':{'en': 'Telemig Celular'},
'553299905':{'en': 'Telemig Celular'},
'553299906':{'en': 'Telemig Celular'},
'553299907':{'en': 'Telemig Celular'},
'553299908':{'en': 'Telemig Celular'},
'553299909':{'en': 'Telemig Celular'},
'553299911':{'en': 'Telemig Celular'},
'553299912':{'en': 'Telemig Celular'},
'553299913':{'en': 'Telemig Celular'},
'553299914':{'en': 'Telemig Celular'},
'553299917':{'en': 'Telemig Celular'},
'553299918':{'en': 'Telemig Celular'},
'553299919':{'en': 'Telemig Celular'},
'553299921':{'en': 'Telemig Celular'},
'553299922':{'en': 'Telemig Celular'},
'553299923':{'en': 'Telemig Celular'},
'553299924':{'en': 'Telemig Celular'},
'553299925':{'en': 'Telemig Celular'},
'553299931':{'en': 'Telemig Celular'},
'553299932':{'en': 'Telemig Celular'},
'553299933':{'en': 'Telemig Celular'},
'553299934':{'en': 'Telemig Celular'},
'553299935':{'en': 'Telemig Celular'},
'553299936':{'en': 'Telemig Celular'},
'553299937':{'en': 'Telemig Celular'},
'553299938':{'en': 'Telemig Celular'},
'553299939':{'en': 'Telemig Celular'},
'553299941':{'en': 'Telemig Celular'},
'553299942':{'en': 'Telemig Celular'},
'553299943':{'en': 'Telemig Celular'},
'553299944':{'en': 'Telemig Celular'},
'553299945':{'en': 'Telemig Celular'},
'553299946':{'en': 'Telemig Celular'},
'553299947':{'en': 'Telemig Celular'},
'553299948':{'en': 'Telemig Celular'},
'553299949':{'en': 'Telemig Celular'},
'553299951':{'en': 'Telemig Celular'},
'553299952':{'en': 'Telemig Celular'},
'553299953':{'en': 'Telemig Celular'},
'553299954':{'en': 'Telemig Celular'},
'553299955':{'en': 'Telemig Celular'},
'553299956':{'en': 'Telemig Celular'},
'553299957':{'en': 'Telemig Celular'},
'553299958':{'en': 'Telemig Celular'},
'553299959':{'en': 'Telemig Celular'},
'55329996':{'en': 'Telemig Celular'},
'553299971':{'en': 'Telemig Celular'},
'553299972':{'en': 'Telemig Celular'},
'553299973':{'en': 'Telemig Celular'},
'553299974':{'en': 'Telemig Celular'},
'553299975':{'en': 'Telemig Celular'},
'553299976':{'en': 'Telemig Celular'},
'553299977':{'en': 'Telemig Celular'},
'553299979':{'en': 'Telemig Celular'},
'55329998':{'en': 'Telemig Celular'},
'553299991':{'en': 'Telemig Celular'},
'553299992':{'en': 'Telemig Celular'},
'553299993':{'en': 'Telemig Celular'},
'553299994':{'en': 'Telemig Celular'},
'553299995':{'en': 'Telemig Celular'},
'553299996':{'en': 'Telemig Celular'},
'553299997':{'en': 'Telemig Celular'},
'553299998':{'en': 'Telemig Celular'},
'553398401':{'en': 'Claro'},
'553398402':{'en': 'Claro'},
'553398403':{'en': 'Claro'},
'553398404':{'en': 'Claro'},
'553398405':{'en': 'Claro'},
'553398406':{'en': 'Claro'},
'553398407':{'en': 'Claro'},
'553398408':{'en': 'Claro'},
'553398409':{'en': 'Claro'},
'553398411':{'en': 'Claro'},
'553398412':{'en': 'Claro'},
'553398413':{'en': 'Claro'},
'553398414':{'en': 'Claro'},
'553398415':{'en': 'Claro'},
'553398416':{'en': 'Claro'},
'553398417':{'en': 'Claro'},
'553398418':{'en': 'Claro'},
'553398419':{'en': 'Claro'},
'553398421':{'en': 'Claro'},
'553398422':{'en': 'Claro'},
'553398423':{'en': 'Claro'},
'553398424':{'en': 'Claro'},
'553398425':{'en': 'Claro'},
'553398426':{'en': 'Claro'},
'553398427':{'en': 'Claro'},
'553398428':{'en': 'Claro'},
'553398429':{'en': 'Claro'},
'553398431':{'en': 'Claro'},
'553398432':{'en': 'Claro'},
'553398433':{'en': 'Claro'},
'553398434':{'en': 'Claro'},
'553398435':{'en': 'Claro'},
'553398436':{'en': 'Claro'},
'553398437':{'en': 'Claro'},
'553398438':{'en': 'Claro'},
'553398439':{'en': 'Claro'},
'553398441':{'en': 'Claro'},
'553398442':{'en': 'Claro'},
'553398443':{'en': 'Claro'},
'553398444':{'en': 'Claro'},
'553398445':{'en': 'Claro'},
'553398446':{'en': 'Claro'},
'553398447':{'en': 'Claro'},
'553398448':{'en': 'Claro'},
'553398449':{'en': 'Claro'},
'553398451':{'en': 'Claro'},
'553398452':{'en': 'Claro'},
'553398453':{'en': 'Claro'},
'553398454':{'en': 'Claro'},
'553398455':{'en': 'Claro'},
'553398456':{'en': 'Claro'},
'5533985':{'en': 'Oi'},
'5533986':{'en': 'Oi'},
'5533987':{'en': 'Oi'},
'5533988':{'en': 'Oi'},
'5533989':{'en': 'Oi'},
'553399101':{'en': 'TIM'},
'553399102':{'en': 'TIM'},
'553399103':{'en': 'TIM'},
'553399104':{'en': 'TIM'},
'553399105':{'en': 'TIM'},
'553399106':{'en': 'TIM'},
'553399107':{'en': 'TIM'},
'553399108':{'en': 'TIM'},
'553399109':{'en': 'TIM'},
'553399111':{'en': 'TIM'},
'553399112':{'en': 'TIM'},
'553399113':{'en': 'TIM'},
'553399114':{'en': 'TIM'},
'553399115':{'en': 'TIM'},
'553399116':{'en': 'TIM'},
'553399117':{'en': 'TIM'},
'553399118':{'en': 'TIM'},
'553399119':{'en': 'TIM'},
'553399121':{'en': 'TIM'},
'553399122':{'en': 'TIM'},
'553399123':{'en': 'TIM'},
'553399124':{'en': 'TIM'},
'553399125':{'en': 'TIM'},
'553399126':{'en': 'TIM'},
'553399127':{'en': 'TIM'},
'553399128':{'en': 'TIM'},
'553399129':{'en': 'TIM'},
'553399136':{'en': 'TIM'},
'553399137':{'en': 'TIM'},
'553399138':{'en': 'TIM'},
'553399139':{'en': 'TIM'},
'553399168':{'en': 'TIM'},
'553399191':{'en': 'TIM'},
'553399193':{'en': 'TIM'},
'553399197':{'en': 'TIM'},
'553399198':{'en': 'TIM'},
'553399199':{'en': 'TIM'},
'553399901':{'en': 'Telemig Celular'},
'553399902':{'en': 'Telemig Celular'},
'553399903':{'en': 'Telemig Celular'},
'553399904':{'en': 'Telemig Celular'},
'553399905':{'en': 'Telemig Celular'},
'553399906':{'en': 'Telemig Celular'},
'553399907':{'en': 'Telemig Celular'},
'553399908':{'en': 'Telemig Celular'},
'553399909':{'en': 'Telemig Celular'},
'553399911':{'en': 'Telemig Celular'},
'553399912':{'en': 'Telemig Celular'},
'553399913':{'en': 'Telemig Celular'},
'553399914':{'en': 'Telemig Celular'},
'553399915':{'en': 'Telemig Celular'},
'553399916':{'en': 'Telemig Celular'},
'553399917':{'en': 'Telemig Celular'},
'553399918':{'en': 'Telemig Celular'},
'553399919':{'en': 'Telemig Celular'},
'553399921':{'en': 'Telemig Celular'},
'553399922':{'en': 'Telemig Celular'},
'553399933':{'en': 'Telemig Celular'},
'553399951':{'en': 'Telemig Celular'},
'553399952':{'en': 'Telemig Celular'},
'553399953':{'en': 'Telemig Celular'},
'553399954':{'en': 'Telemig Celular'},
'553399955':{'en': 'Telemig Celular'},
'553399956':{'en': 'Telemig Celular'},
'553399957':{'en': 'Telemig Celular'},
'553399958':{'en': 'Telemig Celular'},
'553399959':{'en': 'Telemig Celular'},
'553399961':{'en': 'Telemig Celular'},
'553399962':{'en': 'Telemig Celular'},
'553399963':{'en': 'Telemig Celular'},
'553399964':{'en': 'Telemig Celular'},
'553399965':{'en': 'Telemig Celular'},
'553399966':{'en': 'Telemig Celular'},
'553399967':{'en': 'Telemig Celular'},
'553399968':{'en': 'Telemig Celular'},
'553399969':{'en': 'Telemig Celular'},
'553399971':{'en': 'Telemig Celular'},
'553399972':{'en': 'Telemig Celular'},
'553399973':{'en': 'Telemig Celular'},
'553399974':{'en': 'Telemig Celular'},
'553399975':{'en': 'Telemig Celular'},
'553399976':{'en': 'Telemig Celular'},
'553399977':{'en': 'Telemig Celular'},
'553399978':{'en': 'Telemig Celular'},
'553399979':{'en': 'Telemig Celular'},
'55339998':{'en': 'Telemig Celular'},
'55349840':{'en': 'Claro'},
'55349841':{'en': 'Claro'},
'553498420':{'en': 'Claro'},
'553498421':{'en': 'Claro'},
'553498422':{'en': 'Claro'},
'5534985':{'en': 'Oi'},
'5534986':{'en': 'Oi'},
'5534987':{'en': 'Oi'},
'5534988':{'en': 'Oi'},
'5534989':{'en': 'Oi'},
'553499101':{'en': 'TIM'},
'553499102':{'en': 'TIM'},
'553499103':{'en': 'TIM'},
'553499104':{'en': 'TIM'},
'553499105':{'en': 'TIM'},
'553499106':{'en': 'TIM'},
'553499107':{'en': 'TIM'},
'553499108':{'en': 'TIM'},
'553499109':{'en': 'TIM'},
'55349911':{'en': 'TIM'},
'55349912':{'en': 'TIM'},
'55349913':{'en': 'TIM'},
'55349914':{'en': 'TIM'},
'55349915':{'en': 'TIM'},
'55349916':{'en': 'TIM'},
'55349917':{'en': 'TIM'},
'553499181':{'en': 'TIM'},
'553499182':{'en': 'TIM'},
'553499183':{'en': 'TIM'},
'553499184':{'en': 'TIM'},
'553499185':{'en': 'TIM'},
'553499186':{'en': 'TIM'},
'553499187':{'en': 'TIM'},
'553499188':{'en': 'TIM'},
'553499189':{'en': 'TIM'},
'553499191':{'en': 'TIM'},
'553499192':{'en': 'TIM'},
'553499193':{'en': 'TIM'},
'553499194':{'en': 'TIM'},
'553499195':{'en': 'TIM'},
'553499196':{'en': 'TIM'},
'553499197':{'en': 'TIM'},
'553499198':{'en': 'TIM'},
'553499199':{'en': 'TIM'},
'553499202':{'en': 'TIM'},
'553499203':{'en': 'TIM'},
'553499204':{'en': 'TIM'},
'553499205':{'en': 'TIM'},
'553499206':{'en': 'TIM'},
'553499207':{'en': 'TIM'},
'553499208':{'en': 'TIM'},
'553499209':{'en': 'TIM'},
'553499211':{'en': 'TIM'},
'553499212':{'en': 'TIM'},
'553499213':{'en': 'TIM'},
'553499214':{'en': 'TIM'},
'553499215':{'en': 'TIM'},
'553499216':{'en': 'TIM'},
'553499217':{'en': 'TIM'},
'553499218':{'en': 'TIM'},
'553499229':{'en': 'TIM'},
'553499801':{'en': 'Telemig Celular'},
'553499802':{'en': 'Telemig Celular'},
'553499803':{'en': 'Telemig Celular'},
'553499804':{'en': 'Telemig Celular'},
'553499805':{'en': 'Telemig Celular'},
'553499806':{'en': 'Telemig Celular'},
'553499807':{'en': 'Telemig Celular'},
'553499808':{'en': 'Telemig Celular'},
'553499809':{'en': 'Telemig Celular'},
'553499811':{'en': 'Telemig Celular'},
'553499812':{'en': 'Telemig Celular'},
'553499813':{'en': 'Telemig Celular'},
'553499814':{'en': 'Telemig Celular'},
'553499815':{'en': 'Telemig Celular'},
'553499816':{'en': 'Telemig Celular'},
'553499817':{'en': 'Telemig Celular'},
'553499821':{'en': 'Telemig Celular'},
'553499822':{'en': 'Telemig Celular'},
'553499823':{'en': 'Telemig Celular'},
'553499824':{'en': 'Telemig Celular'},
'553499825':{'en': 'Telemig Celular'},
'553499901':{'en': 'Telemig Celular'},
'553499902':{'en': 'Telemig Celular'},
'553499903':{'en': 'Telemig Celular'},
'553499904':{'en': 'Telemig Celular'},
'553499905':{'en': 'Telemig Celular'},
'553499906':{'en': 'Telemig Celular'},
'553499907':{'en': 'Telemig Celular'},
'553499908':{'en': 'Telemig Celular'},
'553499909':{'en': 'Telemig Celular'},
'553499911':{'en': 'Telemig Celular'},
'553499912':{'en': 'Telemig Celular'},
'553499913':{'en': 'Telemig Celular'},
'553499914':{'en': 'Telemig Celular'},
'553499915':{'en': 'Telemig Celular'},
'553499916':{'en': 'Telemig Celular'},
'553499917':{'en': 'Telemig Celular'},
'553499918':{'en': 'Telemig Celular'},
'553499919':{'en': 'Telemig Celular'},
'553499921':{'en': 'Telemig Celular'},
'553499922':{'en': 'Telemig Celular'},
'553499923':{'en': 'Telemig Celular'},
'553499924':{'en': 'Telemig Celular'},
'553499925':{'en': 'Telemig Celular'},
'553499926':{'en': 'Telemig Celular'},
'553499927':{'en': 'Telemig Celular'},
'553499928':{'en': 'Telemig Celular'},
'553499929':{'en': 'Telemig Celular'},
'553499931':{'en': 'Telemig Celular'},
'553499932':{'en': 'Telemig Celular'},
'553499933':{'en': 'Telemig Celular'},
'553499934':{'en': 'Telemig Celular'},
'553499935':{'en': 'Telemig Celular'},
'553499936':{'en': 'Telemig Celular'},
'553499937':{'en': 'Telemig Celular'},
'553499938':{'en': 'Telemig Celular'},
'553499939':{'en': 'Telemig Celular'},
'553499941':{'en': 'Telemig Celular'},
'553499942':{'en': 'Telemig Celular'},
'553499943':{'en': 'Telemig Celular'},
'553499944':{'en': 'Telemig Celular'},
'553499945':{'en': 'Telemig Celular'},
'553499946':{'en': 'Telemig Celular'},
'553499947':{'en': 'Telemig Celular'},
'553499948':{'en': 'Telemig Celular'},
'553499949':{'en': 'Telemig Celular'},
'553499951':{'en': 'Telemig Celular'},
'553499952':{'en': 'Telemig Celular'},
'553499953':{'en': 'Telemig Celular'},
'553499954':{'en': 'Telemig Celular'},
'553499955':{'en': 'Telemig Celular'},
'553499956':{'en': 'Telemig Celular'},
'553499957':{'en': 'Telemig Celular'},
'553499958':{'en': 'Telemig Celular'},
'553499959':{'en': 'Telemig Celular'},
'553499981':{'en': 'Telemig Celular'},
'553499982':{'en': 'Telemig Celular'},
'553499983':{'en': 'Telemig Celular'},
'553499984':{'en': 'Telemig Celular'},
'553499985':{'en': 'Telemig Celular'},
'553499986':{'en': 'Telemig Celular'},
'553499987':{'en': 'Telemig Celular'},
'553499988':{'en': 'Telemig Celular'},
'553499989':{'en': 'Telemig Celular'},
'553598401':{'en': 'Claro'},
'553598402':{'en': 'Claro'},
'553598403':{'en': 'Claro'},
'553598404':{'en': 'Claro'},
'553598405':{'en': 'Claro'},
'553598406':{'en': 'Claro'},
'553598407':{'en': 'Claro'},
'553598408':{'en': 'Claro'},
'553598409':{'en': 'Claro'},
'553598411':{'en': 'Claro'},
'553598412':{'en': 'Claro'},
'553598413':{'en': 'Claro'},
'553598414':{'en': 'Claro'},
'553598415':{'en': 'Claro'},
'553598416':{'en': 'Claro'},
'553598417':{'en': 'Claro'},
'553598418':{'en': 'Claro'},
'553598419':{'en': 'Claro'},
'553598421':{'en': 'Claro'},
'553598422':{'en': 'Claro'},
'553598423':{'en': 'Claro'},
'553598424':{'en': 'Claro'},
'553598425':{'en': 'Claro'},
'553598426':{'en': 'Claro'},
'553598427':{'en': 'Claro'},
'553598428':{'en': 'Claro'},
'553598429':{'en': 'Claro'},
'553598431':{'en': 'Claro'},
'553598432':{'en': 'Claro'},
'553598433':{'en': 'Claro'},
'553598434':{'en': 'Claro'},
'553598435':{'en': 'Claro'},
'553598436':{'en': 'Claro'},
'553598437':{'en': 'Claro'},
'553598438':{'en': 'Claro'},
'553598439':{'en': 'Claro'},
'553598441':{'en': 'Claro'},
'553598442':{'en': 'Claro'},
'553598443':{'en': 'Claro'},
'553598444':{'en': 'Claro'},
'553598445':{'en': 'Claro'},
'553598446':{'en': 'Claro'},
'553598447':{'en': 'Claro'},
'553598448':{'en': 'Claro'},
'553598449':{'en': 'Claro'},
'553598451':{'en': 'Claro'},
'553598452':{'en': 'Claro'},
'553598453':{'en': 'Claro'},
'553598454':{'en': 'Claro'},
'553598455':{'en': 'Claro'},
'553598456':{'en': 'Claro'},
'553598457':{'en': 'Claro'},
'553598458':{'en': 'Claro'},
'553598459':{'en': 'Claro'},
'553598461':{'en': 'Claro'},
'553598462':{'en': 'Claro'},
'553598463':{'en': 'Claro'},
'553598464':{'en': 'Claro'},
'553598465':{'en': 'Claro'},
'553598466':{'en': 'Claro'},
'553598467':{'en': 'Claro'},
'553598468':{'en': 'Claro'},
'553598469':{'en': 'Claro'},
'553598471':{'en': 'Claro'},
'553598472':{'en': 'Claro'},
'553598473':{'en': 'Claro'},
'553598474':{'en': 'Claro'},
'553598475':{'en': 'Claro'},
'553598476':{'en': 'Claro'},
'553598477':{'en': 'Claro'},
'553598478':{'en': 'Claro'},
'553598479':{'en': 'Claro'},
'5535985':{'en': 'Oi'},
'5535986':{'en': 'Oi'},
'5535987':{'en': 'Oi'},
'5535988':{'en': 'Oi'},
'5535989':{'en': 'Oi'},
'553599101':{'en': 'TIM'},
'553599102':{'en': 'TIM'},
'553599103':{'en': 'TIM'},
'553599104':{'en': 'TIM'},
'553599105':{'en': 'TIM'},
'553599106':{'en': 'TIM'},
'553599107':{'en': 'TIM'},
'553599108':{'en': 'TIM'},
'553599109':{'en': 'TIM'},
'553599111':{'en': 'TIM'},
'553599112':{'en': 'TIM'},
'553599113':{'en': 'TIM'},
'553599114':{'en': 'TIM'},
'553599115':{'en': 'TIM'},
'553599116':{'en': 'TIM'},
'553599117':{'en': 'TIM'},
'553599118':{'en': 'TIM'},
'553599119':{'en': 'TIM'},
'553599121':{'en': 'TIM'},
'553599122':{'en': 'TIM'},
'553599123':{'en': 'TIM'},
'553599124':{'en': 'TIM'},
'553599125':{'en': 'TIM'},
'553599126':{'en': 'TIM'},
'553599127':{'en': 'TIM'},
'553599128':{'en': 'TIM'},
'553599129':{'en': 'TIM'},
'553599131':{'en': 'TIM'},
'553599132':{'en': 'TIM'},
'553599133':{'en': 'TIM'},
'553599134':{'en': 'TIM'},
'553599135':{'en': 'TIM'},
'553599136':{'en': 'TIM'},
'553599137':{'en': 'TIM'},
'553599138':{'en': 'TIM'},
'553599139':{'en': 'TIM'},
'553599141':{'en': 'TIM'},
'553599142':{'en': 'TIM'},
'553599143':{'en': 'TIM'},
'553599144':{'en': 'TIM'},
'553599145':{'en': 'TIM'},
'553599146':{'en': 'TIM'},
'553599147':{'en': 'TIM'},
'553599148':{'en': 'TIM'},
'553599149':{'en': 'TIM'},
'553599151':{'en': 'TIM'},
'553599152':{'en': 'TIM'},
'553599153':{'en': 'TIM'},
'553599154':{'en': 'TIM'},
'553599155':{'en': 'TIM'},
'553599156':{'en': 'TIM'},
'553599157':{'en': 'TIM'},
'553599158':{'en': 'TIM'},
'553599159':{'en': 'TIM'},
'553599161':{'en': 'TIM'},
'553599162':{'en': 'TIM'},
'553599163':{'en': 'TIM'},
'553599164':{'en': 'TIM'},
'553599165':{'en': 'TIM'},
'553599166':{'en': 'TIM'},
'553599167':{'en': 'TIM'},
'553599168':{'en': 'TIM'},
'553599169':{'en': 'TIM'},
'553599171':{'en': 'TIM'},
'553599172':{'en': 'TIM'},
'553599173':{'en': 'TIM'},
'553599174':{'en': 'TIM'},
'553599175':{'en': 'TIM'},
'553599176':{'en': 'TIM'},
'553599177':{'en': 'TIM'},
'553599178':{'en': 'TIM'},
'553599179':{'en': 'TIM'},
'553599181':{'en': 'TIM'},
'553599187':{'en': 'TIM'},
'553599188':{'en': 'TIM'},
'553599191':{'en': 'TIM'},
'553599192':{'en': 'TIM'},
'553599193':{'en': 'TIM'},
'553599197':{'en': 'TIM'},
'553599198':{'en': 'TIM'},
'553599199':{'en': 'TIM'},
'553599801':{'en': 'Telemig Celular'},
'553599802':{'en': 'Telemig Celular'},
'553599803':{'en': 'Telemig Celular'},
'553599804':{'en': 'Telemig Celular'},
'553599805':{'en': 'Telemig Celular'},
'553599806':{'en': 'Telemig Celular'},
'553599807':{'en': 'Telemig Celular'},
'553599808':{'en': 'Telemig Celular'},
'553599809':{'en': 'Telemig Celular'},
'553599811':{'en': 'Telemig Celular'},
'553599812':{'en': 'Telemig Celular'},
'553599813':{'en': 'Telemig Celular'},
'553599814':{'en': 'Telemig Celular'},
'553599815':{'en': 'Telemig Celular'},
'553599816':{'en': 'Telemig Celular'},
'553599817':{'en': 'Telemig Celular'},
'553599818':{'en': 'Telemig Celular'},
'553599819':{'en': 'Telemig Celular'},
'553599821':{'en': 'Telemig Celular'},
'553599822':{'en': 'Telemig Celular'},
'553599823':{'en': 'Telemig Celular'},
'553599824':{'en': 'Telemig Celular'},
'553599825':{'en': 'Telemig Celular'},
'553599826':{'en': 'Telemig Celular'},
'553599827':{'en': 'Telemig Celular'},
'553599828':{'en': 'Telemig Celular'},
'553599829':{'en': 'Telemig Celular'},
'553599831':{'en': 'Telemig Celular'},
'553599832':{'en': 'Telemig Celular'},
'553599833':{'en': 'Telemig Celular'},
'553599834':{'en': 'Telemig Celular'},
'553599835':{'en': 'Telemig Celular'},
'553599836':{'en': 'Telemig Celular'},
'553599837':{'en': 'Telemig Celular'},
'553599838':{'en': 'Telemig Celular'},
'553599839':{'en': 'Telemig Celular'},
'553599841':{'en': 'Telemig Celular'},
'553599842':{'en': 'Telemig Celular'},
'553599843':{'en': 'Telemig Celular'},
'553599844':{'en': 'Telemig Celular'},
'553599845':{'en': 'Telemig Celular'},
'553599846':{'en': 'Telemig Celular'},
'553599901':{'en': 'Telemig Celular'},
'553599902':{'en': 'Telemig Celular'},
'553599903':{'en': 'Telemig Celular'},
'553599904':{'en': 'Telemig Celular'},
'553599905':{'en': 'Telemig Celular'},
'553599906':{'en': 'Telemig Celular'},
'553599907':{'en': 'Telemig Celular'},
'553599908':{'en': 'Telemig Celular'},
'553599911':{'en': 'Telemig Celular'},
'553599912':{'en': 'Telemig Celular'},
'553599913':{'en': 'Telemig Celular'},
'553599914':{'en': 'Telemig Celular'},
'553599915':{'en': 'Telemig Celular'},
'553599916':{'en': 'Telemig Celular'},
'553599917':{'en': 'Telemig Celular'},
'553599918':{'en': 'Telemig Celular'},
'553599919':{'en': 'Telemig Celular'},
'553599921':{'en': 'Telemig Celular'},
'553599922':{'en': 'Telemig Celular'},
'553599923':{'en': 'Telemig Celular'},
'553599924':{'en': 'Telemig Celular'},
'553599925':{'en': 'Telemig Celular'},
'553599926':{'en': 'Telemig Celular'},
'553599927':{'en': 'Telemig Celular'},
'553599928':{'en': 'Telemig Celular'},
'553599929':{'en': 'Telemig Celular'},
'553599931':{'en': 'Telemig Celular'},
'553599932':{'en': 'Telemig Celular'},
'553599933':{'en': 'Telemig Celular'},
'553599934':{'en': 'Telemig Celular'},
'553599935':{'en': 'Telemig Celular'},
'553599936':{'en': 'Telemig Celular'},
'553599937':{'en': 'Telemig Celular'},
'553599938':{'en': 'Telemig Celular'},
'553599939':{'en': 'Telemig Celular'},
'553599941':{'en': 'Telemig Celular'},
'553599942':{'en': 'Telemig Celular'},
'553599943':{'en': 'Telemig Celular'},
'553599944':{'en': 'Telemig Celular'},
'553599945':{'en': 'Telemig Celular'},
'553599946':{'en': 'Telemig Celular'},
'553599947':{'en': 'Telemig Celular'},
'553599948':{'en': 'Telemig Celular'},
'553599949':{'en': 'Telemig Celular'},
'553599951':{'en': 'Telemig Celular'},
'553599952':{'en': 'Telemig Celular'},
'553599953':{'en': 'Telemig Celular'},
'553599954':{'en': 'Telemig Celular'},
'553599955':{'en': 'Telemig Celular'},
'553599956':{'en': 'Telemig Celular'},
'553599957':{'en': 'Telemig Celular'},
'553599958':{'en': 'Telemig Celular'},
'553599959':{'en': 'Telemig Celular'},
'55359996':{'en': 'Telemig Celular'},
'55359997':{'en': 'Telemig Celular'},
'55359998':{'en': 'Telemig Celular'},
'553599992':{'en': 'Telemig Celular'},
'553599993':{'en': 'Telemig Celular'},
'553599994':{'en': 'Telemig Celular'},
'553599995':{'en': 'Telemig Celular'},
'553599996':{'en': 'Telemig Celular'},
'553599997':{'en': 'Telemig Celular'},
'553798401':{'en': 'Claro'},
'553798402':{'en': 'Claro'},
'553798403':{'en': 'Claro'},
'553798404':{'en': 'Claro'},
'553798405':{'en': 'Claro'},
'553798406':{'en': 'Claro'},
'553798407':{'en': 'Claro'},
'553798408':{'en': 'Claro'},
'553798409':{'en': 'Claro'},
'553798411':{'en': 'Claro'},
'553798412':{'en': 'Claro'},
'553798413':{'en': 'Claro'},
'553798414':{'en': 'Claro'},
'553798415':{'en': 'Claro'},
'553798416':{'en': 'Claro'},
'553798417':{'en': 'Claro'},
'553798418':{'en': 'Claro'},
'553798419':{'en': 'Claro'},
'553798421':{'en': 'Claro'},
'553798422':{'en': 'Claro'},
'553798423':{'en': 'Claro'},
'553798424':{'en': 'Claro'},
'553798425':{'en': 'Claro'},
'553798426':{'en': 'Claro'},
'5537985':{'en': 'Oi'},
'5537986':{'en': 'Oi'},
'5537987':{'en': 'Oi'},
'5537988':{'en': 'Oi'},
'5537989':{'en': 'Oi'},
'553799101':{'en': 'TIM'},
'553799102':{'en': 'TIM'},
'553799103':{'en': 'TIM'},
'553799104':{'en': 'TIM'},
'553799105':{'en': 'TIM'},
'553799106':{'en': 'TIM'},
'553799107':{'en': 'TIM'},
'553799108':{'en': 'TIM'},
'553799109':{'en': 'TIM'},
'553799111':{'en': 'TIM'},
'553799112':{'en': 'TIM'},
'553799113':{'en': 'TIM'},
'553799114':{'en': 'TIM'},
'553799115':{'en': 'TIM'},
'553799116':{'en': 'TIM'},
'553799117':{'en': 'TIM'},
'553799118':{'en': 'TIM'},
'553799119':{'en': 'TIM'},
'553799121':{'en': 'TIM'},
'553799122':{'en': 'TIM'},
'553799123':{'en': 'TIM'},
'553799124':{'en': 'TIM'},
'553799125':{'en': 'TIM'},
'553799126':{'en': 'TIM'},
'553799127':{'en': 'TIM'},
'553799128':{'en': 'TIM'},
'553799129':{'en': 'TIM'},
'553799131':{'en': 'TIM'},
'553799132':{'en': 'TIM'},
'553799133':{'en': 'TIM'},
'553799134':{'en': 'TIM'},
'553799135':{'en': 'TIM'},
'553799136':{'en': 'TIM'},
'553799137':{'en': 'TIM'},
'553799138':{'en': 'TIM'},
'553799139':{'en': 'TIM'},
'553799141':{'en': 'TIM'},
'553799143':{'en': 'TIM'},
'553799144':{'en': 'TIM'},
'553799145':{'en': 'TIM'},
'553799146':{'en': 'TIM'},
'553799147':{'en': 'TIM'},
'553799154':{'en': 'TIM'},
'553799186':{'en': 'TIM'},
'553799191':{'en': 'TIM'},
'553799192':{'en': 'TIM'},
'553799193':{'en': 'TIM'},
'553799194':{'en': 'TIM'},
'553799197':{'en': 'TIM'},
'553799198':{'en': 'TIM'},
'553799199':{'en': 'TIM'},
'553799801':{'en': 'Telemig Celular'},
'553799802':{'en': 'Telemig Celular'},
'553799803':{'en': 'Telemig Celular'},
'553799804':{'en': 'Telemig Celular'},
'553799805':{'en': 'Telemig Celular'},
'553799901':{'en': 'Telemig Celular'},
'553799902':{'en': 'Telemig Celular'},
'553799903':{'en': 'Telemig Celular'},
'553799904':{'en': 'Telemig Celular'},
'553799905':{'en': 'Telemig Celular'},
'553799906':{'en': 'Telemig Celular'},
'553799907':{'en': 'Telemig Celular'},
'553799908':{'en': 'Telemig Celular'},
'553799909':{'en': 'Telemig Celular'},
'553799911':{'en': 'Telemig Celular'},
'553799912':{'en': 'Telemig Celular'},
'553799913':{'en': 'Telemig Celular'},
'553799914':{'en': 'Telemig Celular'},
'553799915':{'en': 'Telemig Celular'},
'553799916':{'en': 'Telemig Celular'},
'553799917':{'en': 'Telemig Celular'},
'553799918':{'en': 'Telemig Celular'},
'553799919':{'en': 'Telemig Celular'},
'553799921':{'en': 'Telemig Celular'},
'553799922':{'en': 'Telemig Celular'},
'553799923':{'en': 'Telemig Celular'},
'553799924':{'en': 'Telemig Celular'},
'553799925':{'en': 'Telemig Celular'},
'553799926':{'en': 'Telemig Celular'},
'553799927':{'en': 'Telemig Celular'},
'553799928':{'en': 'Telemig Celular'},
'553799929':{'en': 'Telemig Celular'},
'553799931':{'en': 'Telemig Celular'},
'553799932':{'en': 'Telemig Celular'},
'553799933':{'en': 'Telemig Celular'},
'553799934':{'en': 'Telemig Celular'},
'553799935':{'en': 'Telemig Celular'},
'553799936':{'en': 'Telemig Celular'},
'553799937':{'en': 'Telemig Celular'},
'553799938':{'en': 'Telemig Celular'},
'553799939':{'en': 'Telemig Celular'},
'553799941':{'en': 'Telemig Celular'},
'553799942':{'en': 'Telemig Celular'},
'553799943':{'en': 'Telemig Celular'},
'553799944':{'en': 'Telemig Celular'},
'553799945':{'en': 'Telemig Celular'},
'553799946':{'en': 'Telemig Celular'},
'553799947':{'en': 'Telemig Celular'},
'553799948':{'en': 'Telemig Celular'},
'553799949':{'en': 'Telemig Celular'},
'553799951':{'en': 'Telemig Celular'},
'553799952':{'en': 'Telemig Celular'},
'553799953':{'en': 'Telemig Celular'},
'553799954':{'en': 'Telemig Celular'},
'553799955':{'en': 'Telemig Celular'},
'553799956':{'en': 'Telemig Celular'},
'553799957':{'en': 'Telemig Celular'},
'553799958':{'en': 'Telemig Celular'},
'553799959':{'en': 'Telemig Celular'},
'553799961':{'en': 'Telemig Celular'},
'553799962':{'en': 'Telemig Celular'},
'553799963':{'en': 'Telemig Celular'},
'553799964':{'en': 'Telemig Celular'},
'553799965':{'en': 'Telemig Celular'},
'553799966':{'en': 'Telemig Celular'},
'553799967':{'en': 'Telemig Celular'},
'553799968':{'en': 'Telemig Celular'},
'553799969':{'en': 'Telemig Celular'},
'55379998':{'en': 'Telemig Celular'},
'553799991':{'en': 'Telemig Celular'},
'553799992':{'en': 'Telemig Celular'},
'553799993':{'en': 'Telemig Celular'},
'553799994':{'en': 'Telemig Celular'},
'553799995':{'en': 'Telemig Celular'},
'553799996':{'en': 'Telemig Celular'},
'553799997':{'en': 'Telemig Celular'},
'553799998':{'en': 'Telemig Celular'},
'553799999':{'en': 'Telemig Celular'},
'553898401':{'en': 'Claro'},
'553898402':{'en': 'Claro'},
'553898403':{'en': 'Claro'},
'553898404':{'en': 'Claro'},
'553898405':{'en': 'Claro'},
'553898406':{'en': 'Claro'},
'553898407':{'en': 'Claro'},
'553898408':{'en': 'Claro'},
'553898409':{'en': 'Claro'},
'553898411':{'en': 'Claro'},
'553898412':{'en': 'Claro'},
'553898413':{'en': 'Claro'},
'553898414':{'en': 'Claro'},
'553898415':{'en': 'Claro'},
'553898416':{'en': 'Claro'},
'553898417':{'en': 'Claro'},
'553898418':{'en': 'Claro'},
'553898419':{'en': 'Claro'},
'553898421':{'en': 'Claro'},
'553898422':{'en': 'Claro'},
'553898423':{'en': 'Claro'},
'553898424':{'en': 'Claro'},
'553898425':{'en': 'Claro'},
'553898426':{'en': 'Claro'},
'553898427':{'en': 'Claro'},
'553898428':{'en': 'Claro'},
'553898429':{'en': 'Claro'},
'553898431':{'en': 'Claro'},
'553898432':{'en': 'Claro'},
'553898433':{'en': 'Claro'},
'553898434':{'en': 'Claro'},
'553898435':{'en': 'Claro'},
'553898436':{'en': 'Claro'},
'553898437':{'en': 'Claro'},
'5538985':{'en': 'Oi'},
'5538986':{'en': 'Oi'},
'5538987':{'en': 'Oi'},
'5538988':{'en': 'Oi'},
'5538989':{'en': 'Oi'},
'553899101':{'en': 'TIM'},
'553899102':{'en': 'TIM'},
'553899103':{'en': 'TIM'},
'553899104':{'en': 'TIM'},
'553899105':{'en': 'TIM'},
'553899106':{'en': 'TIM'},
'553899107':{'en': 'TIM'},
'553899108':{'en': 'TIM'},
'553899109':{'en': 'TIM'},
'553899111':{'en': 'TIM'},
'553899112':{'en': 'TIM'},
'553899113':{'en': 'TIM'},
'553899114':{'en': 'TIM'},
'553899115':{'en': 'TIM'},
'553899116':{'en': 'TIM'},
'553899117':{'en': 'TIM'},
'553899118':{'en': 'TIM'},
'553899119':{'en': 'TIM'},
'553899121':{'en': 'TIM'},
'553899122':{'en': 'TIM'},
'553899123':{'en': 'TIM'},
'553899124':{'en': 'TIM'},
'553899125':{'en': 'TIM'},
'553899126':{'en': 'TIM'},
'553899127':{'en': 'TIM'},
'553899128':{'en': 'TIM'},
'553899129':{'en': 'TIM'},
'553899131':{'en': 'TIM'},
'553899132':{'en': 'TIM'},
'553899133':{'en': 'TIM'},
'553899134':{'en': 'TIM'},
'553899135':{'en': 'TIM'},
'553899136':{'en': 'TIM'},
'553899137':{'en': 'TIM'},
'553899138':{'en': 'TIM'},
'553899139':{'en': 'TIM'},
'553899141':{'en': 'TIM'},
'553899142':{'en': 'TIM'},
'553899143':{'en': 'TIM'},
'553899144':{'en': 'TIM'},
'553899145':{'en': 'TIM'},
'553899146':{'en': 'TIM'},
'553899147':{'en': 'TIM'},
'553899148':{'en': 'TIM'},
'553899149':{'en': 'TIM'},
'553899151':{'en': 'TIM'},
'553899152':{'en': 'TIM'},
'553899153':{'en': 'TIM'},
'553899154':{'en': 'TIM'},
'553899155':{'en': 'TIM'},
'553899156':{'en': 'TIM'},
'553899157':{'en': 'TIM'},
'553899158':{'en': 'TIM'},
'553899159':{'en': 'TIM'},
'553899161':{'en': 'TIM'},
'553899162':{'en': 'TIM'},
'553899163':{'en': 'TIM'},
'553899164':{'en': 'TIM'},
'553899165':{'en': 'TIM'},
'553899166':{'en': 'TIM'},
'553899167':{'en': 'TIM'},
'553899168':{'en': 'TIM'},
'553899169':{'en': 'TIM'},
'553899171':{'en': 'TIM'},
'553899172':{'en': 'TIM'},
'553899173':{'en': 'TIM'},
'553899174':{'en': 'TIM'},
'553899175':{'en': 'TIM'},
'553899176':{'en': 'TIM'},
'553899177':{'en': 'TIM'},
'553899178':{'en': 'TIM'},
'553899179':{'en': 'TIM'},
'553899181':{'en': 'TIM'},
'553899182':{'en': 'TIM'},
'553899183':{'en': 'TIM'},
'553899184':{'en': 'TIM'},
'553899191':{'en': 'TIM'},
'553899192':{'en': 'TIM'},
'553899193':{'en': 'TIM'},
'553899194':{'en': 'TIM'},
'553899195':{'en': 'TIM'},
'553899196':{'en': 'TIM'},
'553899197':{'en': 'TIM'},
'553899801':{'en': 'Telemig Celular'},
'553899802':{'en': 'Telemig Celular'},
'553899803':{'en': 'Telemig Celular'},
'553899804':{'en': 'Telemig Celular'},
'553899805':{'en': 'Telemig Celular'},
'553899806':{'en': 'Telemig Celular'},
'553899807':{'en': 'Telemig Celular'},
'553899808':{'en': 'Telemig Celular'},
'553899809':{'en': 'Telemig Celular'},
'553899810':{'en': 'Telemig Celular'},
'553899901':{'en': 'Telemig Celular'},
'553899902':{'en': 'Telemig Celular'},
'553899903':{'en': 'Telemig Celular'},
'553899904':{'en': 'Telemig Celular'},
'553899905':{'en': 'Telemig Celular'},
'553899906':{'en': 'Telemig Celular'},
'553899907':{'en': 'Telemig Celular'},
'553899908':{'en': 'Telemig Celular'},
'553899909':{'en': 'Telemig Celular'},
'55389991':{'en': 'Telemig Celular'},
'55389992':{'en': 'Telemig Celular'},
'55389993':{'en': 'Telemig Celular'},
'55389994':{'en': 'Telemig Celular'},
'55389995':{'en': 'Telemig Celular'},
'55389996':{'en': 'Telemig Celular'},
'55389997':{'en': 'Telemig Celular'},
'55389998':{'en': 'Telemig Celular'},
'553899990':{'en': 'Telemig Celular'},
'553899991':{'en': 'Telemig Celular'},
'553899992':{'en': 'Telemig Celular'},
'553899993':{'en': 'Telemig Celular'},
'553899995':{'en': 'Telemig Celular'},
'553899996':{'en': 'Telemig Celular'},
'553899997':{'en': 'Telemig Celular'},
'553899998':{'en': 'Telemig Celular'},
'553899999':{'en': 'Telemig Celular'},
'554198401':{'en': 'Brasil Telecom GSM'},
'554198402':{'en': 'Brasil Telecom GSM'},
'554198403':{'en': 'Brasil Telecom GSM'},
'554198404':{'en': 'Brasil Telecom GSM'},
'554198405':{'en': 'Brasil Telecom GSM'},
'554198406':{'en': 'Brasil Telecom GSM'},
'554198407':{'en': 'Brasil Telecom GSM'},
'554198408':{'en': 'Brasil Telecom GSM'},
'554198409':{'en': 'Brasil Telecom GSM'},
'554198411':{'en': 'Brasil Telecom GSM'},
'554198412':{'en': 'Brasil Telecom GSM'},
'554198413':{'en': 'Brasil Telecom GSM'},
'554198414':{'en': 'Brasil Telecom GSM'},
'554198415':{'en': 'Brasil Telecom GSM'},
'554198416':{'en': 'Brasil Telecom GSM'},
'554198417':{'en': 'Brasil Telecom GSM'},
'554198418':{'en': 'Brasil Telecom GSM'},
'554198419':{'en': 'Brasil Telecom GSM'},
'554198421':{'en': 'Brasil Telecom GSM'},
'554198422':{'en': 'Brasil Telecom GSM'},
'554198423':{'en': 'Brasil Telecom GSM'},
'554198424':{'en': 'Brasil Telecom GSM'},
'554198425':{'en': 'Brasil Telecom GSM'},
'554198426':{'en': 'Brasil Telecom GSM'},
'554198427':{'en': 'Brasil Telecom GSM'},
'554198428':{'en': 'Brasil Telecom GSM'},
'554198429':{'en': 'Brasil Telecom GSM'},
'554198431':{'en': 'Brasil Telecom GSM'},
'554198432':{'en': 'Brasil Telecom GSM'},
'554198433':{'en': 'Brasil Telecom GSM'},
'554198434':{'en': 'Brasil Telecom GSM'},
'554198435':{'en': 'Brasil Telecom GSM'},
'554198436':{'en': 'Brasil Telecom GSM'},
'554198437':{'en': 'Brasil Telecom GSM'},
'554198438':{'en': 'Brasil Telecom GSM'},
'554198439':{'en': 'Brasil Telecom GSM'},
'554198441':{'en': 'Brasil Telecom GSM'},
'554198442':{'en': 'Brasil Telecom GSM'},
'554198443':{'en': 'Brasil Telecom GSM'},
'554198444':{'en': 'Brasil Telecom GSM'},
'554198445':{'en': 'Brasil Telecom GSM'},
'554198446':{'en': 'Brasil Telecom GSM'},
'554198447':{'en': 'Brasil Telecom GSM'},
'554198448':{'en': 'Brasil Telecom GSM'},
'554198449':{'en': 'Brasil Telecom GSM'},
'554198451':{'en': 'Brasil Telecom GSM'},
'554198452':{'en': 'Brasil Telecom GSM'},
'554198453':{'en': 'Brasil Telecom GSM'},
'554198454':{'en': 'Brasil Telecom GSM'},
'554198455':{'en': 'Brasil Telecom GSM'},
'554198456':{'en': 'Brasil Telecom GSM'},
'554198457':{'en': 'Brasil Telecom GSM'},
'554198458':{'en': 'Brasil Telecom GSM'},
'554198459':{'en': 'Brasil Telecom GSM'},
'554198461':{'en': 'Brasil Telecom GSM'},
'554198462':{'en': 'Brasil Telecom GSM'},
'554198463':{'en': 'Brasil Telecom GSM'},
'554198464':{'en': 'Brasil Telecom GSM'},
'554198465':{'en': 'Brasil Telecom GSM'},
'554198466':{'en': 'Brasil Telecom GSM'},
'554198467':{'en': 'Brasil Telecom GSM'},
'554198468':{'en': 'Brasil Telecom GSM'},
'554198469':{'en': 'Brasil Telecom GSM'},
'554198471':{'en': 'Brasil Telecom GSM'},
'554198472':{'en': 'Brasil Telecom GSM'},
'554198473':{'en': 'Brasil Telecom GSM'},
'554198474':{'en': 'Brasil Telecom GSM'},
'554198475':{'en': 'Brasil Telecom GSM'},
'554198476':{'en': 'Brasil Telecom GSM'},
'554198477':{'en': 'Brasil Telecom GSM'},
'554198478':{'en': 'Brasil Telecom GSM'},
'55419870':{'en': 'Claro'},
'55419871':{'en': 'Claro'},
'55419872':{'en': 'Claro'},
'55419873':{'en': 'Claro'},
'55419874':{'en': 'Claro'},
'554198750':{'en': 'Claro'},
'554198751':{'en': 'Claro'},
'554198752':{'en': 'Claro'},
'5541988':{'en': 'Claro'},
'55419910':{'en': 'Vivo'},
'554199111':{'en': 'Vivo'},
'554199112':{'en': 'Vivo'},
'554199113':{'en': 'Vivo'},
'554199114':{'en': 'Vivo'},
'554199115':{'en': 'Vivo'},
'554199116':{'en': 'Vivo'},
'554199117':{'en': 'Vivo'},
'554199118':{'en': 'Vivo'},
'554199119':{'en': 'Vivo'},
'554199121':{'en': 'Vivo'},
'554199122':{'en': 'Vivo'},
'554199123':{'en': 'Vivo'},
'554199124':{'en': 'Vivo'},
'554199125':{'en': 'Vivo'},
'554199126':{'en': 'Vivo'},
'554199127':{'en': 'Vivo'},
'554199128':{'en': 'Vivo'},
'554199129':{'en': 'Vivo'},
'554199131':{'en': 'Vivo'},
'554199132':{'en': 'Vivo'},
'554199133':{'en': 'Vivo'},
'554199134':{'en': 'Vivo'},
'554199135':{'en': 'Vivo'},
'554199136':{'en': 'Vivo'},
'554199137':{'en': 'Vivo'},
'554199138':{'en': 'Vivo'},
'554199139':{'en': 'Vivo'},
'554199141':{'en': 'Vivo'},
'554199142':{'en': 'Vivo'},
'554199143':{'en': 'Vivo'},
'554199144':{'en': 'Vivo'},
'554199145':{'en': 'Vivo'},
'554199146':{'en': 'Vivo'},
'554199147':{'en': 'Vivo'},
'554199148':{'en': 'Vivo'},
'554199149':{'en': 'Vivo'},
'554199151':{'en': 'Vivo'},
'554199152':{'en': 'Vivo'},
'554199153':{'en': 'Vivo'},
'554199154':{'en': 'Vivo'},
'554199155':{'en': 'Vivo'},
'554199156':{'en': 'Vivo'},
'554199157':{'en': 'Vivo'},
'554199158':{'en': 'Vivo'},
'554199159':{'en': 'Vivo'},
'554199161':{'en': 'Vivo'},
'554199162':{'en': 'Vivo'},
'554199163':{'en': 'Vivo'},
'554199164':{'en': 'Vivo'},
'554199165':{'en': 'Vivo'},
'554199166':{'en': 'Vivo'},
'554199167':{'en': 'Vivo'},
'554199168':{'en': 'Vivo'},
'554199169':{'en': 'Vivo'},
'554199171':{'en': 'Vivo'},
'554199172':{'en': 'Vivo'},
'554199173':{'en': 'Vivo'},
'554199174':{'en': 'Vivo'},
'554199175':{'en': 'Vivo'},
'554199176':{'en': 'Vivo'},
'554199177':{'en': 'Vivo'},
'554199178':{'en': 'Vivo'},
'554199179':{'en': 'Vivo'},
'554199181':{'en': 'Vivo'},
'554199182':{'en': 'Vivo'},
'554199183':{'en': 'Vivo'},
'554199184':{'en': 'Vivo'},
'554199185':{'en': 'Vivo'},
'554199186':{'en': 'Vivo'},
'554199187':{'en': 'Vivo'},
'554199188':{'en': 'Vivo'},
'554199189':{'en': 'Vivo'},
'554199191':{'en': 'Vivo'},
'554199192':{'en': 'Vivo'},
'554199193':{'en': 'Vivo'},
'554199194':{'en': 'Vivo'},
'554199195':{'en': 'Vivo'},
'554199196':{'en': 'Vivo'},
'554199197':{'en': 'Vivo'},
'554199198':{'en': 'Vivo'},
'554199199':{'en': 'Vivo'},
'554199201':{'en': 'Vivo'},
'554199202':{'en': 'Vivo'},
'554199203':{'en': 'Vivo'},
'554199204':{'en': 'Vivo'},
'554199205':{'en': 'Vivo'},
'554199206':{'en': 'Vivo'},
'554199207':{'en': 'Vivo'},
'554199208':{'en': 'Vivo'},
'554199209':{'en': 'Vivo'},
'554199211':{'en': 'Vivo'},
'554199212':{'en': 'Vivo'},
'554199213':{'en': 'Vivo'},
'554199214':{'en': 'Vivo'},
'554199215':{'en': 'Vivo'},
'554199216':{'en': 'Vivo'},
'554199217':{'en': 'Vivo'},
'554199218':{'en': 'Vivo'},
'554199219':{'en': 'Vivo'},
'554199221':{'en': 'Vivo'},
'554199222':{'en': 'Vivo'},
'554199223':{'en': 'Vivo'},
'554199224':{'en': 'Vivo'},
'554199225':{'en': 'Vivo'},
'554199226':{'en': 'Vivo'},
'554199227':{'en': 'Vivo'},
'554199228':{'en': 'Vivo'},
'554199229':{'en': 'Vivo'},
'554199231':{'en': 'Vivo'},
'554199232':{'en': 'Vivo'},
'554199233':{'en': 'Vivo'},
'554199234':{'en': 'Vivo'},
'554199235':{'en': 'Vivo'},
'554199236':{'en': 'Vivo'},
'554199237':{'en': 'Vivo'},
'554199238':{'en': 'Vivo'},
'554199239':{'en': 'Vivo'},
'554199241':{'en': 'Vivo'},
'554199242':{'en': 'Vivo'},
'554199243':{'en': 'Vivo'},
'554199244':{'en': 'Vivo'},
'554199245':{'en': 'Vivo'},
'554199246':{'en': 'Vivo'},
'554199247':{'en': 'Vivo'},
'554199248':{'en': 'Vivo'},
'554199249':{'en': 'Vivo'},
'554199251':{'en': 'Vivo'},
'554199252':{'en': 'Vivo'},
'554199253':{'en': 'Vivo'},
'554199254':{'en': 'Vivo'},
'554199255':{'en': 'Vivo'},
'554199256':{'en': 'Vivo'},
'554199257':{'en': 'Vivo'},
'554199258':{'en': 'Vivo'},
'554199259':{'en': 'Vivo'},
'554199261':{'en': 'Vivo'},
'554199262':{'en': 'Vivo'},
'554199263':{'en': 'Vivo'},
'554199264':{'en': 'Vivo'},
'554199265':{'en': 'Vivo'},
'554199266':{'en': 'Vivo'},
'5541996':{'en': 'TIM'},
'554199800':{'en': 'TIM'},
'554199801':{'en': 'TIM'},
'554199802':{'en': 'TIM'},
'554199803':{'en': 'TIM'},
'554199804':{'en': 'TIM'},
'5541999':{'en': 'TIM'},
'554298401':{'en': 'Brasil Telecom GSM'},
'554298402':{'en': 'Brasil Telecom GSM'},
'554298403':{'en': 'Brasil Telecom GSM'},
'554298404':{'en': 'Brasil Telecom GSM'},
'554298405':{'en': 'Brasil Telecom GSM'},
'554298406':{'en': 'Brasil Telecom GSM'},
'554298407':{'en': 'Brasil Telecom GSM'},
'554298408':{'en': 'Brasil Telecom GSM'},
'554298409':{'en': 'Brasil Telecom GSM'},
'554298411':{'en': 'Brasil Telecom GSM'},
'554298412':{'en': 'Brasil Telecom GSM'},
'554298413':{'en': 'Brasil Telecom GSM'},
'554298414':{'en': 'Brasil Telecom GSM'},
'554298415':{'en': 'Brasil Telecom GSM'},
'554298416':{'en': 'Brasil Telecom GSM'},
'554298417':{'en': 'Brasil Telecom GSM'},
'55429880':{'en': 'Claro'},
'55429881':{'en': 'Claro'},
'55429882':{'en': 'Claro'},
'55429883':{'en': 'Claro'},
'55429884':{'en': 'Claro'},
'55429885':{'en': 'Claro'},
'55429886':{'en': 'Claro'},
'554298870':{'en': 'Claro'},
'554298871':{'en': 'Claro'},
'554298872':{'en': 'Claro'},
'554298873':{'en': 'Claro'},
'554298874':{'en': 'Claro'},
'55429910':{'en': 'Vivo'},
'554299111':{'en': 'Vivo'},
'554299112':{'en': 'Vivo'},
'554299113':{'en': 'Vivo'},
'554299114':{'en': 'Vivo'},
'554299115':{'en': 'Vivo'},
'554299116':{'en': 'Vivo'},
'554299117':{'en': 'Vivo'},
'554299118':{'en': 'Vivo'},
'554299119':{'en': 'Vivo'},
'554299121':{'en': 'Vivo'},
'554299122':{'en': 'Vivo'},
'554299123':{'en': 'Vivo'},
'554299124':{'en': 'Vivo'},
'554299125':{'en': 'Vivo'},
'554299126':{'en': 'Vivo'},
'554299127':{'en': 'Vivo'},
'554299128':{'en': 'Vivo'},
'554299129':{'en': 'Vivo'},
'554299131':{'en': 'Vivo'},
'554299132':{'en': 'Vivo'},
'554299133':{'en': 'Vivo'},
'554299134':{'en': 'Vivo'},
'554299135':{'en': 'Vivo'},
'554299136':{'en': 'Vivo'},
'554299137':{'en': 'Vivo'},
'554299138':{'en': 'Vivo'},
'554299139':{'en': 'Vivo'},
'554299141':{'en': 'Vivo'},
'554299142':{'en': 'Vivo'},
'554299143':{'en': 'Vivo'},
'554299144':{'en': 'Vivo'},
'554299145':{'en': 'Vivo'},
'554299146':{'en': 'Vivo'},
'554299147':{'en': 'Vivo'},
'554299148':{'en': 'Vivo'},
'554299149':{'en': 'Vivo'},
'554299151':{'en': 'Vivo'},
'554299152':{'en': 'Vivo'},
'554299153':{'en': 'Vivo'},
'554299154':{'en': 'Vivo'},
'554299155':{'en': 'Vivo'},
'554299156':{'en': 'Vivo'},
'554299157':{'en': 'Vivo'},
'554299158':{'en': 'Vivo'},
'554299159':{'en': 'Vivo'},
'554299161':{'en': 'Vivo'},
'554299162':{'en': 'Vivo'},
'554299163':{'en': 'Vivo'},
'554299164':{'en': 'Vivo'},
'554299165':{'en': 'Vivo'},
'554299166':{'en': 'Vivo'},
'554299901':{'en': 'TIM'},
'554299902':{'en': 'TIM'},
'554299903':{'en': 'TIM'},
'554299904':{'en': 'TIM'},
'554299905':{'en': 'TIM'},
'554299906':{'en': 'TIM'},
'554299907':{'en': 'TIM'},
'554299908':{'en': 'TIM'},
'554299911':{'en': 'TIM'},
'554299912':{'en': 'TIM'},
'554299913':{'en': 'TIM'},
'554299914':{'en': 'TIM'},
'554299915':{'en': 'TIM'},
'554299916':{'en': 'TIM'},
'554299917':{'en': 'TIM'},
'554299918':{'en': 'TIM'},
'554299919':{'en': 'TIM'},
'554299921':{'en': 'TIM'},
'554299922':{'en': 'TIM'},
'554299923':{'en': 'TIM'},
'554299924':{'en': 'TIM'},
'554299925':{'en': 'TIM'},
'554299926':{'en': 'TIM'},
'554299927':{'en': 'TIM'},
'554299928':{'en': 'TIM'},
'554299929':{'en': 'TIM'},
'554299931':{'en': 'TIM'},
'554299932':{'en': 'TIM'},
'554299933':{'en': 'TIM'},
'554299934':{'en': 'TIM'},
'554299935':{'en': 'TIM'},
'554299936':{'en': 'TIM'},
'554299937':{'en': 'TIM'},
'554299938':{'en': 'TIM'},
'554299939':{'en': 'TIM'},
'554299941':{'en': 'TIM'},
'554299942':{'en': 'TIM'},
'554299943':{'en': 'TIM'},
'554299944':{'en': 'TIM'},
'554299945':{'en': 'TIM'},
'554299946':{'en': 'TIM'},
'554299947':{'en': 'TIM'},
'554299948':{'en': 'TIM'},
'554299949':{'en': 'TIM'},
'554299961':{'en': 'TIM'},
'554299962':{'en': 'TIM'},
'554299963':{'en': 'TIM'},
'554299964':{'en': 'TIM'},
'554299965':{'en': 'TIM'},
'554299966':{'en': 'TIM'},
'554299967':{'en': 'TIM'},
'554299969':{'en': 'TIM'},
'554299971':{'en': 'TIM'},
'554299972':{'en': 'TIM'},
'554299973':{'en': 'TIM'},
'554299974':{'en': 'TIM'},
'554299975':{'en': 'TIM'},
'554299976':{'en': 'TIM'},
'554299977':{'en': 'TIM'},
'554299978':{'en': 'TIM'},
'554299979':{'en': 'TIM'},
'554299981':{'en': 'TIM'},
'554299982':{'en': 'TIM'},
'554398111':{'en': 'TIM'},
'554398401':{'en': 'Brasil Telecom GSM'},
'554398402':{'en': 'Brasil Telecom GSM'},
'554398403':{'en': 'Brasil Telecom GSM'},
'554398404':{'en': 'Brasil Telecom GSM'},
'554398405':{'en': 'Brasil Telecom GSM'},
'554398406':{'en': 'Brasil Telecom GSM'},
'554398407':{'en': 'Brasil Telecom GSM'},
'554398408':{'en': 'Brasil Telecom GSM'},
'554398409':{'en': 'Brasil Telecom GSM'},
'554398411':{'en': 'Brasil Telecom GSM'},
'554398412':{'en': 'Brasil Telecom GSM'},
'554398413':{'en': 'Brasil Telecom GSM'},
'554398414':{'en': 'Brasil Telecom GSM'},
'554398415':{'en': 'Brasil Telecom GSM'},
'554398416':{'en': 'Brasil Telecom GSM'},
'554398417':{'en': 'Brasil Telecom GSM'},
'554398418':{'en': 'Brasil Telecom GSM'},
'554398419':{'en': 'Brasil Telecom GSM'},
'554398421':{'en': 'Brasil Telecom GSM'},
'554398422':{'en': 'Brasil Telecom GSM'},
'554398423':{'en': 'Brasil Telecom GSM'},
'554398424':{'en': 'Brasil Telecom GSM'},
'554398425':{'en': 'Brasil Telecom GSM'},
'554398426':{'en': 'Brasil Telecom GSM'},
'554398427':{'en': 'Brasil Telecom GSM'},
'554398428':{'en': 'Brasil Telecom GSM'},
'554398429':{'en': 'Brasil Telecom GSM'},
'554398431':{'en': 'Brasil Telecom GSM'},
'554398432':{'en': 'Brasil Telecom GSM'},
'554398433':{'en': 'Brasil Telecom GSM'},
'554398434':{'en': 'Brasil Telecom GSM'},
'554398435':{'en': 'Brasil Telecom GSM'},
'55439880':{'en': 'Claro'},
'55439881':{'en': 'Claro'},
'55439882':{'en': 'Claro'},
'55439883':{'en': 'Claro'},
'55439884':{'en': 'Claro'},
'554398850':{'en': 'Claro'},
'554398851':{'en': 'Claro'},
'554398852':{'en': 'Claro'},
'55439910':{'en': 'Vivo'},
'554399111':{'en': 'Vivo'},
'554399112':{'en': 'Vivo'},
'554399113':{'en': 'Vivo'},
'554399114':{'en': 'Vivo'},
'554399115':{'en': 'Vivo'},
'554399116':{'en': 'Vivo'},
'554399117':{'en': 'Vivo'},
'554399118':{'en': 'Vivo'},
'554399119':{'en': 'Vivo'},
'554399121':{'en': 'Vivo'},
'554399122':{'en': 'Vivo'},
'554399123':{'en': 'Vivo'},
'554399124':{'en': 'Vivo'},
'554399125':{'en': 'Vivo'},
'554399126':{'en': 'Vivo'},
'554399127':{'en': 'Vivo'},
'554399128':{'en': 'Vivo'},
'554399129':{'en': 'Vivo'},
'554399131':{'en': 'Vivo'},
'554399132':{'en': 'Vivo'},
'554399133':{'en': 'Vivo'},
'554399134':{'en': 'Vivo'},
'554399135':{'en': 'Vivo'},
'554399136':{'en': 'Vivo'},
'554399137':{'en': 'Vivo'},
'554399138':{'en': 'Vivo'},
'554399139':{'en': 'Vivo'},
'554399141':{'en': 'Vivo'},
'554399142':{'en': 'Vivo'},
'554399143':{'en': 'Vivo'},
'554399144':{'en': 'Vivo'},
'554399145':{'en': 'Vivo'},
'554399146':{'en': 'Vivo'},
'554399147':{'en': 'Vivo'},
'554399148':{'en': 'Vivo'},
'554399149':{'en': 'Vivo'},
'554399151':{'en': 'Vivo'},
'554399152':{'en': 'Vivo'},
'554399153':{'en': 'Vivo'},
'554399154':{'en': 'Vivo'},
'554399155':{'en': 'Vivo'},
'554399156':{'en': 'Vivo'},
'554399157':{'en': 'Vivo'},
'554399158':{'en': 'Vivo'},
'554399159':{'en': 'Vivo'},
'554399161':{'en': 'Vivo'},
'554399162':{'en': 'Vivo'},
'554399163':{'en': 'Vivo'},
'554399164':{'en': 'Vivo'},
'554399165':{'en': 'Vivo'},
'554399166':{'en': 'Vivo'},
'554399167':{'en': 'Vivo'},
'554399168':{'en': 'Vivo'},
'554399169':{'en': 'Vivo'},
'554399171':{'en': 'Vivo'},
'554399172':{'en': 'Vivo'},
'554399173':{'en': 'Vivo'},
'554399174':{'en': 'Vivo'},
'554399175':{'en': 'Vivo'},
'554399176':{'en': 'Vivo'},
'554399177':{'en': 'Vivo'},
'554399178':{'en': 'Vivo'},
'554399179':{'en': 'Vivo'},
'554399181':{'en': 'Vivo'},
'554399182':{'en': 'Vivo'},
'554399183':{'en': 'Vivo'},
'554399184':{'en': 'Vivo'},
'554399185':{'en': 'Vivo'},
'554399186':{'en': 'Vivo'},
'554399187':{'en': 'Vivo'},
'554399188':{'en': 'Vivo'},
'554399189':{'en': 'Vivo'},
'554399191':{'en': 'Vivo'},
'554399192':{'en': 'Vivo'},
'554399193':{'en': 'Vivo'},
'554399194':{'en': 'Vivo'},
'554399195':{'en': 'Vivo'},
'55439960':{'en': 'TIM'},
'554399610':{'en': 'TIM'},
'554399611':{'en': 'TIM'},
'554399612':{'en': 'TIM'},
'55439990':{'en': 'TIM'},
'55439991':{'en': 'TIM'},
'55439992':{'en': 'TIM'},
'55439993':{'en': 'TIM'},
'55439995':{'en': 'TIM'},
'55439996':{'en': 'TIM'},
'55439997':{'en': 'TIM'},
'55439998':{'en': 'TIM'},
'554498401':{'en': 'Brasil Telecom GSM'},
'554498402':{'en': 'Brasil Telecom GSM'},
'554498403':{'en': 'Brasil Telecom GSM'},
'554498404':{'en': 'Brasil Telecom GSM'},
'554498405':{'en': 'Brasil Telecom GSM'},
'554498406':{'en': 'Brasil Telecom GSM'},
'554498407':{'en': 'Brasil Telecom GSM'},
'554498408':{'en': 'Brasil Telecom GSM'},
'554498409':{'en': 'Brasil Telecom GSM'},
'554498411':{'en': 'Brasil Telecom GSM'},
'554498412':{'en': 'Brasil Telecom GSM'},
'554498413':{'en': 'Brasil Telecom GSM'},
'554498414':{'en': 'Brasil Telecom GSM'},
'554498415':{'en': 'Brasil Telecom GSM'},
'554498416':{'en': 'Brasil Telecom GSM'},
'554498417':{'en': 'Brasil Telecom GSM'},
'554498418':{'en': 'Brasil Telecom GSM'},
'554498419':{'en': 'Brasil Telecom GSM'},
'554498421':{'en': 'Brasil Telecom GSM'},
'554498422':{'en': 'Brasil Telecom GSM'},
'554498423':{'en': 'Brasil Telecom GSM'},
'554498424':{'en': 'Brasil Telecom GSM'},
'554498425':{'en': 'Brasil Telecom GSM'},
'554498426':{'en': 'Brasil Telecom GSM'},
'554498427':{'en': 'Brasil Telecom GSM'},
'554498428':{'en': 'Brasil Telecom GSM'},
'554498429':{'en': 'Brasil Telecom GSM'},
'55449880':{'en': 'Claro'},
'55449881':{'en': 'Claro'},
'55449882':{'en': 'Claro'},
'55449883':{'en': 'Claro'},
'55449884':{'en': 'Claro'},
'55449885':{'en': 'Claro'},
'554498860':{'en': 'Claro'},
'55449910':{'en': 'Vivo'},
'554499111':{'en': 'Vivo'},
'554499112':{'en': 'Vivo'},
'554499113':{'en': 'Vivo'},
'554499114':{'en': 'Vivo'},
'554499115':{'en': 'Vivo'},
'554499116':{'en': 'Vivo'},
'554499117':{'en': 'Vivo'},
'554499118':{'en': 'Vivo'},
'554499119':{'en': 'Vivo'},
'554499121':{'en': 'Vivo'},
'554499122':{'en': 'Vivo'},
'554499123':{'en': 'Vivo'},
'554499124':{'en': 'Vivo'},
'554499125':{'en': 'Vivo'},
'554499126':{'en': 'Vivo'},
'554499127':{'en': 'Vivo'},
'554499128':{'en': 'Vivo'},
'554499129':{'en': 'Vivo'},
'554499131':{'en': 'Vivo'},
'554499132':{'en': 'Vivo'},
'554499133':{'en': 'Vivo'},
'554499134':{'en': 'Vivo'},
'554499135':{'en': 'Vivo'},
'554499136':{'en': 'Vivo'},
'554499137':{'en': 'Vivo'},
'554499138':{'en': 'Vivo'},
'554499139':{'en': 'Vivo'},
'554499141':{'en': 'Vivo'},
'554499142':{'en': 'Vivo'},
'554499143':{'en': 'Vivo'},
'554499144':{'en': 'Vivo'},
'554499145':{'en': 'Vivo'},
'554499146':{'en': 'Vivo'},
'554499147':{'en': 'Vivo'},
'554499148':{'en': 'Vivo'},
'554499149':{'en': 'Vivo'},
'554499151':{'en': 'Vivo'},
'554499152':{'en': 'Vivo'},
'554499153':{'en': 'Vivo'},
'554499154':{'en': 'Vivo'},
'554499155':{'en': 'Vivo'},
'554499156':{'en': 'Vivo'},
'554499157':{'en': 'Vivo'},
'554499158':{'en': 'Vivo'},
'554499159':{'en': 'Vivo'},
'554499161':{'en': 'Vivo'},
'554499162':{'en': 'Vivo'},
'554499163':{'en': 'Vivo'},
'554499164':{'en': 'Vivo'},
'554499165':{'en': 'Vivo'},
'554499166':{'en': 'Vivo'},
'554499167':{'en': 'Vivo'},
'554499168':{'en': 'Vivo'},
'554499169':{'en': 'Vivo'},
'554499171':{'en': 'Vivo'},
'554499172':{'en': 'Vivo'},
'554499173':{'en': 'Vivo'},
'554499174':{'en': 'Vivo'},
'5544999':{'en': 'TIM'},
'554598401':{'en': 'Brasil Telecom GSM'},
'554598402':{'en': 'Brasil Telecom GSM'},
'554598403':{'en': 'Brasil Telecom GSM'},
'554598404':{'en': 'Brasil Telecom GSM'},
'554598405':{'en': 'Brasil Telecom GSM'},
'554598406':{'en': 'Brasil Telecom GSM'},
'554598407':{'en': 'Brasil Telecom GSM'},
'554598408':{'en': 'Brasil Telecom GSM'},
'554598409':{'en': 'Brasil Telecom GSM'},
'554598411':{'en': 'Brasil Telecom GSM'},
'554598412':{'en': 'Brasil Telecom GSM'},
'554598413':{'en': 'Brasil Telecom GSM'},
'554598414':{'en': 'Brasil Telecom GSM'},
'554598415':{'en': 'Brasil Telecom GSM'},
'554598416':{'en': 'Brasil Telecom GSM'},
'554598417':{'en': 'Brasil Telecom GSM'},
'554598418':{'en': 'Brasil Telecom GSM'},
'554598801':{'en': 'Claro'},
'554598802':{'en': 'Claro'},
'554598803':{'en': 'Claro'},
'554598804':{'en': 'Claro'},
'554598805':{'en': 'Claro'},
'554598806':{'en': 'Claro'},
'554598807':{'en': 'Claro'},
'554598808':{'en': 'Claro'},
'554598809':{'en': 'Claro'},
'554598811':{'en': 'Claro'},
'554598812':{'en': 'Claro'},
'554598813':{'en': 'Claro'},
'554598814':{'en': 'Claro'},
'554598815':{'en': 'Claro'},
'554598816':{'en': 'Claro'},
'554598817':{'en': 'Claro'},
'554598818':{'en': 'Claro'},
'554598819':{'en': 'Claro'},
'554598821':{'en': 'Claro'},
'554598822':{'en': 'Claro'},
'554598823':{'en': 'Claro'},
'554598824':{'en': 'Claro'},
'554598825':{'en': 'Claro'},
'554598826':{'en': 'Claro'},
'554598827':{'en': 'Claro'},
'554598828':{'en': 'Claro'},
'554598829':{'en': 'Claro'},
'554598831':{'en': 'Claro'},
'554598832':{'en': 'Claro'},
'554598833':{'en': 'Claro'},
'554598834':{'en': 'Claro'},
'554598835':{'en': 'Claro'},
'554598836':{'en': 'Claro'},
'554598837':{'en': 'Claro'},
'554598838':{'en': 'Claro'},
'554598839':{'en': 'Claro'},
'554598841':{'en': 'Claro'},
'554598842':{'en': 'Claro'},
'55459910':{'en': 'Vivo'},
'554599111':{'en': 'Vivo'},
'554599112':{'en': 'Vivo'},
'554599113':{'en': 'Vivo'},
'554599114':{'en': 'Vivo'},
'554599115':{'en': 'Vivo'},
'554599116':{'en': 'Vivo'},
'554599117':{'en': 'Vivo'},
'554599118':{'en': 'Vivo'},
'554599119':{'en': 'Vivo'},
'554599121':{'en': 'Vivo'},
'554599122':{'en': 'Vivo'},
'554599123':{'en': 'Vivo'},
'554599124':{'en': 'Vivo'},
'554599125':{'en': 'Vivo'},
'554599126':{'en': 'Vivo'},
'554599127':{'en': 'Vivo'},
'554599128':{'en': 'Vivo'},
'554599129':{'en': 'Vivo'},
'554599131':{'en': 'Vivo'},
'554599132':{'en': 'Vivo'},
'554599133':{'en': 'Vivo'},
'554599134':{'en': 'Vivo'},
'554599135':{'en': 'Vivo'},
'554599136':{'en': 'Vivo'},
'554599137':{'en': 'Vivo'},
'554599138':{'en': 'Vivo'},
'554599139':{'en': 'Vivo'},
'554599141':{'en': 'Vivo'},
'554599142':{'en': 'Vivo'},
'554599143':{'en': 'Vivo'},
'554599144':{'en': 'Vivo'},
'554599145':{'en': 'Vivo'},
'554599146':{'en': 'Vivo'},
'554599147':{'en': 'Vivo'},
'554599148':{'en': 'Vivo'},
'554599149':{'en': 'Vivo'},
'554599151':{'en': 'Vivo'},
'554599152':{'en': 'Vivo'},
'554599153':{'en': 'Vivo'},
'554599154':{'en': 'Vivo'},
'554599155':{'en': 'Vivo'},
'554599156':{'en': 'Vivo'},
'554599157':{'en': 'Vivo'},
'554599158':{'en': 'Vivo'},
'554599911':{'en': 'TIM'},
'554599912':{'en': 'TIM'},
'554599913':{'en': 'TIM'},
'554599914':{'en': 'TIM'},
'554599915':{'en': 'TIM'},
'554599916':{'en': 'TIM'},
'554599917':{'en': 'TIM'},
'554599918':{'en': 'TIM'},
'554599919':{'en': 'TIM'},
'554599921':{'en': 'TIM'},
'554599922':{'en': 'TIM'},
'554599923':{'en': 'TIM'},
'554599924':{'en': 'TIM'},
'554599925':{'en': 'TIM'},
'554599926':{'en': 'TIM'},
'554599927':{'en': 'TIM'},
'554599928':{'en': 'TIM'},
'554599929':{'en': 'TIM'},
'554599931':{'en': 'TIM'},
'554599932':{'en': 'TIM'},
'554599933':{'en': 'TIM'},
'554599934':{'en': 'TIM'},
'554599935':{'en': 'TIM'},
'554599936':{'en': 'TIM'},
'554599937':{'en': 'TIM'},
'554599938':{'en': 'TIM'},
'554599939':{'en': 'TIM'},
'554599941':{'en': 'TIM'},
'554599942':{'en': 'TIM'},
'554599943':{'en': 'TIM'},
'554599944':{'en': 'TIM'},
'554599945':{'en': 'TIM'},
'554599946':{'en': 'TIM'},
'554599947':{'en': 'TIM'},
'554599948':{'en': 'TIM'},
'554599949':{'en': 'TIM'},
'554599951':{'en': 'TIM'},
'554599952':{'en': 'TIM'},
'554599953':{'en': 'TIM'},
'554599954':{'en': 'TIM'},
'554599961':{'en': 'TIM'},
'554599962':{'en': 'TIM'},
'554599963':{'en': 'TIM'},
'554599964':{'en': 'TIM'},
'554599965':{'en': 'TIM'},
'554599966':{'en': 'TIM'},
'554599967':{'en': 'TIM'},
'554599968':{'en': 'TIM'},
'554599969':{'en': 'TIM'},
'554599971':{'en': 'TIM'},
'554599972':{'en': 'TIM'},
'554599973':{'en': 'TIM'},
'554599974':{'en': 'TIM'},
'554599975':{'en': 'TIM'},
'554599976':{'en': 'TIM'},
'554599977':{'en': 'TIM'},
'554599978':{'en': 'TIM'},
'554599979':{'en': 'TIM'},
'554599981':{'en': 'TIM'},
'554599982':{'en': 'TIM'},
'554599983':{'en': 'TIM'},
'554599984':{'en': 'TIM'},
'554698401':{'en': 'Brasil Telecom GSM'},
'554698402':{'en': 'Brasil Telecom GSM'},
'554698403':{'en': 'Brasil Telecom GSM'},
'554698404':{'en': 'Brasil Telecom GSM'},
'554698405':{'en': 'Brasil Telecom GSM'},
'554698406':{'en': 'Brasil Telecom GSM'},
'554698407':{'en': 'Brasil Telecom GSM'},
'554698801':{'en': 'Claro'},
'554698802':{'en': 'Claro'},
'554698803':{'en': 'Claro'},
'554698804':{'en': 'Claro'},
'554698805':{'en': 'Claro'},
'554698806':{'en': 'Claro'},
'554698807':{'en': 'Claro'},
'554698808':{'en': 'Claro'},
'554698809':{'en': 'Claro'},
'55469881':{'en': 'Claro'},
'554698821':{'en': 'Claro'},
'554698822':{'en': 'Claro'},
'554698823':{'en': 'Claro'},
'554698824':{'en': 'Claro'},
'554698825':{'en': 'Claro'},
'554698826':{'en': 'Claro'},
'554698827':{'en': 'Claro'},
'55469910':{'en': 'Vivo'},
'554699111':{'en': 'Vivo'},
'554699112':{'en': 'Vivo'},
'554699113':{'en': 'Vivo'},
'554699114':{'en': 'Vivo'},
'554699115':{'en': 'Vivo'},
'554699116':{'en': 'Vivo'},
'554699117':{'en': 'Vivo'},
'554699118':{'en': 'Vivo'},
'554699119':{'en': 'Vivo'},
'554699121':{'en': 'Vivo'},
'554699122':{'en': 'Vivo'},
'554699123':{'en': 'Vivo'},
'554699124':{'en': 'Vivo'},
'554699125':{'en': 'Vivo'},
'554699126':{'en': 'Vivo'},
'554699127':{'en': 'Vivo'},
'554699128':{'en': 'Vivo'},
'554699129':{'en': 'Vivo'},
'554699131':{'en': 'Vivo'},
'554699132':{'en': 'Vivo'},
'554699911':{'en': 'TIM'},
'554699912':{'en': 'TIM'},
'554699913':{'en': 'TIM'},
'554699914':{'en': 'TIM'},
'554699915':{'en': 'TIM'},
'554699916':{'en': 'TIM'},
'554699917':{'en': 'TIM'},
'554699918':{'en': 'TIM'},
'554699919':{'en': 'TIM'},
'554699921':{'en': 'TIM'},
'554699922':{'en': 'TIM'},
'554699923':{'en': 'TIM'},
'554699939':{'en': 'TIM'},
'554699971':{'en': 'TIM'},
'554699972':{'en': 'TIM'},
'554699973':{'en': 'TIM'},
'554699974':{'en': 'TIM'},
'554699975':{'en': 'TIM'},
'554699976':{'en': 'TIM'},
'554699978':{'en': 'TIM'},
'554798401':{'en': 'Brasil Telecom GSM'},
'554798402':{'en': 'Brasil Telecom GSM'},
'554798403':{'en': 'Brasil Telecom GSM'},
'554798404':{'en': 'Brasil Telecom GSM'},
'554798405':{'en': 'Brasil Telecom GSM'},
'554798406':{'en': 'Brasil Telecom GSM'},
'554798407':{'en': 'Brasil Telecom GSM'},
'554798408':{'en': 'Brasil Telecom GSM'},
'554798409':{'en': 'Brasil Telecom GSM'},
'554798411':{'en': 'Brasil Telecom GSM'},
'554798412':{'en': 'Brasil Telecom GSM'},
'554798413':{'en': 'Brasil Telecom GSM'},
'554798414':{'en': 'Brasil Telecom GSM'},
'554798415':{'en': 'Brasil Telecom GSM'},
'554798416':{'en': 'Brasil Telecom GSM'},
'554798417':{'en': 'Brasil Telecom GSM'},
'554798418':{'en': 'Brasil Telecom GSM'},
'554798419':{'en': 'Brasil Telecom GSM'},
'554798421':{'en': 'Brasil Telecom GSM'},
'554798422':{'en': 'Brasil Telecom GSM'},
'554798423':{'en': 'Brasil Telecom GSM'},
'554798424':{'en': 'Brasil Telecom GSM'},
'554798425':{'en': 'Brasil Telecom GSM'},
'554798426':{'en': 'Brasil Telecom GSM'},
'554798427':{'en': 'Brasil Telecom GSM'},
'554798428':{'en': 'Brasil Telecom GSM'},
'554798429':{'en': 'Brasil Telecom GSM'},
'554798431':{'en': 'Brasil Telecom GSM'},
'554798432':{'en': 'Brasil Telecom GSM'},
'554798433':{'en': 'Brasil Telecom GSM'},
'554798434':{'en': 'Brasil Telecom GSM'},
'554798435':{'en': 'Brasil Telecom GSM'},
'554798436':{'en': 'Brasil Telecom GSM'},
'554798437':{'en': 'Brasil Telecom GSM'},
'554798438':{'en': 'Brasil Telecom GSM'},
'554798439':{'en': 'Brasil Telecom GSM'},
'554798441':{'en': 'Brasil Telecom GSM'},
'554798442':{'en': 'Brasil Telecom GSM'},
'554798443':{'en': 'Brasil Telecom GSM'},
'554798444':{'en': 'Brasil Telecom GSM'},
'554798445':{'en': 'Brasil Telecom GSM'},
'554798446':{'en': 'Brasil Telecom GSM'},
'554798447':{'en': 'Brasil Telecom GSM'},
'554798448':{'en': 'Brasil Telecom GSM'},
'554798449':{'en': 'Brasil Telecom GSM'},
'554798451':{'en': 'Brasil Telecom GSM'},
'554798452':{'en': 'Brasil Telecom GSM'},
'554798453':{'en': 'Brasil Telecom GSM'},
'554798454':{'en': 'Brasil Telecom GSM'},
'554798455':{'en': 'Brasil Telecom GSM'},
'554798456':{'en': 'Brasil Telecom GSM'},
'554798457':{'en': 'Brasil Telecom GSM'},
'5547988':{'en': 'Claro'},
'554798900':{'en': 'Claro'},
'554798901':{'en': 'Claro'},
'5547991':{'en': 'Vivo'},
'55479920':{'en': 'Vivo'},
'55479921':{'en': 'Vivo'},
'55479922':{'en': 'Vivo'},
'55479923':{'en': 'Vivo'},
'554799240':{'en': 'Vivo'},
'554799241':{'en': 'Vivo'},
'554799242':{'en': 'Vivo'},
'554799243':{'en': 'Vivo'},
'554799244':{'en': 'Vivo'},
'554799245':{'en': 'Vivo'},
'554799246':{'en': 'Vivo'},
'554799601':{'en': 'TIM'},
'554799602':{'en': 'TIM'},
'554799603':{'en': 'TIM'},
'554799604':{'en': 'TIM'},
'554799605':{'en': 'TIM'},
'554799606':{'en': 'TIM'},
'554799607':{'en': 'TIM'},
'554799608':{'en': 'TIM'},
'554799609':{'en': 'TIM'},
'554799611':{'en': 'TIM'},
'554799612':{'en': 'TIM'},
'554799613':{'en': 'TIM'},
'554799614':{'en': 'TIM'},
'554799615':{'en': 'TIM'},
'554799616':{'en': 'TIM'},
'554799617':{'en': 'TIM'},
'554799618':{'en': 'TIM'},
'554799619':{'en': 'TIM'},
'554799621':{'en': 'TIM'},
'554799622':{'en': 'TIM'},
'554799623':{'en': 'TIM'},
'554799624':{'en': 'TIM'},
'554799625':{'en': 'TIM'},
'554799626':{'en': 'TIM'},
'554799627':{'en': 'TIM'},
'554799628':{'en': 'TIM'},
'554799629':{'en': 'TIM'},
'554799631':{'en': 'TIM'},
'554799632':{'en': 'TIM'},
'554799633':{'en': 'TIM'},
'554799634':{'en': 'TIM'},
'554799635':{'en': 'TIM'},
'554799636':{'en': 'TIM'},
'554799637':{'en': 'TIM'},
'554799638':{'en': 'TIM'},
'554799651':{'en': 'TIM'},
'554799652':{'en': 'TIM'},
'554799653':{'en': 'TIM'},
'554799654':{'en': 'TIM'},
'554799655':{'en': 'TIM'},
'554799656':{'en': 'TIM'},
'554799657':{'en': 'TIM'},
'554799658':{'en': 'TIM'},
'554799901':{'en': 'TIM'},
'554799902':{'en': 'TIM'},
'554799903':{'en': 'TIM'},
'554799904':{'en': 'TIM'},
'554799905':{'en': 'TIM'},
'554799906':{'en': 'TIM'},
'554799907':{'en': 'TIM'},
'554799909':{'en': 'TIM'},
'554799911':{'en': 'TIM'},
'554799912':{'en': 'TIM'},
'554799913':{'en': 'TIM'},
'554799914':{'en': 'TIM'},
'554799915':{'en': 'TIM'},
'554799916':{'en': 'TIM'},
'554799917':{'en': 'TIM'},
'554799918':{'en': 'TIM'},
'554799919':{'en': 'TIM'},
'554799921':{'en': 'TIM'},
'554799922':{'en': 'TIM'},
'554799923':{'en': 'TIM'},
'554799924':{'en': 'TIM'},
'554799925':{'en': 'TIM'},
'554799926':{'en': 'TIM'},
'554799927':{'en': 'TIM'},
'554799928':{'en': 'TIM'},
'554799929':{'en': 'TIM'},
'554799931':{'en': 'TIM'},
'554799932':{'en': 'TIM'},
'554799933':{'en': 'TIM'},
'554799934':{'en': 'TIM'},
'554799935':{'en': 'TIM'},
'554799936':{'en': 'TIM'},
'554799937':{'en': 'TIM'},
'554799938':{'en': 'TIM'},
'554799939':{'en': 'TIM'},
'554799941':{'en': 'TIM'},
'554799942':{'en': 'TIM'},
'554799943':{'en': 'TIM'},
'554799944':{'en': 'TIM'},
'554799945':{'en': 'TIM'},
'554799946':{'en': 'TIM'},
'554799947':{'en': 'TIM'},
'554799948':{'en': 'TIM'},
'554799949':{'en': 'TIM'},
'554799951':{'en': 'TIM'},
'554799952':{'en': 'TIM'},
'554799953':{'en': 'TIM'},
'554799954':{'en': 'TIM'},
'554799955':{'en': 'TIM'},
'554799956':{'en': 'TIM'},
'554799957':{'en': 'TIM'},
'554799958':{'en': 'TIM'},
'554799959':{'en': 'TIM'},
'554799961':{'en': 'TIM'},
'554799962':{'en': 'TIM'},
'554799963':{'en': 'TIM'},
'554799964':{'en': 'TIM'},
'554799965':{'en': 'TIM'},
'554799966':{'en': 'TIM'},
'554799967':{'en': 'TIM'},
'554799968':{'en': 'TIM'},
'554799969':{'en': 'TIM'},
'554799971':{'en': 'TIM'},
'554799972':{'en': 'TIM'},
'554799973':{'en': 'TIM'},
'554799974':{'en': 'TIM'},
'554799975':{'en': 'TIM'},
'554799976':{'en': 'TIM'},
'554799977':{'en': 'TIM'},
'554799978':{'en': 'TIM'},
'554799979':{'en': 'TIM'},
'55479998':{'en': 'TIM'},
'554799991':{'en': 'TIM'},
'554799992':{'en': 'TIM'},
'554799993':{'en': 'TIM'},
'554799994':{'en': 'TIM'},
'554799995':{'en': 'TIM'},
'554799996':{'en': 'TIM'},
'554799997':{'en': 'TIM'},
'554799998':{'en': 'TIM'},
'554898401':{'en': 'Brasil Telecom GSM'},
'554898402':{'en': 'Brasil Telecom GSM'},
'554898403':{'en': 'Brasil Telecom GSM'},
'554898404':{'en': 'Brasil Telecom GSM'},
'554898405':{'en': 'Brasil Telecom GSM'},
'554898406':{'en': 'Brasil Telecom GSM'},
'554898407':{'en': 'Brasil Telecom GSM'},
'554898408':{'en': 'Brasil Telecom GSM'},
'554898409':{'en': 'Brasil Telecom GSM'},
'554898411':{'en': 'Brasil Telecom GSM'},
'554898412':{'en': 'Brasil Telecom GSM'},
'554898413':{'en': 'Brasil Telecom GSM'},
'554898414':{'en': 'Brasil Telecom GSM'},
'554898415':{'en': 'Brasil Telecom GSM'},
'554898416':{'en': 'Brasil Telecom GSM'},
'554898417':{'en': 'Brasil Telecom GSM'},
'554898418':{'en': 'Brasil Telecom GSM'},
'554898419':{'en': 'Brasil Telecom GSM'},
'554898421':{'en': 'Brasil Telecom GSM'},
'554898422':{'en': 'Brasil Telecom GSM'},
'554898423':{'en': 'Brasil Telecom GSM'},
'554898424':{'en': 'Brasil Telecom GSM'},
'554898425':{'en': 'Brasil Telecom GSM'},
'554898426':{'en': 'Brasil Telecom GSM'},
'554898427':{'en': 'Brasil Telecom GSM'},
'554898428':{'en': 'Brasil Telecom GSM'},
'554898429':{'en': 'Brasil Telecom GSM'},
'554898431':{'en': 'Brasil Telecom GSM'},
'554898432':{'en': 'Brasil Telecom GSM'},
'554898433':{'en': 'Brasil Telecom GSM'},
'554898434':{'en': 'Brasil Telecom GSM'},
'554898435':{'en': 'Brasil Telecom GSM'},
'554898436':{'en': 'Brasil Telecom GSM'},
'554898437':{'en': 'Brasil Telecom GSM'},
'554898438':{'en': 'Brasil Telecom GSM'},
'554898439':{'en': 'Brasil Telecom GSM'},
'554898441':{'en': 'Brasil Telecom GSM'},
'554898442':{'en': 'Brasil Telecom GSM'},
'554898443':{'en': 'Brasil Telecom GSM'},
'554898444':{'en': 'Brasil Telecom GSM'},
'554898445':{'en': 'Brasil Telecom GSM'},
'554898446':{'en': 'Brasil Telecom GSM'},
'554898447':{'en': 'Brasil Telecom GSM'},
'554898448':{'en': 'Brasil Telecom GSM'},
'554898449':{'en': 'Brasil Telecom GSM'},
'554898451':{'en': 'Brasil Telecom GSM'},
'554898452':{'en': 'Brasil Telecom GSM'},
'55489880':{'en': 'Claro'},
'55489881':{'en': 'Claro'},
'55489882':{'en': 'Claro'},
'55489883':{'en': 'Claro'},
'55489884':{'en': 'Claro'},
'55489885':{'en': 'Claro'},
'554898860':{'en': 'Claro'},
'554898861':{'en': 'Claro'},
'554898862':{'en': 'Claro'},
'554898863':{'en': 'Claro'},
'554898864':{'en': 'Claro'},
'554898865':{'en': 'Claro'},
'554898866':{'en': 'Claro'},
'55489910':{'en': 'Vivo'},
'554899111':{'en': 'Vivo'},
'554899112':{'en': 'Vivo'},
'554899113':{'en': 'Vivo'},
'554899114':{'en': 'Vivo'},
'554899115':{'en': 'Vivo'},
'554899116':{'en': 'Vivo'},
'554899117':{'en': 'Vivo'},
'554899118':{'en': 'Vivo'},
'554899119':{'en': 'Vivo'},
'554899121':{'en': 'Vivo'},
'554899122':{'en': 'Vivo'},
'554899123':{'en': 'Vivo'},
'554899124':{'en': 'Vivo'},
'554899125':{'en': 'Vivo'},
'554899126':{'en': 'Vivo'},
'554899127':{'en': 'Vivo'},
'554899128':{'en': 'Vivo'},
'554899129':{'en': 'Vivo'},
'554899131':{'en': 'Vivo'},
'554899132':{'en': 'Vivo'},
'554899133':{'en': 'Vivo'},
'554899134':{'en': 'Vivo'},
'554899135':{'en': 'Vivo'},
'554899136':{'en': 'Vivo'},
'554899137':{'en': 'Vivo'},
'554899138':{'en': 'Vivo'},
'554899139':{'en': 'Vivo'},
'554899141':{'en': 'Vivo'},
'554899142':{'en': 'Vivo'},
'554899143':{'en': 'Vivo'},
'554899144':{'en': 'Vivo'},
'554899145':{'en': 'Vivo'},
'554899146':{'en': 'Vivo'},
'554899147':{'en': 'Vivo'},
'554899148':{'en': 'Vivo'},
'554899149':{'en': 'Vivo'},
'554899151':{'en': 'Vivo'},
'554899152':{'en': 'Vivo'},
'554899153':{'en': 'Vivo'},
'554899154':{'en': 'Vivo'},
'554899155':{'en': 'Vivo'},
'554899156':{'en': 'Vivo'},
'554899157':{'en': 'Vivo'},
'554899158':{'en': 'Vivo'},
'554899159':{'en': 'Vivo'},
'554899161':{'en': 'Vivo'},
'554899162':{'en': 'Vivo'},
'554899163':{'en': 'Vivo'},
'554899164':{'en': 'Vivo'},
'554899165':{'en': 'Vivo'},
'554899166':{'en': 'Vivo'},
'554899167':{'en': 'Vivo'},
'554899168':{'en': 'Vivo'},
'554899169':{'en': 'Vivo'},
'554899171':{'en': 'Vivo'},
'554899172':{'en': 'Vivo'},
'554899173':{'en': 'Vivo'},
'554899174':{'en': 'Vivo'},
'554899175':{'en': 'Vivo'},
'554899176':{'en': 'Vivo'},
'554899177':{'en': 'Vivo'},
'554899178':{'en': 'Vivo'},
'554899179':{'en': 'Vivo'},
'554899181':{'en': 'Vivo'},
'554899182':{'en': 'Vivo'},
'554899183':{'en': 'Vivo'},
'554899184':{'en': 'Vivo'},
'554899185':{'en': 'Vivo'},
'554899186':{'en': 'Vivo'},
'554899601':{'en': 'TIM'},
'554899602':{'en': 'TIM'},
'554899603':{'en': 'TIM'},
'554899604':{'en': 'TIM'},
'554899606':{'en': 'TIM'},
'554899607':{'en': 'TIM'},
'554899608':{'en': 'TIM'},
'554899609':{'en': 'TIM'},
'554899611':{'en': 'TIM'},
'554899612':{'en': 'TIM'},
'554899613':{'en': 'TIM'},
'554899614':{'en': 'TIM'},
'554899615':{'en': 'TIM'},
'554899616':{'en': 'TIM'},
'554899617':{'en': 'TIM'},
'554899618':{'en': 'TIM'},
'554899619':{'en': 'TIM'},
'554899621':{'en': 'TIM'},
'554899622':{'en': 'TIM'},
'554899623':{'en': 'TIM'},
'554899624':{'en': 'TIM'},
'554899625':{'en': 'TIM'},
'554899626':{'en': 'TIM'},
'554899627':{'en': 'TIM'},
'554899628':{'en': 'TIM'},
'554899629':{'en': 'TIM'},
'554899631':{'en': 'TIM'},
'554899632':{'en': 'TIM'},
'554899633':{'en': 'TIM'},
'554899634':{'en': 'TIM'},
'554899635':{'en': 'TIM'},
'554899636':{'en': 'TIM'},
'554899637':{'en': 'TIM'},
'554899638':{'en': 'TIM'},
'554899639':{'en': 'TIM'},
'554899641':{'en': 'TIM'},
'554899642':{'en': 'TIM'},
'554899643':{'en': 'TIM'},
'554899901':{'en': 'TIM'},
'554899902':{'en': 'TIM'},
'554899903':{'en': 'TIM'},
'554899904':{'en': 'TIM'},
'554899905':{'en': 'TIM'},
'554899906':{'en': 'TIM'},
'554899907':{'en': 'TIM'},
'554899908':{'en': 'TIM'},
'554899909':{'en': 'TIM'},
'554899911':{'en': 'TIM'},
'554899912':{'en': 'TIM'},
'554899913':{'en': 'TIM'},
'554899914':{'en': 'TIM'},
'554899915':{'en': 'TIM'},
'554899916':{'en': 'TIM'},
'554899917':{'en': 'TIM'},
'554899918':{'en': 'TIM'},
'554899919':{'en': 'TIM'},
'554899921':{'en': 'TIM'},
'554899922':{'en': 'TIM'},
'554899923':{'en': 'TIM'},
'554899924':{'en': 'TIM'},
'554899925':{'en': 'TIM'},
'554899926':{'en': 'TIM'},
'554899927':{'en': 'TIM'},
'554899928':{'en': 'TIM'},
'554899929':{'en': 'TIM'},
'554899931':{'en': 'TIM'},
'554899932':{'en': 'TIM'},
'554899933':{'en': 'TIM'},
'554899934':{'en': 'TIM'},
'554899935':{'en': 'TIM'},
'554899936':{'en': 'TIM'},
'554899937':{'en': 'TIM'},
'554899938':{'en': 'TIM'},
'554899939':{'en': 'TIM'},
'554899941':{'en': 'TIM'},
'554899942':{'en': 'TIM'},
'554899943':{'en': 'TIM'},
'554899944':{'en': 'TIM'},
'554899945':{'en': 'TIM'},
'554899946':{'en': 'TIM'},
'554899947':{'en': 'TIM'},
'554899948':{'en': 'TIM'},
'554899949':{'en': 'TIM'},
'554899951':{'en': 'TIM'},
'554899952':{'en': 'TIM'},
'554899953':{'en': 'TIM'},
'554899954':{'en': 'TIM'},
'554899955':{'en': 'TIM'},
'554899956':{'en': 'TIM'},
'554899957':{'en': 'TIM'},
'554899958':{'en': 'TIM'},
'554899959':{'en': 'TIM'},
'55489996':{'en': 'TIM'},
'554899971':{'en': 'TIM'},
'554899972':{'en': 'TIM'},
'554899973':{'en': 'TIM'},
'554899974':{'en': 'TIM'},
'554899975':{'en': 'TIM'},
'554899976':{'en': 'TIM'},
'554899977':{'en': 'TIM'},
'554899978':{'en': 'TIM'},
'554899979':{'en': 'TIM'},
'55489998':{'en': 'TIM'},
'554899991':{'en': 'TIM'},
'554899992':{'en': 'TIM'},
'554899993':{'en': 'TIM'},
'554899994':{'en': 'TIM'},
'554899995':{'en': 'TIM'},
'554899996':{'en': 'TIM'},
'554899997':{'en': 'TIM'},
'554899998':{'en': 'TIM'},
'554998401':{'en': 'Brasil Telecom GSM'},
'554998402':{'en': 'Brasil Telecom GSM'},
'554998403':{'en': 'Brasil Telecom GSM'},
'554998404':{'en': 'Brasil Telecom GSM'},
'554998405':{'en': 'Brasil Telecom GSM'},
'554998406':{'en': 'Brasil Telecom GSM'},
'554998407':{'en': 'Brasil Telecom GSM'},
'554998408':{'en': 'Brasil Telecom GSM'},
'554998409':{'en': 'Brasil Telecom GSM'},
'554998411':{'en': 'Brasil Telecom GSM'},
'554998412':{'en': 'Brasil Telecom GSM'},
'554998413':{'en': 'Brasil Telecom GSM'},
'554998414':{'en': 'Brasil Telecom GSM'},
'554998415':{'en': 'Brasil Telecom GSM'},
'554998416':{'en': 'Brasil Telecom GSM'},
'554998417':{'en': 'Brasil Telecom GSM'},
'554998418':{'en': 'Brasil Telecom GSM'},
'554998419':{'en': 'Brasil Telecom GSM'},
'55499880':{'en': 'Claro'},
'55499881':{'en': 'Claro'},
'55499882':{'en': 'Claro'},
'55499883':{'en': 'Claro'},
'55499884':{'en': 'Claro'},
'55499885':{'en': 'Claro'},
'55499886':{'en': 'Claro'},
'55499887':{'en': 'Claro'},
'554998880':{'en': 'Claro'},
'554998881':{'en': 'Claro'},
'554998882':{'en': 'Claro'},
'55499910':{'en': 'Vivo'},
'554999111':{'en': 'Vivo'},
'554999112':{'en': 'Vivo'},
'554999113':{'en': 'Vivo'},
'554999114':{'en': 'Vivo'},
'554999115':{'en': 'Vivo'},
'554999116':{'en': 'Vivo'},
'554999117':{'en': 'Vivo'},
'554999118':{'en': 'Vivo'},
'554999119':{'en': 'Vivo'},
'554999121':{'en': 'Vivo'},
'554999122':{'en': 'Vivo'},
'554999123':{'en': 'Vivo'},
'554999124':{'en': 'Vivo'},
'554999125':{'en': 'Vivo'},
'554999126':{'en': 'Vivo'},
'554999127':{'en': 'Vivo'},
'554999128':{'en': 'Vivo'},
'554999129':{'en': 'Vivo'},
'554999131':{'en': 'Vivo'},
'554999132':{'en': 'Vivo'},
'554999133':{'en': 'Vivo'},
'554999134':{'en': 'Vivo'},
'554999135':{'en': 'Vivo'},
'554999136':{'en': 'Vivo'},
'554999137':{'en': 'Vivo'},
'554999138':{'en': 'Vivo'},
'554999139':{'en': 'Vivo'},
'554999141':{'en': 'Vivo'},
'554999142':{'en': 'Vivo'},
'554999143':{'en': 'Vivo'},
'554999144':{'en': 'Vivo'},
'554999145':{'en': 'Vivo'},
'554999146':{'en': 'Vivo'},
'554999147':{'en': 'Vivo'},
'554999148':{'en': 'Vivo'},
'554999149':{'en': 'Vivo'},
'554999151':{'en': 'Vivo'},
'554999152':{'en': 'Vivo'},
'554999153':{'en': 'Vivo'},
'554999154':{'en': 'Vivo'},
'554999155':{'en': 'Vivo'},
'554999156':{'en': 'Vivo'},
'554999157':{'en': 'Vivo'},
'554999158':{'en': 'Vivo'},
'554999159':{'en': 'Vivo'},
'554999161':{'en': 'Vivo'},
'554999162':{'en': 'Vivo'},
'554999163':{'en': 'Vivo'},
'554999164':{'en': 'Vivo'},
'554999165':{'en': 'Vivo'},
'554999166':{'en': 'Vivo'},
'554999167':{'en': 'Vivo'},
'554999168':{'en': 'Vivo'},
'554999169':{'en': 'Vivo'},
'554999171':{'en': 'Vivo'},
'554999172':{'en': 'Vivo'},
'554999173':{'en': 'Vivo'},
'554999174':{'en': 'Vivo'},
'554999175':{'en': 'Vivo'},
'554999176':{'en': 'Vivo'},
'554999177':{'en': 'Vivo'},
'554999911':{'en': 'TIM'},
'554999912':{'en': 'TIM'},
'554999913':{'en': 'TIM'},
'554999914':{'en': 'TIM'},
'554999915':{'en': 'TIM'},
'554999916':{'en': 'TIM'},
'554999917':{'en': 'TIM'},
'554999918':{'en': 'TIM'},
'554999919':{'en': 'TIM'},
'554999921':{'en': 'TIM'},
'554999922':{'en': 'TIM'},
'554999923':{'en': 'TIM'},
'554999924':{'en': 'TIM'},
'554999925':{'en': 'TIM'},
'554999926':{'en': 'TIM'},
'554999927':{'en': 'TIM'},
'554999928':{'en': 'TIM'},
'554999929':{'en': 'TIM'},
'554999931':{'en': 'TIM'},
'554999932':{'en': 'TIM'},
'554999939':{'en': 'TIM'},
'554999951':{'en': 'TIM'},
'554999955':{'en': 'TIM'},
'554999963':{'en': 'TIM'},
'554999964':{'en': 'TIM'},
'554999965':{'en': 'TIM'},
'554999966':{'en': 'TIM'},
'554999967':{'en': 'TIM'},
'554999968':{'en': 'TIM'},
'554999969':{'en': 'TIM'},
'554999971':{'en': 'TIM'},
'554999972':{'en': 'TIM'},
'554999973':{'en': 'TIM'},
'554999974':{'en': 'TIM'},
'554999975':{'en': 'TIM'},
'554999976':{'en': 'TIM'},
'554999977':{'en': 'TIM'},
'554999978':{'en': 'TIM'},
'554999979':{'en': 'TIM'},
'55499998':{'en': 'TIM'},
'554999991':{'en': 'TIM'},
'554999992':{'en': 'TIM'},
'554999993':{'en': 'TIM'},
'554999994':{'en': 'TIM'},
'554999995':{'en': 'TIM'},
'554999996':{'en': 'TIM'},
'554999997':{'en': 'TIM'},
'554999998':{'en': 'TIM'},
'5551981':{'en': 'TIM'},
'555198201':{'en': 'TIM'},
'555198202':{'en': 'TIM'},
'555198203':{'en': 'TIM'},
'555198204':{'en': 'TIM'},
'555198205':{'en': 'TIM'},
'555198206':{'en': 'TIM'},
'555198207':{'en': 'TIM'},
'555198208':{'en': 'TIM'},
'555198209':{'en': 'TIM'},
'555198211':{'en': 'TIM'},
'555198212':{'en': 'TIM'},
'555198213':{'en': 'TIM'},
'555198214':{'en': 'TIM'},
'555198215':{'en': 'TIM'},
'555198216':{'en': 'TIM'},
'555198217':{'en': 'TIM'},
'555198218':{'en': 'TIM'},
'555198226':{'en': 'TIM'},
'555198227':{'en': 'TIM'},
'5551984':{'en': 'Brasil Telecom GSM'},
'5551985':{'en': 'Brasil Telecom GSM'},
'5551991':{'en': 'Claro'},
'5551992':{'en': 'Claro'},
'5551993':{'en': 'Claro'},
'55519940':{'en': 'Claro'},
'55519941':{'en': 'Claro'},
'55519942':{'en': 'Claro'},
'55519943':{'en': 'Claro'},
'55519944':{'en': 'Claro'},
'555199450':{'en': 'Claro'},
'55519950':{'en': 'Vivo'},
'55519951':{'en': 'Vivo'},
'55519952':{'en': 'Vivo'},
'55519953':{'en': 'Vivo'},
'55519954':{'en': 'Vivo'},
'55519955':{'en': 'Vivo'},
'555199560':{'en': 'Vivo'},
'555199561':{'en': 'Vivo'},
'555199562':{'en': 'Vivo'},
'555199563':{'en': 'Vivo'},
'5551996':{'en': 'Vivo'},
'5551997':{'en': 'Vivo'},
'5551998':{'en': 'Vivo'},
'5551999':{'en': 'Vivo'},
'555398111':{'en': 'TIM'},
'555398112':{'en': 'TIM'},
'555398113':{'en': 'TIM'},
'555398114':{'en': 'TIM'},
'555398115':{'en': 'TIM'},
'555398116':{'en': 'TIM'},
'555398117':{'en': 'TIM'},
'555398118':{'en': 'TIM'},
'555398119':{'en': 'TIM'},
'555398121':{'en': 'TIM'},
'555398122':{'en': 'TIM'},
'555398123':{'en': 'TIM'},
'555398124':{'en': 'TIM'},
'555398125':{'en': 'TIM'},
'555398126':{'en': 'TIM'},
'555398127':{'en': 'TIM'},
'555398128':{'en': 'TIM'},
'555398129':{'en': 'TIM'},
'555398131':{'en': 'TIM'},
'555398132':{'en': 'TIM'},
'555398133':{'en': 'TIM'},
'555398134':{'en': 'TIM'},
'555398135':{'en': 'TIM'},
'555398136':{'en': 'TIM'},
'555398137':{'en': 'TIM'},
'555398138':{'en': 'TIM'},
'555398139':{'en': 'TIM'},
'555398141':{'en': 'TIM'},
'555398401':{'en': 'Brasil Telecom GSM'},
'555398402':{'en': 'Brasil Telecom GSM'},
'555398403':{'en': 'Brasil Telecom GSM'},
'555398404':{'en': 'Brasil Telecom GSM'},
'555398405':{'en': 'Brasil Telecom GSM'},
'555398406':{'en': 'Brasil Telecom GSM'},
'555398407':{'en': 'Brasil Telecom GSM'},
'555398408':{'en': 'Brasil Telecom GSM'},
'555398409':{'en': 'Brasil Telecom GSM'},
'555398411':{'en': 'Brasil Telecom GSM'},
'555398412':{'en': 'Brasil Telecom GSM'},
'555398413':{'en': 'Brasil Telecom GSM'},
'555398414':{'en': 'Brasil Telecom GSM'},
'555398415':{'en': 'Brasil Telecom GSM'},
'555398416':{'en': 'Brasil Telecom GSM'},
'555398417':{'en': 'Brasil Telecom GSM'},
'555398418':{'en': 'Brasil Telecom GSM'},
'555398419':{'en': 'Brasil Telecom GSM'},
'555398421':{'en': 'Brasil Telecom GSM'},
'555398422':{'en': 'Brasil Telecom GSM'},
'555398423':{'en': 'Brasil Telecom GSM'},
'555398424':{'en': 'Brasil Telecom GSM'},
'555398425':{'en': 'Brasil Telecom GSM'},
'555398426':{'en': 'Brasil Telecom GSM'},
'555398427':{'en': 'Brasil Telecom GSM'},
'555398428':{'en': 'Brasil Telecom GSM'},
'555398429':{'en': 'Brasil Telecom GSM'},
'555398431':{'en': 'Brasil Telecom GSM'},
'555398432':{'en': 'Brasil Telecom GSM'},
'555398433':{'en': 'Brasil Telecom GSM'},
'555398434':{'en': 'Brasil Telecom GSM'},
'555398435':{'en': 'Brasil Telecom GSM'},
'555398436':{'en': 'Brasil Telecom GSM'},
'555398437':{'en': 'Brasil Telecom GSM'},
'55539910':{'en': 'Claro'},
'55539911':{'en': 'Claro'},
'55539912':{'en': 'Claro'},
'55539913':{'en': 'Claro'},
'55539914':{'en': 'Claro'},
'55539915':{'en': 'Claro'},
'55539916':{'en': 'Claro'},
'555399170':{'en': 'Claro'},
'555399171':{'en': 'Claro'},
'555399172':{'en': 'Claro'},
'555399173':{'en': 'Claro'},
'555399174':{'en': 'Claro'},
'555399175':{'en': 'Claro'},
'555399176':{'en': 'Claro'},
'555399177':{'en': 'Claro'},
'555399241':{'en': 'Claro'},
'555399911':{'en': 'TIM'},
'555399913':{'en': 'TIM'},
'555399927':{'en': 'Vivo'},
'555399928':{'en': 'Vivo'},
'555399929':{'en': 'Vivo'},
'555399931':{'en': 'Vivo'},
'555399932':{'en': 'Vivo'},
'555399933':{'en': 'Vivo'},
'555399934':{'en': 'Vivo'},
'555399935':{'en': 'Vivo'},
'555399936':{'en': 'Vivo'},
'555399937':{'en': 'Vivo'},
'555399938':{'en': 'Vivo'},
'555399939':{'en': 'TIM'},
'555399941':{'en': 'Vivo'},
'555399942':{'en': 'Vivo'},
'555399943':{'en': 'Vivo'},
'555399944':{'en': 'Vivo'},
'555399945':{'en': 'Vivo'},
'555399946':{'en': 'Vivo'},
'555399947':{'en': 'Vivo'},
'555399948':{'en': 'Vivo'},
'555399949':{'en': 'Vivo'},
'555399951':{'en': 'Vivo'},
'555399952':{'en': 'Vivo'},
'555399953':{'en': 'Vivo'},
'555399954':{'en': 'Vivo'},
'555399955':{'en': 'Vivo'},
'555399956':{'en': 'Vivo'},
'555399957':{'en': 'Vivo'},
'555399958':{'en': 'Vivo'},
'555399959':{'en': 'Vivo'},
'555399961':{'en': 'Vivo'},
'555399962':{'en': 'Vivo'},
'555399963':{'en': 'Vivo'},
'555399964':{'en': 'Vivo'},
'555399965':{'en': 'Vivo'},
'555399966':{'en': 'Vivo'},
'555399967':{'en': 'Vivo'},
'555399968':{'en': 'Vivo'},
'555399969':{'en': 'Vivo'},
'555399971':{'en': 'Vivo'},
'555399972':{'en': 'Vivo'},
'555399973':{'en': 'Vivo'},
'555399974':{'en': 'Vivo'},
'555399975':{'en': 'Vivo'},
'555399976':{'en': 'Vivo'},
'555399977':{'en': 'Vivo'},
'555399978':{'en': 'Vivo'},
'555399979':{'en': 'Vivo'},
'555399981':{'en': 'TIM'},
'555399982':{'en': 'TIM'},
'555399983':{'en': 'TIM'},
'555399985':{'en': 'TIM'},
'555399986':{'en': 'TIM'},
'555399987':{'en': 'TIM'},
'555399988':{'en': 'TIM'},
'555399989':{'en': 'TIM'},
'555399991':{'en': 'Vivo'},
'555399992':{'en': 'Vivo'},
'555399993':{'en': 'Vivo'},
'555399994':{'en': 'Vivo'},
'555399995':{'en': 'Vivo'},
'555399996':{'en': 'Vivo'},
'555399997':{'en': 'Vivo'},
'555399998':{'en': 'Vivo'},
'555399999':{'en': 'Vivo'},
'555498111':{'en': 'TIM'},
'555498112':{'en': 'TIM'},
'555498113':{'en': 'TIM'},
'555498114':{'en': 'TIM'},
'555498115':{'en': 'TIM'},
'555498116':{'en': 'TIM'},
'555498117':{'en': 'TIM'},
'555498118':{'en': 'TIM'},
'555498119':{'en': 'TIM'},
'555498121':{'en': 'TIM'},
'555498122':{'en': 'TIM'},
'555498123':{'en': 'TIM'},
'555498124':{'en': 'TIM'},
'555498125':{'en': 'TIM'},
'555498126':{'en': 'TIM'},
'555498127':{'en': 'TIM'},
'555498128':{'en': 'TIM'},
'555498129':{'en': 'TIM'},
'555498131':{'en': 'TIM'},
'555498132':{'en': 'TIM'},
'555498133':{'en': 'TIM'},
'555498134':{'en': 'TIM'},
'555498135':{'en': 'TIM'},
'555498136':{'en': 'TIM'},
'555498137':{'en': 'TIM'},
'555498138':{'en': 'TIM'},
'555498139':{'en': 'TIM'},
'555498141':{'en': 'TIM'},
'555498142':{'en': 'TIM'},
'555498143':{'en': 'TIM'},
'555498144':{'en': 'TIM'},
'555498145':{'en': 'TIM'},
'555498146':{'en': 'TIM'},
'555498147':{'en': 'TIM'},
'555498148':{'en': 'TIM'},
'555498149':{'en': 'TIM'},
'555498401':{'en': 'Brasil Telecom GSM'},
'555498402':{'en': 'Brasil Telecom GSM'},
'555498403':{'en': 'Brasil Telecom GSM'},
'555498404':{'en': 'Brasil Telecom GSM'},
'555498405':{'en': 'Brasil Telecom GSM'},
'555498406':{'en': 'Brasil Telecom GSM'},
'555498407':{'en': 'Brasil Telecom GSM'},
'555498408':{'en': 'Brasil Telecom GSM'},
'555498409':{'en': 'Brasil Telecom GSM'},
'555498411':{'en': 'Brasil Telecom GSM'},
'555498412':{'en': 'Brasil Telecom GSM'},
'555498413':{'en': 'Brasil Telecom GSM'},
'555498414':{'en': 'Brasil Telecom GSM'},
'555498415':{'en': 'Brasil Telecom GSM'},
'555498416':{'en': 'Brasil Telecom GSM'},
'555498417':{'en': 'Brasil Telecom GSM'},
'555498418':{'en': 'Brasil Telecom GSM'},
'5554991':{'en': 'Claro'},
'55549920':{'en': 'Claro'},
'55549921':{'en': 'Claro'},
'555499220':{'en': 'Claro'},
'555499221':{'en': 'Claro'},
'555499601':{'en': 'Vivo'},
'555499602':{'en': 'Vivo'},
'555499603':{'en': 'Vivo'},
'555499604':{'en': 'Vivo'},
'555499605':{'en': 'Vivo'},
'555499606':{'en': 'Vivo'},
'555499607':{'en': 'Vivo'},
'555499608':{'en': 'Vivo'},
'555499609':{'en': 'Vivo'},
'555499611':{'en': 'Vivo'},
'555499612':{'en': 'Vivo'},
'555499613':{'en': 'Vivo'},
'555499614':{'en': 'Vivo'},
'555499615':{'en': 'Vivo'},
'555499616':{'en': 'Vivo'},
'555499617':{'en': 'Vivo'},
'555499618':{'en': 'Vivo'},
'555499619':{'en': 'Vivo'},
'555499621':{'en': 'Vivo'},
'555499622':{'en': 'Vivo'},
'555499623':{'en': 'Vivo'},
'555499624':{'en': 'Vivo'},
'555499625':{'en': 'Vivo'},
'555499626':{'en': 'Vivo'},
'555499627':{'en': 'Vivo'},
'555499628':{'en': 'Vivo'},
'555499629':{'en': 'Vivo'},
'555499631':{'en': 'Vivo'},
'555499632':{'en': 'Vivo'},
'555499633':{'en': 'Vivo'},
'555499634':{'en': 'Vivo'},
'555499635':{'en': 'Vivo'},
'555499636':{'en': 'Vivo'},
'555499637':{'en': 'Vivo'},
'555499638':{'en': 'Vivo'},
'555499639':{'en': 'Vivo'},
'555499641':{'en': 'Vivo'},
'555499642':{'en': 'Vivo'},
'555499643':{'en': 'Vivo'},
'555499644':{'en': 'Vivo'},
'555499645':{'en': 'Vivo'},
'555499646':{'en': 'Vivo'},
'555499647':{'en': 'Vivo'},
'555499648':{'en': 'Vivo'},
'555499649':{'en': 'Vivo'},
'555499651':{'en': 'Vivo'},
'555499652':{'en': 'Vivo'},
'555499653':{'en': 'Vivo'},
'555499654':{'en': 'Vivo'},
'555499655':{'en': 'Vivo'},
'555499656':{'en': 'Vivo'},
'555499657':{'en': 'Vivo'},
'555499658':{'en': 'Vivo'},
'555499659':{'en': 'Vivo'},
'555499661':{'en': 'Vivo'},
'555499662':{'en': 'Vivo'},
'555499663':{'en': 'Vivo'},
'555499664':{'en': 'Vivo'},
'555499665':{'en': 'Vivo'},
'555499666':{'en': 'Vivo'},
'555499667':{'en': 'Vivo'},
'555499668':{'en': 'Vivo'},
'555499669':{'en': 'Vivo'},
'555499671':{'en': 'Vivo'},
'555499672':{'en': 'Vivo'},
'555499673':{'en': 'Vivo'},
'555499674':{'en': 'Vivo'},
'555499675':{'en': 'Vivo'},
'555499676':{'en': 'Vivo'},
'555499677':{'en': 'Vivo'},
'555499678':{'en': 'Vivo'},
'555499679':{'en': 'Vivo'},
'555499681':{'en': 'Vivo'},
'555499682':{'en': 'Vivo'},
'555499683':{'en': 'Vivo'},
'555499684':{'en': 'Vivo'},
'555499901':{'en': 'Vivo'},
'555499902':{'en': 'Vivo'},
'555499903':{'en': 'Vivo'},
'555499904':{'en': 'Vivo'},
'555499905':{'en': 'Vivo'},
'555499906':{'en': 'Vivo'},
'555499907':{'en': 'Vivo'},
'555499908':{'en': 'Vivo'},
'555499909':{'en': 'Vivo'},
'555499911':{'en': 'Vivo'},
'555499912':{'en': 'Vivo'},
'555499913':{'en': 'Vivo'},
'555499914':{'en': 'Vivo'},
'555499915':{'en': 'Vivo'},
'555499916':{'en': 'Vivo'},
'555499917':{'en': 'Vivo'},
'555499918':{'en': 'Vivo'},
'555499919':{'en': 'Vivo'},
'555499921':{'en': 'Vivo'},
'555499922':{'en': 'Vivo'},
'555499923':{'en': 'Vivo'},
'555499924':{'en': 'Vivo'},
'555499925':{'en': 'Vivo'},
'555499926':{'en': 'Vivo'},
'555499927':{'en': 'Vivo'},
'555499928':{'en': 'Vivo'},
'555499929':{'en': 'Vivo'},
'555499931':{'en': 'Vivo'},
'555499932':{'en': 'Vivo'},
'555499933':{'en': 'Vivo'},
'555499934':{'en': 'Vivo'},
'555499935':{'en': 'Vivo'},
'555499936':{'en': 'Vivo'},
'555499937':{'en': 'Vivo'},
'555499938':{'en': 'Vivo'},
'555499939':{'en': 'Vivo'},
'555499941':{'en': 'Vivo'},
'555499942':{'en': 'Vivo'},
'555499943':{'en': 'Vivo'},
'555499944':{'en': 'Vivo'},
'555499945':{'en': 'Vivo'},
'555499946':{'en': 'Vivo'},
'555499947':{'en': 'Vivo'},
'555499948':{'en': 'Vivo'},
'555499949':{'en': 'Vivo'},
'555499951':{'en': 'Vivo'},
'555499952':{'en': 'Vivo'},
'555499953':{'en': 'Vivo'},
'555499954':{'en': 'Vivo'},
'555499955':{'en': 'Vivo'},
'555499956':{'en': 'Vivo'},
'555499957':{'en': 'Vivo'},
'555499958':{'en': 'Vivo'},
'555499959':{'en': 'Vivo'},
'555499961':{'en': 'Vivo'},
'555499962':{'en': 'Vivo'},
'555499963':{'en': 'Vivo'},
'555499964':{'en': 'Vivo'},
'555499965':{'en': 'Vivo'},
'555499966':{'en': 'Vivo'},
'555499967':{'en': 'Vivo'},
'555499968':{'en': 'Vivo'},
'555499969':{'en': 'Vivo'},
'555499971':{'en': 'Vivo'},
'555499972':{'en': 'Vivo'},
'555499973':{'en': 'Vivo'},
'555499974':{'en': 'Vivo'},
'555499975':{'en': 'Vivo'},
'555499976':{'en': 'Vivo'},
'555499977':{'en': 'Vivo'},
'555499978':{'en': 'Vivo'},
'555499979':{'en': 'Vivo'},
'555499981':{'en': 'Vivo'},
'555499982':{'en': 'Vivo'},
'555499983':{'en': 'Vivo'},
'555499984':{'en': 'Vivo'},
'555499985':{'en': 'Vivo'},
'555499986':{'en': 'Vivo'},
'555499987':{'en': 'Vivo'},
'555499988':{'en': 'Vivo'},
'555499989':{'en': 'Vivo'},
'555499991':{'en': 'Vivo'},
'555499992':{'en': 'Vivo'},
'555499993':{'en': 'Vivo'},
'555499994':{'en': 'Vivo'},
'555499995':{'en': 'Vivo'},
'555499996':{'en': 'Vivo'},
'555499997':{'en': 'Vivo'},
'555499998':{'en': 'Vivo'},
'555499999':{'en': 'Vivo'},
'555598111':{'en': 'TIM'},
'555598112':{'en': 'TIM'},
'555598113':{'en': 'TIM'},
'555598114':{'en': 'TIM'},
'555598115':{'en': 'TIM'},
'555598116':{'en': 'TIM'},
'555598117':{'en': 'TIM'},
'555598118':{'en': 'TIM'},
'555598119':{'en': 'TIM'},
'555598121':{'en': 'TIM'},
'555598122':{'en': 'TIM'},
'555598123':{'en': 'TIM'},
'555598124':{'en': 'TIM'},
'555598125':{'en': 'TIM'},
'555598126':{'en': 'TIM'},
'555598127':{'en': 'TIM'},
'555598128':{'en': 'TIM'},
'555598129':{'en': 'TIM'},
'555598131':{'en': 'TIM'},
'555598132':{'en': 'TIM'},
'555598133':{'en': 'TIM'},
'555598134':{'en': 'TIM'},
'555598135':{'en': 'TIM'},
'555598136':{'en': 'TIM'},
'555598137':{'en': 'TIM'},
'555598138':{'en': 'TIM'},
'555598139':{'en': 'TIM'},
'555598141':{'en': 'TIM'},
'555598142':{'en': 'TIM'},
'555598143':{'en': 'TIM'},
'555598401':{'en': 'Brasil Telecom GSM'},
'555598402':{'en': 'Brasil Telecom GSM'},
'555598403':{'en': 'Brasil Telecom GSM'},
'555598404':{'en': 'Brasil Telecom GSM'},
'555598405':{'en': 'Brasil Telecom GSM'},
'555598406':{'en': 'Brasil Telecom GSM'},
'555598407':{'en': 'Brasil Telecom GSM'},
'555598408':{'en': 'Brasil Telecom GSM'},
'555598409':{'en': 'Brasil Telecom GSM'},
'555598411':{'en': 'Brasil Telecom GSM'},
'555598412':{'en': 'Brasil Telecom GSM'},
'555598413':{'en': 'Brasil Telecom GSM'},
'555598414':{'en': 'Brasil Telecom GSM'},
'555598415':{'en': 'Brasil Telecom GSM'},
'555598416':{'en': 'Brasil Telecom GSM'},
'555598417':{'en': 'Brasil Telecom GSM'},
'555598418':{'en': 'Brasil Telecom GSM'},
'555598419':{'en': 'Brasil Telecom GSM'},
'555598421':{'en': 'Brasil Telecom GSM'},
'555598422':{'en': 'Brasil Telecom GSM'},
'555598423':{'en': 'Brasil Telecom GSM'},
'555598424':{'en': 'Brasil Telecom GSM'},
'555598425':{'en': 'Brasil Telecom GSM'},
'555598426':{'en': 'Brasil Telecom GSM'},
'555598427':{'en': 'Brasil Telecom GSM'},
'5555991':{'en': 'Claro'},
'5555996':{'en': 'Vivo'},
'55559990':{'en': 'Vivo'},
'55559991':{'en': 'Vivo'},
'55559992':{'en': 'Vivo'},
'555599931':{'en': 'Vivo'},
'555599932':{'en': 'Vivo'},
'555599933':{'en': 'Vivo'},
'555599934':{'en': 'Vivo'},
'555599935':{'en': 'Vivo'},
'555599936':{'en': 'Vivo'},
'555599937':{'en': 'Vivo'},
'555599938':{'en': 'Vivo'},
'555599939':{'en': 'Vivo'},
'555599941':{'en': 'Vivo'},
'555599942':{'en': 'Vivo'},
'555599943':{'en': 'Vivo'},
'555599944':{'en': 'Vivo'},
'555599945':{'en': 'Vivo'},
'555599946':{'en': 'Vivo'},
'555599947':{'en': 'Vivo'},
'555599948':{'en': 'Vivo'},
'555599949':{'en': 'Vivo'},
'555599951':{'en': 'Vivo'},
'555599952':{'en': 'Vivo'},
'555599953':{'en': 'Vivo'},
'555599954':{'en': 'Vivo'},
'555599955':{'en': 'Vivo'},
'555599956':{'en': 'Vivo'},
'555599957':{'en': 'Vivo'},
'555599958':{'en': 'Vivo'},
'555599959':{'en': 'Vivo'},
'555599961':{'en': 'Vivo'},
'555599962':{'en': 'Vivo'},
'555599963':{'en': 'Vivo'},
'555599964':{'en': 'Vivo'},
'555599965':{'en': 'Vivo'},
'555599966':{'en': 'Vivo'},
'555599967':{'en': 'Vivo'},
'555599968':{'en': 'Vivo'},
'555599969':{'en': 'Vivo'},
'555599971':{'en': 'Vivo'},
'555599972':{'en': 'Vivo'},
'555599973':{'en': 'Vivo'},
'555599974':{'en': 'Vivo'},
'555599975':{'en': 'Vivo'},
'555599976':{'en': 'Vivo'},
'555599977':{'en': 'Vivo'},
'555599978':{'en': 'Vivo'},
'555599979':{'en': 'Vivo'},
'555599981':{'en': 'Vivo'},
'555599982':{'en': 'Vivo'},
'555599983':{'en': 'Vivo'},
'555599984':{'en': 'Vivo'},
'555599985':{'en': 'Vivo'},
'555599986':{'en': 'Vivo'},
'555599987':{'en': 'Vivo'},
'555599988':{'en': 'Vivo'},
'555599989':{'en': 'Vivo'},
'555599991':{'en': 'Vivo'},
'555599992':{'en': 'Vivo'},
'555599993':{'en': 'Vivo'},
'555599994':{'en': 'Vivo'},
'555599995':{'en': 'Vivo'},
'555599996':{'en': 'Vivo'},
'555599997':{'en': 'Vivo'},
'555599998':{'en': 'Vivo'},
'555599999':{'en': 'Vivo'},
'556198101':{'en': 'TIM'},
'556198102':{'en': 'TIM'},
'556198103':{'en': 'TIM'},
'556198104':{'en': 'TIM'},
'556198105':{'en': 'TIM'},
'556198106':{'en': 'TIM'},
'556198107':{'en': 'TIM'},
'556198108':{'en': 'TIM'},
'556198109':{'en': 'TIM'},
'55619811':{'en': 'TIM'},
'55619812':{'en': 'TIM'},
'55619813':{'en': 'TIM'},
'55619814':{'en': 'TIM'},
'55619815':{'en': 'TIM'},
'55619816':{'en': 'TIM'},
'556198171':{'en': 'TIM'},
'556198172':{'en': 'TIM'},
'556198173':{'en': 'TIM'},
'556198174':{'en': 'TIM'},
'556198175':{'en': 'TIM'},
'556198176':{'en': 'TIM'},
'556198177':{'en': 'TIM'},
'556198178':{'en': 'TIM'},
'556198179':{'en': 'TIM'},
'556198181':{'en': 'TIM'},
'556198182':{'en': 'TIM'},
'556198183':{'en': 'TIM'},
'556198184':{'en': 'TIM'},
'556198185':{'en': 'TIM'},
'556198186':{'en': 'TIM'},
'556198187':{'en': 'TIM'},
'556198188':{'en': 'TIM'},
'556198189':{'en': 'TIM'},
'556198191':{'en': 'TIM'},
'556198192':{'en': 'TIM'},
'556198193':{'en': 'TIM'},
'556198194':{'en': 'TIM'},
'556198195':{'en': 'TIM'},
'556198196':{'en': 'TIM'},
'556198197':{'en': 'TIM'},
'556198198':{'en': 'TIM'},
'556198199':{'en': 'TIM'},
'5561984':{'en': 'Brasil Telecom GSM'},
'556198501':{'en': 'Brasil Telecom GSM'},
'556198502':{'en': 'Brasil Telecom GSM'},
'556198503':{'en': 'Brasil Telecom GSM'},
'556198504':{'en': 'Brasil Telecom GSM'},
'556198505':{'en': 'Brasil Telecom GSM'},
'556198506':{'en': 'Brasil Telecom GSM'},
'556198507':{'en': 'Brasil Telecom GSM'},
'556198508':{'en': 'Brasil Telecom GSM'},
'556198509':{'en': 'Brasil Telecom GSM'},
'556198511':{'en': 'Brasil Telecom GSM'},
'556198512':{'en': 'Brasil Telecom GSM'},
'556198513':{'en': 'Brasil Telecom GSM'},
'556198514':{'en': 'Brasil Telecom GSM'},
'556198515':{'en': 'Brasil Telecom GSM'},
'556198516':{'en': 'Brasil Telecom GSM'},
'556198517':{'en': 'Brasil Telecom GSM'},
'556198518':{'en': 'Brasil Telecom GSM'},
'556198519':{'en': 'Brasil Telecom GSM'},
'556198521':{'en': 'Brasil Telecom GSM'},
'556198522':{'en': 'Brasil Telecom GSM'},
'556198523':{'en': 'Brasil Telecom GSM'},
'556198524':{'en': 'Brasil Telecom GSM'},
'556198525':{'en': 'Brasil Telecom GSM'},
'556198526':{'en': 'Brasil Telecom GSM'},
'556198527':{'en': 'Brasil Telecom GSM'},
'556198528':{'en': 'Brasil Telecom GSM'},
'556198529':{'en': 'Brasil Telecom GSM'},
'556198531':{'en': 'Brasil Telecom GSM'},
'556198532':{'en': 'Brasil Telecom GSM'},
'556198533':{'en': 'Brasil Telecom GSM'},
'556198534':{'en': 'Brasil Telecom GSM'},
'556198535':{'en': 'Brasil Telecom GSM'},
'556198536':{'en': 'Brasil Telecom GSM'},
'556198537':{'en': 'Brasil Telecom GSM'},
'556198538':{'en': 'Brasil Telecom GSM'},
'556198539':{'en': 'Brasil Telecom GSM'},
'556198541':{'en': 'Brasil Telecom GSM'},
'556198542':{'en': 'Brasil Telecom GSM'},
'556198543':{'en': 'Brasil Telecom GSM'},
'556198544':{'en': 'Brasil Telecom GSM'},
'556198545':{'en': 'Brasil Telecom GSM'},
'556198546':{'en': 'Brasil Telecom GSM'},
'556198547':{'en': 'Brasil Telecom GSM'},
'556198548':{'en': 'Brasil Telecom GSM'},
'556198549':{'en': 'Brasil Telecom GSM'},
'556198551':{'en': 'Brasil Telecom GSM'},
'556198552':{'en': 'Brasil Telecom GSM'},
'556198553':{'en': 'Brasil Telecom GSM'},
'556198554':{'en': 'Brasil Telecom GSM'},
'556198555':{'en': 'Brasil Telecom GSM'},
'556198556':{'en': 'Brasil Telecom GSM'},
'556198557':{'en': 'Brasil Telecom GSM'},
'556198558':{'en': 'Brasil Telecom GSM'},
'556198559':{'en': 'Brasil Telecom GSM'},
'556198561':{'en': 'Brasil Telecom GSM'},
'556198562':{'en': 'Brasil Telecom GSM'},
'556198563':{'en': 'Brasil Telecom GSM'},
'556198564':{'en': 'Brasil Telecom GSM'},
'556198565':{'en': 'Brasil Telecom GSM'},
'556198566':{'en': 'Brasil Telecom GSM'},
'556198567':{'en': 'Brasil Telecom GSM'},
'556198568':{'en': 'Brasil Telecom GSM'},
'556198569':{'en': 'Brasil Telecom GSM'},
'556198571':{'en': 'Brasil Telecom GSM'},
'556198572':{'en': 'Brasil Telecom GSM'},
'556198573':{'en': 'Brasil Telecom GSM'},
'556198574':{'en': 'Brasil Telecom GSM'},
'556198576':{'en': 'Brasil Telecom GSM'},
'556198577':{'en': 'Brasil Telecom GSM'},
'556198579':{'en': 'Brasil Telecom GSM'},
'556198581':{'en': 'Brasil Telecom GSM'},
'556198582':{'en': 'Brasil Telecom GSM'},
'556199601':{'en': 'Vivo'},
'556199602':{'en': 'Vivo'},
'556199603':{'en': 'Vivo'},
'556199604':{'en': 'Vivo'},
'556199605':{'en': 'Vivo'},
'556199606':{'en': 'Vivo'},
'556199607':{'en': 'Vivo'},
'556199608':{'en': 'Vivo'},
'556199609':{'en': 'Vivo'},
'556199611':{'en': 'Vivo'},
'556199612':{'en': 'Vivo'},
'556199613':{'en': 'Vivo'},
'556199614':{'en': 'Vivo'},
'556199615':{'en': 'Vivo'},
'556199616':{'en': 'Vivo'},
'556199617':{'en': 'Vivo'},
'556199618':{'en': 'Vivo'},
'556199619':{'en': 'Vivo'},
'556199621':{'en': 'Vivo'},
'556199622':{'en': 'Vivo'},
'556199623':{'en': 'Vivo'},
'556199624':{'en': 'Vivo'},
'556199625':{'en': 'Vivo'},
'556199626':{'en': 'Vivo'},
'556199627':{'en': 'Vivo'},
'556199628':{'en': 'Vivo'},
'556199629':{'en': 'Vivo'},
'556199631':{'en': 'Vivo'},
'556199632':{'en': 'Vivo'},
'556199633':{'en': 'Vivo'},
'556199634':{'en': 'Vivo'},
'556199635':{'en': 'Vivo'},
'556199636':{'en': 'Vivo'},
'556199637':{'en': 'Vivo'},
'556199638':{'en': 'Vivo'},
'556199639':{'en': 'Vivo'},
'556199641':{'en': 'Vivo'},
'556199642':{'en': 'Vivo'},
'556199643':{'en': 'Vivo'},
'556199644':{'en': 'Vivo'},
'556199645':{'en': 'Vivo'},
'556199646':{'en': 'Vivo'},
'556199647':{'en': 'Vivo'},
'556199648':{'en': 'Vivo'},
'556199649':{'en': 'Vivo'},
'556199651':{'en': 'Vivo'},
'556199652':{'en': 'Vivo'},
'556199653':{'en': 'Vivo'},
'556199654':{'en': 'Vivo'},
'556199655':{'en': 'Vivo'},
'556199656':{'en': 'Vivo'},
'556199657':{'en': 'Vivo'},
'556199658':{'en': 'Vivo'},
'556199659':{'en': 'Vivo'},
'556199661':{'en': 'Vivo'},
'556199662':{'en': 'Vivo'},
'556199663':{'en': 'Vivo'},
'556199664':{'en': 'Vivo'},
'556199665':{'en': 'Vivo'},
'556199666':{'en': 'Vivo'},
'556199667':{'en': 'Vivo'},
'556199668':{'en': 'Vivo'},
'556199669':{'en': 'Vivo'},
'556199671':{'en': 'Vivo'},
'556199672':{'en': 'Vivo'},
'556199673':{'en': 'Vivo'},
'556199674':{'en': 'Vivo'},
'556199675':{'en': 'Vivo'},
'556199676':{'en': 'Vivo'},
'556199677':{'en': 'Vivo'},
'556199678':{'en': 'Vivo'},
'556199679':{'en': 'Vivo'},
'556199681':{'en': 'Vivo'},
'556199682':{'en': 'Vivo'},
'556199683':{'en': 'Vivo'},
'556199684':{'en': 'Vivo'},
'556199685':{'en': 'Vivo'},
'556199686':{'en': 'Vivo'},
'556199687':{'en': 'Vivo'},
'556199688':{'en': 'Vivo'},
'556199689':{'en': 'Vivo'},
'556199691':{'en': 'Vivo'},
'556199692':{'en': 'Vivo'},
'556199693':{'en': 'Vivo'},
'556199694':{'en': 'Vivo'},
'556199695':{'en': 'Vivo'},
'556199696':{'en': 'Vivo'},
'556199697':{'en': 'Vivo'},
'556199698':{'en': 'Vivo'},
'556199699':{'en': 'Vivo'},
'556199801':{'en': 'Vivo'},
'556199802':{'en': 'Vivo'},
'556199803':{'en': 'Vivo'},
'556199804':{'en': 'Vivo'},
'556199805':{'en': 'Vivo'},
'556199806':{'en': 'Vivo'},
'556199807':{'en': 'Vivo'},
'556199808':{'en': 'Vivo'},
'556199809':{'en': 'Vivo'},
'556199811':{'en': 'Vivo'},
'556199812':{'en': 'Vivo'},
'556199813':{'en': 'Vivo'},
'556199814':{'en': 'Vivo'},
'556199815':{'en': 'Vivo'},
'556199816':{'en': 'Vivo'},
'556199817':{'en': 'Vivo'},
'556199818':{'en': 'Vivo'},
'556199819':{'en': 'Vivo'},
'556199821':{'en': 'Vivo'},
'556199822':{'en': 'Vivo'},
'556199823':{'en': 'Vivo'},
'556199824':{'en': 'Vivo'},
'556199825':{'en': 'Vivo'},
'556199826':{'en': 'Vivo'},
'556199827':{'en': 'Vivo'},
'556199828':{'en': 'Vivo'},
'556199829':{'en': 'Vivo'},
'556199831':{'en': 'Vivo'},
'556199832':{'en': 'Vivo'},
'556199838':{'en': 'Vivo'},
'556199839':{'en': 'Vivo'},
'556199841':{'en': 'Vivo'},
'556199842':{'en': 'Vivo'},
'556199843':{'en': 'Vivo'},
'556199844':{'en': 'Vivo'},
'556199901':{'en': 'Vivo'},
'556199902':{'en': 'Vivo'},
'556199903':{'en': 'Vivo'},
'556199904':{'en': 'Vivo'},
'556199905':{'en': 'Vivo'},
'556199906':{'en': 'Vivo'},
'556199907':{'en': 'Vivo'},
'556199908':{'en': 'Vivo'},
'556199909':{'en': 'Vivo'},
'556199911':{'en': 'Vivo'},
'556199912':{'en': 'Vivo'},
'556199913':{'en': 'Vivo'},
'556199914':{'en': 'Vivo'},
'556199915':{'en': 'Vivo'},
'556199916':{'en': 'Vivo'},
'556199917':{'en': 'Vivo'},
'556199918':{'en': 'Vivo'},
'556199919':{'en': 'Vivo'},
'556199921':{'en': 'Vivo'},
'556199922':{'en': 'Vivo'},
'556199923':{'en': 'Vivo'},
'556199924':{'en': 'Vivo'},
'556199925':{'en': 'Vivo'},
'556199926':{'en': 'Vivo'},
'556199927':{'en': 'Vivo'},
'556199928':{'en': 'Vivo'},
'556199929':{'en': 'Vivo'},
'556199931':{'en': 'Vivo'},
'556199932':{'en': 'Vivo'},
'556199933':{'en': 'Vivo'},
'556199934':{'en': 'Vivo'},
'556199935':{'en': 'Vivo'},
'556199936':{'en': 'Vivo'},
'556199937':{'en': 'Vivo'},
'556199938':{'en': 'Vivo'},
'556199939':{'en': 'Vivo'},
'556199941':{'en': 'Vivo'},
'556199942':{'en': 'Vivo'},
'556199943':{'en': 'Vivo'},
'556199944':{'en': 'Vivo'},
'556199945':{'en': 'Vivo'},
'556199946':{'en': 'Vivo'},
'556199947':{'en': 'Vivo'},
'556199948':{'en': 'Vivo'},
'556199949':{'en': 'Vivo'},
'556199951':{'en': 'Vivo'},
'556199952':{'en': 'Vivo'},
'556199953':{'en': 'Vivo'},
'556199954':{'en': 'Vivo'},
'556199955':{'en': 'Vivo'},
'556199956':{'en': 'Vivo'},
'556199957':{'en': 'Vivo'},
'556199958':{'en': 'Vivo'},
'556199959':{'en': 'Vivo'},
'55619996':{'en': 'Vivo'},
'55619997':{'en': 'Vivo'},
'55619998':{'en': 'Vivo'},
'55619999':{'en': 'Vivo'},
'556298101':{'en': 'TIM'},
'556298102':{'en': 'TIM'},
'556298103':{'en': 'TIM'},
'556298104':{'en': 'TIM'},
'556298111':{'en': 'TIM'},
'556298112':{'en': 'TIM'},
'556298113':{'en': 'TIM'},
'556298114':{'en': 'TIM'},
'556298115':{'en': 'TIM'},
'556298116':{'en': 'TIM'},
'556298117':{'en': 'TIM'},
'556298118':{'en': 'TIM'},
'556298119':{'en': 'TIM'},
'556298121':{'en': 'TIM'},
'556298122':{'en': 'TIM'},
'556298123':{'en': 'TIM'},
'556298124':{'en': 'TIM'},
'556298125':{'en': 'TIM'},
'556298126':{'en': 'TIM'},
'556298127':{'en': 'TIM'},
'556298128':{'en': 'TIM'},
'556298129':{'en': 'TIM'},
'556298131':{'en': 'TIM'},
'556298132':{'en': 'TIM'},
'556298133':{'en': 'TIM'},
'556298134':{'en': 'TIM'},
'556298135':{'en': 'TIM'},
'556298136':{'en': 'TIM'},
'556298137':{'en': 'TIM'},
'556298138':{'en': 'TIM'},
'556298139':{'en': 'TIM'},
'556298141':{'en': 'TIM'},
'556298142':{'en': 'TIM'},
'556298143':{'en': 'TIM'},
'556298144':{'en': 'TIM'},
'556298145':{'en': 'TIM'},
'556298146':{'en': 'TIM'},
'556298147':{'en': 'TIM'},
'556298148':{'en': 'TIM'},
'556298149':{'en': 'TIM'},
'556298151':{'en': 'TIM'},
'556298152':{'en': 'TIM'},
'556298153':{'en': 'TIM'},
'556298154':{'en': 'TIM'},
'556298155':{'en': 'TIM'},
'556298156':{'en': 'TIM'},
'556298157':{'en': 'TIM'},
'556298158':{'en': 'TIM'},
'556298159':{'en': 'TIM'},
'556298161':{'en': 'TIM'},
'556298162':{'en': 'TIM'},
'556298163':{'en': 'TIM'},
'556298164':{'en': 'TIM'},
'556298165':{'en': 'TIM'},
'556298166':{'en': 'TIM'},
'556298167':{'en': 'TIM'},
'556298168':{'en': 'TIM'},
'556298169':{'en': 'TIM'},
'556298171':{'en': 'TIM'},
'556298172':{'en': 'TIM'},
'556298173':{'en': 'TIM'},
'556298174':{'en': 'TIM'},
'556298175':{'en': 'TIM'},
'556298176':{'en': 'TIM'},
'556298177':{'en': 'TIM'},
'556298178':{'en': 'TIM'},
'556298179':{'en': 'TIM'},
'556298181':{'en': 'TIM'},
'556298182':{'en': 'TIM'},
'556298183':{'en': 'TIM'},
'556298184':{'en': 'TIM'},
'556298185':{'en': 'TIM'},
'556298186':{'en': 'TIM'},
'556298187':{'en': 'TIM'},
'556298188':{'en': 'TIM'},
'556298189':{'en': 'TIM'},
'556298191':{'en': 'TIM'},
'556298192':{'en': 'TIM'},
'556298193':{'en': 'TIM'},
'556298194':{'en': 'TIM'},
'556298195':{'en': 'TIM'},
'556298196':{'en': 'TIM'},
'556298197':{'en': 'TIM'},
'556298198':{'en': 'TIM'},
'556298199':{'en': 'TIM'},
'5562984':{'en': 'Brasil Telecom GSM'},
'5562985':{'en': 'Brasil Telecom GSM'},
'55629960':{'en': 'Vivo'},
'55629961':{'en': 'Vivo'},
'55629962':{'en': 'Vivo'},
'55629963':{'en': 'Vivo'},
'55629964':{'en': 'Vivo'},
'55629965':{'en': 'Vivo'},
'556299661':{'en': 'Vivo'},
'556299662':{'en': 'Vivo'},
'556299663':{'en': 'Vivo'},
'556299664':{'en': 'Vivo'},
'556299665':{'en': 'Vivo'},
'556299666':{'en': 'Vivo'},
'556299667':{'en': 'Vivo'},
'556299668':{'en': 'Vivo'},
'556299669':{'en': 'Vivo'},
'556299671':{'en': 'Vivo'},
'556299672':{'en': 'Vivo'},
'556299673':{'en': 'Vivo'},
'556299674':{'en': 'Vivo'},
'556299675':{'en': 'Vivo'},
'556299676':{'en': 'Vivo'},
'556299677':{'en': 'Vivo'},
'556299678':{'en': 'Vivo'},
'556299679':{'en': 'Vivo'},
'556299681':{'en': 'Vivo'},
'556299682':{'en': 'Vivo'},
'556299683':{'en': 'Vivo'},
'556299684':{'en': 'Vivo'},
'556299685':{'en': 'Vivo'},
'556299686':{'en': 'Vivo'},
'556299687':{'en': 'Vivo'},
'556299688':{'en': 'Vivo'},
'556299689':{'en': 'Vivo'},
'556299691':{'en': 'Vivo'},
'556299692':{'en': 'Vivo'},
'556299693':{'en': 'Vivo'},
'556299694':{'en': 'Vivo'},
'556299695':{'en': 'Vivo'},
'556299696':{'en': 'Vivo'},
'556299697':{'en': 'Vivo'},
'556299698':{'en': 'Vivo'},
'556299699':{'en': 'Vivo'},
'556299801':{'en': 'Vivo'},
'556299802':{'en': 'Vivo'},
'556299803':{'en': 'Vivo'},
'556299804':{'en': 'Vivo'},
'556299805':{'en': 'Vivo'},
'556299806':{'en': 'Vivo'},
'556299807':{'en': 'Vivo'},
'556299808':{'en': 'Vivo'},
'556299809':{'en': 'Vivo'},
'556299811':{'en': 'Vivo'},
'556299812':{'en': 'Vivo'},
'556299813':{'en': 'Vivo'},
'556299814':{'en': 'Vivo'},
'556299815':{'en': 'Vivo'},
'556299816':{'en': 'Vivo'},
'556299817':{'en': 'Vivo'},
'556299818':{'en': 'Vivo'},
'556299901':{'en': 'Vivo'},
'556299902':{'en': 'Vivo'},
'556299903':{'en': 'Vivo'},
'556299904':{'en': 'Vivo'},
'556299905':{'en': 'Vivo'},
'556299906':{'en': 'Vivo'},
'556299907':{'en': 'Vivo'},
'556299908':{'en': 'Vivo'},
'556299909':{'en': 'Vivo'},
'556299911':{'en': 'Vivo'},
'556299912':{'en': 'Vivo'},
'556299913':{'en': 'Vivo'},
'556299914':{'en': 'Vivo'},
'556299915':{'en': 'Vivo'},
'556299916':{'en': 'Vivo'},
'556299917':{'en': 'Vivo'},
'556299918':{'en': 'Vivo'},
'556299919':{'en': 'Vivo'},
'556299921':{'en': 'Vivo'},
'556299922':{'en': 'Vivo'},
'556299923':{'en': 'Vivo'},
'556299924':{'en': 'Vivo'},
'556299925':{'en': 'Vivo'},
'556299926':{'en': 'Vivo'},
'556299927':{'en': 'Vivo'},
'556299928':{'en': 'Vivo'},
'556299929':{'en': 'Vivo'},
'556299931':{'en': 'Vivo'},
'556299932':{'en': 'Vivo'},
'556299933':{'en': 'Vivo'},
'556299934':{'en': 'Vivo'},
'556299935':{'en': 'Vivo'},
'556299936':{'en': 'Vivo'},
'556299937':{'en': 'Vivo'},
'556299938':{'en': 'Vivo'},
'556299939':{'en': 'Vivo'},
'556299941':{'en': 'Vivo'},
'556299942':{'en': 'Vivo'},
'556299943':{'en': 'Vivo'},
'556299944':{'en': 'Vivo'},
'556299945':{'en': 'Vivo'},
'556299946':{'en': 'Vivo'},
'556299947':{'en': 'Vivo'},
'556299948':{'en': 'Vivo'},
'556299949':{'en': 'Vivo'},
'556299951':{'en': 'Vivo'},
'556299952':{'en': 'Vivo'},
'556299953':{'en': 'Vivo'},
'556299954':{'en': 'Vivo'},
'556299955':{'en': 'Vivo'},
'556299956':{'en': 'Vivo'},
'556299957':{'en': 'Vivo'},
'556299958':{'en': 'Vivo'},
'556299959':{'en': 'Vivo'},
'55629996':{'en': 'Vivo'},
'55629997':{'en': 'Vivo'},
'55629998':{'en': 'Vivo'},
'556299991':{'en': 'Vivo'},
'556299992':{'en': 'Vivo'},
'556299993':{'en': 'Vivo'},
'556299994':{'en': 'Vivo'},
'556299995':{'en': 'Vivo'},
'556299996':{'en': 'Vivo'},
'556299997':{'en': 'Vivo'},
'556299998':{'en': 'Vivo'},
'556299999':{'en': 'Vivo'},
'556398111':{'en': 'TIM'},
'556398112':{'en': 'TIM'},
'556398113':{'en': 'TIM'},
'556398114':{'en': 'TIM'},
'556398115':{'en': 'TIM'},
'556398116':{'en': 'TIM'},
'556398117':{'en': 'TIM'},
'556398118':{'en': 'TIM'},
'556398119':{'en': 'TIM'},
'556398121':{'en': 'TIM'},
'556398122':{'en': 'TIM'},
'556398123':{'en': 'TIM'},
'556398124':{'en': 'TIM'},
'556398125':{'en': 'TIM'},
'556398126':{'en': 'TIM'},
'556398127':{'en': 'TIM'},
'556398128':{'en': 'TIM'},
'556398129':{'en': 'TIM'},
'556398131':{'en': 'TIM'},
'556398132':{'en': 'TIM'},
'556398133':{'en': 'TIM'},
'556398401':{'en': 'Brasil Telecom GSM'},
'556398402':{'en': 'Brasil Telecom GSM'},
'556398403':{'en': 'Brasil Telecom GSM'},
'556398404':{'en': 'Brasil Telecom GSM'},
'556398405':{'en': 'Brasil Telecom GSM'},
'556398406':{'en': 'Brasil Telecom GSM'},
'556398407':{'en': 'Brasil Telecom GSM'},
'556398408':{'en': 'Brasil Telecom GSM'},
'556398409':{'en': 'Brasil Telecom GSM'},
'556398411':{'en': 'Brasil Telecom GSM'},
'556398412':{'en': 'Brasil Telecom GSM'},
'556398413':{'en': 'Brasil Telecom GSM'},
'556398414':{'en': 'Brasil Telecom GSM'},
'556398415':{'en': 'Brasil Telecom GSM'},
'556398416':{'en': 'Brasil Telecom GSM'},
'556398417':{'en': 'Brasil Telecom GSM'},
'556398418':{'en': 'Brasil Telecom GSM'},
'556398419':{'en': 'Brasil Telecom GSM'},
'556398421':{'en': 'Brasil Telecom GSM'},
'556398422':{'en': 'Brasil Telecom GSM'},
'556398423':{'en': 'Brasil Telecom GSM'},
'556398424':{'en': 'Brasil Telecom GSM'},
'556398425':{'en': 'Brasil Telecom GSM'},
'556398426':{'en': 'Brasil Telecom GSM'},
'556398427':{'en': 'Brasil Telecom GSM'},
'556398428':{'en': 'Brasil Telecom GSM'},
'556398429':{'en': 'Brasil Telecom GSM'},
'556398431':{'en': 'Brasil Telecom GSM'},
'556398432':{'en': 'Brasil Telecom GSM'},
'556398433':{'en': 'Brasil Telecom GSM'},
'556398434':{'en': 'Brasil Telecom GSM'},
'556398435':{'en': 'Brasil Telecom GSM'},
'556398436':{'en': 'Brasil Telecom GSM'},
'556398437':{'en': 'Brasil Telecom GSM'},
'556398438':{'en': 'Brasil Telecom GSM'},
'556398439':{'en': 'Brasil Telecom GSM'},
'556398441':{'en': 'Brasil Telecom GSM'},
'556398442':{'en': 'Brasil Telecom GSM'},
'556398443':{'en': 'Brasil Telecom GSM'},
'556399911':{'en': 'Vivo'},
'556399941':{'en': 'Vivo'},
'556399942':{'en': 'Vivo'},
'556399943':{'en': 'Vivo'},
'556399944':{'en': 'Vivo'},
'556399945':{'en': 'Vivo'},
'556399946':{'en': 'Vivo'},
'556399947':{'en': 'Vivo'},
'556399948':{'en': 'Vivo'},
'556399949':{'en': 'Vivo'},
'556399951':{'en': 'Vivo'},
'556399952':{'en': 'Vivo'},
'556399953':{'en': 'Vivo'},
'556399954':{'en': 'Vivo'},
'556399955':{'en': 'Vivo'},
'556399956':{'en': 'Vivo'},
'556399957':{'en': 'Vivo'},
'556399958':{'en': 'Vivo'},
'556399959':{'en': 'Vivo'},
'556399961':{'en': 'Vivo'},
'556399962':{'en': 'Vivo'},
'556399963':{'en': 'Vivo'},
'556399964':{'en': 'Vivo'},
'556399965':{'en': 'Vivo'},
'556399966':{'en': 'Vivo'},
'556399967':{'en': 'Vivo'},
'556399968':{'en': 'Vivo'},
'556399969':{'en': 'Vivo'},
'55639997':{'en': 'Vivo'},
'55639998':{'en': 'Vivo'},
'556399991':{'en': 'Vivo'},
'556399992':{'en': 'Vivo'},
'556399993':{'en': 'Vivo'},
'556399994':{'en': 'Vivo'},
'556399995':{'en': 'Vivo'},
'556399996':{'en': 'Vivo'},
'556399997':{'en': 'Vivo'},
'556399998':{'en': 'Vivo'},
'556399999':{'en': 'Vivo'},
'556498111':{'en': 'TIM'},
'556498112':{'en': 'TIM'},
'556498113':{'en': 'TIM'},
'556498114':{'en': 'TIM'},
'556498115':{'en': 'TIM'},
'556498116':{'en': 'TIM'},
'556498117':{'en': 'TIM'},
'556498118':{'en': 'TIM'},
'556498119':{'en': 'TIM'},
'556498121':{'en': 'TIM'},
'556498122':{'en': 'TIM'},
'556498123':{'en': 'TIM'},
'556498124':{'en': 'TIM'},
'556498125':{'en': 'TIM'},
'556498126':{'en': 'TIM'},
'556498127':{'en': 'TIM'},
'556498128':{'en': 'TIM'},
'556498129':{'en': 'TIM'},
'556498131':{'en': 'TIM'},
'556498132':{'en': 'TIM'},
'556498133':{'en': 'TIM'},
'556498134':{'en': 'TIM'},
'556498135':{'en': 'TIM'},
'556498136':{'en': 'TIM'},
'556498137':{'en': 'TIM'},
'556498138':{'en': 'TIM'},
'556498139':{'en': 'TIM'},
'556498141':{'en': 'TIM'},
'556498401':{'en': 'Brasil Telecom GSM'},
'556498402':{'en': 'Brasil Telecom GSM'},
'556498403':{'en': 'Brasil Telecom GSM'},
'556498404':{'en': 'Brasil Telecom GSM'},
'556498405':{'en': 'Brasil Telecom GSM'},
'556498406':{'en': 'Brasil Telecom GSM'},
'556498407':{'en': 'Brasil Telecom GSM'},
'556498408':{'en': 'Brasil Telecom GSM'},
'556498409':{'en': 'Brasil Telecom GSM'},
'556498411':{'en': 'Brasil Telecom GSM'},
'556498412':{'en': 'Brasil Telecom GSM'},
'556498413':{'en': 'Brasil Telecom GSM'},
'556498414':{'en': 'Brasil Telecom GSM'},
'556498415':{'en': 'Brasil Telecom GSM'},
'556498416':{'en': 'Brasil Telecom GSM'},
'556498417':{'en': 'Brasil Telecom GSM'},
'556498418':{'en': 'Brasil Telecom GSM'},
'556498419':{'en': 'Brasil Telecom GSM'},
'556498421':{'en': 'Brasil Telecom GSM'},
'556498422':{'en': 'Brasil Telecom GSM'},
'556498423':{'en': 'Brasil Telecom GSM'},
'556498424':{'en': 'Brasil Telecom GSM'},
'556498425':{'en': 'Brasil Telecom GSM'},
'556498426':{'en': 'Brasil Telecom GSM'},
'556498427':{'en': 'Brasil Telecom GSM'},
'556498428':{'en': 'Brasil Telecom GSM'},
'556498429':{'en': 'Brasil Telecom GSM'},
'556498431':{'en': 'Brasil Telecom GSM'},
'556498432':{'en': 'Brasil Telecom GSM'},
'556498433':{'en': 'Brasil Telecom GSM'},
'556498434':{'en': 'Brasil Telecom GSM'},
'556498435':{'en': 'Brasil Telecom GSM'},
'556498436':{'en': 'Brasil Telecom GSM'},
'556498437':{'en': 'Brasil Telecom GSM'},
'556498438':{'en': 'Brasil Telecom GSM'},
'556498439':{'en': 'Brasil Telecom GSM'},
'556498441':{'en': 'Brasil Telecom GSM'},
'556499606':{'en': 'Vivo'},
'556499607':{'en': 'Vivo'},
'556499611':{'en': 'Vivo'},
'556499618':{'en': 'Vivo'},
'556499623':{'en': 'Vivo'},
'556499624':{'en': 'Vivo'},
'556499625':{'en': 'Vivo'},
'556499626':{'en': 'Vivo'},
'556499627':{'en': 'Vivo'},
'556499641':{'en': 'Vivo'},
'556499642':{'en': 'Vivo'},
'556499643':{'en': 'Vivo'},
'556499644':{'en': 'Vivo'},
'556499645':{'en': 'Vivo'},
'556499646':{'en': 'Vivo'},
'556499647':{'en': 'Vivo'},
'556499648':{'en': 'Vivo'},
'556499652':{'en': 'Vivo'},
'556499653':{'en': 'Vivo'},
'556499654':{'en': 'Vivo'},
'556499655':{'en': 'Vivo'},
'556499658':{'en': 'Vivo'},
'556499671':{'en': 'Vivo'},
'556499675':{'en': 'Vivo'},
'556499676':{'en': 'Vivo'},
'556499695':{'en': 'Vivo'},
'556499699':{'en': 'Vivo'},
'556499902':{'en': 'Vivo'},
'556499905':{'en': 'Vivo'},
'556499906':{'en': 'Vivo'},
'556499907':{'en': 'Vivo'},
'556499911':{'en': 'Vivo'},
'556499931':{'en': 'Vivo'},
'556499935':{'en': 'Vivo'},
'556499937':{'en': 'Vivo'},
'556499938':{'en': 'Vivo'},
'556499939':{'en': 'Vivo'},
'556499941':{'en': 'Vivo'},
'556499942':{'en': 'Vivo'},
'556499943':{'en': 'Vivo'},
'556499944':{'en': 'Vivo'},
'556499945':{'en': 'Vivo'},
'556499946':{'en': 'Vivo'},
'556499947':{'en': 'Vivo'},
'556499948':{'en': 'Vivo'},
'556499949':{'en': 'Vivo'},
'556499951':{'en': 'Vivo'},
'556499952':{'en': 'Vivo'},
'556499953':{'en': 'Vivo'},
'556499954':{'en': 'Vivo'},
'556499955':{'en': 'Vivo'},
'556499956':{'en': 'Vivo'},
'556499957':{'en': 'Vivo'},
'556499958':{'en': 'Vivo'},
'556499959':{'en': 'Vivo'},
'556499961':{'en': 'Vivo'},
'556499962':{'en': 'Vivo'},
'556499963':{'en': 'Vivo'},
'556499964':{'en': 'Vivo'},
'556499965':{'en': 'Vivo'},
'556499966':{'en': 'Vivo'},
'556499967':{'en': 'Vivo'},
'556499968':{'en': 'Vivo'},
'556499969':{'en': 'Vivo'},
'55649997':{'en': 'Vivo'},
'55649998':{'en': 'Vivo'},
'556499991':{'en': 'Vivo'},
'556499994':{'en': 'Vivo'},
'556499995':{'en': 'Vivo'},
'556499996':{'en': 'Vivo'},
'556499997':{'en': 'Vivo'},
'556499998':{'en': 'Vivo'},
'556598111':{'en': 'TIM'},
'556598112':{'en': 'TIM'},
'556598113':{'en': 'TIM'},
'556598114':{'en': 'TIM'},
'556598115':{'en': 'TIM'},
'556598116':{'en': 'TIM'},
'556598117':{'en': 'TIM'},
'556598118':{'en': 'TIM'},
'556598119':{'en': 'TIM'},
'556598121':{'en': 'TIM'},
'556598122':{'en': 'TIM'},
'556598123':{'en': 'TIM'},
'556598124':{'en': 'TIM'},
'556598125':{'en': 'TIM'},
'556598126':{'en': 'TIM'},
'556598127':{'en': 'TIM'},
'556598128':{'en': 'TIM'},
'556598129':{'en': 'TIM'},
'556598131':{'en': 'TIM'},
'556598132':{'en': 'TIM'},
'556598133':{'en': 'TIM'},
'556598134':{'en': 'TIM'},
'556598135':{'en': 'TIM'},
'556598136':{'en': 'TIM'},
'556598137':{'en': 'TIM'},
'556598138':{'en': 'TIM'},
'556598139':{'en': 'TIM'},
'556598141':{'en': 'TIM'},
'556598142':{'en': 'TIM'},
'556598401':{'en': 'Brasil Telecom GSM'},
'556598402':{'en': 'Brasil Telecom GSM'},
'556598403':{'en': 'Brasil Telecom GSM'},
'556598404':{'en': 'Brasil Telecom GSM'},
'556598405':{'en': 'Brasil Telecom GSM'},
'556598406':{'en': 'Brasil Telecom GSM'},
'556598407':{'en': 'Brasil Telecom GSM'},
'556598408':{'en': 'Brasil Telecom GSM'},
'556598409':{'en': 'Brasil Telecom GSM'},
'556598411':{'en': 'Brasil Telecom GSM'},
'556598412':{'en': 'Brasil Telecom GSM'},
'556598413':{'en': 'Brasil Telecom GSM'},
'556598414':{'en': 'Brasil Telecom GSM'},
'556598415':{'en': 'Brasil Telecom GSM'},
'556598416':{'en': 'Brasil Telecom GSM'},
'556598417':{'en': 'Brasil Telecom GSM'},
'556598418':{'en': 'Brasil Telecom GSM'},
'556598419':{'en': 'Brasil Telecom GSM'},
'556598421':{'en': 'Brasil Telecom GSM'},
'556598422':{'en': 'Brasil Telecom GSM'},
'556598423':{'en': 'Brasil Telecom GSM'},
'556598424':{'en': 'Brasil Telecom GSM'},
'556598425':{'en': 'Brasil Telecom GSM'},
'556598426':{'en': 'Brasil Telecom GSM'},
'556598427':{'en': 'Brasil Telecom GSM'},
'556598428':{'en': 'Brasil Telecom GSM'},
'556598429':{'en': 'Brasil Telecom GSM'},
'556598431':{'en': 'Brasil Telecom GSM'},
'556598432':{'en': 'Brasil Telecom GSM'},
'556598433':{'en': 'Brasil Telecom GSM'},
'556598434':{'en': 'Brasil Telecom GSM'},
'556598435':{'en': 'Brasil Telecom GSM'},
'556598436':{'en': 'Brasil Telecom GSM'},
'556598437':{'en': 'Brasil Telecom GSM'},
'556598438':{'en': 'Brasil Telecom GSM'},
'556598439':{'en': 'Brasil Telecom GSM'},
'556598441':{'en': 'Brasil Telecom GSM'},
'556598442':{'en': 'Brasil Telecom GSM'},
'556598443':{'en': 'Brasil Telecom GSM'},
'556598444':{'en': 'Brasil Telecom GSM'},
'556598445':{'en': 'Brasil Telecom GSM'},
'556598446':{'en': 'Brasil Telecom GSM'},
'556598447':{'en': 'Brasil Telecom GSM'},
'556598448':{'en': 'Brasil Telecom GSM'},
'556598449':{'en': 'Brasil Telecom GSM'},
'556598451':{'en': 'Brasil Telecom GSM'},
'556598452':{'en': 'Brasil Telecom GSM'},
'556598453':{'en': 'Brasil Telecom GSM'},
'556598454':{'en': 'Brasil Telecom GSM'},
'556598455':{'en': 'Brasil Telecom GSM'},
'556598456':{'en': 'Brasil Telecom GSM'},
'556598457':{'en': 'Brasil Telecom GSM'},
'556599601':{'en': 'Vivo'},
'556599602':{'en': 'Vivo'},
'556599603':{'en': 'Vivo'},
'556599604':{'en': 'Vivo'},
'556599605':{'en': 'Vivo'},
'556599606':{'en': 'Vivo'},
'556599607':{'en': 'Vivo'},
'556599608':{'en': 'Vivo'},
'556599609':{'en': 'Vivo'},
'556599611':{'en': 'Vivo'},
'556599612':{'en': 'Vivo'},
'556599613':{'en': 'Vivo'},
'556599614':{'en': 'Vivo'},
'556599615':{'en': 'Vivo'},
'556599616':{'en': 'Vivo'},
'556599617':{'en': 'Vivo'},
'556599618':{'en': 'Vivo'},
'556599619':{'en': 'Vivo'},
'556599621':{'en': 'Vivo'},
'556599622':{'en': 'Vivo'},
'556599623':{'en': 'Vivo'},
'556599624':{'en': 'Vivo'},
'556599625':{'en': 'Vivo'},
'556599626':{'en': 'Vivo'},
'556599627':{'en': 'Vivo'},
'556599628':{'en': 'Vivo'},
'556599629':{'en': 'Vivo'},
'556599631':{'en': 'Vivo'},
'556599632':{'en': 'Vivo'},
'556599633':{'en': 'Vivo'},
'556599634':{'en': 'Vivo'},
'556599635':{'en': 'Vivo'},
'556599636':{'en': 'Vivo'},
'556599637':{'en': 'Vivo'},
'556599638':{'en': 'Vivo'},
'556599639':{'en': 'Vivo'},
'556599641':{'en': 'Vivo'},
'556599642':{'en': 'Vivo'},
'556599643':{'en': 'Vivo'},
'556599644':{'en': 'Vivo'},
'556599645':{'en': 'Vivo'},
'556599646':{'en': 'Vivo'},
'556599647':{'en': 'Vivo'},
'556599648':{'en': 'Vivo'},
'556599649':{'en': 'Vivo'},
'556599651':{'en': 'Vivo'},
'556599652':{'en': 'Vivo'},
'556599653':{'en': 'Vivo'},
'556599654':{'en': 'Vivo'},
'556599655':{'en': 'Vivo'},
'556599656':{'en': 'Vivo'},
'556599657':{'en': 'Vivo'},
'556599658':{'en': 'Vivo'},
'556599659':{'en': 'Vivo'},
'556599661':{'en': 'Vivo'},
'556599662':{'en': 'Vivo'},
'556599663':{'en': 'Vivo'},
'556599664':{'en': 'Vivo'},
'556599665':{'en': 'Vivo'},
'556599666':{'en': 'Vivo'},
'556599667':{'en': 'Vivo'},
'556599668':{'en': 'Vivo'},
'556599669':{'en': 'Vivo'},
'556599671':{'en': 'Vivo'},
'556599672':{'en': 'Vivo'},
'556599673':{'en': 'Vivo'},
'556599674':{'en': 'Vivo'},
'556599675':{'en': 'Vivo'},
'556599676':{'en': 'Vivo'},
'556599677':{'en': 'Vivo'},
'556599901':{'en': 'Vivo'},
'556599902':{'en': 'Vivo'},
'556599903':{'en': 'Vivo'},
'556599904':{'en': 'Vivo'},
'556599905':{'en': 'Vivo'},
'556599906':{'en': 'Vivo'},
'556599907':{'en': 'Vivo'},
'556599908':{'en': 'Vivo'},
'556599909':{'en': 'Vivo'},
'556599911':{'en': 'Vivo'},
'556599912':{'en': 'Vivo'},
'556599913':{'en': 'Vivo'},
'556599914':{'en': 'Vivo'},
'556599915':{'en': 'Vivo'},
'556599916':{'en': 'Vivo'},
'556599917':{'en': 'Vivo'},
'556599918':{'en': 'Vivo'},
'556599919':{'en': 'Vivo'},
'556599921':{'en': 'Vivo'},
'556599922':{'en': 'Vivo'},
'556599923':{'en': 'Vivo'},
'556599924':{'en': 'Vivo'},
'556599925':{'en': 'Vivo'},
'556599926':{'en': 'Vivo'},
'556599927':{'en': 'Vivo'},
'556599928':{'en': 'Vivo'},
'556599929':{'en': 'Vivo'},
'556599931':{'en': 'Vivo'},
'556599932':{'en': 'Vivo'},
'556599933':{'en': 'Vivo'},
'556599934':{'en': 'Vivo'},
'556599935':{'en': 'Vivo'},
'556599936':{'en': 'Vivo'},
'556599937':{'en': 'Vivo'},
'556599938':{'en': 'Vivo'},
'556599939':{'en': 'Vivo'},
'556599941':{'en': 'Vivo'},
'556599942':{'en': 'Vivo'},
'556599943':{'en': 'Vivo'},
'556599944':{'en': 'Vivo'},
'556599945':{'en': 'Vivo'},
'556599946':{'en': 'Vivo'},
'556599947':{'en': 'Vivo'},
'556599948':{'en': 'Vivo'},
'556599949':{'en': 'Vivo'},
'556599951':{'en': 'Vivo'},
'556599952':{'en': 'Vivo'},
'556599953':{'en': 'Vivo'},
'556599954':{'en': 'Vivo'},
'556599955':{'en': 'Vivo'},
'556599956':{'en': 'Vivo'},
'556599957':{'en': 'Vivo'},
'556599958':{'en': 'Vivo'},
'556599959':{'en': 'Vivo'},
'55659996':{'en': 'Vivo'},
'55659997':{'en': 'Vivo'},
'55659998':{'en': 'Vivo'},
'556599991':{'en': 'Vivo'},
'556599992':{'en': 'Vivo'},
'556599993':{'en': 'Vivo'},
'556599994':{'en': 'Vivo'},
'556599995':{'en': 'Vivo'},
'556599996':{'en': 'Vivo'},
'556599997':{'en': 'Vivo'},
'556599998':{'en': 'Vivo'},
'556599999':{'en': 'Vivo'},
'556698111':{'en': 'TIM'},
'556698112':{'en': 'TIM'},
'556698113':{'en': 'TIM'},
'556698114':{'en': 'TIM'},
'556698115':{'en': 'TIM'},
'556698116':{'en': 'TIM'},
'556698117':{'en': 'TIM'},
'556698118':{'en': 'TIM'},
'556698119':{'en': 'TIM'},
'556698121':{'en': 'TIM'},
'556698122':{'en': 'TIM'},
'556698123':{'en': 'TIM'},
'556698124':{'en': 'TIM'},
'556698125':{'en': 'TIM'},
'556698126':{'en': 'TIM'},
'556698127':{'en': 'TIM'},
'556698128':{'en': 'TIM'},
'556698129':{'en': 'TIM'},
'556698131':{'en': 'TIM'},
'556698132':{'en': 'TIM'},
'556698401':{'en': 'Brasil Telecom GSM'},
'556698402':{'en': 'Brasil Telecom GSM'},
'556698403':{'en': 'Brasil Telecom GSM'},
'556698404':{'en': 'Brasil Telecom GSM'},
'556698405':{'en': 'Brasil Telecom GSM'},
'556698406':{'en': 'Brasil Telecom GSM'},
'556698407':{'en': 'Brasil Telecom GSM'},
'556698408':{'en': 'Brasil Telecom GSM'},
'556698409':{'en': 'Brasil Telecom GSM'},
'556698411':{'en': 'Brasil Telecom GSM'},
'556698412':{'en': 'Brasil Telecom GSM'},
'556698413':{'en': 'Brasil Telecom GSM'},
'556698414':{'en': 'Brasil Telecom GSM'},
'556698415':{'en': 'Brasil Telecom GSM'},
'556698416':{'en': 'Brasil Telecom GSM'},
'556698417':{'en': 'Brasil Telecom GSM'},
'556698418':{'en': 'Brasil Telecom GSM'},
'556698419':{'en': 'Brasil Telecom GSM'},
'556698421':{'en': 'Brasil Telecom GSM'},
'556698422':{'en': 'Brasil Telecom GSM'},
'556698423':{'en': 'Brasil Telecom GSM'},
'556698424':{'en': 'Brasil Telecom GSM'},
'556698425':{'en': 'Brasil Telecom GSM'},
'556698426':{'en': 'Brasil Telecom GSM'},
'556698427':{'en': 'Brasil Telecom GSM'},
'556698428':{'en': 'Brasil Telecom GSM'},
'5566996':{'en': 'Vivo'},
'556699901':{'en': 'Vivo'},
'556699902':{'en': 'Vivo'},
'556699903':{'en': 'Vivo'},
'556699904':{'en': 'Vivo'},
'556699905':{'en': 'Vivo'},
'556699906':{'en': 'Vivo'},
'556699907':{'en': 'Vivo'},
'556699908':{'en': 'Vivo'},
'556699909':{'en': 'Vivo'},
'556699911':{'en': 'Vivo'},
'556699912':{'en': 'Vivo'},
'556699913':{'en': 'Vivo'},
'556699951':{'en': 'Vivo'},
'556699952':{'en': 'Vivo'},
'556699953':{'en': 'Vivo'},
'556699954':{'en': 'Vivo'},
'556699955':{'en': 'Vivo'},
'556699956':{'en': 'Vivo'},
'556699957':{'en': 'Vivo'},
'556699958':{'en': 'Vivo'},
'556699959':{'en': 'Vivo'},
'556699961':{'en': 'Vivo'},
'556699962':{'en': 'Vivo'},
'556699963':{'en': 'Vivo'},
'556699964':{'en': 'Vivo'},
'556699965':{'en': 'Vivo'},
'556699966':{'en': 'Vivo'},
'556699967':{'en': 'Vivo'},
'556699968':{'en': 'Vivo'},
'556699969':{'en': 'Vivo'},
'55669997':{'en': 'Vivo'},
'55669998':{'en': 'Vivo'},
'556699991':{'en': 'Vivo'},
'556699992':{'en': 'Vivo'},
'556699993':{'en': 'Vivo'},
'556699994':{'en': 'Vivo'},
'556699995':{'en': 'Vivo'},
'556699996':{'en': 'Vivo'},
'556699997':{'en': 'Vivo'},
'556699998':{'en': 'Vivo'},
'556699999':{'en': 'Vivo'},
'556798111':{'en': 'TIM'},
'556798112':{'en': 'TIM'},
'556798113':{'en': 'TIM'},
'556798114':{'en': 'TIM'},
'556798115':{'en': 'TIM'},
'556798116':{'en': 'TIM'},
'556798117':{'en': 'TIM'},
'556798118':{'en': 'TIM'},
'556798119':{'en': 'TIM'},
'556798121':{'en': 'TIM'},
'556798122':{'en': 'TIM'},
'556798123':{'en': 'TIM'},
'556798124':{'en': 'TIM'},
'556798125':{'en': 'TIM'},
'556798126':{'en': 'TIM'},
'556798127':{'en': 'TIM'},
'556798128':{'en': 'TIM'},
'556798129':{'en': 'TIM'},
'556798131':{'en': 'TIM'},
'556798132':{'en': 'TIM'},
'556798133':{'en': 'TIM'},
'556798134':{'en': 'TIM'},
'556798135':{'en': 'TIM'},
'556798136':{'en': 'TIM'},
'556798137':{'en': 'TIM'},
'556798138':{'en': 'TIM'},
'556798139':{'en': 'TIM'},
'556798141':{'en': 'TIM'},
'556798142':{'en': 'TIM'},
'556798143':{'en': 'TIM'},
'556798144':{'en': 'TIM'},
'556798145':{'en': 'TIM'},
'556798146':{'en': 'TIM'},
'556798147':{'en': 'TIM'},
'556798148':{'en': 'TIM'},
'556798149':{'en': 'TIM'},
'556798151':{'en': 'TIM'},
'556798152':{'en': 'TIM'},
'556798153':{'en': 'TIM'},
'556798154':{'en': 'TIM'},
'556798155':{'en': 'TIM'},
'556798156':{'en': 'TIM'},
'556798157':{'en': 'TIM'},
'556798158':{'en': 'TIM'},
'556798159':{'en': 'TIM'},
'556798161':{'en': 'TIM'},
'556798162':{'en': 'TIM'},
'556798163':{'en': 'TIM'},
'556798164':{'en': 'TIM'},
'556798167':{'en': 'TIM'},
'556798401':{'en': 'Brasil Telecom GSM'},
'556798402':{'en': 'Brasil Telecom GSM'},
'556798403':{'en': 'Brasil Telecom GSM'},
'556798404':{'en': 'Brasil Telecom GSM'},
'556798405':{'en': 'Brasil Telecom GSM'},
'556798406':{'en': 'Brasil Telecom GSM'},
'556798407':{'en': 'Brasil Telecom GSM'},
'556798408':{'en': 'Brasil Telecom GSM'},
'556798409':{'en': 'Brasil Telecom GSM'},
'556798411':{'en': 'Brasil Telecom GSM'},
'556798412':{'en': 'Brasil Telecom GSM'},
'556798413':{'en': 'Brasil Telecom GSM'},
'556798414':{'en': 'Brasil Telecom GSM'},
'556798415':{'en': 'Brasil Telecom GSM'},
'556798416':{'en': 'Brasil Telecom GSM'},
'556798417':{'en': 'Brasil Telecom GSM'},
'556798418':{'en': 'Brasil Telecom GSM'},
'556798419':{'en': 'Brasil Telecom GSM'},
'556798421':{'en': 'Brasil Telecom GSM'},
'556798422':{'en': 'Brasil Telecom GSM'},
'556798423':{'en': 'Brasil Telecom GSM'},
'556798424':{'en': 'Brasil Telecom GSM'},
'556798425':{'en': 'Brasil Telecom GSM'},
'556798426':{'en': 'Brasil Telecom GSM'},
'556798427':{'en': 'Brasil Telecom GSM'},
'556798428':{'en': 'Brasil Telecom GSM'},
'556798429':{'en': 'Brasil Telecom GSM'},
'556798431':{'en': 'Brasil Telecom GSM'},
'556798432':{'en': 'Brasil Telecom GSM'},
'556798433':{'en': 'Brasil Telecom GSM'},
'556798434':{'en': 'Brasil Telecom GSM'},
'556798435':{'en': 'Brasil Telecom GSM'},
'556798436':{'en': 'Brasil Telecom GSM'},
'556798437':{'en': 'Brasil Telecom GSM'},
'556798438':{'en': 'Brasil Telecom GSM'},
'556798439':{'en': 'Brasil Telecom GSM'},
'556798441':{'en': 'Brasil Telecom GSM'},
'556798442':{'en': 'Brasil Telecom GSM'},
'556798443':{'en': 'Brasil Telecom GSM'},
'556798444':{'en': 'Brasil Telecom GSM'},
'556798445':{'en': 'Brasil Telecom GSM'},
'556798446':{'en': 'Brasil Telecom GSM'},
'556798447':{'en': 'Brasil Telecom GSM'},
'556798448':{'en': 'Brasil Telecom GSM'},
'556798449':{'en': 'Brasil Telecom GSM'},
'556798451':{'en': 'Brasil Telecom GSM'},
'556798452':{'en': 'Brasil Telecom GSM'},
'556798453':{'en': 'Brasil Telecom GSM'},
'556798454':{'en': 'Brasil Telecom GSM'},
'5567996':{'en': 'Vivo'},
'55679980':{'en': 'Vivo'},
'556799810':{'en': 'Vivo'},
'556799811':{'en': 'Vivo'},
'556799812':{'en': 'Vivo'},
'556799813':{'en': 'Vivo'},
'556799814':{'en': 'Vivo'},
'5567999':{'en': 'Vivo'},
'556898111':{'en': 'TIM'},
'556898112':{'en': 'TIM'},
'556898113':{'en': 'TIM'},
'556898114':{'en': 'TIM'},
'556898115':{'en': 'TIM'},
'556898117':{'en': 'TIM'},
'556898118':{'en': 'TIM'},
'556898119':{'en': 'TIM'},
'556898121':{'en': 'TIM'},
'556898401':{'en': 'Brasil Telecom GSM'},
'556898402':{'en': 'Brasil Telecom GSM'},
'556898403':{'en': 'Brasil Telecom GSM'},
'556898404':{'en': 'Brasil Telecom GSM'},
'556898405':{'en': 'Brasil Telecom GSM'},
'556898406':{'en': 'Brasil Telecom GSM'},
'556898407':{'en': 'Brasil Telecom GSM'},
'556898408':{'en': 'Brasil Telecom GSM'},
'556898409':{'en': 'Brasil Telecom GSM'},
'556898411':{'en': 'Brasil Telecom GSM'},
'556898412':{'en': 'Brasil Telecom GSM'},
'556898413':{'en': 'Brasil Telecom GSM'},
'556898414':{'en': 'Brasil Telecom GSM'},
'556898415':{'en': 'Brasil Telecom GSM'},
'556898416':{'en': 'Brasil Telecom GSM'},
'556898417':{'en': 'Brasil Telecom GSM'},
'556898418':{'en': 'Brasil Telecom GSM'},
'556899911':{'en': 'Vivo'},
'556899931':{'en': 'Vivo'},
'556899932':{'en': 'Vivo'},
'556899933':{'en': 'Vivo'},
'556899934':{'en': 'Vivo'},
'556899935':{'en': 'Vivo'},
'556899936':{'en': 'Vivo'},
'556899937':{'en': 'Vivo'},
'556899938':{'en': 'Vivo'},
'556899939':{'en': 'Vivo'},
'556899941':{'en': 'Vivo'},
'556899942':{'en': 'Vivo'},
'556899943':{'en': 'Vivo'},
'556899944':{'en': 'Vivo'},
'556899945':{'en': 'Vivo'},
'556899946':{'en': 'Vivo'},
'556899947':{'en': 'Vivo'},
'556899948':{'en': 'Vivo'},
'556899949':{'en': 'Vivo'},
'556899951':{'en': 'Vivo'},
'556899952':{'en': 'Vivo'},
'556899953':{'en': 'Vivo'},
'556899954':{'en': 'Vivo'},
'556899955':{'en': 'Vivo'},
'556899956':{'en': 'Vivo'},
'556899957':{'en': 'Vivo'},
'556899958':{'en': 'Vivo'},
'556899959':{'en': 'Vivo'},
'556899961':{'en': 'Vivo'},
'556899962':{'en': 'Vivo'},
'556899963':{'en': 'Vivo'},
'556899964':{'en': 'Vivo'},
'556899965':{'en': 'Vivo'},
'556899966':{'en': 'Vivo'},
'556899967':{'en': 'Vivo'},
'556899968':{'en': 'Vivo'},
'556899969':{'en': 'Vivo'},
'55689997':{'en': 'Vivo'},
'55689998':{'en': 'Vivo'},
'556899991':{'en': 'Vivo'},
'556899992':{'en': 'Vivo'},
'556899993':{'en': 'Vivo'},
'556899994':{'en': 'Vivo'},
'556899995':{'en': 'Vivo'},
'556899996':{'en': 'Vivo'},
'556899997':{'en': 'Vivo'},
'556899998':{'en': 'Vivo'},
'556899999':{'en': 'Vivo'},
'556998111':{'en': 'TIM'},
'556998112':{'en': 'TIM'},
'556998113':{'en': 'TIM'},
'556998114':{'en': 'TIM'},
'556998115':{'en': 'TIM'},
'556998116':{'en': 'TIM'},
'556998117':{'en': 'TIM'},
'556998118':{'en': 'TIM'},
'556998119':{'en': 'TIM'},
'556998121':{'en': 'TIM'},
'556998122':{'en': 'TIM'},
'556998123':{'en': 'TIM'},
'556998124':{'en': 'TIM'},
'556998125':{'en': 'TIM'},
'556998126':{'en': 'TIM'},
'556998127':{'en': 'TIM'},
'556998128':{'en': 'TIM'},
'556998401':{'en': 'Brasil Telecom GSM'},
'556998402':{'en': 'Brasil Telecom GSM'},
'556998403':{'en': 'Brasil Telecom GSM'},
'556998404':{'en': 'Brasil Telecom GSM'},
'556998405':{'en': 'Brasil Telecom GSM'},
'556998406':{'en': 'Brasil Telecom GSM'},
'556998407':{'en': 'Brasil Telecom GSM'},
'556998408':{'en': 'Brasil Telecom GSM'},
'556998409':{'en': 'Brasil Telecom GSM'},
'556998411':{'en': 'Brasil Telecom GSM'},
'556998412':{'en': 'Brasil Telecom GSM'},
'556998413':{'en': 'Brasil Telecom GSM'},
'556998414':{'en': 'Brasil Telecom GSM'},
'556998415':{'en': 'Brasil Telecom GSM'},
'556998416':{'en': 'Brasil Telecom GSM'},
'556998417':{'en': 'Brasil Telecom GSM'},
'556998418':{'en': 'Brasil Telecom GSM'},
'556998419':{'en': 'Brasil Telecom GSM'},
'556998421':{'en': 'Brasil Telecom GSM'},
'556998422':{'en': 'Brasil Telecom GSM'},
'556998423':{'en': 'Brasil Telecom GSM'},
'556998424':{'en': 'Brasil Telecom GSM'},
'556998425':{'en': 'Brasil Telecom GSM'},
'556998426':{'en': 'Brasil Telecom GSM'},
'556998427':{'en': 'Brasil Telecom GSM'},
'556998428':{'en': 'Brasil Telecom GSM'},
'556998429':{'en': 'Brasil Telecom GSM'},
'556998431':{'en': 'Brasil Telecom GSM'},
'556998432':{'en': 'Brasil Telecom GSM'},
'556998433':{'en': 'Brasil Telecom GSM'},
'556998434':{'en': 'Brasil Telecom GSM'},
'556998435':{'en': 'Brasil Telecom GSM'},
'556998436':{'en': 'Brasil Telecom GSM'},
'556998437':{'en': 'Brasil Telecom GSM'},
'556998438':{'en': 'Brasil Telecom GSM'},
'556998439':{'en': 'Brasil Telecom GSM'},
'556998441':{'en': 'Brasil Telecom GSM'},
'556998442':{'en': 'Brasil Telecom GSM'},
'556998443':{'en': 'Brasil Telecom GSM'},
'556998444':{'en': 'Brasil Telecom GSM'},
'556998445':{'en': 'Brasil Telecom GSM'},
'556998446':{'en': 'Brasil Telecom GSM'},
'556998447':{'en': 'Brasil Telecom GSM'},
'556998448':{'en': 'Brasil Telecom GSM'},
'556998449':{'en': 'Brasil Telecom GSM'},
'556998451':{'en': 'Brasil Telecom GSM'},
'556998452':{'en': 'Brasil Telecom GSM'},
'556998453':{'en': 'Brasil Telecom GSM'},
'556998454':{'en': 'Brasil Telecom GSM'},
'556998455':{'en': 'Brasil Telecom GSM'},
'556998456':{'en': 'Brasil Telecom GSM'},
'556998457':{'en': 'Brasil Telecom GSM'},
'556998458':{'en': 'Brasil Telecom GSM'},
'556998459':{'en': 'Brasil Telecom GSM'},
'556998461':{'en': 'Brasil Telecom GSM'},
'556998462':{'en': 'Brasil Telecom GSM'},
'556998463':{'en': 'Brasil Telecom GSM'},
'556998465':{'en': 'Brasil Telecom GSM'},
'556998466':{'en': 'Brasil Telecom GSM'},
'556998467':{'en': 'Brasil Telecom GSM'},
'556999901':{'en': 'Vivo'},
'556999902':{'en': 'Vivo'},
'556999903':{'en': 'Vivo'},
'556999904':{'en': 'Vivo'},
'556999905':{'en': 'Vivo'},
'556999906':{'en': 'Vivo'},
'556999907':{'en': 'Vivo'},
'556999908':{'en': 'Vivo'},
'556999909':{'en': 'Vivo'},
'556999911':{'en': 'Vivo'},
'556999912':{'en': 'Vivo'},
'556999913':{'en': 'Vivo'},
'556999914':{'en': 'Vivo'},
'556999915':{'en': 'Vivo'},
'556999916':{'en': 'Vivo'},
'556999917':{'en': 'Vivo'},
'556999918':{'en': 'Vivo'},
'556999919':{'en': 'Vivo'},
'556999921':{'en': 'Vivo'},
'556999922':{'en': 'Vivo'},
'556999923':{'en': 'Vivo'},
'556999924':{'en': 'Vivo'},
'556999925':{'en': 'Vivo'},
'556999926':{'en': 'Vivo'},
'556999927':{'en': 'Vivo'},
'556999928':{'en': 'Vivo'},
'556999929':{'en': 'Vivo'},
'556999931':{'en': 'Vivo'},
'556999932':{'en': 'Vivo'},
'556999951':{'en': 'Vivo'},
'556999952':{'en': 'Vivo'},
'556999953':{'en': 'Vivo'},
'556999954':{'en': 'Vivo'},
'556999955':{'en': 'Vivo'},
'556999956':{'en': 'Vivo'},
'556999957':{'en': 'Vivo'},
'556999958':{'en': 'Vivo'},
'556999959':{'en': 'Vivo'},
'55699996':{'en': 'Vivo'},
'55699997':{'en': 'Vivo'},
'55699998':{'en': 'Vivo'},
'556999991':{'en': 'Vivo'},
'556999992':{'en': 'Vivo'},
'556999993':{'en': 'Vivo'},
'556999994':{'en': 'Vivo'},
'556999995':{'en': 'Vivo'},
'556999996':{'en': 'Vivo'},
'556999997':{'en': 'Vivo'},
'556999998':{'en': 'Vivo'},
'556999999':{'en': 'Vivo'},
'5571981':{'en': 'Claro'},
'5571982':{'en': 'Claro'},
'55719830':{'en': 'Claro'},
'55719831':{'en': 'Claro'},
'55719832':{'en': 'Claro'},
'55719833':{'en': 'Claro'},
'55719834':{'en': 'Claro'},
'55719835':{'en': 'Claro'},
'557198360':{'en': 'Claro'},
'557198361':{'en': 'Claro'},
'557198362':{'en': 'Claro'},
'5571985':{'en': 'Oi'},
'5571986':{'en': 'Oi'},
'5571987':{'en': 'Oi'},
'5571988':{'en': 'Oi'},
'5571989':{'en': 'Oi'},
'5571991':{'en': 'TIM'},
'557199201':{'en': 'TIM'},
'557199202':{'en': 'TIM'},
'557199203':{'en': 'TIM'},
'557199204':{'en': 'TIM'},
'557199205':{'en': 'TIM'},
'557199206':{'en': 'TIM'},
'557199207':{'en': 'TIM'},
'557199208':{'en': 'TIM'},
'557199209':{'en': 'TIM'},
'557199211':{'en': 'TIM'},
'557199212':{'en': 'TIM'},
'557199213':{'en': 'TIM'},
'557199214':{'en': 'TIM'},
'557199215':{'en': 'TIM'},
'557199216':{'en': 'TIM'},
'557199217':{'en': 'TIM'},
'557199218':{'en': 'TIM'},
'557199219':{'en': 'TIM'},
'557199221':{'en': 'TIM'},
'557199222':{'en': 'TIM'},
'557199223':{'en': 'TIM'},
'557199224':{'en': 'TIM'},
'557199225':{'en': 'TIM'},
'557199226':{'en': 'TIM'},
'557199227':{'en': 'TIM'},
'557199228':{'en': 'TIM'},
'557199229':{'en': 'TIM'},
'557199231':{'en': 'TIM'},
'557199232':{'en': 'TIM'},
'557199233':{'en': 'TIM'},
'557199234':{'en': 'TIM'},
'557199235':{'en': 'TIM'},
'557199236':{'en': 'TIM'},
'557199237':{'en': 'TIM'},
'557199238':{'en': 'TIM'},
'557199239':{'en': 'TIM'},
'557199241':{'en': 'TIM'},
'557199242':{'en': 'TIM'},
'557199243':{'en': 'TIM'},
'557199244':{'en': 'TIM'},
'557199245':{'en': 'TIM'},
'557199246':{'en': 'TIM'},
'557199247':{'en': 'TIM'},
'557199248':{'en': 'TIM'},
'557199249':{'en': 'TIM'},
'557199251':{'en': 'TIM'},
'557199252':{'en': 'TIM'},
'557199253':{'en': 'TIM'},
'557199254':{'en': 'TIM'},
'557199255':{'en': 'TIM'},
'557199256':{'en': 'TIM'},
'557199257':{'en': 'TIM'},
'557199258':{'en': 'TIM'},
'557199259':{'en': 'TIM'},
'557199261':{'en': 'TIM'},
'557199262':{'en': 'TIM'},
'557199263':{'en': 'TIM'},
'557199264':{'en': 'TIM'},
'557199265':{'en': 'TIM'},
'557199266':{'en': 'TIM'},
'557199267':{'en': 'TIM'},
'557199268':{'en': 'TIM'},
'557199269':{'en': 'TIM'},
'557199271':{'en': 'TIM'},
'557199272':{'en': 'TIM'},
'557199273':{'en': 'TIM'},
'557199274':{'en': 'TIM'},
'557199275':{'en': 'TIM'},
'557199276':{'en': 'TIM'},
'557199277':{'en': 'TIM'},
'557199278':{'en': 'TIM'},
'557199279':{'en': 'TIM'},
'557199287':{'en': 'TIM'},
'55719960':{'en': 'Vivo'},
'55719961':{'en': 'Vivo'},
'55719962':{'en': 'Vivo'},
'55719963':{'en': 'Vivo'},
'55719964':{'en': 'Vivo'},
'55719965':{'en': 'Vivo'},
'55719966':{'en': 'Vivo'},
'55719967':{'en': 'Vivo'},
'55719968':{'en': 'Vivo'},
'557199690':{'en': 'Vivo'},
'557199691':{'en': 'Vivo'},
'557199692':{'en': 'Vivo'},
'557199901':{'en': 'Vivo'},
'557199902':{'en': 'Vivo'},
'557199903':{'en': 'Vivo'},
'557199904':{'en': 'Vivo'},
'557199905':{'en': 'Vivo'},
'557199906':{'en': 'Vivo'},
'557199907':{'en': 'Vivo'},
'557199908':{'en': 'Vivo'},
'557199909':{'en': 'Vivo'},
'557199911':{'en': 'Vivo'},
'557199912':{'en': 'Vivo'},
'557199913':{'en': 'Vivo'},
'557199914':{'en': 'Vivo'},
'557199915':{'en': 'Vivo'},
'557199916':{'en': 'Vivo'},
'557199917':{'en': 'Vivo'},
'557199918':{'en': 'Vivo'},
'557199919':{'en': 'Vivo'},
'557199921':{'en': 'Vivo'},
'557199922':{'en': 'Vivo'},
'557199923':{'en': 'Vivo'},
'557199924':{'en': 'Vivo'},
'557199925':{'en': 'Vivo'},
'557199926':{'en': 'Vivo'},
'557199927':{'en': 'Vivo'},
'557199928':{'en': 'Vivo'},
'557199929':{'en': 'Vivo'},
'557199931':{'en': 'Vivo'},
'557199932':{'en': 'Vivo'},
'557199933':{'en': 'Vivo'},
'557199934':{'en': 'Vivo'},
'557199935':{'en': 'Vivo'},
'557199936':{'en': 'Vivo'},
'557199937':{'en': 'Vivo'},
'557199938':{'en': 'Vivo'},
'557199939':{'en': 'Vivo'},
'557199941':{'en': 'Vivo'},
'557199942':{'en': 'Vivo'},
'557199943':{'en': 'Vivo'},
'557199944':{'en': 'Vivo'},
'557199945':{'en': 'Vivo'},
'557199946':{'en': 'Vivo'},
'557199947':{'en': 'Vivo'},
'557199948':{'en': 'Vivo'},
'557199949':{'en': 'Vivo'},
'557199951':{'en': 'Vivo'},
'557199952':{'en': 'Vivo'},
'557199953':{'en': 'Vivo'},
'557199954':{'en': 'Vivo'},
'557199955':{'en': 'Vivo'},
'557199956':{'en': 'Vivo'},
'557199957':{'en': 'Vivo'},
'557199958':{'en': 'Vivo'},
'557199959':{'en': 'Vivo'},
'55719996':{'en': 'Vivo'},
'557199971':{'en': 'Vivo'},
'557199972':{'en': 'Vivo'},
'557199973':{'en': 'Vivo'},
'557199974':{'en': 'Vivo'},
'557199975':{'en': 'Vivo'},
'557199976':{'en': 'Vivo'},
'557199977':{'en': 'Vivo'},
'557199978':{'en': 'Vivo'},
'557199979':{'en': 'Vivo'},
'557199981':{'en': 'Vivo'},
'557199982':{'en': 'Vivo'},
'557199983':{'en': 'Vivo'},
'557199984':{'en': 'Vivo'},
'557199985':{'en': 'Vivo'},
'557199986':{'en': 'Vivo'},
'557199987':{'en': 'Vivo'},
'557199988':{'en': 'Vivo'},
'557199989':{'en': 'Vivo'},
'557199991':{'en': 'Vivo'},
'557199992':{'en': 'Vivo'},
'557199993':{'en': 'Vivo'},
'557199994':{'en': 'Vivo'},
'557199995':{'en': 'Vivo'},
'557199996':{'en': 'Vivo'},
'557199997':{'en': 'Vivo'},
'557199998':{'en': 'Vivo'},
'557199999':{'en': 'Vivo'},
'55739981':{'en': 'Claro'},
'55739985':{'en': 'Oi'},
'55739986':{'en': 'Oi'},
'55739987':{'en': 'Oi'},
'55739988':{'en': 'Oi'},
'55739989':{'en': 'Oi'},
'557399911':{'en': 'TIM'},
'557399912':{'en': 'TIM'},
'557399913':{'en': 'TIM'},
'557399914':{'en': 'TIM'},
'557399915':{'en': 'TIM'},
'557399919':{'en': 'TIM'},
'557399980':{'en': 'Vivo'},
'55739999':{'en': 'Vivo'},
'55749810':{'en': 'Claro'},
'55749811':{'en': 'Claro'},
'55749812':{'en': 'Claro'},
'557498130':{'en': 'Claro'},
'557498131':{'en': 'Claro'},
'5574985':{'en': 'Oi'},
'5574986':{'en': 'Oi'},
'5574987':{'en': 'Oi'},
'5574988':{'en': 'Oi'},
'5574989':{'en': 'Oi'},
'557499115':{'en': 'TIM'},
'557499116':{'en': 'TIM'},
'557499121':{'en': 'TIM'},
'557499122':{'en': 'TIM'},
'557499123':{'en': 'TIM'},
'557499124':{'en': 'TIM'},
'557499125':{'en': 'TIM'},
'557499135':{'en': 'TIM'},
'557499147':{'en': 'TIM'},
'557499148':{'en': 'TIM'},
'557499149':{'en': 'TIM'},
'557499188':{'en': 'TIM'},
'557499189':{'en': 'TIM'},
'557499191':{'en': 'TIM'},
'557499193':{'en': 'TIM'},
'557499194':{'en': 'TIM'},
'557499195':{'en': 'TIM'},
'557499198':{'en': 'TIM'},
'557499199':{'en': 'TIM'},
'557499941':{'en': 'Vivo'},
'557499942':{'en': 'Vivo'},
'557499943':{'en': 'Vivo'},
'557499944':{'en': 'Vivo'},
'557499945':{'en': 'Vivo'},
'557499946':{'en': 'Vivo'},
'557499947':{'en': 'Vivo'},
'557499948':{'en': 'Vivo'},
'557499949':{'en': 'Vivo'},
'55749995':{'en': 'Vivo'},
'557499961':{'en': 'Vivo'},
'557499962':{'en': 'Vivo'},
'557499963':{'en': 'Vivo'},
'557499964':{'en': 'Vivo'},
'557499965':{'en': 'Vivo'},
'557499967':{'en': 'Vivo'},
'557499968':{'en': 'Vivo'},
'557499969':{'en': 'Vivo'},
'557499970':{'en': 'Vivo'},
'557499971':{'en': 'Vivo'},
'557499972':{'en': 'Vivo'},
'557499973':{'en': 'Vivo'},
'557499975':{'en': 'Vivo'},
'557499976':{'en': 'Vivo'},
'557499977':{'en': 'Vivo'},
'557499978':{'en': 'Vivo'},
'557499979':{'en': 'Vivo'},
'55749998':{'en': 'Vivo'},
'557499991':{'en': 'Vivo'},
'557499995':{'en': 'Vivo'},
'557499996':{'en': 'Vivo'},
'557499998':{'en': 'Vivo'},
'557499999':{'en': 'Vivo'},
'5575981':{'en': 'Claro'},
'55759820':{'en': 'Claro'},
'55759821':{'en': 'Claro'},
'55759822':{'en': 'Claro'},
'55759823':{'en': 'Claro'},
'55759824':{'en': 'Claro'},
'55759825':{'en': 'Claro'},
'55759826':{'en': 'Claro'},
'557598270':{'en': 'Claro'},
'557598271':{'en': 'Claro'},
'557598272':{'en': 'Claro'},
'557598273':{'en': 'Claro'},
'557598274':{'en': 'Claro'},
'557598275':{'en': 'Claro'},
'557598276':{'en': 'Claro'},
'5575985':{'en': 'Oi'},
'5575986':{'en': 'Oi'},
'5575987':{'en': 'Oi'},
'5575988':{'en': 'Oi'},
'5575989':{'en': 'Oi'},
'557599111':{'en': 'TIM'},
'557599112':{'en': 'TIM'},
'557599113':{'en': 'TIM'},
'557599114':{'en': 'TIM'},
'557599115':{'en': 'TIM'},
'557599116':{'en': 'TIM'},
'557599117':{'en': 'TIM'},
'557599118':{'en': 'TIM'},
'557599119':{'en': 'TIM'},
'557599121':{'en': 'TIM'},
'557599122':{'en': 'TIM'},
'557599123':{'en': 'TIM'},
'557599124':{'en': 'TIM'},
'557599125':{'en': 'TIM'},
'557599126':{'en': 'TIM'},
'557599127':{'en': 'TIM'},
'557599129':{'en': 'TIM'},
'557599131':{'en': 'TIM'},
'557599132':{'en': 'TIM'},
'557599133':{'en': 'TIM'},
'557599134':{'en': 'TIM'},
'557599135':{'en': 'TIM'},
'557599136':{'en': 'TIM'},
'557599137':{'en': 'TIM'},
'557599138':{'en': 'TIM'},
'557599139':{'en': 'TIM'},
'557599141':{'en': 'TIM'},
'557599142':{'en': 'TIM'},
'557599143':{'en': 'TIM'},
'557599144':{'en': 'TIM'},
'557599145':{'en': 'TIM'},
'557599146':{'en': 'TIM'},
'557599147':{'en': 'TIM'},
'557599148':{'en': 'TIM'},
'557599149':{'en': 'TIM'},
'557599165':{'en': 'TIM'},
'557599168':{'en': 'TIM'},
'557599169':{'en': 'TIM'},
'557599172':{'en': 'TIM'},
'557599173':{'en': 'TIM'},
'557599174':{'en': 'TIM'},
'557599175':{'en': 'TIM'},
'557599176':{'en': 'TIM'},
'557599177':{'en': 'TIM'},
'557599178':{'en': 'TIM'},
'557599179':{'en': 'TIM'},
'557599181':{'en': 'TIM'},
'557599182':{'en': 'TIM'},
'557599183':{'en': 'TIM'},
'557599191':{'en': 'TIM'},
'557599192':{'en': 'TIM'},
'557599193':{'en': 'TIM'},
'557599194':{'en': 'TIM'},
'557599198':{'en': 'TIM'},
'557599199':{'en': 'TIM'},
'557599801':{'en': 'Vivo'},
'557599802':{'en': 'Vivo'},
'557599803':{'en': 'Vivo'},
'557599804':{'en': 'Vivo'},
'557599805':{'en': 'Vivo'},
'557599806':{'en': 'Vivo'},
'557599807':{'en': 'Vivo'},
'557599808':{'en': 'Vivo'},
'557599809':{'en': 'Vivo'},
'55759981':{'en': 'Vivo'},
'55759982':{'en': 'Vivo'},
'55759983':{'en': 'Vivo'},
'557599840':{'en': 'Vivo'},
'557599841':{'en': 'Vivo'},
'557599842':{'en': 'Vivo'},
'557599843':{'en': 'Vivo'},
'55759990':{'en': 'Vivo'},
'55759991':{'en': 'Vivo'},
'55759992':{'en': 'Vivo'},
'55759993':{'en': 'Vivo'},
'55759994':{'en': 'Vivo'},
'55759995':{'en': 'Vivo'},
'557599961':{'en': 'Vivo'},
'557599962':{'en': 'Vivo'},
'557599963':{'en': 'Vivo'},
'557599964':{'en': 'Vivo'},
'557599965':{'en': 'Vivo'},
'557599966':{'en': 'Vivo'},
'557599967':{'en': 'Vivo'},
'557599970':{'en': 'Vivo'},
'557599972':{'en': 'Vivo'},
'557599973':{'en': 'Vivo'},
'557599975':{'en': 'Vivo'},
'557599976':{'en': 'Vivo'},
'557599977':{'en': 'Vivo'},
'557599978':{'en': 'Vivo'},
'557599979':{'en': 'Vivo'},
'557599980':{'en': 'Vivo'},
'557599981':{'en': 'Vivo'},
'557599982':{'en': 'Vivo'},
'557599983':{'en': 'Vivo'},
'557599985':{'en': 'Vivo'},
'557599986':{'en': 'Vivo'},
'557599987':{'en': 'Vivo'},
'557599988':{'en': 'Vivo'},
'557599990':{'en': 'Vivo'},
'557599992':{'en': 'Vivo'},
'557599993':{'en': 'Vivo'},
'557599994':{'en': 'Vivo'},
'557599996':{'en': 'Vivo'},
'557599997':{'en': 'Vivo'},
'557599998':{'en': 'Vivo'},
'557599999':{'en': 'Vivo'},
'55779810':{'en': 'Claro'},
'55779811':{'en': 'Claro'},
'55779812':{'en': 'Claro'},
'55779813':{'en': 'Claro'},
'55779814':{'en': 'Claro'},
'557798150':{'en': 'Claro'},
'557798151':{'en': 'Claro'},
'557798152':{'en': 'Claro'},
'557798153':{'en': 'Claro'},
'5577985':{'en': 'Oi'},
'5577986':{'en': 'Oi'},
'5577987':{'en': 'Oi'},
'5577988':{'en': 'Oi'},
'5577989':{'en': 'Oi'},
'557799115':{'en': 'TIM'},
'557799116':{'en': 'TIM'},
'557799117':{'en': 'TIM'},
'557799118':{'en': 'TIM'},
'557799119':{'en': 'TIM'},
'557799121':{'en': 'TIM'},
'557799125':{'en': 'TIM'},
'557799127':{'en': 'TIM'},
'557799128':{'en': 'TIM'},
'557799129':{'en': 'TIM'},
'557799131':{'en': 'TIM'},
'557799135':{'en': 'TIM'},
'557799136':{'en': 'TIM'},
'557799148':{'en': 'TIM'},
'557799149':{'en': 'TIM'},
'557799191':{'en': 'TIM'},
'557799193':{'en': 'TIM'},
'557799194':{'en': 'TIM'},
'557799198':{'en': 'TIM'},
'557799199':{'en': 'TIM'},
'55779980':{'en': 'Vivo'},
'557799810':{'en': 'Vivo'},
'557799811':{'en': 'Vivo'},
'557799812':{'en': 'Vivo'},
'557799813':{'en': 'Vivo'},
'557799814':{'en': 'Vivo'},
'557799815':{'en': 'Vivo'},
'55779990':{'en': 'Vivo'},
'55779991':{'en': 'Vivo'},
'55779992':{'en': 'Vivo'},
'55779993':{'en': 'Vivo'},
'55779994':{'en': 'Vivo'},
'55779995':{'en': 'Vivo'},
'557799961':{'en': 'Vivo'},
'557799962':{'en': 'Vivo'},
'557799963':{'en': 'Vivo'},
'557799964':{'en': 'Vivo'},
'557799966':{'en': 'Vivo'},
'557799967':{'en': 'Vivo'},
'557799968':{'en': 'Vivo'},
'557799969':{'en': 'Vivo'},
'557799970':{'en': 'Vivo'},
'557799971':{'en': 'Vivo'},
'557799972':{'en': 'Vivo'},
'557799973':{'en': 'Vivo'},
'557799974':{'en': 'Vivo'},
'557799975':{'en': 'Vivo'},
'557799976':{'en': 'Vivo'},
'557799977':{'en': 'Vivo'},
'557799978':{'en': 'Vivo'},
'55779998':{'en': 'Vivo'},
'557799990':{'en': 'Vivo'},
'557799992':{'en': 'Vivo'},
'557799993':{'en': 'Vivo'},
'557799999':{'en': 'Vivo'},
'55799810':{'en': 'Claro'},
'55799811':{'en': 'Claro'},
'55799812':{'en': 'Claro'},
'55799813':{'en': 'Claro'},
'55799814':{'en': 'Claro'},
'55799815':{'en': 'Claro'},
'55799816':{'en': 'Claro'},
'557998171':{'en': 'Claro'},
'557998172':{'en': 'Claro'},
'5579985':{'en': 'Oi'},
'5579986':{'en': 'Oi'},
'5579987':{'en': 'Oi'},
'5579988':{'en': 'Oi'},
'5579989':{'en': 'Oi'},
'557999116':{'en': 'TIM'},
'557999121':{'en': 'TIM'},
'557999124':{'en': 'TIM'},
'557999131':{'en': 'TIM'},
'557999132':{'en': 'TIM'},
'557999133':{'en': 'TIM'},
'557999134':{'en': 'TIM'},
'557999135':{'en': 'TIM'},
'557999136':{'en': 'TIM'},
'557999137':{'en': 'TIM'},
'557999138':{'en': 'TIM'},
'557999139':{'en': 'TIM'},
'557999141':{'en': 'TIM'},
'557999142':{'en': 'TIM'},
'557999143':{'en': 'TIM'},
'557999145':{'en': 'TIM'},
'557999147':{'en': 'TIM'},
'557999148':{'en': 'TIM'},
'557999149':{'en': 'TIM'},
'557999151':{'en': 'TIM'},
'557999152':{'en': 'TIM'},
'557999153':{'en': 'TIM'},
'557999154':{'en': 'TIM'},
'557999159':{'en': 'TIM'},
'557999191':{'en': 'TIM'},
'557999192':{'en': 'TIM'},
'557999193':{'en': 'TIM'},
'557999198':{'en': 'TIM'},
'557999199':{'en': 'TIM'},
'557999600':{'en': 'Vivo'},
'557999601':{'en': 'Vivo'},
'557999602':{'en': 'Vivo'},
'557999603':{'en': 'Vivo'},
'557999604':{'en': 'Vivo'},
'557999605':{'en': 'Vivo'},
'557999606':{'en': 'Vivo'},
'557999607':{'en': 'Vivo'},
'5579998':{'en': 'Vivo'},
'5579999':{'en': 'Vivo'},
'5581981':{'en': 'Vivo'},
'55819820':{'en': 'Vivo'},
'55819821':{'en': 'Vivo'},
'55819822':{'en': 'Vivo'},
'55819823':{'en': 'Vivo'},
'55819824':{'en': 'Vivo'},
'55819825':{'en': 'Vivo'},
'558198260':{'en': 'Vivo'},
'558198261':{'en': 'Vivo'},
'558198262':{'en': 'Vivo'},
'558198263':{'en': 'Vivo'},
'5581985':{'en': 'Oi'},
'5581986':{'en': 'Oi'},
'5581987':{'en': 'Oi'},
'5581988':{'en': 'Oi'},
'5581989':{'en': 'Oi'},
'5581991':{'en': 'Claro'},
'5581992':{'en': 'Claro'},
'55819930':{'en': 'Claro'},
'55819931':{'en': 'Claro'},
'55819932':{'en': 'Claro'},
'55819933':{'en': 'Claro'},
'55819934':{'en': 'Claro'},
'55819935':{'en': 'Claro'},
'55819936':{'en': 'Claro'},
'558199370':{'en': 'Claro'},
'558199371':{'en': 'Claro'},
'558199372':{'en': 'Claro'},
'558199373':{'en': 'Claro'},
'558199374':{'en': 'Claro'},
'558199375':{'en': 'Claro'},
'5581994':{'en': 'Claro'},
'5581996':{'en': 'TIM'},
'55819970':{'en': 'TIM'},
'55819971':{'en': 'TIM'},
'55819972':{'en': 'TIM'},
'558199730':{'en': 'TIM'},
'558199731':{'en': 'TIM'},
'558199732':{'en': 'TIM'},
'558199733':{'en': 'TIM'},
'558199734':{'en': 'TIM'},
'558199735':{'en': 'TIM'},
'558199736':{'en': 'TIM'},
'558199737':{'en': 'TIM'},
'558199738':{'en': 'TIM'},
'558199747':{'en': 'TIM'},
'558199748':{'en': 'TIM'},
'558199749':{'en': 'TIM'},
'558199750':{'en': 'TIM'},
'5581999':{'en': 'TIM'},
'55829810':{'en': 'Vivo'},
'55829811':{'en': 'Vivo'},
'55829812':{'en': 'Vivo'},
'55829813':{'en': 'Vivo'},
'558298140':{'en': 'Vivo'},
'558298141':{'en': 'Vivo'},
'558298142':{'en': 'Vivo'},
'558298143':{'en': 'Vivo'},
'5582985':{'en': 'Oi'},
'5582986':{'en': 'Oi'},
'5582987':{'en': 'Oi'},
'5582988':{'en': 'Oi'},
'5582989':{'en': 'Oi'},
'5582991':{'en': 'Claro'},
'55829930':{'en': 'Claro'},
'558299310':{'en': 'Claro'},
'558299311':{'en': 'Claro'},
'558299312':{'en': 'Claro'},
'558299313':{'en': 'Claro'},
'558299314':{'en': 'Claro'},
'558299315':{'en': 'Claro'},
'558299316':{'en': 'Claro'},
'558299317':{'en': 'Claro'},
'558299318':{'en': 'Claro'},
'55829932':{'en': 'Claro'},
'55829933':{'en': 'Claro'},
'558299340':{'en': 'Claro'},
'558299341':{'en': 'Claro'},
'558299342':{'en': 'Claro'},
'558299343':{'en': 'Claro'},
'558299351':{'en': 'Claro'},
'558299361':{'en': 'Claro'},
'558299371':{'en': 'Claro'},
'558299381':{'en': 'Claro'},
'558299444':{'en': 'Claro'},
'558299600':{'en': 'TIM'},
'558299601':{'en': 'TIM'},
'558299602':{'en': 'TIM'},
'558299603':{'en': 'TIM'},
'558299604':{'en': 'TIM'},
'558299605':{'en': 'TIM'},
'558299606':{'en': 'TIM'},
'558299607':{'en': 'TIM'},
'558299608':{'en': 'TIM'},
'558299621':{'en': 'TIM'},
'558299622':{'en': 'TIM'},
'558299623':{'en': 'TIM'},
'558299624':{'en': 'TIM'},
'558299625':{'en': 'TIM'},
'558299627':{'en': 'TIM'},
'558299628':{'en': 'TIM'},
'558299629':{'en': 'TIM'},
'558299631':{'en': 'TIM'},
'55829990':{'en': 'TIM'},
'55829991':{'en': 'TIM'},
'55829992':{'en': 'TIM'},
'55829993':{'en': 'TIM'},
'55829994':{'en': 'TIM'},
'55829995':{'en': 'TIM'},
'55829996':{'en': 'TIM'},
'55829997':{'en': 'TIM'},
'55829998':{'en': 'TIM'},
'558299991':{'en': 'TIM'},
'558299992':{'en': 'TIM'},
'558299993':{'en': 'TIM'},
'558299994':{'en': 'TIM'},
'558299995':{'en': 'TIM'},
'558299996':{'en': 'TIM'},
'558299997':{'en': 'TIM'},
'558299998':{'en': 'TIM'},
'558299999':{'en': 'TIM'},
'55839810':{'en': 'Vivo'},
'55839811':{'en': 'Vivo'},
'55839812':{'en': 'Vivo'},
'55839813':{'en': 'Vivo'},
'55839814':{'en': 'Vivo'},
'55839815':{'en': 'Vivo'},
'558398160':{'en': 'Vivo'},
'558398161':{'en': 'Vivo'},
'558398162':{'en': 'Vivo'},
'558398163':{'en': 'Vivo'},
'558398164':{'en': 'Vivo'},
'558398165':{'en': 'Vivo'},
'5583985':{'en': 'Oi'},
'5583986':{'en': 'Oi'},
'5583987':{'en': 'Oi'},
'5583988':{'en': 'Oi'},
'5583989':{'en': 'Oi'},
'5583991':{'en': 'Claro'},
'55839930':{'en': 'Claro'},
'55839931':{'en': 'Claro'},
'55839932':{'en': 'Claro'},
'558399330':{'en': 'Claro'},
'558399331':{'en': 'Claro'},
'558399332':{'en': 'Claro'},
'558399333':{'en': 'Claro'},
'558399334':{'en': 'Claro'},
'558399342':{'en': 'Claro'},
'558399352':{'en': 'Claro'},
'558399362':{'en': 'Claro'},
'558399372':{'en': 'Claro'},
'558399382':{'en': 'Claro'},
'558399444':{'en': 'Claro'},
'558399600':{'en': 'TIM'},
'558399601':{'en': 'TIM'},
'55839990':{'en': 'TIM'},
'55839991':{'en': 'TIM'},
'55839992':{'en': 'TIM'},
'55839993':{'en': 'TIM'},
'558399940':{'en': 'TIM'},
'558399941':{'en': 'TIM'},
'558399942':{'en': 'TIM'},
'558399943':{'en': 'TIM'},
'558399944':{'en': 'TIM'},
'558399945':{'en': 'TIM'},
'558399946':{'en': 'TIM'},
'558399947':{'en': 'TIM'},
'558399948':{'en': 'TIM'},
'55839995':{'en': 'TIM'},
'55839996':{'en': 'TIM'},
'55839997':{'en': 'TIM'},
'55839998':{'en': 'TIM'},
'55839999':{'en': 'TIM'},
'55849810':{'en': 'Vivo'},
'55849811':{'en': 'Vivo'},
'55849812':{'en': 'Vivo'},
'55849813':{'en': 'Vivo'},
'558498140':{'en': 'Vivo'},
'558498141':{'en': 'Vivo'},
'558498142':{'en': 'Vivo'},
'558498143':{'en': 'Vivo'},
'558498144':{'en': 'Vivo'},
'558498145':{'en': 'Vivo'},
'558498146':{'en': 'Vivo'},
'558498147':{'en': 'Vivo'},
'5584985':{'en': 'Oi'},
'5584986':{'en': 'Oi'},
'5584987':{'en': 'Oi'},
'5584988':{'en': 'Oi'},
'5584989':{'en': 'Oi'},
'5584991':{'en': 'Claro'},
'55849940':{'en': 'Claro'},
'55849941':{'en': 'Claro'},
'55849942':{'en': 'Claro'},
'55849943':{'en': 'Claro'},
'55849944':{'en': 'Claro'},
'55849945':{'en': 'Claro'},
'558499461':{'en': 'Claro'},
'558499462':{'en': 'Claro'},
'558499463':{'en': 'Claro'},
'558499464':{'en': 'Claro'},
'558499465':{'en': 'Claro'},
'558499466':{'en': 'Claro'},
'558499467':{'en': 'Claro'},
'558499468':{'en': 'Claro'},
'558499471':{'en': 'Claro'},
'558499481':{'en': 'Claro'},
'55849960':{'en': 'TIM'},
'558499610':{'en': 'TIM'},
'558499611':{'en': 'TIM'},
'558499612':{'en': 'TIM'},
'558499615':{'en': 'TIM'},
'558499616':{'en': 'TIM'},
'558499617':{'en': 'TIM'},
'558499618':{'en': 'TIM'},
'558499619':{'en': 'TIM'},
'558499620':{'en': 'TIM'},
'5584999':{'en': 'TIM'},
'5585981':{'en': 'Vivo'},
'55859820':{'en': 'Vivo'},
'558598210':{'en': 'Vivo'},
'558598211':{'en': 'Vivo'},
'558598212':{'en': 'Vivo'},
'558598213':{'en': 'Vivo'},
'558598214':{'en': 'Vivo'},
'558598215':{'en': 'Vivo'},
'558598216':{'en': 'Vivo'},
'558598217':{'en': 'Vivo'},
'5585985':{'en': 'Oi'},
'5585986':{'en': 'Oi'},
'5585987':{'en': 'Oi'},
'5585988':{'en': 'Oi'},
'5585989':{'en': 'Oi'},
'5585991':{'en': 'Claro'},
'55859920':{'en': 'Claro'},
'55859921':{'en': 'Claro'},
'55859922':{'en': 'Claro'},
'55859923':{'en': 'Claro'},
'55859924':{'en': 'Claro'},
'558599401':{'en': 'Claro'},
'558599402':{'en': 'Claro'},
'558599403':{'en': 'Claro'},
'558599404':{'en': 'Claro'},
'558599444':{'en': 'Claro'},
'558599601':{'en': 'TIM'},
'558599602':{'en': 'TIM'},
'558599603':{'en': 'TIM'},
'558599604':{'en': 'TIM'},
'558599605':{'en': 'TIM'},
'558599606':{'en': 'TIM'},
'558599607':{'en': 'TIM'},
'558599608':{'en': 'TIM'},
'558599609':{'en': 'TIM'},
'558599611':{'en': 'TIM'},
'558599612':{'en': 'TIM'},
'558599613':{'en': 'TIM'},
'558599614':{'en': 'TIM'},
'558599615':{'en': 'TIM'},
'558599616':{'en': 'TIM'},
'558599617':{'en': 'TIM'},
'558599618':{'en': 'TIM'},
'558599619':{'en': 'TIM'},
'558599621':{'en': 'TIM'},
'558599622':{'en': 'TIM'},
'558599623':{'en': 'TIM'},
'558599624':{'en': 'TIM'},
'558599625':{'en': 'TIM'},
'558599626':{'en': 'TIM'},
'558599627':{'en': 'TIM'},
'558599628':{'en': 'TIM'},
'558599629':{'en': 'TIM'},
'558599631':{'en': 'TIM'},
'558599632':{'en': 'TIM'},
'558599633':{'en': 'TIM'},
'558599634':{'en': 'TIM'},
'558599635':{'en': 'TIM'},
'558599636':{'en': 'TIM'},
'558599637':{'en': 'TIM'},
'558599638':{'en': 'TIM'},
'558599639':{'en': 'TIM'},
'558599641':{'en': 'TIM'},
'558599642':{'en': 'TIM'},
'558599643':{'en': 'TIM'},
'558599644':{'en': 'TIM'},
'558599645':{'en': 'TIM'},
'558599646':{'en': 'TIM'},
'558599647':{'en': 'TIM'},
'558599648':{'en': 'TIM'},
'558599649':{'en': 'TIM'},
'558599651':{'en': 'TIM'},
'558599652':{'en': 'TIM'},
'558599653':{'en': 'TIM'},
'558599654':{'en': 'TIM'},
'558599655':{'en': 'TIM'},
'558599656':{'en': 'TIM'},
'558599657':{'en': 'TIM'},
'558599658':{'en': 'TIM'},
'558599659':{'en': 'TIM'},
'558599661':{'en': 'TIM'},
'558599662':{'en': 'TIM'},
'558599663':{'en': 'TIM'},
'558599664':{'en': 'TIM'},
'558599665':{'en': 'TIM'},
'558599666':{'en': 'TIM'},
'558599667':{'en': 'TIM'},
'558599668':{'en': 'TIM'},
'558599669':{'en': 'TIM'},
'558599671':{'en': 'TIM'},
'558599672':{'en': 'TIM'},
'558599673':{'en': 'TIM'},
'558599674':{'en': 'TIM'},
'558599675':{'en': 'TIM'},
'558599676':{'en': 'TIM'},
'558599677':{'en': 'TIM'},
'558599901':{'en': 'TIM'},
'558599902':{'en': 'TIM'},
'558599903':{'en': 'TIM'},
'558599904':{'en': 'TIM'},
'558599905':{'en': 'TIM'},
'558599906':{'en': 'TIM'},
'558599907':{'en': 'TIM'},
'558599908':{'en': 'TIM'},
'558599909':{'en': 'TIM'},
'558599911':{'en': 'TIM'},
'558599912':{'en': 'TIM'},
'558599913':{'en': 'TIM'},
'558599914':{'en': 'TIM'},
'558599915':{'en': 'TIM'},
'558599916':{'en': 'TIM'},
'558599917':{'en': 'TIM'},
'558599918':{'en': 'TIM'},
'558599919':{'en': 'TIM'},
'558599921':{'en': 'TIM'},
'558599922':{'en': 'TIM'},
'558599923':{'en': 'TIM'},
'558599924':{'en': 'TIM'},
'558599925':{'en': 'TIM'},
'558599926':{'en': 'TIM'},
'558599927':{'en': 'TIM'},
'558599928':{'en': 'TIM'},
'558599929':{'en': 'TIM'},
'558599931':{'en': 'TIM'},
'558599932':{'en': 'TIM'},
'558599933':{'en': 'TIM'},
'558599934':{'en': 'TIM'},
'558599935':{'en': 'TIM'},
'558599936':{'en': 'TIM'},
'558599937':{'en': 'TIM'},
'558599938':{'en': 'TIM'},
'558599939':{'en': 'TIM'},
'558599941':{'en': 'TIM'},
'558599942':{'en': 'TIM'},
'558599943':{'en': 'TIM'},
'558599944':{'en': 'TIM'},
'558599945':{'en': 'TIM'},
'558599946':{'en': 'TIM'},
'558599947':{'en': 'TIM'},
'558599948':{'en': 'TIM'},
'558599949':{'en': 'TIM'},
'558599951':{'en': 'TIM'},
'558599952':{'en': 'TIM'},
'558599953':{'en': 'TIM'},
'558599954':{'en': 'TIM'},
'558599955':{'en': 'TIM'},
'558599956':{'en': 'TIM'},
'558599957':{'en': 'TIM'},
'558599958':{'en': 'TIM'},
'558599959':{'en': 'TIM'},
'558599961':{'en': 'TIM'},
'558599962':{'en': 'TIM'},
'558599963':{'en': 'TIM'},
'558599969':{'en': 'TIM'},
'558599971':{'en': 'TIM'},
'558599972':{'en': 'TIM'},
'558599973':{'en': 'TIM'},
'558599974':{'en': 'TIM'},
'558599975':{'en': 'TIM'},
'558599976':{'en': 'TIM'},
'558599977':{'en': 'TIM'},
'558599978':{'en': 'TIM'},
'558599979':{'en': 'TIM'},
'558599981':{'en': 'TIM'},
'558599982':{'en': 'TIM'},
'558599983':{'en': 'TIM'},
'558599984':{'en': 'TIM'},
'558599985':{'en': 'TIM'},
'558599986':{'en': 'TIM'},
'558599987':{'en': 'TIM'},
'558599988':{'en': 'TIM'},
'558599989':{'en': 'TIM'},
'55859999':{'en': 'TIM'},
'55869810':{'en': 'Vivo'},
'55869811':{'en': 'Vivo'},
'55869812':{'en': 'Vivo'},
'55869813':{'en': 'Vivo'},
'558698140':{'en': 'Vivo'},
'558698141':{'en': 'Vivo'},
'558698142':{'en': 'Vivo'},
'558698143':{'en': 'Vivo'},
'558698144':{'en': 'Vivo'},
'5586985':{'en': 'Oi'},
'5586986':{'en': 'Oi'},
'5586987':{'en': 'Oi'},
'5586988':{'en': 'Oi'},
'5586989':{'en': 'Oi'},
'5586994':{'en': 'Claro'},
'55869950':{'en': 'Claro'},
'558699510':{'en': 'Claro'},
'558699511':{'en': 'Claro'},
'558699512':{'en': 'Claro'},
'558699513':{'en': 'Claro'},
'558699514':{'en': 'Claro'},
'558699515':{'en': 'Claro'},
'558699516':{'en': 'Claro'},
'558699517':{'en': 'Claro'},
'55869992':{'en': 'TIM'},
'55869993':{'en': 'TIM'},
'55869994':{'en': 'TIM'},
'558699950':{'en': 'TIM'},
'558699951':{'en': 'TIM'},
'558699971':{'en': 'TIM'},
'558699972':{'en': 'TIM'},
'558699973':{'en': 'TIM'},
'558699974':{'en': 'TIM'},
'558699975':{'en': 'TIM'},
'558699976':{'en': 'TIM'},
'558699977':{'en': 'TIM'},
'558699978':{'en': 'TIM'},
'558699979':{'en': 'TIM'},
'558699981':{'en': 'TIM'},
'558699982':{'en': 'TIM'},
'558699983':{'en': 'TIM'},
'558699984':{'en': 'TIM'},
'558699985':{'en': 'TIM'},
'558699986':{'en': 'TIM'},
'558699987':{'en': 'TIM'},
'558699988':{'en': 'TIM'},
'558699989':{'en': 'TIM'},
'558699991':{'en': 'TIM'},
'558699992':{'en': 'TIM'},
'558699993':{'en': 'TIM'},
'558699994':{'en': 'TIM'},
'558699995':{'en': 'TIM'},
'558699996':{'en': 'TIM'},
'558699997':{'en': 'TIM'},
'558699998':{'en': 'TIM'},
'558699999':{'en': 'TIM'},
'55879810':{'en': 'Vivo'},
'55879811':{'en': 'Vivo'},
'558798120':{'en': 'Vivo'},
'558798121':{'en': 'Vivo'},
'558798122':{'en': 'Vivo'},
'558798123':{'en': 'Vivo'},
'558798124':{'en': 'Vivo'},
'558798125':{'en': 'Vivo'},
'5587985':{'en': 'Oi'},
'5587986':{'en': 'Oi'},
'5587987':{'en': 'Oi'},
'5587988':{'en': 'Oi'},
'5587989':{'en': 'Oi'},
'55879910':{'en': 'Claro'},
'55879911':{'en': 'Claro'},
'55879912':{'en': 'Claro'},
'55879913':{'en': 'Claro'},
'55879914':{'en': 'Claro'},
'55879915':{'en': 'Claro'},
'55879916':{'en': 'Claro'},
'558799170':{'en': 'Claro'},
'558799171':{'en': 'Claro'},
'558799172':{'en': 'Claro'},
'558799173':{'en': 'Claro'},
'558799174':{'en': 'Claro'},
'558799175':{'en': 'Claro'},
'558799243':{'en': 'Claro'},
'558799253':{'en': 'Claro'},
'558799601':{'en': 'TIM'},
'558799602':{'en': 'TIM'},
'558799603':{'en': 'TIM'},
'558799604':{'en': 'TIM'},
'558799605':{'en': 'TIM'},
'558799606':{'en': 'TIM'},
'558799607':{'en': 'TIM'},
'558799608':{'en': 'TIM'},
'558799609':{'en': 'TIM'},
'558799611':{'en': 'TIM'},
'558799612':{'en': 'TIM'},
'558799613':{'en': 'TIM'},
'558799614':{'en': 'TIM'},
'558799615':{'en': 'TIM'},
'558799616':{'en': 'TIM'},
'558799617':{'en': 'TIM'},
'558799618':{'en': 'TIM'},
'558799619':{'en': 'TIM'},
'558799621':{'en': 'TIM'},
'558799622':{'en': 'TIM'},
'558799623':{'en': 'TIM'},
'558799624':{'en': 'TIM'},
'558799625':{'en': 'TIM'},
'558799626':{'en': 'TIM'},
'558799627':{'en': 'TIM'},
'558799628':{'en': 'TIM'},
'558799629':{'en': 'TIM'},
'558799631':{'en': 'TIM'},
'558799632':{'en': 'TIM'},
'558799633':{'en': 'TIM'},
'558799634':{'en': 'TIM'},
'558799635':{'en': 'TIM'},
'558799636':{'en': 'TIM'},
'558799637':{'en': 'TIM'},
'558799638':{'en': 'TIM'},
'558799639':{'en': 'TIM'},
'558799901':{'en': 'TIM'},
'558799902':{'en': 'TIM'},
'558799911':{'en': 'TIM'},
'558799912':{'en': 'TIM'},
'558799913':{'en': 'TIM'},
'558799914':{'en': 'TIM'},
'558799915':{'en': 'TIM'},
'558799916':{'en': 'TIM'},
'558799917':{'en': 'TIM'},
'558799918':{'en': 'TIM'},
'558799919':{'en': 'TIM'},
'558799921':{'en': 'TIM'},
'558799922':{'en': 'TIM'},
'558799923':{'en': 'TIM'},
'558799924':{'en': 'TIM'},
'558799925':{'en': 'TIM'},
'558799926':{'en': 'TIM'},
'558799927':{'en': 'TIM'},
'558799928':{'en': 'TIM'},
'558799929':{'en': 'TIM'},
'558799931':{'en': 'TIM'},
'558799932':{'en': 'TIM'},
'558799933':{'en': 'TIM'},
'558799934':{'en': 'TIM'},
'558799935':{'en': 'TIM'},
'558799936':{'en': 'TIM'},
'558799937':{'en': 'TIM'},
'558799938':{'en': 'TIM'},
'558799939':{'en': 'TIM'},
'558799988':{'en': 'TIM'},
'558799991':{'en': 'TIM'},
'558799992':{'en': 'TIM'},
'558799993':{'en': 'TIM'},
'558799994':{'en': 'TIM'},
'558799995':{'en': 'TIM'},
'558799996':{'en': 'TIM'},
'558799997':{'en': 'TIM'},
'558799998':{'en': 'TIM'},
'558799999':{'en': 'TIM'},
'55889810':{'en': 'Vivo'},
'55889811':{'en': 'Vivo'},
'55889812':{'en': 'Vivo'},
'558898130':{'en': 'Vivo'},
'5588985':{'en': 'Oi'},
'5588986':{'en': 'Oi'},
'5588987':{'en': 'Oi'},
'5588988':{'en': 'Oi'},
'5588989':{'en': 'Oi'},
'5588992':{'en': 'Claro'},
'55889940':{'en': 'Claro'},
'55889941':{'en': 'Claro'},
'55889942':{'en': 'Claro'},
'55889943':{'en': 'Claro'},
'55889944':{'en': 'Claro'},
'55889945':{'en': 'Claro'},
'558899460':{'en': 'Claro'},
'558899461':{'en': 'Claro'},
'558899462':{'en': 'Claro'},
'558899463':{'en': 'Claro'},
'558899464':{'en': 'Claro'},
'558899601':{'en': 'TIM'},
'558899602':{'en': 'TIM'},
'558899603':{'en': 'TIM'},
'558899604':{'en': 'TIM'},
'558899605':{'en': 'TIM'},
'558899606':{'en': 'TIM'},
'558899607':{'en': 'TIM'},
'558899608':{'en': 'TIM'},
'558899609':{'en': 'TIM'},
'55889961':{'en': 'TIM'},
'558899621':{'en': 'TIM'},
'558899622':{'en': 'TIM'},
'558899623':{'en': 'TIM'},
'558899624':{'en': 'TIM'},
'558899625':{'en': 'TIM'},
'558899626':{'en': 'TIM'},
'558899627':{'en': 'TIM'},
'558899628':{'en': 'TIM'},
'558899630':{'en': 'TIM'},
'558899634':{'en': 'TIM'},
'558899635':{'en': 'TIM'},
'558899636':{'en': 'TIM'},
'558899640':{'en': 'TIM'},
'558899650':{'en': 'TIM'},
'558899901':{'en': 'TIM'},
'558899902':{'en': 'TIM'},
'558899903':{'en': 'TIM'},
'558899904':{'en': 'TIM'},
'558899905':{'en': 'TIM'},
'558899906':{'en': 'TIM'},
'558899907':{'en': 'TIM'},
'558899908':{'en': 'TIM'},
'558899909':{'en': 'TIM'},
'558899911':{'en': 'TIM'},
'558899912':{'en': 'TIM'},
'558899913':{'en': 'TIM'},
'558899914':{'en': 'TIM'},
'558899915':{'en': 'TIM'},
'558899916':{'en': 'TIM'},
'558899917':{'en': 'TIM'},
'558899918':{'en': 'TIM'},
'558899919':{'en': 'TIM'},
'558899921':{'en': 'TIM'},
'558899922':{'en': 'TIM'},
'558899923':{'en': 'TIM'},
'558899924':{'en': 'TIM'},
'558899925':{'en': 'TIM'},
'558899926':{'en': 'TIM'},
'558899927':{'en': 'TIM'},
'558899928':{'en': 'TIM'},
'558899929':{'en': 'TIM'},
'558899931':{'en': 'TIM'},
'558899932':{'en': 'TIM'},
'558899933':{'en': 'TIM'},
'558899934':{'en': 'TIM'},
'558899935':{'en': 'TIM'},
'558899936':{'en': 'TIM'},
'558899937':{'en': 'TIM'},
'558899938':{'en': 'TIM'},
'558899939':{'en': 'TIM'},
'558899941':{'en': 'TIM'},
'558899942':{'en': 'TIM'},
'558899943':{'en': 'TIM'},
'558899944':{'en': 'TIM'},
'558899945':{'en': 'TIM'},
'558899946':{'en': 'TIM'},
'558899947':{'en': 'TIM'},
'558899951':{'en': 'TIM'},
'558899952':{'en': 'TIM'},
'558899953':{'en': 'TIM'},
'558899954':{'en': 'TIM'},
'558899955':{'en': 'TIM'},
'558899956':{'en': 'TIM'},
'558899957':{'en': 'TIM'},
'558899958':{'en': 'TIM'},
'558899959':{'en': 'TIM'},
'558899961':{'en': 'TIM'},
'558899962':{'en': 'TIM'},
'558899963':{'en': 'TIM'},
'558899964':{'en': 'TIM'},
'558899965':{'en': 'TIM'},
'558899966':{'en': 'TIM'},
'558899967':{'en': 'TIM'},
'558899968':{'en': 'TIM'},
'558899969':{'en': 'TIM'},
'55899810':{'en': 'Vivo'},
'55899811':{'en': 'Vivo'},
'558998120':{'en': 'Vivo'},
'5589985':{'en': 'Oi'},
'5589986':{'en': 'Oi'},
'5589987':{'en': 'Oi'},
'5589988':{'en': 'Oi'},
'5589989':{'en': 'Oi'},
'55899940':{'en': 'Claro'},
'55899941':{'en': 'Claro'},
'55899942':{'en': 'Claro'},
'55899943':{'en': 'Claro'},
'558999443':{'en': 'Claro'},
'558999444':{'en': 'Claro'},
'558999445':{'en': 'Claro'},
'558999446':{'en': 'Claro'},
'558999921':{'en': 'TIM'},
'558999922':{'en': 'TIM'},
'558999924':{'en': 'TIM'},
'558999925':{'en': 'TIM'},
'558999926':{'en': 'TIM'},
'558999927':{'en': 'TIM'},
'558999928':{'en': 'TIM'},
'558999929':{'en': 'TIM'},
'558999971':{'en': 'TIM'},
'558999972':{'en': 'TIM'},
'558999973':{'en': 'TIM'},
'558999974':{'en': 'TIM'},
'558999975':{'en': 'TIM'},
'558999976':{'en': 'TIM'},
'558999978':{'en': 'TIM'},
'558999979':{'en': 'TIM'},
'558999984':{'en': 'TIM'},
'558999985':{'en': 'TIM'},
'558999997':{'en': 'TIM'},
'5591981':{'en': 'TIM'},
'559198201':{'en': 'TIM'},
'559198202':{'en': 'TIM'},
'559198203':{'en': 'TIM'},
'559198204':{'en': 'TIM'},
'559198205':{'en': 'TIM'},
'559198206':{'en': 'TIM'},
'559198207':{'en': 'TIM'},
'559198208':{'en': 'TIM'},
'559198209':{'en': 'TIM'},
'559198211':{'en': 'TIM'},
'559198212':{'en': 'TIM'},
'559198213':{'en': 'TIM'},
'559198214':{'en': 'TIM'},
'559198215':{'en': 'TIM'},
'559198216':{'en': 'TIM'},
'559198217':{'en': 'TIM'},
'559198218':{'en': 'TIM'},
'559198219':{'en': 'TIM'},
'559198221':{'en': 'TIM'},
'559198222':{'en': 'TIM'},
'559198223':{'en': 'TIM'},
'559198224':{'en': 'TIM'},
'559198225':{'en': 'TIM'},
'559198226':{'en': 'TIM'},
'559198227':{'en': 'TIM'},
'559198228':{'en': 'TIM'},
'559198229':{'en': 'TIM'},
'559198231':{'en': 'TIM'},
'559198232':{'en': 'TIM'},
'559198233':{'en': 'TIM'},
'559198234':{'en': 'TIM'},
'559198235':{'en': 'TIM'},
'559198236':{'en': 'TIM'},
'559198237':{'en': 'TIM'},
'559198238':{'en': 'TIM'},
'559198239':{'en': 'TIM'},
'559198241':{'en': 'TIM'},
'559198242':{'en': 'TIM'},
'559198243':{'en': 'TIM'},
'559198244':{'en': 'TIM'},
'559198245':{'en': 'TIM'},
'559198246':{'en': 'TIM'},
'55919840':{'en': 'Claro'},
'55919841':{'en': 'Claro'},
'55919842':{'en': 'Claro'},
'55919843':{'en': 'Claro'},
'55919844':{'en': 'Claro'},
'55919845':{'en': 'Claro'},
'55919846':{'en': 'Claro'},
'55919847':{'en': 'Claro'},
'559198480':{'en': 'Claro'},
'559198481':{'en': 'Claro'},
'559198482':{'en': 'Claro'},
'559198483':{'en': 'Claro'},
'559198484':{'en': 'Claro'},
'559198485':{'en': 'Claro'},
'559198486':{'en': 'Claro'},
'5591985':{'en': 'Oi'},
'5591986':{'en': 'Oi'},
'5591987':{'en': 'Oi'},
'5591988':{'en': 'Oi'},
'5591989':{'en': 'Oi'},
'5591991':{'en': 'Vivo'},
'55919920':{'en': 'Vivo'},
'55919921':{'en': 'Vivo'},
'55919922':{'en': 'Vivo'},
'55919923':{'en': 'Vivo'},
'55919924':{'en': 'Vivo'},
'55919925':{'en': 'Vivo'},
'55919926':{'en': 'Vivo'},
'55919927':{'en': 'Vivo'},
'559199280':{'en': 'Vivo'},
'559199281':{'en': 'Vivo'},
'559199282':{'en': 'Vivo'},
'559199283':{'en': 'Vivo'},
'559199284':{'en': 'Vivo'},
'559199285':{'en': 'Vivo'},
'559199286':{'en': 'Vivo'},
'559199601':{'en': 'Oi'},
'559199602':{'en': 'Oi'},
'559199603':{'en': 'Oi'},
'559199604':{'en': 'Oi'},
'559199605':{'en': 'Oi'},
'559199606':{'en': 'Oi'},
'559199607':{'en': 'Oi'},
'559199608':{'en': 'Oi'},
'559199609':{'en': 'Oi'},
'559199611':{'en': 'Oi'},
'559199612':{'en': 'Oi'},
'559199613':{'en': 'Oi'},
'559199614':{'en': 'Oi'},
'559199615':{'en': 'Oi'},
'559199616':{'en': 'Oi'},
'559199617':{'en': 'Oi'},
'559199618':{'en': 'Oi'},
'559199619':{'en': 'Oi'},
'559199621':{'en': 'Oi'},
'559199622':{'en': 'Oi'},
'559199623':{'en': 'Oi'},
'559199624':{'en': 'Oi'},
'559199625':{'en': 'Oi'},
'559199626':{'en': 'Oi'},
'559199627':{'en': 'Oi'},
'559199628':{'en': 'Oi'},
'559199629':{'en': 'Oi'},
'559199631':{'en': 'Oi'},
'559199632':{'en': 'Oi'},
'559199633':{'en': 'Oi'},
'559199634':{'en': 'Oi'},
'559199635':{'en': 'Oi'},
'559199636':{'en': 'Oi'},
'559199637':{'en': 'Oi'},
'559199638':{'en': 'Oi'},
'559199639':{'en': 'Oi'},
'559199641':{'en': 'Oi'},
'559199642':{'en': 'Oi'},
'559199901':{'en': 'Oi'},
'559199902':{'en': 'Oi'},
'559199903':{'en': 'Oi'},
'559199904':{'en': 'Oi'},
'559199905':{'en': 'Oi'},
'559199906':{'en': 'Oi'},
'559199907':{'en': 'Oi'},
'559199908':{'en': 'Oi'},
'559199912':{'en': 'Oi'},
'559199913':{'en': 'Oi'},
'559199914':{'en': 'Oi'},
'559199915':{'en': 'Oi'},
'559199916':{'en': 'Oi'},
'559199917':{'en': 'Oi'},
'559199918':{'en': 'Oi'},
'559199919':{'en': 'Oi'},
'559199921':{'en': 'Oi'},
'559199922':{'en': 'Oi'},
'559199923':{'en': 'Oi'},
'559199924':{'en': 'Oi'},
'559199925':{'en': 'Oi'},
'559199926':{'en': 'Oi'},
'559199933':{'en': 'Oi'},
'559199940':{'en': 'Oi'},
'559199941':{'en': 'Oi'},
'559199942':{'en': 'Oi'},
'559199943':{'en': 'Oi'},
'559199944':{'en': 'Oi'},
'559199949':{'en': 'Oi'},
'55919996':{'en': 'Oi'},
'559199971':{'en': 'Oi'},
'559199975':{'en': 'Oi'},
'559199977':{'en': 'Oi'},
'559199978':{'en': 'Oi'},
'559199979':{'en': 'Oi'},
'55919998':{'en': 'Oi'},
'5592981':{'en': 'TIM'},
'559298211':{'en': 'TIM'},
'55929840':{'en': 'Claro'},
'55929841':{'en': 'Claro'},
'55929842':{'en': 'Claro'},
'55929843':{'en': 'Claro'},
'55929844':{'en': 'Claro'},
'559298450':{'en': 'Claro'},
'559298451':{'en': 'Claro'},
'5592985':{'en': 'Oi'},
'5592986':{'en': 'Oi'},
'5592987':{'en': 'Oi'},
'5592988':{'en': 'Oi'},
'5592989':{'en': 'Oi'},
'5592991':{'en': 'Vivo'},
'5592992':{'en': 'Vivo'},
'55929930':{'en': 'Vivo'},
'55929931':{'en': 'Vivo'},
'55929932':{'en': 'Vivo'},
'55929933':{'en': 'Vivo'},
'55929934':{'en': 'Vivo'},
'55929935':{'en': 'Vivo'},
'55929936':{'en': 'Vivo'},
'55929937':{'en': 'Vivo'},
'55929938':{'en': 'Vivo'},
'559299390':{'en': 'Vivo'},
'559299391':{'en': 'Vivo'},
'559299392':{'en': 'Vivo'},
'559299393':{'en': 'Vivo'},
'559299394':{'en': 'Vivo'},
'559299395':{'en': 'Vivo'},
'559299631':{'en': 'Oi'},
'559299901':{'en': 'Oi'},
'559299902':{'en': 'Oi'},
'559299903':{'en': 'Oi'},
'559299904':{'en': 'Oi'},
'559299905':{'en': 'Oi'},
'559299906':{'en': 'Oi'},
'559299907':{'en': 'Oi'},
'559299908':{'en': 'Oi'},
'559299909':{'en': 'Oi'},
'559299911':{'en': 'Oi'},
'559299912':{'en': 'Oi'},
'559299913':{'en': 'Oi'},
'559299933':{'en': 'Oi'},
'559299944':{'en': 'Oi'},
'559299951':{'en': 'Oi'},
'559299961':{'en': 'Oi'},
'559299962':{'en': 'Oi'},
'559299963':{'en': 'Oi'},
'559299964':{'en': 'Oi'},
'559299965':{'en': 'Oi'},
'559299966':{'en': 'Oi'},
'559299967':{'en': 'Oi'},
'559299968':{'en': 'Oi'},
'559299969':{'en': 'Oi'},
'559299971':{'en': 'Oi'},
'559299972':{'en': 'Oi'},
'559299973':{'en': 'Oi'},
'559299974':{'en': 'Oi'},
'559299975':{'en': 'Oi'},
'559299976':{'en': 'Oi'},
'559299977':{'en': 'Oi'},
'559299978':{'en': 'Oi'},
'55929998':{'en': 'Oi'},
'559299991':{'en': 'Oi'},
'559299994':{'en': 'Oi'},
'559299995':{'en': 'Oi'},
'559299996':{'en': 'Oi'},
'559299997':{'en': 'Oi'},
'559299998':{'en': 'Oi'},
'559299999':{'en': 'Oi'},
'559398111':{'en': 'TIM'},
'559398112':{'en': 'TIM'},
'559398113':{'en': 'TIM'},
'559398114':{'en': 'TIM'},
'559398115':{'en': 'TIM'},
'559398116':{'en': 'TIM'},
'559398117':{'en': 'TIM'},
'559398118':{'en': 'TIM'},
'559398119':{'en': 'TIM'},
'559398121':{'en': 'TIM'},
'559398122':{'en': 'TIM'},
'559398123':{'en': 'TIM'},
'559398124':{'en': 'TIM'},
'559398125':{'en': 'TIM'},
'559398401':{'en': 'Claro'},
'559398402':{'en': 'Claro'},
'559398403':{'en': 'Claro'},
'559398404':{'en': 'Claro'},
'559398405':{'en': 'Claro'},
'559398406':{'en': 'Claro'},
'559398407':{'en': 'Claro'},
'559398408':{'en': 'Claro'},
'559398409':{'en': 'Claro'},
'559398411':{'en': 'Claro'},
'559398412':{'en': 'Claro'},
'559398413':{'en': 'Claro'},
'559398414':{'en': 'Claro'},
'559398415':{'en': 'Claro'},
'559398416':{'en': 'Claro'},
'5593985':{'en': 'Oi'},
'5593986':{'en': 'Oi'},
'5593987':{'en': 'Oi'},
'5593988':{'en': 'Oi'},
'5593989':{'en': 'Oi'},
'559399101':{'en': 'Vivo'},
'559399102':{'en': 'Vivo'},
'559399103':{'en': 'Vivo'},
'559399104':{'en': 'Vivo'},
'559399105':{'en': 'Vivo'},
'559399106':{'en': 'Vivo'},
'559399107':{'en': 'Vivo'},
'559399111':{'en': 'Vivo'},
'559399112':{'en': 'Vivo'},
'559399113':{'en': 'Vivo'},
'559399114':{'en': 'Vivo'},
'559399115':{'en': 'Vivo'},
'559399116':{'en': 'Vivo'},
'559399117':{'en': 'Vivo'},
'559399118':{'en': 'Vivo'},
'559399119':{'en': 'Vivo'},
'559399121':{'en': 'Vivo'},
'559399122':{'en': 'Vivo'},
'559399123':{'en': 'Vivo'},
'559399124':{'en': 'Vivo'},
'559399125':{'en': 'Vivo'},
'559399126':{'en': 'Vivo'},
'559399127':{'en': 'Vivo'},
'559399128':{'en': 'Vivo'},
'559399129':{'en': 'Vivo'},
'559399131':{'en': 'Vivo'},
'559399132':{'en': 'Vivo'},
'559399133':{'en': 'Vivo'},
'559399134':{'en': 'Vivo'},
'559399135':{'en': 'Vivo'},
'559399136':{'en': 'Vivo'},
'559399137':{'en': 'Vivo'},
'559399138':{'en': 'Vivo'},
'559399139':{'en': 'Vivo'},
'559399141':{'en': 'Vivo'},
'559399142':{'en': 'Vivo'},
'559399143':{'en': 'Vivo'},
'559399144':{'en': 'Vivo'},
'559399145':{'en': 'Vivo'},
'559399146':{'en': 'Vivo'},
'559399147':{'en': 'Vivo'},
'559399148':{'en': 'Vivo'},
'559399149':{'en': 'Vivo'},
'559399151':{'en': 'Vivo'},
'559399152':{'en': 'Vivo'},
'559399153':{'en': 'Vivo'},
'559399154':{'en': 'Vivo'},
'559399155':{'en': 'Vivo'},
'559399156':{'en': 'Vivo'},
'559399157':{'en': 'Vivo'},
'559399158':{'en': 'Vivo'},
'559399159':{'en': 'Vivo'},
'559399161':{'en': 'Vivo'},
'559399162':{'en': 'Vivo'},
'559399163':{'en': 'Vivo'},
'559399164':{'en': 'Vivo'},
'559399165':{'en': 'Vivo'},
'559399166':{'en': 'Vivo'},
'559399167':{'en': 'Vivo'},
'559399168':{'en': 'Vivo'},
'559399169':{'en': 'Vivo'},
'559399171':{'en': 'Vivo'},
'559399172':{'en': 'Vivo'},
'559399173':{'en': 'Vivo'},
'559399174':{'en': 'Vivo'},
'559399175':{'en': 'Vivo'},
'559399176':{'en': 'Vivo'},
'559399177':{'en': 'Vivo'},
'559399178':{'en': 'Vivo'},
'559399179':{'en': 'Vivo'},
'559399181':{'en': 'Vivo'},
'559399182':{'en': 'Vivo'},
'559399183':{'en': 'Vivo'},
'559399184':{'en': 'Vivo'},
'559399185':{'en': 'Vivo'},
'559399186':{'en': 'Vivo'},
'559399187':{'en': 'Vivo'},
'559399188':{'en': 'Vivo'},
'559399189':{'en': 'Vivo'},
'559399191':{'en': 'Vivo'},
'559399192':{'en': 'Vivo'},
'559399193':{'en': 'Vivo'},
'559399194':{'en': 'Vivo'},
'559399195':{'en': 'Vivo'},
'559399196':{'en': 'Vivo'},
'559399197':{'en': 'Vivo'},
'559399198':{'en': 'Vivo'},
'559399199':{'en': 'Vivo'},
'559399651':{'en': 'Oi'},
'559399652':{'en': 'Oi'},
'559399653':{'en': 'Oi'},
'559399654':{'en': 'Oi'},
'559399655':{'en': 'Oi'},
'559399656':{'en': 'Oi'},
'559399657':{'en': 'Oi'},
'559399658':{'en': 'Oi'},
'559399659':{'en': 'Oi'},
'559399901':{'en': 'Oi'},
'559399902':{'en': 'Oi'},
'559399903':{'en': 'Oi'},
'559399904':{'en': 'Oi'},
'559399908':{'en': 'Oi'},
'559399933':{'en': 'Oi'},
'559399951':{'en': 'Oi'},
'559399952':{'en': 'Oi'},
'559399954':{'en': 'Oi'},
'559399970':{'en': 'Oi'},
'559399973':{'en': 'Oi'},
'559399974':{'en': 'Oi'},
'559399975':{'en': 'Oi'},
'559399976':{'en': 'Oi'},
'559399977':{'en': 'Oi'},
'559399978':{'en': 'Oi'},
'559399979':{'en': 'Oi'},
'559498111':{'en': 'TIM'},
'559498112':{'en': 'TIM'},
'559498113':{'en': 'TIM'},
'559498114':{'en': 'TIM'},
'559498115':{'en': 'TIM'},
'559498116':{'en': 'TIM'},
'559498117':{'en': 'TIM'},
'559498118':{'en': 'TIM'},
'559498119':{'en': 'TIM'},
'559498121':{'en': 'TIM'},
'559498122':{'en': 'TIM'},
'559498123':{'en': 'TIM'},
'559498124':{'en': 'TIM'},
'559498125':{'en': 'TIM'},
'559498126':{'en': 'TIM'},
'559498127':{'en': 'TIM'},
'559498128':{'en': 'TIM'},
'559498129':{'en': 'TIM'},
'559498131':{'en': 'TIM'},
'559498132':{'en': 'TIM'},
'559498133':{'en': 'TIM'},
'559498134':{'en': 'TIM'},
'559498135':{'en': 'TIM'},
'559498136':{'en': 'TIM'},
'559498137':{'en': 'TIM'},
'559498138':{'en': 'TIM'},
'559498139':{'en': 'TIM'},
'559498141':{'en': 'TIM'},
'559498142':{'en': 'TIM'},
'559498143':{'en': 'TIM'},
'559498144':{'en': 'TIM'},
'559498145':{'en': 'TIM'},
'559498146':{'en': 'TIM'},
'559498147':{'en': 'TIM'},
'559498148':{'en': 'TIM'},
'559498149':{'en': 'TIM'},
'559498154':{'en': 'TIM'},
'559498155':{'en': 'TIM'},
'559498156':{'en': 'TIM'},
'559498157':{'en': 'TIM'},
'559498401':{'en': 'Claro'},
'559498402':{'en': 'Claro'},
'559498403':{'en': 'Claro'},
'559498404':{'en': 'Claro'},
'559498405':{'en': 'Claro'},
'559498406':{'en': 'Claro'},
'559498407':{'en': 'Claro'},
'559498408':{'en': 'Claro'},
'559498409':{'en': 'Claro'},
'559498411':{'en': 'Claro'},
'559498412':{'en': 'Claro'},
'559498413':{'en': 'Claro'},
'559498414':{'en': 'Claro'},
'5594985':{'en': 'Oi'},
'5594986':{'en': 'Oi'},
'5594987':{'en': 'Oi'},
'5594988':{'en': 'Oi'},
'5594989':{'en': 'Oi'},
'5594991':{'en': 'Vivo'},
'55949920':{'en': 'Vivo'},
'55949921':{'en': 'Vivo'},
'55949922':{'en': 'Vivo'},
'559499230':{'en': 'Vivo'},
'559499231':{'en': 'Vivo'},
'559499232':{'en': 'Vivo'},
'559499233':{'en': 'Vivo'},
'559499661':{'en': 'Oi'},
'559499662':{'en': 'Oi'},
'559499663':{'en': 'Oi'},
'559499664':{'en': 'Oi'},
'559499665':{'en': 'Oi'},
'559499666':{'en': 'Oi'},
'559499667':{'en': 'Oi'},
'559499668':{'en': 'Oi'},
'559499901':{'en': 'Oi'},
'559499903':{'en': 'Oi'},
'559499904':{'en': 'Oi'},
'559499908':{'en': 'Oi'},
'559499909':{'en': 'Oi'},
'559499933':{'en': 'Oi'},
'559499934':{'en': 'Oi'},
'559499944':{'en': 'Oi'},
'559499953':{'en': 'Oi'},
'559499955':{'en': 'Oi'},
'559499969':{'en': 'Oi'},
'559499970':{'en': 'Oi'},
'559499972':{'en': 'Oi'},
'559499973':{'en': 'Oi'},
'559499974':{'en': 'Oi'},
'559499975':{'en': 'Oi'},
'559499977':{'en': 'Oi'},
'559499979':{'en': 'Oi'},
'559598111':{'en': 'TIM'},
'559598112':{'en': 'TIM'},
'559598113':{'en': 'TIM'},
'559598114':{'en': 'TIM'},
'559598115':{'en': 'TIM'},
'559598116':{'en': 'TIM'},
'559598117':{'en': 'TIM'},
'559598118':{'en': 'TIM'},
'559598119':{'en': 'TIM'},
'559598121':{'en': 'TIM'},
'559598122':{'en': 'TIM'},
'559598123':{'en': 'TIM'},
'55959840':{'en': 'Claro'},
'559598410':{'en': 'Claro'},
'559598411':{'en': 'Claro'},
'559598412':{'en': 'Claro'},
'5595985':{'en': 'Oi'},
'5595986':{'en': 'Oi'},
'5595987':{'en': 'Oi'},
'5595988':{'en': 'Oi'},
'5595989':{'en': 'Oi'},
'559599111':{'en': 'Vivo'},
'559599112':{'en': 'Vivo'},
'559599113':{'en': 'Vivo'},
'559599114':{'en': 'Vivo'},
'559599115':{'en': 'Vivo'},
'559599116':{'en': 'Vivo'},
'559599117':{'en': 'Vivo'},
'559599118':{'en': 'Vivo'},
'559599119':{'en': 'Vivo'},
'559599121':{'en': 'Vivo'},
'559599122':{'en': 'Vivo'},
'559599123':{'en': 'Vivo'},
'559599124':{'en': 'Vivo'},
'559599125':{'en': 'Vivo'},
'559599126':{'en': 'Vivo'},
'559599127':{'en': 'Vivo'},
'559599128':{'en': 'Vivo'},
'559599129':{'en': 'Vivo'},
'559599131':{'en': 'Vivo'},
'559599132':{'en': 'Vivo'},
'559599133':{'en': 'Vivo'},
'559599134':{'en': 'Vivo'},
'559599135':{'en': 'Vivo'},
'559599136':{'en': 'Vivo'},
'559599137':{'en': 'Vivo'},
'559599138':{'en': 'Vivo'},
'559599139':{'en': 'Vivo'},
'559599141':{'en': 'Vivo'},
'559599142':{'en': 'Vivo'},
'559599143':{'en': 'Vivo'},
'559599144':{'en': 'Vivo'},
'559599145':{'en': 'Vivo'},
'559599146':{'en': 'Vivo'},
'559599147':{'en': 'Vivo'},
'559599148':{'en': 'Vivo'},
'559599149':{'en': 'Vivo'},
'559599151':{'en': 'Vivo'},
'559599152':{'en': 'Vivo'},
'559599153':{'en': 'Vivo'},
'559599154':{'en': 'Vivo'},
'559599155':{'en': 'Vivo'},
'559599156':{'en': 'Vivo'},
'559599157':{'en': 'Vivo'},
'559599158':{'en': 'Vivo'},
'559599159':{'en': 'Vivo'},
'559599161':{'en': 'Vivo'},
'559599162':{'en': 'Vivo'},
'559599163':{'en': 'Vivo'},
'559599164':{'en': 'Vivo'},
'559599165':{'en': 'Vivo'},
'559599166':{'en': 'Vivo'},
'559599167':{'en': 'Vivo'},
'559599168':{'en': 'Vivo'},
'559599901':{'en': 'Oi'},
'559599902':{'en': 'Oi'},
'559599903':{'en': 'Oi'},
'559599904':{'en': 'Oi'},
'559599905':{'en': 'Oi'},
'559599933':{'en': 'Oi'},
'559599959':{'en': 'Oi'},
'559599961':{'en': 'Oi'},
'559599962':{'en': 'Oi'},
'559599963':{'en': 'Oi'},
'559599964':{'en': 'Oi'},
'559599965':{'en': 'Oi'},
'559599967':{'en': 'Oi'},
'559599970':{'en': 'Oi'},
'559599971':{'en': 'Oi'},
'559599972':{'en': 'Oi'},
'559599974':{'en': 'Oi'},
'559599976':{'en': 'Oi'},
'559599977':{'en': 'Oi'},
'559599981':{'en': 'Oi'},
'559698111':{'en': 'TIM'},
'559698112':{'en': 'TIM'},
'559698113':{'en': 'TIM'},
'559698114':{'en': 'TIM'},
'559698115':{'en': 'TIM'},
'559698116':{'en': 'TIM'},
'559698117':{'en': 'TIM'},
'559698118':{'en': 'TIM'},
'559698119':{'en': 'TIM'},
'559698121':{'en': 'TIM'},
'559698122':{'en': 'TIM'},
'559698123':{'en': 'TIM'},
'559698124':{'en': 'TIM'},
'559698125':{'en': 'TIM'},
'559698126':{'en': 'TIM'},
'559698127':{'en': 'TIM'},
'559698128':{'en': 'TIM'},
'559698129':{'en': 'TIM'},
'559698131':{'en': 'TIM'},
'559698132':{'en': 'TIM'},
'559698133':{'en': 'TIM'},
'559698134':{'en': 'TIM'},
'559698135':{'en': 'TIM'},
'55969840':{'en': 'Claro'},
'559698410':{'en': 'Claro'},
'559698411':{'en': 'Claro'},
'559698412':{'en': 'Claro'},
'559698413':{'en': 'Claro'},
'5596985':{'en': 'Oi'},
'5596986':{'en': 'Oi'},
'5596987':{'en': 'Oi'},
'5596988':{'en': 'Oi'},
'5596989':{'en': 'Oi'},
'559699111':{'en': 'Vivo'},
'559699112':{'en': 'Vivo'},
'559699113':{'en': 'Vivo'},
'559699114':{'en': 'Vivo'},
'559699115':{'en': 'Vivo'},
'559699116':{'en': 'Vivo'},
'559699117':{'en': 'Vivo'},
'559699118':{'en': 'Vivo'},
'559699119':{'en': 'Vivo'},
'559699121':{'en': 'Vivo'},
'559699122':{'en': 'Vivo'},
'559699123':{'en': 'Vivo'},
'559699124':{'en': 'Vivo'},
'559699125':{'en': 'Vivo'},
'559699126':{'en': 'Vivo'},
'559699127':{'en': 'Vivo'},
'559699128':{'en': 'Vivo'},
'559699129':{'en': 'Vivo'},
'559699131':{'en': 'Vivo'},
'559699132':{'en': 'Vivo'},
'559699133':{'en': 'Vivo'},
'559699134':{'en': 'Vivo'},
'559699135':{'en': 'Vivo'},
'559699136':{'en': 'Vivo'},
'559699137':{'en': 'Vivo'},
'559699138':{'en': 'Vivo'},
'559699139':{'en': 'Vivo'},
'559699141':{'en': 'Vivo'},
'559699142':{'en': 'Vivo'},
'559699143':{'en': 'Vivo'},
'559699144':{'en': 'Vivo'},
'559699145':{'en': 'Vivo'},
'559699146':{'en': 'Vivo'},
'559699147':{'en': 'Vivo'},
'559699148':{'en': 'Vivo'},
'559699149':{'en': 'Vivo'},
'559699151':{'en': 'Vivo'},
'559699152':{'en': 'Vivo'},
'559699153':{'en': 'Vivo'},
'559699154':{'en': 'Vivo'},
'559699155':{'en': 'Vivo'},
'559699156':{'en': 'Vivo'},
'559699157':{'en': 'Vivo'},
'559699158':{'en': 'Vivo'},
'559699159':{'en': 'Vivo'},
'559699161':{'en': 'Vivo'},
'559699162':{'en': 'Vivo'},
'559699163':{'en': 'Vivo'},
'559699164':{'en': 'Vivo'},
'559699165':{'en': 'Vivo'},
'559699166':{'en': 'Vivo'},
'559699167':{'en': 'Vivo'},
'559699168':{'en': 'Vivo'},
'559699169':{'en': 'Vivo'},
'559699171':{'en': 'Vivo'},
'559699172':{'en': 'Vivo'},
'559699173':{'en': 'Vivo'},
'559699174':{'en': 'Vivo'},
'559699175':{'en': 'Vivo'},
'559699176':{'en': 'Vivo'},
'559699177':{'en': 'Vivo'},
'559699178':{'en': 'Vivo'},
'559699179':{'en': 'Vivo'},
'559699181':{'en': 'Vivo'},
'559699901':{'en': 'Oi'},
'559699902':{'en': 'Oi'},
'559699903':{'en': 'Oi'},
'559699904':{'en': 'Oi'},
'559699905':{'en': 'Oi'},
'559699906':{'en': 'Oi'},
'559699907':{'en': 'Oi'},
'559699908':{'en': 'Oi'},
'559699909':{'en': 'Oi'},
'559699911':{'en': 'Oi'},
'559699912':{'en': 'Oi'},
'559699913':{'en': 'Oi'},
'559699914':{'en': 'Oi'},
'559699915':{'en': 'Oi'},
'559699916':{'en': 'Oi'},
'559699917':{'en': 'Oi'},
'559699918':{'en': 'Oi'},
'559699933':{'en': 'Oi'},
'559699961':{'en': 'Oi'},
'559699962':{'en': 'Oi'},
'559699963':{'en': 'Oi'},
'559699964':{'en': 'Oi'},
'559699965':{'en': 'Oi'},
'559699966':{'en': 'Oi'},
'559699967':{'en': 'Oi'},
'559699968':{'en': 'Oi'},
'559699970':{'en': 'Oi'},
'559699971':{'en': 'Oi'},
'559699972':{'en': 'Oi'},
'559699973':{'en': 'Oi'},
'559699974':{'en': 'Oi'},
'559699975':{'en': 'Oi'},
'559699976':{'en': 'Oi'},
'559699981':{'en': 'Oi'},
'559798111':{'en': 'TIM'},
'559798113':{'en': 'TIM'},
'559798114':{'en': 'TIM'},
'559798115':{'en': 'TIM'},
'559798116':{'en': 'TIM'},
'559798117':{'en': 'TIM'},
'559798118':{'en': 'TIM'},
'559798119':{'en': 'TIM'},
'559798121':{'en': 'TIM'},
'55979840':{'en': 'Claro'},
'559798410':{'en': 'Claro'},
'559798411':{'en': 'Claro'},
'5597985':{'en': 'Oi'},
'5597986':{'en': 'Oi'},
'5597987':{'en': 'Oi'},
'5597988':{'en': 'Oi'},
'5597989':{'en': 'Oi'},
'559799151':{'en': 'Vivo'},
'559799152':{'en': 'Vivo'},
'559799153':{'en': 'Vivo'},
'559799154':{'en': 'Vivo'},
'559799155':{'en': 'Vivo'},
'559799156':{'en': 'Vivo'},
'559799157':{'en': 'Vivo'},
'559799158':{'en': 'Vivo'},
'559799159':{'en': 'Vivo'},
'559799161':{'en': 'Vivo'},
'559799162':{'en': 'Vivo'},
'559799163':{'en': 'Vivo'},
'559799164':{'en': 'Vivo'},
'559799165':{'en': 'Vivo'},
'559799166':{'en': 'Vivo'},
'559799167':{'en': 'Vivo'},
'559799168':{'en': 'Vivo'},
'559799169':{'en': 'Vivo'},
'559799171':{'en': 'Vivo'},
'559799172':{'en': 'Vivo'},
'559799173':{'en': 'Vivo'},
'559799174':{'en': 'Vivo'},
'559799175':{'en': 'Vivo'},
'559799176':{'en': 'Vivo'},
'559799177':{'en': 'Vivo'},
'559799178':{'en': 'Vivo'},
'559799179':{'en': 'Vivo'},
'559799181':{'en': 'Vivo'},
'559799182':{'en': 'Vivo'},
'559799183':{'en': 'Vivo'},
'559799184':{'en': 'Vivo'},
'559799185':{'en': 'Vivo'},
'559799186':{'en': 'Vivo'},
'559799187':{'en': 'Vivo'},
'559799188':{'en': 'Vivo'},
'559799611':{'en': 'Oi'},
'559799612':{'en': 'Oi'},
'559799613':{'en': 'Oi'},
'559799901':{'en': 'Oi'},
'559799902':{'en': 'Oi'},
'559799903':{'en': 'Oi'},
'559799933':{'en': 'Oi'},
'559799953':{'en': 'Oi'},
'559799957':{'en': 'Oi'},
'559799958':{'en': 'Oi'},
'559799959':{'en': 'Oi'},
'559799978':{'en': 'Oi'},
'559799979':{'en': 'Oi'},
'559898111':{'en': 'TIM'},
'559898112':{'en': 'TIM'},
'559898113':{'en': 'TIM'},
'559898114':{'en': 'TIM'},
'559898115':{'en': 'TIM'},
'559898116':{'en': 'TIM'},
'559898117':{'en': 'TIM'},
'559898118':{'en': 'TIM'},
'559898119':{'en': 'TIM'},
'559898121':{'en': 'TIM'},
'559898122':{'en': 'TIM'},
'559898123':{'en': 'TIM'},
'559898124':{'en': 'TIM'},
'559898125':{'en': 'TIM'},
'559898126':{'en': 'TIM'},
'559898127':{'en': 'TIM'},
'559898128':{'en': 'TIM'},
'559898129':{'en': 'TIM'},
'559898131':{'en': 'TIM'},
'559898132':{'en': 'TIM'},
'559898133':{'en': 'TIM'},
'559898134':{'en': 'TIM'},
'559898135':{'en': 'TIM'},
'559898136':{'en': 'TIM'},
'559898137':{'en': 'TIM'},
'559898138':{'en': 'TIM'},
'559898139':{'en': 'TIM'},
'559898141':{'en': 'TIM'},
'559898142':{'en': 'TIM'},
'559898143':{'en': 'TIM'},
'559898144':{'en': 'TIM'},
'559898145':{'en': 'TIM'},
'559898146':{'en': 'TIM'},
'559898147':{'en': 'TIM'},
'559898148':{'en': 'TIM'},
'559898149':{'en': 'TIM'},
'559898151':{'en': 'TIM'},
'559898152':{'en': 'TIM'},
'559898153':{'en': 'TIM'},
'559898154':{'en': 'TIM'},
'559898155':{'en': 'TIM'},
'559898156':{'en': 'TIM'},
'559898157':{'en': 'TIM'},
'559898158':{'en': 'TIM'},
'559898159':{'en': 'TIM'},
'559898161':{'en': 'TIM'},
'559898162':{'en': 'TIM'},
'559898163':{'en': 'TIM'},
'559898164':{'en': 'TIM'},
'559898165':{'en': 'TIM'},
'559898166':{'en': 'TIM'},
'559898167':{'en': 'TIM'},
'559898168':{'en': 'TIM'},
'559898169':{'en': 'TIM'},
'559898171':{'en': 'TIM'},
'559898172':{'en': 'TIM'},
'559898173':{'en': 'TIM'},
'559898174':{'en': 'TIM'},
'559898175':{'en': 'TIM'},
'559898176':{'en': 'TIM'},
'559898177':{'en': 'TIM'},
'559898178':{'en': 'TIM'},
'559898179':{'en': 'TIM'},
'559898181':{'en': 'TIM'},
'559898182':{'en': 'TIM'},
'559898183':{'en': 'TIM'},
'559898184':{'en': 'TIM'},
'559898185':{'en': 'TIM'},
'55989840':{'en': 'Claro'},
'55989841':{'en': 'Claro'},
'55989842':{'en': 'Claro'},
'55989843':{'en': 'Claro'},
'55989844':{'en': 'Claro'},
'55989845':{'en': 'Claro'},
'559898460':{'en': 'Claro'},
'559898461':{'en': 'Claro'},
'5598985':{'en': 'Oi'},
'5598986':{'en': 'Oi'},
'5598987':{'en': 'Oi'},
'5598988':{'en': 'Oi'},
'5598989':{'en': 'Oi'},
'559899101':{'en': 'Vivo'},
'559899102':{'en': 'Vivo'},
'559899103':{'en': 'Vivo'},
'559899104':{'en': 'Vivo'},
'559899105':{'en': 'Vivo'},
'559899106':{'en': 'Vivo'},
'559899107':{'en': 'Vivo'},
'559899108':{'en': 'Vivo'},
'559899111':{'en': 'Vivo'},
'559899112':{'en': 'Vivo'},
'559899113':{'en': 'Vivo'},
'559899114':{'en': 'Vivo'},
'559899115':{'en': 'Vivo'},
'559899116':{'en': 'Vivo'},
'559899117':{'en': 'Vivo'},
'559899118':{'en': 'Vivo'},
'559899119':{'en': 'Vivo'},
'559899121':{'en': 'Vivo'},
'559899122':{'en': 'Vivo'},
'559899123':{'en': 'Vivo'},
'559899124':{'en': 'Vivo'},
'559899125':{'en': 'Vivo'},
'559899126':{'en': 'Vivo'},
'559899127':{'en': 'Vivo'},
'559899128':{'en': 'Vivo'},
'559899129':{'en': 'Vivo'},
'559899131':{'en': 'Vivo'},
'559899132':{'en': 'Vivo'},
'559899133':{'en': 'Vivo'},
'559899134':{'en': 'Vivo'},
'559899135':{'en': 'Vivo'},
'559899136':{'en': 'Vivo'},
'559899137':{'en': 'Vivo'},
'559899138':{'en': 'Vivo'},
'559899139':{'en': 'Vivo'},
'559899141':{'en': 'Vivo'},
'559899142':{'en': 'Vivo'},
'559899143':{'en': 'Vivo'},
'559899144':{'en': 'Vivo'},
'559899145':{'en': 'Vivo'},
'559899146':{'en': 'Vivo'},
'559899147':{'en': 'Vivo'},
'559899148':{'en': 'Vivo'},
'559899149':{'en': 'Vivo'},
'559899151':{'en': 'Vivo'},
'559899152':{'en': 'Vivo'},
'559899153':{'en': 'Vivo'},
'559899154':{'en': 'Vivo'},
'559899155':{'en': 'Vivo'},
'559899156':{'en': 'Vivo'},
'559899157':{'en': 'Vivo'},
'559899158':{'en': 'Vivo'},
'559899159':{'en': 'Vivo'},
'559899161':{'en': 'Vivo'},
'559899162':{'en': 'Vivo'},
'559899163':{'en': 'Vivo'},
'559899164':{'en': 'Vivo'},
'559899165':{'en': 'Vivo'},
'559899166':{'en': 'Vivo'},
'559899167':{'en': 'Vivo'},
'559899168':{'en': 'Vivo'},
'559899169':{'en': 'Vivo'},
'559899171':{'en': 'Vivo'},
'559899172':{'en': 'Vivo'},
'559899173':{'en': 'Vivo'},
'559899174':{'en': 'Vivo'},
'559899175':{'en': 'Vivo'},
'559899176':{'en': 'Vivo'},
'559899177':{'en': 'Vivo'},
'559899178':{'en': 'Vivo'},
'559899179':{'en': 'Vivo'},
'559899181':{'en': 'Vivo'},
'559899182':{'en': 'Vivo'},
'559899183':{'en': 'Vivo'},
'559899184':{'en': 'Vivo'},
'559899185':{'en': 'Vivo'},
'559899186':{'en': 'Vivo'},
'559899187':{'en': 'Vivo'},
'559899188':{'en': 'Vivo'},
'559899189':{'en': 'Vivo'},
'559899191':{'en': 'Vivo'},
'559899192':{'en': 'Vivo'},
'559899193':{'en': 'Vivo'},
'559899194':{'en': 'Vivo'},
'559899195':{'en': 'Vivo'},
'559899196':{'en': 'Vivo'},
'559899197':{'en': 'Vivo'},
'559899198':{'en': 'Vivo'},
'559899199':{'en': 'Vivo'},
'559899601':{'en': 'Oi'},
'559899602':{'en': 'Oi'},
'559899603':{'en': 'Oi'},
'559899604':{'en': 'Oi'},
'559899605':{'en': 'Oi'},
'559899606':{'en': 'Oi'},
'559899607':{'en': 'Oi'},
'559899608':{'en': 'Oi'},
'559899609':{'en': 'Oi'},
'559899611':{'en': 'Oi'},
'559899612':{'en': 'Oi'},
'559899613':{'en': 'Oi'},
'559899614':{'en': 'Oi'},
'559899615':{'en': 'Oi'},
'559899616':{'en': 'Oi'},
'559899617':{'en': 'Oi'},
'559899618':{'en': 'Oi'},
'559899619':{'en': 'Oi'},
'559899621':{'en': 'Oi'},
'559899901':{'en': 'Oi'},
'559899902':{'en': 'Oi'},
'559899903':{'en': 'Oi'},
'559899904':{'en': 'Oi'},
'559899905':{'en': 'Oi'},
'559899906':{'en': 'Oi'},
'559899907':{'en': 'Oi'},
'559899908':{'en': 'Oi'},
'559899909':{'en': 'Oi'},
'559899911':{'en': 'Oi'},
'559899912':{'en': 'Oi'},
'559899913':{'en': 'Oi'},
'559899933':{'en': 'Oi'},
'559899934':{'en': 'Oi'},
'559899944':{'en': 'Oi'},
'559899961':{'en': 'Oi'},
'559899962':{'en': 'Oi'},
'559899963':{'en': 'Oi'},
'559899964':{'en': 'Oi'},
'559899965':{'en': 'Oi'},
'559899966':{'en': 'Oi'},
'559899967':{'en': 'Oi'},
'559899968':{'en': 'Oi'},
'559899969':{'en': 'Oi'},
'559899970':{'en': 'Oi'},
'559899971':{'en': 'Oi'},
'559899972':{'en': 'Oi'},
'559899973':{'en': 'Oi'},
'559899974':{'en': 'Oi'},
'559899975':{'en': 'Oi'},
'559899976':{'en': 'Oi'},
'559899981':{'en': 'Oi'},
'559899982':{'en': 'Oi'},
'559899983':{'en': 'Oi'},
'559899984':{'en': 'Oi'},
'559899985':{'en': 'Oi'},
'559899988':{'en': 'Oi'},
'559899991':{'en': 'Oi'},
'559899992':{'en': 'Oi'},
'559899993':{'en': 'Oi'},
'559899994':{'en': 'Oi'},
'559899995':{'en': 'Oi'},
'559998111':{'en': 'TIM'},
'559998112':{'en': 'TIM'},
'559998113':{'en': 'TIM'},
'559998114':{'en': 'TIM'},
'559998115':{'en': 'TIM'},
'559998116':{'en': 'TIM'},
'559998117':{'en': 'TIM'},
'559998118':{'en': 'TIM'},
'559998119':{'en': 'TIM'},
'559998121':{'en': 'TIM'},
'559998122':{'en': 'TIM'},
'559998123':{'en': 'TIM'},
'559998124':{'en': 'TIM'},
'559998125':{'en': 'TIM'},
'559998126':{'en': 'TIM'},
'559998127':{'en': 'TIM'},
'559998128':{'en': 'TIM'},
'559998129':{'en': 'TIM'},
'559998131':{'en': 'TIM'},
'559998132':{'en': 'TIM'},
'559998133':{'en': 'TIM'},
'559998134':{'en': 'TIM'},
'559998135':{'en': 'TIM'},
'559998136':{'en': 'TIM'},
'559998137':{'en': 'TIM'},
'559998138':{'en': 'TIM'},
'559998139':{'en': 'TIM'},
'559998141':{'en': 'TIM'},
'559998142':{'en': 'TIM'},
'559998143':{'en': 'TIM'},
'559998144':{'en': 'TIM'},
'559998145':{'en': 'TIM'},
'559998146':{'en': 'TIM'},
'559998147':{'en': 'TIM'},
'559998148':{'en': 'TIM'},
'559998149':{'en': 'TIM'},
'559998151':{'en': 'TIM'},
'559998152':{'en': 'TIM'},
'559998153':{'en': 'TIM'},
'55999840':{'en': 'Claro'},
'55999841':{'en': 'Claro'},
'559998420':{'en': 'Claro'},
'559998421':{'en': 'Claro'},
'559998422':{'en': 'Claro'},
'5599985':{'en': 'Oi'},
'5599986':{'en': 'Oi'},
'5599987':{'en': 'Oi'},
'5599988':{'en': 'Oi'},
'5599989':{'en': 'Oi'},
'559999101':{'en': 'Vivo'},
'559999102':{'en': 'Vivo'},
'559999103':{'en': 'Vivo'},
'559999104':{'en': 'Vivo'},
'559999105':{'en': 'Vivo'},
'559999106':{'en': 'Vivo'},
'559999107':{'en': 'Vivo'},
'559999108':{'en': 'Vivo'},
'559999109':{'en': 'Vivo'},
'559999111':{'en': 'Vivo'},
'559999112':{'en': 'Vivo'},
'559999113':{'en': 'Vivo'},
'559999114':{'en': 'Vivo'},
'559999115':{'en': 'Vivo'},
'559999121':{'en': 'Vivo'},
'559999122':{'en': 'Vivo'},
'559999123':{'en': 'Vivo'},
'559999124':{'en': 'Vivo'},
'559999125':{'en': 'Vivo'},
'559999126':{'en': 'Vivo'},
'559999127':{'en': 'Vivo'},
'559999128':{'en': 'Vivo'},
'559999129':{'en': 'Vivo'},
'559999131':{'en': 'Vivo'},
'559999132':{'en': 'Vivo'},
'559999133':{'en': 'Vivo'},
'559999134':{'en': 'Vivo'},
'559999135':{'en': 'Vivo'},
'559999136':{'en': 'Vivo'},
'559999137':{'en': 'Vivo'},
'559999138':{'en': 'Vivo'},
'559999139':{'en': 'Vivo'},
'559999141':{'en': 'Vivo'},
'559999142':{'en': 'Vivo'},
'559999143':{'en': 'Vivo'},
'559999144':{'en': 'Vivo'},
'559999145':{'en': 'Vivo'},
'559999146':{'en': 'Vivo'},
'559999147':{'en': 'Vivo'},
'559999148':{'en': 'Vivo'},
'559999149':{'en': 'Vivo'},
'559999151':{'en': 'Vivo'},
'559999152':{'en': 'Vivo'},
'559999153':{'en': 'Vivo'},
'559999154':{'en': 'Vivo'},
'559999155':{'en': 'Vivo'},
'559999156':{'en': 'Vivo'},
'559999157':{'en': 'Vivo'},
'559999158':{'en': 'Vivo'},
'559999159':{'en': 'Vivo'},
'559999161':{'en': 'Vivo'},
'559999162':{'en': 'Vivo'},
'559999163':{'en': 'Vivo'},
'559999164':{'en': 'Vivo'},
'559999165':{'en': 'Vivo'},
'559999166':{'en': 'Vivo'},
'559999167':{'en': 'Vivo'},
'559999168':{'en': 'Vivo'},
'559999169':{'en': 'Vivo'},
'559999171':{'en': 'Vivo'},
'559999172':{'en': 'Vivo'},
'559999173':{'en': 'Vivo'},
'559999174':{'en': 'Vivo'},
'559999175':{'en': 'Vivo'},
'559999176':{'en': 'Vivo'},
'559999177':{'en': 'Vivo'},
'559999178':{'en': 'Vivo'},
'559999179':{'en': 'Vivo'},
'559999181':{'en': 'Vivo'},
'559999182':{'en': 'Vivo'},
'559999183':{'en': 'Vivo'},
'559999184':{'en': 'Vivo'},
'559999185':{'en': 'Vivo'},
'559999186':{'en': 'Vivo'},
'559999187':{'en': 'Vivo'},
'559999188':{'en': 'Vivo'},
'559999189':{'en': 'Vivo'},
'559999191':{'en': 'Vivo'},
'559999192':{'en': 'Vivo'},
'559999193':{'en': 'Vivo'},
'559999194':{'en': 'Vivo'},
'559999195':{'en': 'Vivo'},
'559999196':{'en': 'Vivo'},
'559999197':{'en': 'Vivo'},
'559999198':{'en': 'Vivo'},
'559999631':{'en': 'Oi'},
'559999641':{'en': 'Oi'},
'559999642':{'en': 'Oi'},
'559999643':{'en': 'Oi'},
'559999644':{'en': 'Oi'},
'559999645':{'en': 'Oi'},
'559999646':{'en': 'Oi'},
'559999647':{'en': 'Oi'},
'559999649':{'en': 'Oi'},
'559999651':{'en': 'Oi'},
'559999901':{'en': 'Oi'},
'559999902':{'en': 'Oi'},
'559999903':{'en': 'Oi'},
'559999904':{'en': 'Oi'},
'559999905':{'en': 'Oi'},
'559999933':{'en': 'Oi'},
'559999934':{'en': 'Oi'},
'559999935':{'en': 'Oi'},
'559999951':{'en': 'Oi'},
'559999952':{'en': 'Oi'},
'559999953':{'en': 'Oi'},
'559999954':{'en': 'Oi'},
'559999955':{'en': 'Oi'},
'559999970':{'en': 'Oi'},
'559999977':{'en': 'Oi'},
'559999978':{'en': 'Oi'},
'559999979':{'en': 'Oi'},
'559999984':{'en': 'Oi'},
'559999985':{'en': 'Oi'},
'559999986':{'en': 'Oi'},
'559999987':{'en': 'Oi'},
'559999989':{'en': 'Oi'},
'56211':{'en': 'Rural Telecommunications Chile S.A.'},
'562198':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562220':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562221':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562222':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562223':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562224':{'en': 'Gtd Manquehue S.A.'},
'562225':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'562226':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562227':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562228':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622290':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622291':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622292':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622293':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622294':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622295':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622296':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622297':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56222981':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56222982':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56222983':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56222985':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56222986':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56222987':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56222988':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56222989':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622299':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622300':{'en': 'Entel'},
'5622301':{'en': 'Entel'},
'5622302':{'en': 'Entel'},
'5622303':{'en': 'Entel'},
'5622304':{'en': 'Cibeles Telecom'},
'56223050':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56223051':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56223052':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56223053':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622306':{'en': 'Gtd Manquehue S.A.'},
'5622307':{'en': 'Gtd Manquehue S.A.'},
'5622308':{'en': 'Entel'},
'56223093':{'en': 'Entel'},
'56223099':{'en': 'Gtd Telesat S.A.'},
'562231':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'562232':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622323':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622324':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622325':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622326':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'562233':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562234':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562235':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622356':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622357':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622358':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622359':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'562236':{'en': 'Entel'},
'562237':{'en': 'Entel'},
'562238':{'en': 'Gtd Telesat S.A.'},
'562239':{'en': 'Gtd Telesat S.A.'},
'5622400':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622401':{'en': 'Claro'},
'5622402':{'en': 'Claro'},
'5622403':{'en': 'Claro'},
'5622404':{'en': 'Claro'},
'5622405':{'en': 'Claro'},
'56224060':{'en': 'Claro'},
'56224061':{'en': 'Claro'},
'56224062':{'en': 'Claro'},
'56224063':{'en': 'Claro'},
'56224064':{'en': 'Claro'},
'56224065':{'en': 'Entel'},
'56224066':{'en': 'Entel'},
'56224067':{'en': 'Entel'},
'56224068':{'en': 'Entel'},
'56224069':{'en': 'Entel'},
'5622407':{'en': 'Entel'},
'5622408':{'en': 'Entel'},
'5622409':{'en': 'Entel'},
'5622410':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622411':{'en': 'Gtd Telesat S.A.'},
'5622412':{'en': 'Gtd Telesat S.A.'},
'5622413':{'en': 'Gtd Telesat S.A.'},
'5622414':{'en': 'Telefonica Del Sur S.A.'},
'5622415':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622416':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622417':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622418':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622419':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'562242':{'en': 'Entel'},
'562243':{'en': 'Entel'},
'562244':{'en': 'Entel'},
'5622450':{'en': 'Entel'},
'5622451':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5622452':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5622453':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622454':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622455':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56224565':{'en': 'Claro'},
'56224566':{'en': 'Claro'},
'56224567':{'en': 'Claro'},
'56224568':{'en': 'Claro'},
'56224569':{'en': 'Claro'},
'5622457':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622458':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622459':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'562246':{'en': 'Entel'},
'562247':{'en': 'Entel'},
'5622474':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622475':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'562248':{'en': 'Gtd Telesat S.A.'},
'5622480':{'en': 'Entel'},
'5622481':{'en': 'Entel'},
'562249':{'en': 'Gtd Telesat S.A.'},
'5622491':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622492':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622493':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622494':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622500':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622501':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622502':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622503':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622504':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622505':{'en': 'Claro'},
'5622506':{'en': 'Claro'},
'56225070':{'en': 'Claro'},
'56225071':{'en': 'Claro'},
'56225072':{'en': 'Claro'},
'56225073':{'en': 'Claro'},
'56225074':{'en': 'Claro'},
'5622508':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622509':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'562251':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562252':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622530':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622531':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622532':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622533':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622534':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622535':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56225360':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56225361':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56225362':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56225363':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56225364':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56225365':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56225366':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56225367':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622537':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622538':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622539':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562254':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562255':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622560':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622561':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622562':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622563':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622564':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622565':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622566':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56225671':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56225672':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56225673':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56225675':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56225676':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56225677':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56225678':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56225679':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622568':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622569':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562257':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622570':{'en': 'Claro'},
'5622571':{'en': 'Claro'},
'5622572':{'en': 'Will S.A.'},
'56225730':{'en': 'Stel Access S.A.'},
'56225731':{'en': 'Stel Access S.A.'},
'56225732':{'en': 'Stel Access S.A.'},
'56225733':{'en': 'Stel Access S.A.'},
'56225734':{'en': 'Stel Access S.A.'},
'56225735':{'en': 'Claro'},
'56225736':{'en': 'Claro'},
'56225737':{'en': 'Claro'},
'56225738':{'en': 'Claro'},
'56225739':{'en': 'Claro'},
'562258':{'en': 'Claro'},
'5622587':{'en': 'Gtd Telesat S.A.'},
'5622588':{'en': 'Chile.Com'},
'5622589':{'en': 'Gtd Telesat S.A.'},
'5622590':{'en': 'Ifx Networks Chile S.A.'},
'5622591':{'en': 'Gtd Telesat S.A.'},
'5622592':{'en': 'Gtd Telesat S.A.'},
'5622593':{'en': 'Will S.A.'},
'5622594':{'en': 'Claro'},
'5622595':{'en': 'Claro'},
'5622596':{'en': 'Entel'},
'5622597':{'en': 'Entel'},
'5622598':{'en': 'Gtd Telesat S.A.'},
'5622599':{'en': 'Gtd Telesat S.A.'},
'5622601':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56226020':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56226021':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56226022':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56226023':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56226024':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56226025':{'en': 'Claro'},
'56226026':{'en': 'Claro'},
'56226027':{'en': 'Claro'},
'56226028':{'en': 'Claro'},
'56226029':{'en': 'Claro'},
'5622603':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622604':{'en': 'Entel'},
'5622605':{'en': 'Entel'},
'5622606':{'en': 'Entel'},
'5622607':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622608':{'en': 'Entel'},
'5622609':{'en': 'Entel'},
'5622610':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622611':{'en': 'Entel'},
'5622612':{'en': 'Entel'},
'5622613':{'en': 'Claro'},
'5622614':{'en': 'Claro'},
'5622615':{'en': 'Claro'},
'5622616':{'en': 'Gtd Telesat S.A.'},
'5622617':{'en': 'Gtd Telesat S.A.'},
'5622618':{'en': 'Gtd Telesat S.A.'},
'5622619':{'en': 'Claro'},
'562262':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562263':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562264':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562265':{'en': 'Entel'},
'5622656':{'en': 'Netline'},
'5622657':{'en': 'Fullcom S.A.'},
'5622658':{'en': 'Fullcom S.A.'},
'5622659':{'en': 'Claro'},
'562266':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622666':{'en': 'Claro'},
'562267':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562268':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562269':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562270':{'en': 'Entel'},
'5622707':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622710':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56227115':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56227116':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56227117':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56227118':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56227119':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622712':{'en': 'Quantax'},
'5622713':{'en': 'Gtd Telesat S.A.'},
'5622714':{'en': 'Claro'},
'5622715':{'en': 'Gtd Telesat S.A.'},
'5622716':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622717':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622718':{'en': 'Claro'},
'5622719':{'en': 'Claro'},
'5622720':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622721':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622722':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622723':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622724':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622725':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622726':{'en': 'Gtd Telesat S.A.'},
'5622727':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622728':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622729':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562273':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562274':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562275':{'en': 'Gtd Manquehue S.A.'},
'562276':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622760':{'en': 'Convergia Telecom S.A.'},
'5622761':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622769':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'562277':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562278':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622784':{'en': 'Entel'},
'5622785':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622788':{'en': 'Will S.A.'},
'5622789':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622790':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622791':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622792':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622793':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622794':{'en': 'Rural Telecommunications Chile S.A.'},
'5622795':{'en': 'Entel'},
'5622796':{'en': 'Entel'},
'5622797':{'en': 'Gtd Telesat S.A.'},
'5622798':{'en': 'Entel'},
'5622799':{'en': 'Entel'},
'5622800':{'en': 'Entel'},
'5622807':{'en': 'Claro'},
'5622808':{'en': 'Claro'},
'5622809':{'en': 'Claro'},
'562281':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622813':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622816':{'en': 'Entel'},
'5622820':{'en': 'Entel'},
'5622821':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622822':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622823':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622824':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622825':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622826':{'en': 'Gtd Telesat S.A.'},
'5622827':{'en': 'Gtd Telesat S.A.'},
'5622828':{'en': 'Gtd Telesat S.A.'},
'5622829':{'en': 'Gtd Telesat S.A.'},
'562283':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622830':{'en': 'Claro'},
'5622833':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622839':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'562284':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622840':{'en': 'Telestar'},
'5622848':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'562285':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622860':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622861':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622862':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622863':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622864':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622865':{'en': 'Netel S.A.'},
'5622866':{'en': 'Claro'},
'5622867':{'en': 'Claro'},
'5622868':{'en': 'Will S.A.'},
'5622869':{'en': 'Netline'},
'562287':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622877':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622878':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622880':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622881':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622882':{'en': 'Pronto Ip Ltda.'},
'5622883':{'en': 'Gtd Manquehue S.A.'},
'5622884':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622885':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622886':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622887':{'en': 'Gtd Telesat S.A.'},
'5622888':{'en': 'Claro'},
'5622889':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56228900':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56228901':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56228902':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56228903':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56228904':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56228905':{'en': 'Will S.A.'},
'56228906':{'en': 'Will S.A.'},
'56228907':{'en': 'Will S.A.'},
'56228908':{'en': 'Will S.A.'},
'56228909':{'en': 'Will S.A.'},
'5622891':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622892':{'en': 'Gtd Telesat S.A.'},
'5622893':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622894':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622895':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622896':{'en': 'Entel'},
'5622897':{'en': 'Claro'},
'5622898':{'en': 'Claro'},
'5622899':{'en': 'Claro'},
'5622900':{'en': 'Gtd Manquehue S.A.'},
'5622901':{'en': 'Gtd Telesat S.A.'},
'5622902':{'en': 'Claro'},
'5622903':{'en': 'Claro'},
'5622904':{'en': 'Claro'},
'5622905':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622906':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622907':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622908':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622909':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622910':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622912':{'en': 'Entel'},
'5622913':{'en': 'Entel'},
'5622914':{'en': 'Movistar'},
'5622915':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622916':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622917':{'en': 'Will S.A.'},
'5622918':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622919':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'562292':{'en': 'Entel'},
'5622920':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622921':{'en': 'Claro'},
'5622926':{'en': 'Claro'},
'5622929':{'en': 'Will S.A.'},
'5622930':{'en': 'Will S.A.'},
'5622931':{'en': 'Claro'},
'5622932':{'en': 'Claro'},
'5622933':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622934':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56229353':{'en': 'Gtd Telesat S.A.'},
'56229355':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56229356':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56229357':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56229358':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56229359':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56229363':{'en': 'Gtd Telesat S.A.'},
'56229365':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56229366':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56229367':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56229368':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56229369':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622937':{'en': 'Entel'},
'56229380':{'en': 'Entel'},
'56229381':{'en': 'Entel'},
'56229382':{'en': 'Entel'},
'56229383':{'en': 'Entel'},
'56229384':{'en': 'Entel'},
'56229385':{'en': 'Claro'},
'56229386':{'en': 'Claro'},
'56229387':{'en': 'Claro'},
'56229388':{'en': 'Claro'},
'56229389':{'en': 'Claro'},
'5622939':{'en': 'Gtd Telesat S.A.'},
'562294':{'en': 'Gtd Manquehue S.A.'},
'562295':{'en': 'Gtd Manquehue S.A.'},
'5622962':{'en': 'Telefonica Del Sur S.A.'},
'5622963':{'en': 'Entel'},
'5622964':{'en': 'Entel'},
'5622965':{'en': 'Entel'},
'5622966':{'en': 'Claro'},
'5622967':{'en': 'Claro'},
'5622968':{'en': 'Claro'},
'5622969':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622970':{'en': 'Claro'},
'5622971':{'en': 'Claro'},
'5622972':{'en': 'Convergia Telecom S.A.'},
'5622973':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622974':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622975':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622976':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5622977':{'en': 'Claro'},
'5622978':{'en': 'Claro'},
'5622979':{'en': 'Claro'},
'562298':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5622987':{'en': 'Claro'},
'5622988':{'en': 'Gtd Telesat S.A.'},
'5622989':{'en': 'Gtd Telesat S.A.'},
'5622993':{'en': 'Will S.A.'},
'5622994':{'en': 'Telecomunicaciones Net Uno Ltda.'},
'5622995':{'en': 'Claro'},
'5622996':{'en': 'Claro'},
'5622997':{'en': 'Will S.A.'},
'5622998':{'en': 'Claro'},
'5622999':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'562320':{'en': 'Entel'},
'5623210':{'en': 'Redvoiss'},
'5623211':{'en': 'Claro'},
'5623212':{'en': 'Claro'},
'5623213':{'en': 'Gtd Manquehue S.A.'},
'56232140':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'5623215':{'en': 'Cibeles Telecom'},
'5623216':{'en': 'Entel'},
'5623217':{'en': 'Entel'},
'5623218':{'en': 'Entel'},
'5623219':{'en': 'Entel'},
'562322':{'en': 'Entel'},
'562323':{'en': 'Entel'},
'5623236':{'en': 'Servicios Internet Limitada'},
'5623237':{'en': 'Claro'},
'5623238':{'en': 'Claro'},
'5623239':{'en': 'Claro'},
'562324':{'en': 'Entel'},
'5623240':{'en': 'Claro'},
'5623241':{'en': 'Claro'},
'562325':{'en': 'Entel'},
'562326':{'en': 'Vtr Comunicaciones Spa.'},
'5623260':{'en': 'Entel'},
'5623261':{'en': 'Entel'},
'5623262':{'en': 'Gtd Manquehue S.A.'},
'5623263':{'en': 'Gtd Manquehue S.A.'},
'562327':{'en': 'Movistar'},
'5623270':{'en': 'Vtr Comunicaciones Spa.'},
'5623271':{'en': 'Vtr Comunicaciones Spa.'},
'5623272':{'en': 'Optic Telecomunicaciones Ltda.'},
'562328':{'en': 'Movistar'},
'562329':{'en': 'Movistar'},
'562330':{'en': 'Movistar'},
'562331':{'en': 'Movistar'},
'5623321':{'en': 'Movistar'},
'5623322':{'en': 'Movistar'},
'5623323':{'en': 'Gtd Manquehue S.A.'},
'5623324':{'en': 'Gtd Manquehue S.A.'},
'5623325':{'en': 'Gtd Telesat S.A.'},
'5623326':{'en': 'Gtd Telesat S.A.'},
'5623327':{'en': 'Claro'},
'5623328':{'en': 'Claro'},
'5623329':{'en': 'Claro'},
'5623330':{'en': 'Claro'},
'5623331':{'en': 'Linksat Comunicaciones Spa.'},
'5623332':{'en': 'Linksat Comunicaciones Spa.'},
'5623333':{'en': 'Entel'},
'5623334':{'en': 'Claro'},
'5623335':{'en': 'Claro'},
'5623336':{'en': 'Claro'},
'5623337':{'en': 'Claro'},
'5623338':{'en': 'Claro'},
'562334':{'en': 'Entel'},
'5623348':{'en': 'Claro'},
'5623349':{'en': 'Claro'},
'562335':{'en': 'Movistar'},
'5623350':{'en': 'Claro'},
'5623351':{'en': 'Claro'},
'5623352':{'en': 'Claro'},
'562336':{'en': 'Movistar'},
'562337':{'en': 'Claro'},
'5623370':{'en': 'Movistar'},
'5623371':{'en': 'Movistar'},
'5623372':{'en': 'Movistar'},
'5623373':{'en': 'Movistar'},
'562338':{'en': 'Claro'},
'56236':{'en': 'Entel'},
'56322162':{'en': 'Entel'},
'56322163':{'en': 'Entel'},
'56322164':{'en': 'Entel'},
'56322165':{'en': 'Entel'},
'56322166':{'en': 'Entel'},
'56322167':{'en': 'Entel'},
'56322168':{'en': 'Entel'},
'56322169':{'en': 'Entel'},
'5632217':{'en': 'Entel'},
'5632218':{'en': 'Entel'},
'5632219':{'en': 'Entel'},
'563222':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5632230':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5632231':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5632232':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5632233':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5632234':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5632235':{'en': 'Gtd Telesat S.A.'},
'56322360':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56322361':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56322362':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56322363':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56322364':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56322365':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56322366':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56322367':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56322368':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56322369':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5632237':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5632238':{'en': 'Gtd Telesat S.A.'},
'5632239':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5632240':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5632241':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5632242':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5632243':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5632244':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5632245':{'en': 'Entel'},
'5632246':{'en': 'Entel'},
'5632247':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5632248':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5632249':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'563225':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56322510':{'en': 'Gtd Telesat S.A.'},
'56322511':{'en': 'Gtd Telesat S.A.'},
'56322512':{'en': 'Gtd Telesat S.A.'},
'56322513':{'en': 'Gtd Telesat S.A.'},
'56322514':{'en': 'Gtd Telesat S.A.'},
'56322515':{'en': 'Claro'},
'56322516':{'en': 'Claro'},
'56322517':{'en': 'Claro'},
'56322518':{'en': 'Claro'},
'56322519':{'en': 'Claro'},
'5632252':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5632254':{'en': 'Entel'},
'5632255':{'en': 'Entel'},
'563226':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56322700':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56322701':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56322702':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56322703':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56322704':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56322707':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56322708':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56322709':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5632271':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5632272':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5632273':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5632274':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5632275':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5632276':{'en': 'Claro'},
'5632277':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5632278':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5632279':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'563228':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'563229':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5632290':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5632296':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5632311':{'en': 'Entel'},
'5632312':{'en': 'Entel'},
'56323130':{'en': 'Entel'},
'56323132':{'en': 'Entel'},
'56323133':{'en': 'Chile.Com'},
'56323134':{'en': 'Chile.Com'},
'56323135':{'en': 'Chile.Com'},
'56323136':{'en': 'Chile.Com'},
'56323137':{'en': 'Chile.Com'},
'56323138':{'en': 'Chile.Com'},
'56323139':{'en': 'Chile.Com'},
'5632314':{'en': 'Claro'},
'5632315':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5632316':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5632317':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5632318':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5632319':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5632320':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5632321':{'en': 'Claro'},
'5632322':{'en': 'Claro'},
'5632323':{'en': 'Convergia Telecom S.A.'},
'5632324':{'en': 'Claro'},
'5632325':{'en': 'Claro'},
'5632326':{'en': 'Entel'},
'5632327':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5632328':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5632329':{'en': 'Cibeles Telecom'},
'5632330':{'en': 'Cibeles Telecom'},
'5632331':{'en': 'Entel'},
'5632332':{'en': 'Entel'},
'5632333':{'en': 'Claro'},
'5632334':{'en': 'Claro'},
'5632335':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5632336':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5632337':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5632338':{'en': 'Claro'},
'5632339':{'en': 'Claro'},
'5632340':{'en': 'Claro'},
'5632341':{'en': 'Claro'},
'5632342':{'en': 'Entel'},
'5632343':{'en': 'Entel'},
'5632344':{'en': 'Entel'},
'5632345':{'en': 'Entel'},
'5632346':{'en': 'Entel'},
'5632347':{'en': 'Claro'},
'5632348':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'5632349':{'en': 'Servicios Internet Limitada'},
'563235':{'en': 'Entel'},
'563236':{'en': 'Movistar'},
'563237':{'en': 'Movistar'},
'5632377':{'en': 'Telestar'},
'5632380':{'en': 'Movistar'},
'5632381':{'en': 'Gtd Telesat S.A.'},
'56323820':{'en': 'Redvoiss'},
'5632383':{'en': 'Linksat Comunicaciones Spa.'},
'5632384':{'en': 'Linksat Comunicaciones Spa.'},
'5632385':{'en': 'Gtd Manquehue S.A.'},
'5632386':{'en': 'Gtd Manquehue S.A.'},
'56331980':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56331981':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56331982':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56331983':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56331984':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56331985':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56331986':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'563322':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5633220':{'en': 'Entel'},
'5633223':{'en': 'Claro'},
'5633224':{'en': 'Claro'},
'5633225':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5633231':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5633232':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5633233':{'en': 'Entel'},
'5633234':{'en': 'Entel'},
'5633235':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5633236':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5633237':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5633238':{'en': 'Entel'},
'56332390':{'en': 'Claro'},
'56332391':{'en': 'Claro'},
'56332392':{'en': 'Claro'},
'56332393':{'en': 'Claro'},
'56332394':{'en': 'Claro'},
'56332395':{'en': 'Claro'},
'5633240':{'en': 'Servicios Internet Limitada'},
'5633241':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5633242':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'563324320':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'563324321':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'563324322':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'563324323':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'563324324':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'56332433':{'en': 'Entel'},
'56332434':{'en': 'Chile.Com'},
'5633244':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5633245':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5633246':{'en': 'Claro'},
'5633247':{'en': 'Claro'},
'5633248':{'en': 'Entel'},
'5633249':{'en': 'Entel'},
'5633250':{'en': 'Entel'},
'5633251':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5633252':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5633253':{'en': 'Entel'},
'5633254':{'en': 'Entel'},
'56332550':{'en': 'Redvoiss'},
'5633256':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5633271':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5633274':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5633276':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5633277':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56332781':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5633279':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5633281':{'en': 'Telestar'},
'56341980':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56341981':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56341982':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56341983':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56341984':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56341985':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56341986':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5634220':{'en': 'Entel'},
'5634221':{'en': 'Entel'},
'56342220':{'en': 'Claro'},
'56342221':{'en': 'Claro'},
'56342222':{'en': 'Claro'},
'56342223':{'en': 'Claro'},
'56342224':{'en': 'Claro'},
'56342225':{'en': 'Claro'},
'5634223':{'en': 'Entel'},
'5634224':{'en': 'Entel'},
'5634225':{'en': 'Entel'},
'5634226':{'en': 'Entel'},
'5634227':{'en': 'Vtr Comunicaciones Spa.'},
'5634228':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5634229':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5634231':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5634232':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'563423330':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'563423331':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'563423332':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'563423333':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'563423334':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'56342334':{'en': 'Entel'},
'5634234':{'en': 'Entel'},
'5634235':{'en': 'Entel'},
'5634236':{'en': 'Claro'},
'5634237':{'en': 'Claro'},
'5634238':{'en': 'Claro'},
'5634239':{'en': 'Entel'},
'5634240':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56342400':{'en': 'Chile.Com'},
'5634241':{'en': 'Claro'},
'5634242':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5634243':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5634244':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5634246':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5634247':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5634248':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5634249':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5634250':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5634251':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56342520':{'en': 'Redvoiss'},
'5634253':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5634257':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5634258':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5634259':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5634261':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5634263':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5634268':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5634277':{'en': 'Telestar'},
'5634290':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5634291':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56351980':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56351981':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56351982':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56351983':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56351984':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56351985':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56351986':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'563522':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5635227':{'en': 'Claro'},
'5635229':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5635231':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5635232':{'en': 'Claro'},
'5635233':{'en': 'Claro'},
'5635234':{'en': 'Claro'},
'5635235':{'en': 'Entel'},
'5635236':{'en': 'Entel'},
'5635237':{'en': 'Entel'},
'5635238':{'en': 'Entel'},
'56352390':{'en': 'Claro'},
'56352391':{'en': 'Claro'},
'56352392':{'en': 'Claro'},
'56352393':{'en': 'Claro'},
'56352394':{'en': 'Claro'},
'56352395':{'en': 'Claro'},
'5635240':{'en': 'Entel'},
'5635241':{'en': 'Entel'},
'56352420':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56352421':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56352422':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56352425':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'563524270':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'563524271':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'563524272':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'563524273':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'563524274':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'56352429':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56352430':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56352431':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56352432':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56352436':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56352437':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56352438':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56352439':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5635244':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5635245':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56352460':{'en': 'Chile.Com'},
'56352461':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56352462':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56352463':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56352466':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56352467':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56352469':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5635247':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5635248':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56352490':{'en': 'Redvoiss'},
'56352535':{'en': 'Entel'},
'5635256':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5635257':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5635258':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5635277':{'en': 'Telestar'},
'5635279':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5635288':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56411970':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56411971':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56411972':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56411973':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56411974':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'5641210':{'en': 'Entel'},
'56412110':{'en': 'Quantax'},
'56412111':{'en': 'Quantax'},
'56412112':{'en': 'Quantax'},
'56412113':{'en': 'Quantax'},
'56412114':{'en': 'Quantax'},
'5641212':{'en': 'Entel'},
'5641213':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56412140':{'en': 'Entel'},
'56412141':{'en': 'Entel'},
'5641215':{'en': 'Entel'},
'5641216':{'en': 'Entel'},
'5641217':{'en': 'Entel'},
'5641218':{'en': 'Entel'},
'5641219':{'en': 'Entel'},
'564122':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5641229':{'en': 'Gtd Telesat S.A.'},
'564123':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'564124':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5641246':{'en': 'Telefonica Del Sur S.A.'},
'564125':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5641256':{'en': 'Claro'},
'564126':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5641262':{'en': 'Gtd Telesat S.A.'},
'5641263':{'en': 'Gtd Telesat S.A.'},
'5641271':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5641272':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5641273':{'en': 'Telefonica Del Sur S.A.'},
'5641274':{'en': 'Telefonica Del Sur S.A.'},
'56412751':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56412752':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56412753':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56412754':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56412755':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56412756':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56412757':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56412758':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56412759':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5641276':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5641277':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56412780':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56412781':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56412782':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56412783':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56412784':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56412785':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56412786':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56412787':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56412788':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5641279':{'en': 'Telefonica Del Sur S.A.'},
'56412800':{'en': 'Quantax'},
'56412801':{'en': 'Quantax'},
'56412802':{'en': 'Quantax'},
'56412803':{'en': 'Quantax'},
'56412804':{'en': 'Quantax'},
'5641281':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5641282':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5641283':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56412840':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56412841':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56412842':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56412843':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56412844':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56412845':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5641285':{'en': 'Entel'},
'5641286':{'en': 'Entel'},
'5641287':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5641288':{'en': 'Entel'},
'5641289':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'564129':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5641290':{'en': 'Claro'},
'5641291':{'en': 'Entel'},
'5641292':{'en': 'Entel'},
'5641296':{'en': 'Claro'},
'5641311':{'en': 'Claro'},
'5641312':{'en': 'Claro'},
'5641313':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5641314':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56413150':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56413154':{'en': 'Chile.Com'},
'56413155':{'en': 'Chile.Com'},
'56413156':{'en': 'Chile.Com'},
'56413157':{'en': 'Chile.Com'},
'56413158':{'en': 'Chile.Com'},
'56413159':{'en': 'Chile.Com'},
'5641316':{'en': 'Telefonica Del Sur S.A.'},
'5641317':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5641318':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5641319':{'en': 'Claro'},
'5641320':{'en': 'Convergia Telecom S.A.'},
'5641321':{'en': 'Claro'},
'5641322':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5641323':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5641324':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5641325':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56413260':{'en': 'Claro'},
'56413261':{'en': 'Claro'},
'56413262':{'en': 'Claro'},
'56413263':{'en': 'Claro'},
'56413264':{'en': 'Claro'},
'56413265':{'en': 'Claro'},
'5641327':{'en': 'Entel'},
'5641328':{'en': 'Entel'},
'56413290':{'en': 'Redvoiss'},
'56413300':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'56413302':{'en': 'Gtd Telesat S.A.'},
'5641331':{'en': 'Movistar'},
'5641332':{'en': 'Entel'},
'5641333':{'en': 'Entel'},
'5641334':{'en': 'Entel'},
'5641335':{'en': 'Entel'},
'5641336':{'en': 'Entel'},
'5641337':{'en': 'Servicios Internet Limitada'},
'56413380':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56413381':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56413382':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56413383':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56413384':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5641339':{'en': 'Telefonica Del Sur S.A.'},
'5641350':{'en': 'Entel'},
'5641351':{'en': 'Entel'},
'5641352':{'en': 'Entel'},
'5641355':{'en': 'Claro'},
'5641356':{'en': 'Claro'},
'5641357':{'en': 'Claro'},
'5641358':{'en': 'Claro'},
'5641359':{'en': 'Cibeles Telecom'},
'5641360':{'en': 'Entel'},
'5641361':{'en': 'Entel'},
'5641362':{'en': 'Entel'},
'5641363':{'en': 'Entel'},
'5641364':{'en': 'Entel'},
'5641377':{'en': 'Telestar'},
'56413800':{'en': 'Gtd Telesat S.A.'},
'56413801':{'en': 'Gtd Telesat S.A.'},
'56413802':{'en': 'Gtd Telesat S.A.'},
'56413803':{'en': 'Gtd Telesat S.A.'},
'56413804':{'en': 'Gtd Telesat S.A.'},
'5641383':{'en': 'Telefonica Del Sur S.A.'},
'5641397':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5641398':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56421970':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56421971':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56421972':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56421973':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56421974':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56421975':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56421976':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56421980':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56421981':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56421982':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56421983':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56421984':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56421985':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56421986':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'564222':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5642225':{'en': 'Telefonica Del Sur S.A.'},
'5642231':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5642232':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5642233':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5642234':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56422351':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56422354':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'564223570':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'564223571':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'564223572':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'564223573':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'564223574':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'5642236':{'en': 'Claro'},
'56422370':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56422371':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56422372':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56422373':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56422374':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56422375':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56422378':{'en': 'Chile.Com'},
'56422379':{'en': 'Chile.Com'},
'5642238':{'en': 'Claro'},
'56422390':{'en': 'Gtd Telesat S.A.'},
'56422391':{'en': 'Gtd Telesat S.A.'},
'56422400':{'en': 'Claro'},
'56422401':{'en': 'Claro'},
'56422402':{'en': 'Claro'},
'56422403':{'en': 'Claro'},
'5642241':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5642242':{'en': 'Entel'},
'5642243':{'en': 'Entel'},
'5642244':{'en': 'Claro'},
'5642245':{'en': 'Claro'},
'5642246':{'en': 'Claro'},
'56422471':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56422481':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5642249':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5642250':{'en': 'Entel'},
'56422510':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56422511':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5642252':{'en': 'Entel'},
'5642253':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5642254':{'en': 'Entel'},
'5642255':{'en': 'Entel'},
'56422561':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56422565':{'en': 'Redvoiss'},
'56422571':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5642258':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5642259':{'en': 'Entel'},
'5642261':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5642262':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5642263':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56422641':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5642265':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56422661':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56422662':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56422663':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56422680':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56422681':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5642277':{'en': 'Telestar'},
'5642283':{'en': 'Entel'},
'56422842':{'en': 'Entel'},
'5642285':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5642287':{'en': 'Entel'},
'5642296':{'en': 'Telefonica Del Sur S.A.'},
'56422970':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56422971':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56422972':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56431970':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56431971':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56431972':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56431973':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56431974':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56431975':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56431980':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56431981':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56431982':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56431983':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56431984':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56431985':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56431986':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5643221':{'en': 'Telefonica Del Sur S.A.'},
'56432220':{'en': 'Claro'},
'56432221':{'en': 'Claro'},
'56432222':{'en': 'Claro'},
'56432223':{'en': 'Claro'},
'5643223':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56432240':{'en': 'Chile.Com'},
'56432241':{'en': 'Chile.Com'},
'56432243':{'en': 'Entel'},
'5643225':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5643226':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5643227':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5643228':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5643229':{'en': 'Entel'},
'5643230':{'en': 'Entel'},
'5643231':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5643232':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5643233':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5643234':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5643235':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5643236':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56432370':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56432371':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56432372':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56432373':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56432374':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56432375':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56432380':{'en': 'Gtd Telesat S.A.'},
'56432381':{'en': 'Gtd Telesat S.A.'},
'5643239':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5643240':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56432410':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56432411':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56432412':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56432415':{'en': 'Redvoiss'},
'56432419':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5643242':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56432431':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56432432':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'564324370':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'564324371':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'564324372':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'564324373':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'564324374':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'56432439':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5643244':{'en': 'Claro'},
'5643245':{'en': 'Claro'},
'5643246':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5643247':{'en': 'Claro'},
'5643248':{'en': 'Claro'},
'5643249':{'en': 'Claro'},
'5643251':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5643252':{'en': 'Entel'},
'5643253':{'en': 'Entel'},
'5643254':{'en': 'Entel'},
'56432551':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56432559':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5643256':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5643257':{'en': 'Entel'},
'56432581':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56432588':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56432589':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56432591':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56432599':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56432611':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56432619':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56432621':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56432629':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5643263':{'en': 'Entel'},
'5643264':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5643265':{'en': 'Entel'},
'5643266':{'en': 'Entel'},
'5643269':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5643277':{'en': 'Telestar'},
'5643287':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5643296':{'en': 'Telefonica Del Sur S.A.'},
'56432970':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56432971':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56432972':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56451970':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56451971':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56451972':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56451973':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56451974':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56451975':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56451976':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56451980':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56451981':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56451982':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56451983':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56451984':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56451985':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56451986':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'564522':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5645230':{'en': 'Entel'},
'5645231':{'en': 'Telefonica Del Sur S.A.'},
'5645232':{'en': 'Telefonica Del Sur S.A.'},
'56452320':{'en': 'Entel'},
'56452321':{'en': 'Entel'},
'56452322':{'en': 'Entel'},
'56452323':{'en': 'Entel'},
'5645233':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5645234':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452350':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56452351':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56452352':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56452353':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56452354':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56452355':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'564523570':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'564523571':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'564523572':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'564523573':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'564523574':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'5645236':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5645237':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5645238':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452390':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452391':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452392':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452393':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452394':{'en': 'Claro'},
'56452395':{'en': 'Claro'},
'56452396':{'en': 'Claro'},
'56452399':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5645240':{'en': 'Telefonica Del Sur S.A.'},
'5645241':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5645242':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452430':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452431':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452432':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452433':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452435':{'en': 'Claro'},
'56452436':{'en': 'Claro'},
'56452437':{'en': 'Claro'},
'56452438':{'en': 'Claro'},
'56452439':{'en': 'Claro'},
'5645244':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5645245':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5645246':{'en': 'Entel'},
'56452471':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452472':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452473':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452474':{'en': 'Servicios Internet Limitada'},
'56452475':{'en': 'Servicios Internet Limitada'},
'56452476':{'en': 'Servicios Internet Limitada'},
'56452477':{'en': 'Servicios Internet Limitada'},
'56452478':{'en': 'Servicios Internet Limitada'},
'56452479':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5645248':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56452490':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452491':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452492':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452495':{'en': 'Claro'},
'56452496':{'en': 'Claro'},
'56452497':{'en': 'Claro'},
'56452498':{'en': 'Claro'},
'56452499':{'en': 'Claro'},
'56452503':{'en': 'Entel'},
'56452504':{'en': 'Entel'},
'56452505':{'en': 'Entel'},
'56452506':{'en': 'Entel'},
'56452507':{'en': 'Entel'},
'56452508':{'en': 'Entel'},
'56452509':{'en': 'Entel'},
'56452513':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452515':{'en': 'Entel'},
'56452516':{'en': 'Entel'},
'56452517':{'en': 'Entel'},
'56452518':{'en': 'Entel'},
'56452519':{'en': 'Entel'},
'5645252':{'en': 'Telefonica Del Sur S.A.'},
'56452531':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452532':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452533':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452534':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452535':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452537':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452539':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5645254':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5645255':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452562':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452566':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452568':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452573':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452574':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452578':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452581':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452583':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452585':{'en': 'Claro'},
'56452586':{'en': 'Claro'},
'56452587':{'en': 'Claro'},
'56452588':{'en': 'Claro'},
'56452589':{'en': 'Claro'},
'5645259':{'en': 'Telefonica Del Sur S.A.'},
'5645260':{'en': 'Entel'},
'56452611':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452612':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452613':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452614':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452615':{'en': 'Claro'},
'56452616':{'en': 'Claro'},
'56452617':{'en': 'Claro'},
'56452618':{'en': 'Claro'},
'56452619':{'en': 'Claro'},
'56452620':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56452621':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56452622':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56452623':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56452624':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56452625':{'en': 'Entel'},
'56452626':{'en': 'Entel'},
'56452627':{'en': 'Entel'},
'56452628':{'en': 'Entel'},
'56452629':{'en': 'Entel'},
'56452631':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452634':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452635':{'en': 'Entel'},
'56452636':{'en': 'Entel'},
'56452637':{'en': 'Entel'},
'56452638':{'en': 'Entel'},
'56452639':{'en': 'Entel'},
'5645264':{'en': 'Telefonica Del Sur S.A.'},
'56452651':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452652':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452655':{'en': 'Entel'},
'56452656':{'en': 'Entel'},
'56452657':{'en': 'Entel'},
'56452658':{'en': 'Entel'},
'56452659':{'en': 'Entel'},
'56452661':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452664':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452665':{'en': 'Entel'},
'56452666':{'en': 'Entel'},
'56452667':{'en': 'Entel'},
'56452668':{'en': 'Entel'},
'56452669':{'en': 'Entel'},
'56452671':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452673':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452675':{'en': 'Entel'},
'56452676':{'en': 'Entel'},
'56452677':{'en': 'Entel'},
'56452678':{'en': 'Entel'},
'56452679':{'en': 'Entel'},
'5645268':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5645269':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5645271':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5645272':{'en': 'Telefonica Del Sur S.A.'},
'5645273':{'en': 'Telefonica Del Sur S.A.'},
'5645274':{'en': 'Telefonica Del Sur S.A.'},
'56452753':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452755':{'en': 'Telefonica Del Sur S.A.'},
'56452756':{'en': 'Telefonica Del Sur S.A.'},
'56452757':{'en': 'Telefonica Del Sur S.A.'},
'56452758':{'en': 'Telefonica Del Sur S.A.'},
'56452759':{'en': 'Telefonica Del Sur S.A.'},
'5645276':{'en': 'Telestar'},
'56452770':{'en': 'Claro'},
'56452771':{'en': 'Claro'},
'56452772':{'en': 'Claro'},
'56452774':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452775':{'en': 'Entel'},
'56452776':{'en': 'Entel'},
'56452777':{'en': 'Entel'},
'56452778':{'en': 'Entel'},
'56452779':{'en': 'Entel'},
'56452781':{'en': 'Redvoiss'},
'56452783':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452785':{'en': 'Claro'},
'56452786':{'en': 'Claro'},
'56452787':{'en': 'Claro'},
'56452788':{'en': 'Claro'},
'56452789':{'en': 'Claro'},
'56452793':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452794':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452795':{'en': 'Entel'},
'56452796':{'en': 'Entel'},
'56452797':{'en': 'Entel'},
'56452798':{'en': 'Entel'},
'56452799':{'en': 'Entel'},
'5645280':{'en': 'Chile.Com'},
'5645281':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5645282':{'en': 'Claro'},
'5645283':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5645284':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452851':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452855':{'en': 'Claro'},
'56452856':{'en': 'Claro'},
'56452857':{'en': 'Claro'},
'56452858':{'en': 'Claro'},
'56452859':{'en': 'Claro'},
'5645286':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5645287':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452881':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452882':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452885':{'en': 'Telefonica Del Sur S.A.'},
'56452886':{'en': 'Telefonica Del Sur S.A.'},
'56452887':{'en': 'Telefonica Del Sur S.A.'},
'56452888':{'en': 'Telefonica Del Sur S.A.'},
'56452889':{'en': 'Telefonica Del Sur S.A.'},
'56452891':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452892':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56452895':{'en': 'Entel'},
'56452896':{'en': 'Entel'},
'56452897':{'en': 'Entel'},
'56452898':{'en': 'Entel'},
'56452899':{'en': 'Entel'},
'5645290':{'en': 'Movistar'},
'5645291':{'en': 'Entel'},
'56452920':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56452921':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56452922':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56452923':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56452924':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56452925':{'en': 'Gtd Telesat S.A.'},
'56452926':{'en': 'Gtd Telesat S.A.'},
'56452927':{'en': 'Gtd Telesat S.A.'},
'56452928':{'en': 'Gtd Telesat S.A.'},
'56452929':{'en': 'Gtd Telesat S.A.'},
'5645293':{'en': 'Claro'},
'5645294':{'en': 'Claro'},
'5645295':{'en': 'Gtd Telesat S.A.'},
'5645296':{'en': 'Gtd Telesat S.A.'},
'5645297':{'en': 'Gtd Telesat S.A.'},
'5645298':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5645299':{'en': 'Entel'},
'56511980':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56511981':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56511982':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56511983':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56511984':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56511985':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56511986':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'565122':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5651231':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5651232':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5651233':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5651234':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5651235':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5651236':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5651238':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5651239':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5651240':{'en': 'Entel'},
'5651241':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5651242':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5651243':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5651244':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56512451':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56512453':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'56512455':{'en': 'Claro'},
'56512456':{'en': 'Claro'},
'56512457':{'en': 'Claro'},
'56512458':{'en': 'Claro'},
'56512459':{'en': 'Claro'},
'5651246':{'en': 'Claro'},
'5651247':{'en': 'Claro'},
'5651248':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5651249':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56512505':{'en': 'Redvoiss'},
'56512510':{'en': 'Claro'},
'56512511':{'en': 'Claro'},
'56512512':{'en': 'Claro'},
'56512513':{'en': 'Claro'},
'56512514':{'en': 'Claro'},
'56512518':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56512519':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5651252':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5651253':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5651254':{'en': 'Entel'},
'5651255':{'en': 'Entel'},
'5651256':{'en': 'Entel'},
'5651257':{'en': 'Entel'},
'5651258':{'en': 'Claro'},
'5651259':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5651260':{'en': 'Entel'},
'5651261':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5651262':{'en': 'Claro'},
'5651263':{'en': 'Claro'},
'5651264':{'en': 'Claro'},
'56512651':{'en': 'Entel'},
'56512652':{'en': 'Chile.Com'},
'56512653':{'en': 'Chile.Com'},
'56512654':{'en': 'Chile.Com'},
'56512655':{'en': 'Chile.Com'},
'5651266':{'en': 'Entel'},
'5651267':{'en': 'Entel'},
'5651268':{'en': 'Entel'},
'5651269':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5651271':{'en': 'Claro'},
'5651272':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5651273':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56512740':{'en': 'Claro'},
'56512741':{'en': 'Claro'},
'56512742':{'en': 'Claro'},
'56512743':{'en': 'Claro'},
'56512744':{'en': 'Claro'},
'56512745':{'en': 'Claro'},
'5651275':{'en': 'Entel'},
'5651276':{'en': 'Telestar'},
'5651277':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5651278':{'en': 'Entel'},
'5651279':{'en': 'Entel'},
'56521980':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56521981':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56521982':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56521983':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56521984':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56521985':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56521986':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'565222':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5652227':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5652229':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5652231':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5652232':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5652233':{'en': 'Claro'},
'5652234':{'en': 'Claro'},
'5652235':{'en': 'Claro'},
'5652236':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5652237':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5652238':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56522390':{'en': 'Gtd Telesat S.A.'},
'56522391':{'en': 'Gtd Telesat S.A.'},
'56522392':{'en': 'Chile.Com'},
'56522393':{'en': 'Chile.Com'},
'56522394':{'en': 'Chile.Com'},
'56522395':{'en': 'Chile.Com'},
'5652240':{'en': 'Entel'},
'5652241':{'en': 'Claro'},
'5652242':{'en': 'Entel'},
'5652243':{'en': 'Entel'},
'5652244':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5652245':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5652246':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5652247':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5652248':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56522490':{'en': 'Claro'},
'56522491':{'en': 'Claro'},
'56522492':{'en': 'Claro'},
'56522493':{'en': 'Claro'},
'56522494':{'en': 'Claro'},
'56522495':{'en': 'Claro'},
'5652250':{'en': 'Entel'},
'5652251':{'en': 'Entel'},
'5652252':{'en': 'Entel'},
'5652253':{'en': 'Entel'},
'5652254':{'en': 'Entel'},
'56522552':{'en': 'Entel'},
'56522555':{'en': 'Redvoiss'},
'565225570':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'565225571':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'565225572':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'565225573':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'565225574':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'5652256':{'en': 'Entel'},
'5652257':{'en': 'Entel'},
'5652258':{'en': 'Movistar'},
'5652259':{'en': 'Movistar'},
'5652260':{'en': 'Movistar'},
'5652261':{'en': 'Movistar'},
'5652262':{'en': 'Movistar'},
'5652268':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56531980':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56531981':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56531982':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56531983':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56531984':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56531985':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56531986':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5653220':{'en': 'Entel'},
'5653221':{'en': 'Entel'},
'56532220':{'en': 'Claro'},
'56532221':{'en': 'Claro'},
'56532222':{'en': 'Claro'},
'56532223':{'en': 'Claro'},
'5653232':{'en': 'Claro'},
'5653233':{'en': 'Claro'},
'5653234':{'en': 'Claro'},
'5653235':{'en': 'Claro'},
'5653236':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5653237':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5653242':{'en': 'Entel'},
'5653243':{'en': 'Entel'},
'5653244':{'en': 'Entel'},
'56532453':{'en': 'Entel'},
'56532455':{'en': 'Redvoiss'},
'5653246':{'en': 'Entel'},
'5653247':{'en': 'Entel'},
'5653248':{'en': 'Entel'},
'5653249':{'en': 'Entel'},
'5653252':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532531':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532536':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532538':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532539':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5653254':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532551':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532552':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532553':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'565325570':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'565325571':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'565325572':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'565325573':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'565325574':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'56532558':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5653259':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5653262':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5653263':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5653264':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5653265':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5653266':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532681':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532686':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532691':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532693':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532696':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532711':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532712':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532718':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532721':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532726':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532728':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532731':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532738':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532741':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56532748':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56551980':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56551981':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56551982':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56551983':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56551984':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56551985':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56551986':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'565522':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655231':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56552320':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56552321':{'en': 'Entel'},
'56552322':{'en': 'Entel'},
'56552323':{'en': 'Entel'},
'56552324':{'en': 'Entel'},
'56552325':{'en': 'Servicios Internet Limitada'},
'56552326':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56552327':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56552328':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56552329':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655233':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655234':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655235':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655236':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655237':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5655238':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5655239':{'en': 'Gtd Telesat S.A.'},
'5655240':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5655241':{'en': 'Entel'},
'5655242':{'en': 'Entel'},
'5655243':{'en': 'Entel'},
'5655244':{'en': 'Entel'},
'5655245':{'en': 'Gtd Telesat S.A.'},
'5655246':{'en': 'Gtd Telesat S.A.'},
'5655247':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655248':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655249':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655250':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655251':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655252':{'en': 'Claro'},
'5655253':{'en': 'Claro'},
'5655254':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5655255':{'en': 'Entel'},
'5655256':{'en': 'Entel'},
'5655257':{'en': 'Entel'},
'5655258':{'en': 'Netglobalis Telecom S.A.'},
'5655259':{'en': 'Entel'},
'56552601':{'en': 'Movistar'},
'56552602':{'en': 'Movistar'},
'56552603':{'en': 'Movistar'},
'56552604':{'en': 'Movistar'},
'56552605':{'en': 'Movistar'},
'56552606':{'en': 'Movistar'},
'56552607':{'en': 'Movistar'},
'56552608':{'en': 'Movistar'},
'56552609':{'en': 'Movistar'},
'5655261':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655262':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655263':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655264':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655265':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655266':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655267':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655268':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655269':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56552705':{'en': 'Claro'},
'56552706':{'en': 'Claro'},
'56552707':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'5655271':{'en': 'Claro'},
'5655272':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5655273':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5655274':{'en': 'Claro'},
'56552750':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56552751':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56552752':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56552753':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56552754':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56552755':{'en': 'Entel'},
'56552756':{'en': 'Claro'},
'56552757':{'en': 'Claro'},
'56552758':{'en': 'Claro'},
'56552759':{'en': 'Claro'},
'5655276':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5655277':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5655278':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5655279':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5655280':{'en': 'Cibeles Telecom'},
'5655281':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655282':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5655283':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5655284':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5655285':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655286':{'en': 'Claro'},
'5655287':{'en': 'Convergia Telecom S.A.'},
'5655288':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5655289':{'en': 'Entel'},
'565529':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5655290':{'en': 'Movistar'},
'5655291':{'en': 'Gtd Telesat S.A.'},
'5655297':{'en': 'Telestar'},
'5655298':{'en': 'Entel'},
'56553280':{'en': 'Redvoiss'},
'56553320':{'en': 'Entel'},
'5655334':{'en': 'Movistar'},
'5655335':{'en': 'Movistar'},
'5655336':{'en': 'Movistar'},
'5655337':{'en': 'Movistar'},
'5655338':{'en': 'Movistar'},
'5657221':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5657222':{'en': 'Claro'},
'5657223':{'en': 'Convergia Telecom S.A.'},
'5657224':{'en': 'Entel'},
'5657225':{'en': 'Chile.Com'},
'5657226':{'en': 'Entel'},
'5657227':{'en': 'Entel'},
'5657228':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5657229':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5657231':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5657232':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5657233':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5657234':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5657235':{'en': 'Claro'},
'5657236':{'en': 'Claro'},
'5657237':{'en': 'Gtd Telesat S.A.'},
'5657238':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5657239':{'en': 'Gtd Telesat S.A.'},
'565724':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5657250':{'en': 'Movistar'},
'5657251':{'en': 'Entel'},
'5657252':{'en': 'Entel'},
'5657253':{'en': 'Entel'},
'5657254':{'en': 'Entel'},
'56572550':{'en': 'Redvoiss'},
'56572557':{'en': 'Entel'},
'5657256':{'en': 'Claro'},
'5657257':{'en': 'Entel'},
'5657258':{'en': 'Gtd Telesat S.A.'},
'5657259':{'en': 'Claro'},
'56572610':{'en': 'Claro'},
'56572611':{'en': 'Claro'},
'56572612':{'en': 'Claro'},
'56572613':{'en': 'Claro'},
'5657262':{'en': 'Entel'},
'5657263':{'en': 'Entel'},
'5657271':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5657272':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5657273':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56572741':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56572742':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56572743':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56572744':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56572747':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'56572751':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56572752':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56572753':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56572754':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56572756':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56572757':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56572758':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56572759':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5657276':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5657277':{'en': 'Telestar'},
'5657278':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5657279':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5657281':{'en': 'Gtd Telesat S.A.'},
'5657283':{'en': 'Movistar'},
'5657284':{'en': 'Gtd Manquehue S.A.'},
'5657285':{'en': 'Entel'},
'5657286':{'en': 'Entel'},
'5657287':{'en': 'Entel'},
'5657288':{'en': 'Entel'},
'565822':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5658231':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5658232':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5658233':{'en': 'Claro'},
'5658234':{'en': 'Claro'},
'5658235':{'en': 'Claro'},
'5658236':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5658237':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5658238':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56582390':{'en': 'Gtd Telesat S.A.'},
'56582391':{'en': 'Gtd Telesat S.A.'},
'56582395':{'en': 'Redvoiss'},
'5658240':{'en': 'Entel'},
'5658241':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5658242':{'en': 'Claro'},
'5658243':{'en': 'Claro'},
'56582440':{'en': 'Claro'},
'56582441':{'en': 'Claro'},
'56582442':{'en': 'Claro'},
'56582443':{'en': 'Claro'},
'56582458':{'en': 'Entel'},
'5658246':{'en': 'Convergia Telecom S.A.'},
'5658247':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5658248':{'en': 'Chile.Com'},
'5658249':{'en': 'Entel'},
'5658250':{'en': 'Entel'},
'5658251':{'en': 'Entel'},
'5658252':{'en': 'Entel'},
'5658253':{'en': 'Entel'},
'5658256':{'en': 'Entel'},
'5658257':{'en': 'Entel'},
'5658258':{'en': 'Entel'},
'5658259':{'en': 'Entel'},
'56582747':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'5658277':{'en': 'Telestar'},
'5658283':{'en': 'Movistar'},
'56582892':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5658298':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56611980':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56611981':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56611982':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56611983':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56611984':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56611985':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56611986':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'566122':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56612311':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56612315':{'en': 'Redvoiss'},
'5661232':{'en': 'Claro'},
'56612331':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'566123330':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'566123331':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'566123332':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'566123333':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'566123334':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'5661234':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5661235':{'en': 'Claro'},
'5661236':{'en': 'Claro'},
'5661237':{'en': 'Claro'},
'5661238':{'en': 'Chile.Com'},
'5661239':{'en': 'Claro'},
'5661241':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5661242':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5661245':{'en': 'Entel'},
'56612580':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56612581':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5661261':{'en': 'Entel'},
'5661262':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5661263':{'en': 'Entel'},
'5661264':{'en': 'Entel'},
'5661269':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5661271':{'en': 'Entel'},
'5661272':{'en': 'Entel'},
'5661273':{'en': 'Entel'},
'5661274':{'en': 'Entel'},
'56612761':{'en': 'Entel'},
'56631970':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56631971':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56631972':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56631973':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56631974':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56631975':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56632174':{'en': 'Entel'},
'566322':{'en': 'Telefonica Del Sur S.A.'},
'5663225':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5663226':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5663231':{'en': 'Telefonica Del Sur S.A.'},
'5663232':{'en': 'Entel'},
'5663233':{'en': 'Entel'},
'5663234':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5663235':{'en': 'Claro'},
'5663236':{'en': 'Claro'},
'56632370':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56632371':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56632372':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56632373':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56632374':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56632375':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'5663238':{'en': 'Claro'},
'56632392':{'en': 'Chile.Com'},
'56632393':{'en': 'Chile.Com'},
'56632394':{'en': 'Chile.Com'},
'56632395':{'en': 'Chile.Com'},
'56632411':{'en': 'Telefonica Del Sur S.A.'},
'56632420':{'en': 'Telefonica Del Sur S.A.'},
'56632421':{'en': 'Telefonica Del Sur S.A.'},
'56632422':{'en': 'Telefonica Del Sur S.A.'},
'56632425':{'en': 'Telefonica Del Sur S.A.'},
'56632426':{'en': 'Telefonica Del Sur S.A.'},
'56632427':{'en': 'Telefonica Del Sur S.A.'},
'56632428':{'en': 'Telefonica Del Sur S.A.'},
'56632429':{'en': 'Telefonica Del Sur S.A.'},
'5663243':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56632441':{'en': 'Telefonica Del Sur S.A.'},
'56632442':{'en': 'Telefonica Del Sur S.A.'},
'56632444':{'en': 'Telefonica Del Sur S.A.'},
'5663245':{'en': 'Telefonica Del Sur S.A.'},
'56632460':{'en': 'Telefonica Del Sur S.A.'},
'56632461':{'en': 'Telefonica Del Sur S.A.'},
'56632462':{'en': 'Telefonica Del Sur S.A.'},
'56632465':{'en': 'Telefonica Del Sur S.A.'},
'56632467':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'56632471':{'en': 'Telefonica Del Sur S.A.'},
'56632480':{'en': 'Telefonica Del Sur S.A.'},
'56632481':{'en': 'Telefonica Del Sur S.A.'},
'56632482':{'en': 'Telefonica Del Sur S.A.'},
'56632491':{'en': 'Telefonica Del Sur S.A.'},
'5663251':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5663252':{'en': 'Telefonica Del Sur S.A.'},
'5663253':{'en': 'Entel'},
'56632540':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56632541':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56632542':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56632543':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56632544':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5663255':{'en': 'Entel'},
'56632563':{'en': 'Entel'},
'5663257':{'en': 'Telefonica Del Sur S.A.'},
'5663258':{'en': 'Telefonica Del Sur S.A.'},
'5663259':{'en': 'Claro'},
'56632600':{'en': 'Claro'},
'56632601':{'en': 'Claro'},
'56632602':{'en': 'Claro'},
'56632603':{'en': 'Claro'},
'5663261':{'en': 'Claro'},
'5663262':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5663263':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5663264':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5663265':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5663266':{'en': 'Entel'},
'5663267':{'en': 'Entel'},
'5663268':{'en': 'Entel'},
'5663269':{'en': 'Entel'},
'56632710':{'en': 'Entel'},
'56632711':{'en': 'Entel'},
'56632712':{'en': 'Entel'},
'56632713':{'en': 'Entel'},
'56632715':{'en': 'Entel'},
'56632716':{'en': 'Entel'},
'56632717':{'en': 'Entel'},
'56632718':{'en': 'Entel'},
'56632719':{'en': 'Entel'},
'5663272':{'en': 'Entel'},
'56632730':{'en': 'Redvoiss'},
'5663277':{'en': 'Telestar'},
'5663296':{'en': 'Telefonica Del Sur S.A.'},
'56632970':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56632971':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56632979':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56641974':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56641975':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56641976':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56641977':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56641978':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'5664220':{'en': 'Telefonica Del Sur S.A.'},
'5664221':{'en': 'Telefonica Del Sur S.A.'},
'5664222':{'en': 'Telefonica Del Sur S.A.'},
'5664223':{'en': 'Telefonica Del Sur S.A.'},
'5664224':{'en': 'Telefonica Del Sur S.A.'},
'5664225':{'en': 'Telefonica Del Sur S.A.'},
'5664226':{'en': 'Telefonica Del Sur S.A.'},
'5664227':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56642280':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56642281':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56642282':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56642283':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56642284':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56642290':{'en': 'Redvoiss'},
'56642294':{'en': 'Telefonica Del Sur S.A.'},
'56642295':{'en': 'Telefonica Del Sur S.A.'},
'56642296':{'en': 'Telefonica Del Sur S.A.'},
'5664231':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5664232':{'en': 'Telefonica Del Sur S.A.'},
'5664233':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5664234':{'en': 'Telefonica Del Sur S.A.'},
'56642350':{'en': 'Telefonica Del Sur S.A.'},
'56642351':{'en': 'Telefonica Del Sur S.A.'},
'56642352':{'en': 'Telefonica Del Sur S.A.'},
'56642353':{'en': 'Telefonica Del Sur S.A.'},
'56642354':{'en': 'Telefonica Del Sur S.A.'},
'56642355':{'en': 'Telefonica Del Sur S.A.'},
'566423570':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'566423571':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'566423572':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'566423573':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'566423574':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'5664236':{'en': 'Telefonica Del Sur S.A.'},
'56642370':{'en': 'Telefonica Del Sur S.A.'},
'56642371':{'en': 'Telefonica Del Sur S.A.'},
'56642372':{'en': 'Telefonica Del Sur S.A.'},
'56642374':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56642375':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56642376':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56642377':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56642378':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56642379':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'5664238':{'en': 'Telefonica Del Sur S.A.'},
'56642391':{'en': 'Telefonica Del Sur S.A.'},
'56642392':{'en': 'Chile.Com'},
'56642393':{'en': 'Chile.Com'},
'56642394':{'en': 'Chile.Com'},
'56642395':{'en': 'Chile.Com'},
'56642396':{'en': 'Telefonica Del Sur S.A.'},
'56642400':{'en': 'Claro'},
'56642401':{'en': 'Claro'},
'56642402':{'en': 'Claro'},
'56642403':{'en': 'Claro'},
'5664241':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5664242':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5664243':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5664244':{'en': 'Claro'},
'5664245':{'en': 'Claro'},
'5664246':{'en': 'Claro'},
'5664247':{'en': 'Telefonica Del Sur S.A.'},
'5664248':{'en': 'Claro'},
'5664249':{'en': 'Claro'},
'5664251':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5664252':{'en': 'Telefonica Del Sur S.A.'},
'5664253':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5664254':{'en': 'Entel'},
'5664255':{'en': 'Entel'},
'5664256':{'en': 'Entel'},
'5664257':{'en': 'Telefonica Del Sur S.A.'},
'5664258':{'en': 'Entel'},
'5664259':{'en': 'Entel'},
'5664261':{'en': 'Entel'},
'5664262':{'en': 'Entel'},
'5664263':{'en': 'Entel'},
'5664264':{'en': 'Entel'},
'56642664':{'en': 'Entel'},
'5664267':{'en': 'Entel'},
'5664268':{'en': 'Entel'},
'5664277':{'en': 'Telestar'},
'5664296':{'en': 'Telefonica Del Sur S.A.'},
'56642970':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56642971':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56642979':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56651970':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56651971':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56651972':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56651973':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56651979':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56651980':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56651981':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56651982':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56651983':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56651984':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56651985':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56651986':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5665220':{'en': 'Telefonica Del Sur S.A.'},
'5665221':{'en': 'Telefonica Del Sur S.A.'},
'5665222':{'en': 'Telefonica Del Sur S.A.'},
'5665223':{'en': 'Telefonica Del Sur S.A.'},
'56652240':{'en': 'Telefonica Del Sur S.A.'},
'56652241':{'en': 'Telefonica Del Sur S.A.'},
'56652242':{'en': 'Telefonica Del Sur S.A.'},
'56652243':{'en': 'Telefonica Del Sur S.A.'},
'56652244':{'en': 'Telefonica Del Sur S.A.'},
'56652245':{'en': 'Telefonica Del Sur S.A.'},
'56652246':{'en': 'Telefonica Del Sur S.A.'},
'5665225':{'en': 'Telefonica Del Sur S.A.'},
'5665226':{'en': 'Telefonica Del Sur S.A.'},
'5665227':{'en': 'Telefonica Del Sur S.A.'},
'5665228':{'en': 'Telefonica Del Sur S.A.'},
'5665229':{'en': 'Telefonica Del Sur S.A.'},
'5665231':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5665232':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5665233':{'en': 'Telefonica Del Sur S.A.'},
'5665234':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5665235':{'en': 'Entel'},
'5665236':{'en': 'Entel'},
'56652370':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56652371':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56652372':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56652373':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56652374':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'5665238':{'en': 'Entel'},
'5665239':{'en': 'Claro'},
'56652400':{'en': 'Claro'},
'56652401':{'en': 'Claro'},
'56652402':{'en': 'Claro'},
'56652403':{'en': 'Claro'},
'56652404':{'en': 'Claro'},
'56652405':{'en': 'Claro'},
'5665241':{'en': 'Claro'},
'56652420':{'en': 'Telefonica Del Sur S.A.'},
'56652421':{'en': 'Telefonica Del Sur S.A.'},
'56652422':{'en': 'Telefonica Del Sur S.A.'},
'56652423':{'en': 'Telefonica Del Sur S.A.'},
'56652424':{'en': 'Telefonica Del Sur S.A.'},
'56652425':{'en': 'Telefonica Del Sur S.A.'},
'56652426':{'en': 'Telefonica Del Sur S.A.'},
'566524270':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'566524271':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'566524272':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'566524273':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'566524274':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'5665243':{'en': 'Telefonica Del Sur S.A.'},
'56652440':{'en': 'Telefonica Del Sur S.A.'},
'56652441':{'en': 'Telefonica Del Sur S.A.'},
'56652442':{'en': 'Telefonica Del Sur S.A.'},
'56652443':{'en': 'Telefonica Del Sur S.A.'},
'56652444':{'en': 'Telefonica Del Sur S.A.'},
'56652445':{'en': 'Telefonica Del Sur S.A.'},
'56652446':{'en': 'Telefonica Del Sur S.A.'},
'56652451':{'en': 'Telefonica Del Sur S.A.'},
'56652456':{'en': 'Telefonica Del Sur S.A.'},
'5665246':{'en': 'Telefonica Del Sur S.A.'},
'5665247':{'en': 'Telefonica Del Sur S.A.'},
'5665248':{'en': 'Telefonica Del Sur S.A.'},
'5665249':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5665250':{'en': 'Entel'},
'5665251':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5665252':{'en': 'Telefonica Del Sur S.A.'},
'5665253':{'en': 'Telefonica Del Sur S.A.'},
'5665254':{'en': 'Telefonica Del Sur S.A.'},
'5665255':{'en': 'Claro'},
'5665256':{'en': 'Claro'},
'5665257':{'en': 'Telefonica Del Sur S.A.'},
'5665258':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5665259':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5665261':{'en': 'Telefonica Del Sur S.A.'},
'5665262':{'en': 'Telefonica Del Sur S.A.'},
'5665263':{'en': 'Telefonica Del Sur S.A.'},
'56652640':{'en': 'Telefonica Del Sur S.A.'},
'56652641':{'en': 'Telefonica Del Sur S.A.'},
'56652642':{'en': 'Telefonica Del Sur S.A.'},
'56652643':{'en': 'Telefonica Del Sur S.A.'},
'56652651':{'en': 'Telefonica Del Sur S.A.'},
'56652661':{'en': 'Telefonica Del Sur S.A.'},
'56652667':{'en': 'Telefonica Del Sur S.A.'},
'56652671':{'en': 'Telefonica Del Sur S.A.'},
'56652672':{'en': 'Telefonica Del Sur S.A.'},
'56652673':{'en': 'Telefonica Del Sur S.A.'},
'56652677':{'en': 'Telefonica Del Sur S.A.'},
'5665268':{'en': 'Telefonica Del Sur S.A.'},
'56652691':{'en': 'Telefonica Del Sur S.A.'},
'56652696':{'en': 'Telefonica Del Sur S.A.'},
'5665271':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56652721':{'en': 'Telefonica Del Sur S.A.'},
'56652725':{'en': 'Chile.Com'},
'56652726':{'en': 'Chile.Com'},
'56652731':{'en': 'Telefonica Del Sur S.A.'},
'56652741':{'en': 'Telefonica Del Sur S.A.'},
'5665275':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56652765':{'en': 'Entel'},
'5665277':{'en': 'Entel'},
'5665278':{'en': 'Claro'},
'5665279':{'en': 'Telestar'},
'5665280':{'en': 'Entel'},
'5665281':{'en': 'Convergia Telecom S.A.'},
'5665282':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5665283':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5665284':{'en': 'Entel'},
'5665285':{'en': 'Entel'},
'5665286':{'en': 'Entel'},
'56652870':{'en': 'Redvoiss'},
'5665288':{'en': 'Entel'},
'5665289':{'en': 'Entel'},
'5665296':{'en': 'Telefonica Del Sur S.A.'},
'56652970':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56652971':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56652972':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56671980':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56671981':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56671982':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56671983':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56671984':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56671985':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56671986':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5667221':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'5667222':{'en': 'Telefonica Del Sur S.A.'},
'5667223':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'5667224':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'5667225':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5667226':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5667227':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'5667228':{'en': 'Claro'},
'5667229':{'en': 'Chile.Com'},
'5667231':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'5667232':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'56672330':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'56672331':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'56672332':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'56672333':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'56672334':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'56672335':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'56672336':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'5667234':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'5667235':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'56672360':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'56672361':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'5667237':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'56672380':{'en': 'Claro'},
'56672381':{'en': 'Claro'},
'56672382':{'en': 'Claro'},
'56672383':{'en': 'Claro'},
'56672385':{'en': 'Redvoiss'},
'5667239':{'en': 'Entel'},
'5667241':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'56672423':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'566724270':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'566724271':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'566724272':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'566724273':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'566724274':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'56672430':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'56672431':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'56672435':{'en': 'Entel'},
'56672436':{'en': 'Entel'},
'56672437':{'en': 'Entel'},
'56672438':{'en': 'Entel'},
'56672439':{'en': 'Entel'},
'5667244':{'en': 'Claro'},
'5667245':{'en': 'Claro'},
'5667246':{'en': 'Claro'},
'5667252':{'en': u('Compania De Tel\u00e9fonos De Coyhaique S.A.')},
'56672567':{'en': 'Entel'},
'5667257':{'en': 'Entel'},
'5667258':{'en': 'Entel'},
'5667261':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5667262':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5667263':{'en': 'Entel'},
'5667267':{'en': 'Entel'},
'56711970':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56711971':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56711972':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56711973':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56711974':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56711980':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56711981':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56711982':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56711983':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56711984':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56711985':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56711986':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'567122':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5671231':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5671232':{'en': 'Claro'},
'5671233':{'en': 'Claro'},
'5671234':{'en': 'Claro'},
'5671235':{'en': 'Claro'},
'56712360':{'en': 'Claro'},
'56712361':{'en': 'Claro'},
'56712363':{'en': 'Claro'},
'56712364':{'en': 'Claro'},
'56712365':{'en': 'Claro'},
'56712370':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56712371':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56712372':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56712373':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56712374':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'5671238':{'en': 'Entel'},
'56712390':{'en': 'Gtd Telesat S.A.'},
'56712391':{'en': 'Gtd Telesat S.A.'},
'56712392':{'en': 'Chile.Com'},
'56712393':{'en': 'Chile.Com'},
'56712394':{'en': 'Chile.Com'},
'56712395':{'en': 'Chile.Com'},
'5671240':{'en': 'Entel'},
'5671241':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5671242':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5671243':{'en': 'Entel'},
'5671244':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5671245':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56712460':{'en': 'Redvoiss'},
'5671247':{'en': 'Entel'},
'5671248':{'en': 'Entel'},
'5671251':{'en': 'Entel'},
'5671252':{'en': 'Entel'},
'5671253':{'en': 'Entel'},
'56712571':{'en': 'Entel'},
'5671261':{'en': 'Entel'},
'5671262':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5671263':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5671264':{'en': 'Claro'},
'56712651':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'567126550':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'567126551':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'567126552':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'567126553':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'567126554':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'56712657':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5671267':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5671268':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5671269':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5671271':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56712720':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56712721':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56712722':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56712723':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56712724':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56712725':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56712726':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56712727':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5671273':{'en': 'Entel'},
'5671274':{'en': 'Entel'},
'5671277':{'en': 'Telestar'},
'5671279':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5671280':{'en': 'Entel'},
'5671281':{'en': 'Entel'},
'5671282':{'en': 'Entel'},
'5671283':{'en': 'Entel'},
'5671288':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56712970':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56712971':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56712972':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'5671298':{'en': 'Telefonica Del Sur S.A.'},
'56721980':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56721981':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56721982':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56721983':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56721984':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56721985':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56721986':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56721987':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'567222':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5672231':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5672232':{'en': 'Entel'},
'5672233':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5672234':{'en': 'Entel'},
'5672235':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5672236':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722370':{'en': 'Claro'},
'56722371':{'en': 'Claro'},
'56722372':{'en': 'Claro'},
'56722373':{'en': 'Claro'},
'56722374':{'en': 'Claro'},
'56722375':{'en': 'Claro'},
'56722376':{'en': 'Claro'},
'56722377':{'en': 'Claro'},
'56722381':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722384':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722387':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722390':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722391':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722392':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722393':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722395':{'en': 'Entel'},
'56722396':{'en': 'Entel'},
'56722397':{'en': 'Entel'},
'56722398':{'en': 'Entel'},
'56722399':{'en': 'Entel'},
'5672240':{'en': 'Chile.Com'},
'5672241':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5672242':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5672243':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5672244':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722450':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722451':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722452':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722455':{'en': 'Entel'},
'56722456':{'en': 'Entel'},
'56722457':{'en': 'Entel'},
'56722458':{'en': 'Entel'},
'56722459':{'en': 'Entel'},
'56722461':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722462':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722463':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722466':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722467':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722468':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5672247':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722481':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722485':{'en': 'Entel'},
'56722486':{'en': 'Entel'},
'56722487':{'en': 'Entel'},
'56722488':{'en': 'Entel'},
'56722489':{'en': 'Entel'},
'56722491':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722492':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722495':{'en': 'Entel'},
'56722496':{'en': 'Entel'},
'56722497':{'en': 'Entel'},
'56722498':{'en': 'Entel'},
'56722499':{'en': 'Entel'},
'56722501':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722502':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722506':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722510':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722511':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722512':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722513':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722514':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722515':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722516':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722521':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722522':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722525':{'en': 'Entel'},
'56722526':{'en': 'Entel'},
'56722527':{'en': 'Entel'},
'56722528':{'en': 'Entel'},
'56722529':{'en': 'Entel'},
'5672253':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56722540':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722541':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722545':{'en': 'Entel'},
'56722546':{'en': 'Entel'},
'56722547':{'en': 'Entel'},
'56722548':{'en': 'Entel'},
'56722549':{'en': 'Entel'},
'56722551':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722552':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722553':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722554':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722555':{'en': 'Entel'},
'56722556':{'en': 'Entel'},
'56722557':{'en': 'Entel'},
'56722558':{'en': 'Entel'},
'56722559':{'en': 'Entel'},
'56722561':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722562':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722565':{'en': 'Entel'},
'56722566':{'en': 'Entel'},
'56722567':{'en': 'Entel'},
'56722568':{'en': 'Entel'},
'56722569':{'en': 'Entel'},
'56722570':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722571':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722572':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722573':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722574':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722575':{'en': 'Entel'},
'56722576':{'en': 'Entel'},
'56722577':{'en': 'Entel'},
'56722578':{'en': 'Entel'},
'56722579':{'en': 'Entel'},
'5672258':{'en': 'Entel'},
'56722591':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722596':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'567226':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5672260':{'en': 'Entel'},
'5672261':{'en': 'Claro'},
'5672269':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5672271':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5672272':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5672273':{'en': 'Claro'},
'5672274':{'en': 'Claro'},
'5672275':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5672276':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5672277':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5672278':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722790':{'en': 'Claro'},
'56722791':{'en': 'Claro'},
'56722792':{'en': 'Claro'},
'5672280':{'en': 'Cibeles Telecom'},
'56722810':{'en': 'Redvoiss'},
'56722817':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5672282':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722831':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722833':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'56722835':{'en': 'Entel'},
'56722836':{'en': 'Entel'},
'56722837':{'en': 'Entel'},
'56722838':{'en': 'Entel'},
'56722839':{'en': 'Entel'},
'56722841':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722842':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722843':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722845':{'en': 'Telefonica Del Sur S.A.'},
'56722846':{'en': 'Telefonica Del Sur S.A.'},
'56722847':{'en': 'Telefonica Del Sur S.A.'},
'56722848':{'en': 'Telefonica Del Sur S.A.'},
'56722849':{'en': 'Telefonica Del Sur S.A.'},
'56722851':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722856':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722857':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722858':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722859':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722861':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722865':{'en': 'Telefonica Del Sur S.A.'},
'56722866':{'en': 'Telefonica Del Sur S.A.'},
'56722867':{'en': 'Telefonica Del Sur S.A.'},
'56722868':{'en': 'Telefonica Del Sur S.A.'},
'56722869':{'en': 'Telefonica Del Sur S.A.'},
'56722871':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722874':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56722875':{'en': 'Convergia Telecom S.A.'},
'56722876':{'en': 'Convergia Telecom S.A.'},
'56722877':{'en': 'Convergia Telecom S.A.'},
'56722878':{'en': 'Convergia Telecom S.A.'},
'56722879':{'en': 'Convergia Telecom S.A.'},
'5672288':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5672289':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5672290':{'en': 'Entel'},
'5672291':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5672292':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5672293':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5672294':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5672295':{'en': 'Entel'},
'5672296':{'en': 'Claro'},
'5672297':{'en': 'Entel'},
'5672298':{'en': 'Entel'},
'5672299':{'en': 'Telestar'},
'56731970':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56731971':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56731972':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56731973':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56731974':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56731975':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56731980':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56731981':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56731982':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56731983':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56731984':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56731985':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56731986':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5673221':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5673222':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5673223':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56732240':{'en': 'Claro'},
'56732241':{'en': 'Claro'},
'56732242':{'en': 'Claro'},
'56732243':{'en': 'Claro'},
'5673225':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5673226':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56732270':{'en': 'Chile.Com'},
'56732271':{'en': 'Chile.Com'},
'56732272':{'en': 'Chile.Com'},
'56732279':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5673228':{'en': 'Entel'},
'5673229':{'en': 'Entel'},
'5673231':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5673232':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5673233':{'en': 'Claro'},
'56732346':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56732351':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'567323570':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'567323571':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'567323572':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'567323573':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'567323574':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'56732361':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56732370':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56732371':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56732372':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56732373':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56732374':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56732375':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56732381':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56732382':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56732390':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56732391':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56732400':{'en': 'Gtd Telesat S.A.'},
'56732401':{'en': 'Gtd Telesat S.A.'},
'56732411':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56732412':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56732421':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5673243':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5673244':{'en': 'Claro'},
'5673245':{'en': 'Claro'},
'5673246':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5673247':{'en': 'Claro'},
'5673248':{'en': 'Entel'},
'5673249':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5673250':{'en': 'Entel'},
'56732511':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56732512':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56732513':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56732514':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56732515':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56732516':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56732517':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56732520':{'en': 'Redvoiss'},
'56732541':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56732551':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56732556':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5673256':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5673261':{'en': 'Entel'},
'5673262':{'en': 'Entel'},
'5673263':{'en': 'Entel'},
'56732641':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56732646':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5673265':{'en': 'Entel'},
'56732673':{'en': 'Entel'},
'5673273':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56732740':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56732741':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56732750':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56732751':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56732760':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56732761':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56732766':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56732767':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56732770':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56732771':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56732970':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56732971':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56732979':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56751975':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56751976':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56751977':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56751978':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56751979':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56751980':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56751981':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56751982':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56751983':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56751984':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56751985':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56751986':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5675220':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56752210':{'en': 'Gtd Telesat S.A.'},
'56752211':{'en': 'Gtd Telesat S.A.'},
'56752217':{'en': 'Chile.Com'},
'56752218':{'en': 'Chile.Com'},
'56752219':{'en': 'Chile.Com'},
'5675222':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5675223':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5675224':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5675225':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5675226':{'en': 'Claro'},
'56752275':{'en': 'Entel'},
'5675228':{'en': 'Entel'},
'5675229':{'en': 'Entel'},
'5675231':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5675232':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5675233':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5675234':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5675235':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5675236':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56752371':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56752379':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5675238':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5675239':{'en': 'Entel'},
'56752400':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56752405':{'en': 'Redvoiss'},
'5675241':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5675242':{'en': 'Entel'},
'5675243':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5675244':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56752451':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56752454':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'567524550':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'567524551':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'567524552':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'567524553':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'567524554':{'en': 'Compania Chilena De Comunicaciones Parallel S.A.'},
'56752460':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5675247':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56752481':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5675249':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5675250':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56752510':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56752511':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5675252':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56752530':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56752531':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56752532':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56752533':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56752534':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56752535':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56752536':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'56752537':{'en': 'Complejo Manufacturero De Equipos Telefonicos S.A.C.I.'},
'5675254':{'en': 'Entel'},
'5675255':{'en': 'Entel'},
'5675256':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5675257':{'en': 'Entel'},
'56752580':{'en': 'Claro'},
'56752581':{'en': 'Claro'},
'56752582':{'en': 'Claro'},
'56752583':{'en': 'Claro'},
'5675259':{'en': 'Entel'},
'5675260':{'en': 'Entel'},
'5675261':{'en': 'Entel'},
'56752661':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56752690':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'5675273':{'en': 'Claro'},
'5675274':{'en': 'Claro'},
'5675275':{'en': 'Claro'},
'5675276':{'en': 'Telefonica Del Sur S.A.'},
'5675277':{'en': 'Telestar'},
'5675289':{'en': 'Compania De Telecomunicaciones De Chile S.A.'},
'56752972':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56752973':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56752974':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56752978':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'56752979':{'en': 'Comunicacion Y Telefonia Rural S.A.'},
'5675298':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'56930':{'en': 'WOM'},
'569304':{'en': 'Claro'},
'569310':{'en': 'WOM'},
'569311':{'en': 'WOM'},
'569312':{'en': 'WOM'},
'569313':{'en': 'WOM'},
'569314':{'en': 'WOM'},
'569315':{'en': 'Ops Ingenieria Ltda.'},
'569316':{'en': 'Ops Ingenieria Ltda.'},
'569317':{'en': 'Entel'},
'569318':{'en': 'Entel'},
'569319':{'en': 'Entel'},
'569320':{'en': 'Entel'},
'569321':{'en': 'Entel'},
'569322':{'en': 'Entel'},
'569323':{'en': 'Entel'},
'5693242':{'en': 'Entel'},
'5693243':{'en': 'Entel'},
'5693244':{'en': 'Entel'},
'5693245':{'en': 'Entel'},
'5693246':{'en': 'Entel'},
'5693247':{'en': 'Entel'},
'5693248':{'en': 'Entel'},
'5693249':{'en': 'Entel'},
'569325':{'en': 'Entel'},
'569326':{'en': 'Entel'},
'569327':{'en': 'WOM'},
'569328':{'en': 'WOM'},
'569329':{'en': 'WOM'},
'56933':{'en': 'WOM'},
'569337':{'en': 'Entel'},
'569338':{'en': 'Entel'},
'569339':{'en': 'Entel'},
'56934':{'en': 'Entel'},
'569347':{'en': 'Inversiones Santa Fe Ltda.'},
'569348':{'en': 'WOM'},
'569349':{'en': 'WOM'},
'56935':{'en': 'WOM'},
'56936':{'en': 'WOM'},
'5693600':{'en': 'Pacifico Cable S.P.A.'},
'5693602':{'en': 'Belink Spa.'},
'5693603':{'en': 'Ingenium Outsourcing Services Chile Spa.'},
'5693604':{'en': 'Ingenium Outsourcing Services Chile Spa.'},
'5693605':{'en': 'Cellpluss Spa.'},
'5693606':{'en': 'Cellpluss Spa.'},
'5693607':{'en': 'Cellpluss Spa.'},
'5693608':{'en': 'Cellpluss Spa.'},
'5693609':{'en': 'Cellpluss Spa.'},
'56937':{'en': 'WOM'},
'569379':{'en': 'Ops Ingenieria Ltda.'},
'569380':{'en': 'Ops Ingenieria Ltda.'},
'569381':{'en': 'Ops Ingenieria Ltda.'},
'56940':{'en': 'Entel'},
'56941':{'en': 'Movistar'},
'56942':{'en': 'Entel'},
'569434':{'en': 'Movistar'},
'569435':{'en': 'Movistar'},
'569436':{'en': u('Telef\u00c3\u00b3nica Uno Uno Cuatro')},
'569437':{'en': u('Telef\u00c3\u00b3nica Uno Uno Cuatro')},
'569438':{'en': 'Tribe Mobile Chile Spa'},
'569439':{'en': 'Tribe Mobile Chile Spa'},
'56944':{'en': 'Entel'},
'56945':{'en': 'WOM'},
'5694600':{'en': 'Voip Analysis S.A'},
'5694601':{'en': 'Voip Analysis S.A'},
'5694602':{'en': 'Voip Analysis S.A'},
'5694603':{'en': 'Voip Analysis S.A'},
'5694604':{'en': 'Voip Analysis S.A'},
'56946090':{'en': 'Compatel Chile Limitada'},
'56946091':{'en': 'Compatel Chile Limitada'},
'56946092':{'en': 'Compatel Chile Limitada'},
'56946093':{'en': 'Compatel Chile Limitada'},
'56946094':{'en': 'Compatel Chile Limitada'},
'569461':{'en': 'Claro'},
'569462':{'en': 'Claro'},
'569463':{'en': 'Claro'},
'569464':{'en': 'Claro'},
'569465':{'en': 'Claro'},
'569466':{'en': 'Claro'},
'569467':{'en': 'Claro'},
'569468':{'en': 'Claro'},
'569469':{'en': 'Claro'},
'56947':{'en': 'Movistar'},
'569470':{'en': 'Claro'},
'569471':{'en': 'Claro'},
'569472':{'en': 'Claro'},
'56948':{'en': 'WOM'},
'569480':{'en': 'Movistar'},
'569481':{'en': 'Movistar'},
'569482':{'en': 'Movistar'},
'569483':{'en': 'Empresas Bunker S.A.'},
'56949':{'en': 'WOM'},
'56950':{'en': 'Claro'},
'56951':{'en': 'Entel'},
'569510':{'en': 'Movistar'},
'569511':{'en': 'Movistar'},
'569512':{'en': 'Movistar'},
'569513':{'en': 'Movistar'},
'56952':{'en': 'Entel'},
'569524':{'en': 'Movistar'},
'569525':{'en': 'Movistar'},
'569526':{'en': 'Sociedad Falabella Movil Spa.'},
'569527':{'en': 'Sociedad Falabella Movil Spa.'},
'56953':{'en': 'Movistar'},
'56954':{'en': 'Claro'},
'569550':{'en': 'Ops Ingenieria Ltda.'},
'569551':{'en': 'WOM'},
'569552':{'en': 'WOM'},
'569553':{'en': 'WOM'},
'569554':{'en': 'Movistar'},
'5695550':{'en': 'Movistar'},
'5695551':{'en': 'Movistar'},
'5695552':{'en': 'Movistar'},
'5695553':{'en': 'Movistar'},
'5695554':{'en': 'Movistar'},
'5695555':{'en': 'Redvoiss'},
'5695556':{'en': 'Redvoiss'},
'5695557':{'en': 'Industel Chile Ltda'},
'5695558':{'en': 'Industel Chile Ltda'},
'5695559':{'en': 'Industel Chile Ltda'},
'569556':{'en': 'Entel'},
'569557':{'en': 'Movistar'},
'569558':{'en': 'Sociedad Falabella Movil Spa.'},
'569559':{'en': 'Sociedad Falabella Movil Spa.'},
'56956':{'en': 'Entel'},
'56957':{'en': 'Entel'},
'56958':{'en': 'Movistar'},
'56959':{'en': 'Claro'},
'56961':{'en': 'Movistar'},
'569620':{'en': 'Entel'},
'569621':{'en': 'Entel'},
'569622':{'en': 'Entel'},
'569623':{'en': 'Entel'},
'569624':{'en': 'Entel'},
'569625':{'en': 'Claro'},
'569626':{'en': 'Claro'},
'569627':{'en': 'Claro'},
'569628':{'en': 'Movistar'},
'569629':{'en': 'Movistar'},
'569630':{'en': 'Movistar'},
'569631':{'en': 'Movistar'},
'569632':{'en': 'Movistar'},
'569633':{'en': 'Movistar'},
'569634':{'en': u('VTR M\u00f3vil')},
'569635':{'en': u('VTR M\u00f3vil')},
'569636':{'en': u('VTR M\u00f3vil')},
'569637':{'en': u('VTR M\u00f3vil')},
'569638':{'en': u('VTR M\u00f3vil')},
'569639':{'en': 'Movistar'},
'569640':{'en': 'Movistar'},
'569641':{'en': 'WOM'},
'569642':{'en': 'WOM'},
'569643':{'en': 'WOM'},
'569644':{'en': 'WOM'},
'569645':{'en': 'WOM'},
'569646':{'en': 'Movistar'},
'569647':{'en': 'Movistar'},
'569648':{'en': 'Movistar'},
'569649':{'en': 'Movistar'},
'56965':{'en': 'Claro'},
'569650':{'en': 'Entel'},
'569651':{'en': 'Entel'},
'569658':{'en': 'Entel'},
'569659':{'en': 'Entel'},
'56966':{'en': 'Entel'},
'569670':{'en': 'Claro'},
'569671':{'en': 'Claro'},
'569672':{'en': 'Claro'},
'569673':{'en': 'Claro'},
'569674':{'en': 'Claro'},
'569675':{'en': 'Claro'},
'5696760':{'en': 'Movistar'},
'5696761':{'en': 'Movistar'},
'5696762':{'en': 'Movistar'},
'5696763':{'en': 'Telefonica Del Sur S.A.'},
'5696764':{'en': 'Cibeles Telecom'},
'5696765':{'en': 'Entel'},
'5696766':{'en': 'Entel'},
'5696767':{'en': 'Entel'},
'5696768':{'en': 'Entel'},
'5696769':{'en': 'Entel'},
'5696770':{'en': 'Celupago'},
'5696775':{'en': 'Entel'},
'5696776':{'en': 'Entel'},
'5696777':{'en': 'Entel'},
'5696778':{'en': 'Entel'},
'5696779':{'en': 'Entel'},
'5696780':{'en': 'Mavi'},
'5696785':{'en': 'Entel'},
'5696786':{'en': 'Entel'},
'5696787':{'en': 'Entel'},
'5696788':{'en': 'Entel'},
'5696789':{'en': 'Entel'},
'5696790':{'en': 'Movistar'},
'5696795':{'en': 'Entel'},
'5696796':{'en': 'Entel'},
'5696797':{'en': 'Entel'},
'5696798':{'en': 'Entel'},
'5696799':{'en': 'Entel'},
'56968':{'en': 'Movistar'},
'569688':{'en': u('N\u00c3\u00b3made Telecomunicaciones')},
'5696890':{'en': 'Netline'},
'5696891':{'en': 'Netline'},
'5696892':{'en': 'Netline'},
'5696893':{'en': 'Netline'},
'5696894':{'en': 'Netline'},
'5696895':{'en': 'Entel'},
'5696896':{'en': 'Entel'},
'5696897':{'en': 'Entel'},
'5696898':{'en': 'Entel'},
'5696899':{'en': 'Entel'},
'569690':{'en': 'Entel'},
'5696910':{'en': u('Sociedad Comercial y de Ingenier\u00c3\u00ada Swedcom')},
'5696915':{'en': 'Entel'},
'5696916':{'en': 'Entel'},
'5696917':{'en': 'Entel'},
'5696918':{'en': 'Entel'},
'5696919':{'en': 'Entel'},
'5696920':{'en': 'Television Interactiva'},
'5696930':{'en': 'Quantax'},
'5696940':{'en': 'Telecomunicaciones Net Uno Ltda.'},
'5696950':{'en': 'Blue Two'},
'5696965':{'en': 'Movistar'},
'5696970':{'en': 'Vtr Banda Ancha (Chile) S.A.'},
'5696990':{'en': 'Telecomunicaciones Dotcom'},
'569710':{'en': 'Entel'},
'5697110':{'en': 'Telecomunicaciones Max Ltda.'},
'569712':{'en': 'Movistar'},
'569713':{'en': 'Movistar'},
'569714':{'en': 'Movistar'},
'569715':{'en': 'Movistar'},
'569716':{'en': 'Movistar'},
'569717':{'en': 'Claro'},
'569718':{'en': 'Claro'},
'569719':{'en': 'Claro'},
'56972':{'en': 'Claro'},
'56973':{'en': 'Claro'},
'569737':{'en': 'Entel'},
'569738':{'en': 'Entel'},
'569739':{'en': 'Entel'},
'56974':{'en': 'Movistar'},
'569740':{'en': 'Claro'},
'56975':{'en': 'Entel'},
'569750':{'en': 'WOM'},
'569759':{'en': 'Claro'},
'569760':{'en': 'Entel'},
'569761':{'en': 'Movistar'},
'569762':{'en': 'Movistar'},
'569763':{'en': 'Movistar'},
'569764':{'en': 'Entel'},
'569765':{'en': 'Entel'},
'569766':{'en': 'Entel'},
'569767':{'en': 'Claro'},
'569768':{'en': 'Claro'},
'569769':{'en': 'Claro'},
'569770':{'en': 'Entel'},
'569771':{'en': 'Movistar'},
'569772':{'en': 'Movistar'},
'569773':{'en': 'Movistar'},
'569774':{'en': 'Entel'},
'569775':{'en': 'Entel'},
'569776':{'en': 'Entel'},
'569777':{'en': 'Claro'},
'569778':{'en': 'Claro'},
'569779':{'en': 'Claro'},
'56978':{'en': 'Claro'},
'569780':{'en': 'Entel'},
'569787':{'en': 'Entel'},
'569788':{'en': 'Entel'},
'569789':{'en': 'Entel'},
'569790':{'en': 'Claro'},
'569791':{'en': 'Claro'},
'569792':{'en': 'Claro'},
'569793':{'en': 'Claro'},
'569794':{'en': 'Claro'},
'569795':{'en': 'Entel'},
'569796':{'en': 'Entel'},
'569797':{'en': 'Entel'},
'569798':{'en': 'Entel'},
'569799':{'en': 'Entel'},
'569801':{'en': 'Entel'},
'5698100':{'en': 'Viva'},
'5698101':{'en': 'Viva'},
'5698102':{'en': 'Viva'},
'5698103':{'en': 'Viva'},
'5698104':{'en': 'Viva'},
'5698105':{'en': 'WOM'},
'5698106':{'en': 'WOM'},
'5698107':{'en': 'WOM'},
'5698108':{'en': 'WOM'},
'5698109':{'en': 'WOM'},
'569811':{'en': 'Entel'},
'569812':{'en': 'Movistar'},
'569813':{'en': 'Entel'},
'569814':{'en': 'Claro'},
'569815':{'en': 'Entel'},
'569816':{'en': 'Movistar'},
'569817':{'en': 'Movistar'},
'569818':{'en': 'Entel'},
'569819':{'en': 'Entel'},
'56982':{'en': 'Entel'},
'56983':{'en': 'Movistar'},
'56984':{'en': 'Entel'},
'569848':{'en': 'Claro'},
'569849':{'en': 'Claro'},
'56985':{'en': 'Movistar'},
'56986':{'en': 'Claro'},
'569870':{'en': 'Claro'},
'569871':{'en': 'Claro'},
'569872':{'en': 'Entel'},
'569873':{'en': 'Entel'},
'569874':{'en': 'Entel'},
'569875':{'en': 'Entel'},
'569876':{'en': 'Entel'},
'569877':{'en': 'Claro'},
'569878':{'en': 'Movistar'},
'569879':{'en': 'Movistar'},
'569880':{'en': 'Claro'},
'569881':{'en': 'Entel'},
'569882':{'en': 'Entel'},
'569883':{'en': 'Entel'},
'569884':{'en': 'Movistar'},
'569885':{'en': 'Movistar'},
'569886':{'en': 'Movistar'},
'569887':{'en': 'Movistar'},
'569888':{'en': 'Entel'},
'569889':{'en': 'Entel'},
'56989':{'en': 'Movistar'},
'569900':{'en': 'Claro'},
'569901':{'en': 'Movistar'},
'569902':{'en': 'Movistar'},
'5699026':{'en': 'Mobilink'},
'5699027':{'en': 'Mobilink'},
'5699028':{'en': 'WOM'},
'5699029':{'en': 'WOM'},
'569903':{'en': 'Movistar'},
'569904':{'en': 'Movistar'},
'5699048':{'en': 'WOM'},
'5699049':{'en': 'WOM'},
'569905':{'en': 'Claro'},
'569906':{'en': 'Claro'},
'569907':{'en': 'Entel'},
'569908':{'en': 'Entel'},
'569909':{'en': 'Entel'},
'569910':{'en': 'Claro'},
'569912':{'en': 'Entel'},
'569913':{'en': 'Entel'},
'569914':{'en': 'Movistar'},
'5699140':{'en': 'Entel'},
'5699141':{'en': 'Entel'},
'5699142':{'en': 'Entel'},
'569915':{'en': 'Entel'},
'569916':{'en': 'Movistar'},
'569917':{'en': 'Claro'},
'569918':{'en': 'Movistar'},
'569919':{'en': 'Movistar'},
'569920':{'en': 'Claro'},
'569921':{'en': 'Entel'},
'569922':{'en': 'Movistar'},
'569923':{'en': 'Movistar'},
'569924':{'en': 'Movistar'},
'569925':{'en': 'Movistar'},
'569926':{'en': 'Movistar'},
'569927':{'en': 'Movistar'},
'5699280':{'en': 'Movistar'},
'5699281':{'en': 'Movistar'},
'5699282':{'en': 'Movistar'},
'5699283':{'en': 'Movistar'},
'5699284':{'en': 'Movistar'},
'5699286':{'en': 'Movistar'},
'5699287':{'en': 'Movistar'},
'5699289':{'en': 'Entel'},
'569929':{'en': 'Entel'},
'569930':{'en': 'Entel'},
'569931':{'en': 'Entel'},
'569932':{'en': 'Movistar'},
'569933':{'en': 'Movistar'},
'569934':{'en': 'Movistar'},
'569935':{'en': 'Entel'},
'569936':{'en': 'Movistar'},
'569937':{'en': 'Movistar'},
'5699380':{'en': 'Movistar'},
'5699381':{'en': 'Movistar'},
'5699382':{'en': 'Entel'},
'5699383':{'en': 'Entel'},
'5699385':{'en': 'Claro'},
'5699386':{'en': 'Claro'},
'5699387':{'en': 'Claro'},
'5699388':{'en': 'Claro'},
'5699389':{'en': 'Movistar'},
'569939':{'en': 'Claro'},
'56994':{'en': 'Movistar'},
'569941':{'en': 'Entel'},
'569947':{'en': 'Entel'},
'569948':{'en': 'Entel'},
'569949':{'en': 'Entel'},
'569950':{'en': 'Entel'},
'569951':{'en': 'Entel'},
'569952':{'en': 'Movistar'},
'569953':{'en': 'Movistar'},
'569954':{'en': 'Movistar'},
'569955':{'en': 'Movistar'},
'569956':{'en': 'Movistar'},
'569957':{'en': 'Entel'},
'5699580':{'en': 'Movistar'},
'5699581':{'en': 'Movistar'},
'5699582':{'en': 'Movistar'},
'5699583':{'en': 'Movistar'},
'5699584':{'en': 'Movistar'},
'5699585':{'en': 'Claro'},
'5699586':{'en': 'Claro'},
'5699587':{'en': 'Claro'},
'5699588':{'en': 'Claro'},
'5699589':{'en': 'Claro'},
'569959':{'en': 'Entel'},
'5699600':{'en': 'Movistar'},
'5699601':{'en': 'Entel'},
'5699602':{'en': 'Movistar'},
'5699603':{'en': 'Movistar'},
'5699604':{'en': 'Movistar'},
'56996055':{'en': 'Movistar'},
'56996056':{'en': 'Movistar'},
'56996057':{'en': 'Movistar'},
'56996058':{'en': 'Movistar'},
'56996059':{'en': 'Movistar'},
'5699606':{'en': 'Movistar'},
'5699607':{'en': 'Entel'},
'56996080':{'en': 'Entel'},
'56996081':{'en': 'Entel'},
'56996082':{'en': 'Entel'},
'56996083':{'en': 'Entel'},
'56996084':{'en': 'Entel'},
'56996085':{'en': 'Movistar'},
'56996086':{'en': 'Movistar'},
'56996087':{'en': 'Movistar'},
'56996088':{'en': 'Movistar'},
'56996089':{'en': 'Movistar'},
'5699609':{'en': 'Claro'},
'569961':{'en': 'Entel'},
'569962':{'en': 'Movistar'},
'569963':{'en': 'Movistar'},
'569964':{'en': 'Movistar'},
'569965':{'en': 'Movistar'},
'569966':{'en': 'Movistar'},
'569967':{'en': 'Entel'},
'569968':{'en': 'Claro'},
'5699680':{'en': 'Movistar'},
'5699681':{'en': 'Movistar'},
'569969':{'en': 'Entel'},
'56997':{'en': 'Movistar'},
'569971':{'en': 'Entel'},
'569977':{'en': 'Entel'},
'569978':{'en': 'Entel'},
'569979':{'en': 'Entel'},
'56998':{'en': 'Entel'},
'569980':{'en': 'Claro'},
'569984':{'en': 'Claro'},
'569985':{'en': 'Claro'},
'569986':{'en': 'Claro'},
'569990':{'en': 'Movistar'},
'569991':{'en': 'Entel'},
'569992':{'en': 'Claro'},
'569993':{'en': 'Entel'},
'569994':{'en': 'Entel'},
'569995':{'en': 'Movistar'},
'5699960':{'en': 'Movistar'},
'5699961':{'en': 'Movistar'},
'5699962':{'en': 'Movistar'},
'5699963':{'en': 'Movistar'},
'5699964':{'en': 'Movistar'},
'5699965':{'en': 'Entel'},
'5699966':{'en': 'Entel'},
'5699967':{'en': 'Entel'},
'5699968':{'en': 'Entel'},
'5699969':{'en': 'Entel'},
'5699970':{'en': 'Movistar'},
'5699971':{'en': 'Movistar'},
'5699972':{'en': 'Movistar'},
'5699973':{'en': 'Movistar'},
'5699974':{'en': 'Movistar'},
'5699975':{'en': 'Claro'},
'5699976':{'en': 'Claro'},
'5699977':{'en': 'Claro'},
'5699978':{'en': 'Claro'},
'5699979':{'en': 'Claro'},
'569998':{'en': 'Claro'},
'569999':{'en': 'Entel'},
'5699990':{'en': 'Tesacom'},
'573002':{'en': 'Tigo'},
'573003':{'en': 'Tigo'},
'573004':{'en': 'Tigo'},
'573005':{'en': 'Tigo'},
'573006':{'en': 'Tigo'},
'573007':{'en': 'Tigo'},
'573008':{'en': 'Tigo'},
'573009':{'en': 'Tigo'},
'573012':{'en': 'Tigo'},
'573013':{'en': 'Tigo'},
'573014':{'en': 'Tigo'},
'573015':{'en': 'Tigo'},
'573016':{'en': 'Tigo'},
'573017':{'en': 'Tigo'},
'573022':{'en': 'Tigo'},
'573023':{'en': 'Tigo'},
'573024':{'en': 'Tigo'},
}
|
7df781e150f083ad6c3f299471621947eac6efef
|
8d585fa3b2419d9b993be2f2652e448cfeedc8b2
|
/utils/kubernetes/__init__.py
|
f070f0ec97696bab109000dcfc8f7f9796e9c80b
|
[
"BSD-3-Clause",
"BSD-2-Clause"
] |
permissive
|
DataDog/dd-agent
|
bd4ef0edb234293b51d30894a529ce94b37060f8
|
16fa4ec9ae11ca0adfffbd260c5b4899dc73509f
|
refs/heads/master
| 2023-08-16T09:52:21.816487
| 2023-07-11T15:37:34
| 2023-07-11T15:37:34
| 1,210,071
| 1,227
| 991
|
NOASSERTION
| 2023-06-28T12:20:19
| 2010-12-31T03:02:47
|
Python
|
UTF-8
|
Python
| false
| false
| 384
|
py
|
__init__.py
|
# (C) Datadog, Inc. 2016
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
from .leader_elector import LeaderElector # noqa: F401
from .kube_event_retriever import KubeEventRetriever # noqa: F401
from .pod_service_mapper import PodServiceMapper # noqa: F401
from .kubeutil import detect_is_k8s # noqa: F401
from .kubeutil import KubeUtil # noqa: F401
|
1a18ca3b1e38c18bba305cd3f1886f385284c119
|
a9139fc3f4762b657d141651829db7ccb6da1bb7
|
/tests/algorithms/test_occupation_classifiers.py
|
a3e3d961764e35964bae1eb367c50fc2fd3abeb7
|
[
"MIT"
] |
permissive
|
workforce-data-initiative/skills-ml
|
25d8239f0ead0d94ceba32c97f447abd58d8ab23
|
feffead90815ccdecf24bf1a995f79683442b046
|
refs/heads/master
| 2023-08-31T11:45:59.352116
| 2023-05-05T15:29:54
| 2023-05-05T15:29:54
| 71,592,191
| 164
| 76
|
NOASSERTION
| 2023-09-04T22:29:18
| 2016-10-21T19:51:14
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 9,317
|
py
|
test_occupation_classifiers.py
|
from skills_ml.algorithms.occupation_classifiers.classifiers import CombinedClassifier, KNNDoc2VecClassifier, SocClassifier
from skills_ml.algorithms.embedding.train import EmbeddingTrainer
from skills_ml.algorithms.occupation_classifiers import SOCMajorGroup, DesignMatrix
from skills_ml.algorithms.embedding.models import Doc2VecModel, Word2VecModel, EmbeddingTransformer
from skills_ml.job_postings.common_schema import JobPostingCollectionSample
from skills_ml.job_postings.corpora import Word2VecGensimCorpusCreator
from skills_ml.storage import ProxyObjectWithStorage, ModelStorage, S3Store, FSStore
from skills_ml.algorithms import nlp
from skills_ml.algorithms.preprocessing import IterablePipeline
from skills_utils.s3 import upload
import gensim
from gensim.similarities.index import AnnoyIndexer
from sklearn.ensemble import RandomForestClassifier
from moto import mock_s3
from descriptors import cachedproperty
from functools import partial
import mock
import boto3
import s3fs
import tempfile
import os
import unittest
import json
docs = """licensed practical nurse licensed practical and licensed
vocational nurses licensed practical nurse department family
birthing center schedule part time shift hr day night rotation
hours hrs pp wknd rot holidays minimum salary minimum requisition
number job details provides direct nursing care for individual
patients undergoing cesarean section under the direction of the
surgeon also is involved with assisting with vaginal deliveries
recovery and transferring of newly delivered patient and their
families under the direction of the registered nurse to achieve
the hospital mission of competent christian holistic care patients
cared for include childbearing women and newborn infants the licensed
practical nurse can be responsible for newborn testing such as hearing
screening and car seat testing implements and abides by customer
service standards supports and implements patient safety and other
safety practices as appropriate supports and demonstrates family centered
care principles when interacting with patients and their families and
with coworkers education graduate of an approved school of practical
nursing required experience previous lpn experience preferred special
requirements current licensure as practical nurse lpn in the state of
minnesota required current american heart association aha bls healthcare
provider card required prior to completion of unit orientation eeo aa
graduate of an approved school of practical nursing required,29,29-2061.00"""
def get_corpus(num):
lines = [docs]*num
for line in lines:
yield line
class FakeCorpusGenerator(object):
def __init__(self , num=25):
self.num = num
self.lookup = {}
def __iter__(self):
k = 1
corpus_memory_friendly = get_corpus(num=100)
for data in corpus_memory_friendly:
data = gensim.utils.to_unicode(data).split(',')
words = data[0].split()
label = [str(k)]
self.lookup[str(k)] = data[2]
yield gensim.models.doc2vec.TaggedDocument(words, label)
k += 1
class TestCombinedClassifier(unittest.TestCase):
def basic_filter(self, doc):
if self.major_group.filter_func(doc):
return doc
else:
return None
@property
def pipe_x(self):
document_schema_fields = ['description', 'experienceRequirements', 'qualifications', 'skills']
pipe_x = IterablePipeline(
self.basic_filter,
partial(nlp.fields_join, document_schema_fields=document_schema_fields),
nlp.clean_str,
nlp.word_tokenize,
partial(nlp.vectorize, embedding_model=Word2VecModel(size=10))
)
return pipe_x
@property
def pipe_y(self):
pipe_y = IterablePipeline(
self.basic_filter,
self.major_group.transformer
)
return pipe_y
@cachedproperty
def major_group(self):
return SOCMajorGroup()
@mock.patch('os.getcwd')
def test_combined_cls_local(self, mock_getcwd):
with tempfile.TemporaryDirectory() as td:
mock_getcwd.return_value = td
model_storage = ModelStorage(FSStore(td))
jobpostings = JobPostingCollectionSample()
corpus_generator = Word2VecGensimCorpusCreator(jobpostings, raw=True)
w2v = Word2VecModel(size=10, min_count=0, alpha=0.025, min_alpha=0.025)
trainer = EmbeddingTrainer(w2v, model_storage=model_storage)
trainer.train(corpus_generator, lookup=True)
matrix = DesignMatrix(jobpostings, self.major_group, self.pipe_x, self.pipe_y)
matrix.build()
X = matrix.X
rf = ProxyObjectWithStorage(RandomForestClassifier(), None, None, matrix.target_variable)
rf.fit(X, matrix.y)
proxy_rf = ProxyObjectWithStorage(rf, None, None, matrix.target_variable)
# Remove the last step in the pipe_x
# the input of predict_soc should be tokenized words
new_pipe_x = self.pipe_x
new_pipe_x.generators.pop()
new_matrix = DesignMatrix(JobPostingCollectionSample(), self.major_group, new_pipe_x)
new_matrix.build()
ccls = CombinedClassifier(w2v, rf)
assert len(ccls.predict_soc([new_matrix.X[0]])[0]) == 2
class TestKNNDoc2VecClassifier(unittest.TestCase):
@mock.patch('os.getcwd')
def test_knn_doc2vec_cls_local(self, mock_getcwd):
with tempfile.TemporaryDirectory() as td:
mock_getcwd.return_value = td
model_storage = ModelStorage(FSStore(td))
corpus_generator = FakeCorpusGenerator()
d2v = Doc2VecModel(size=10, min_count=1, dm=0, alpha=0.025, min_alpha=0.025)
trainer = EmbeddingTrainer(d2v, model_storage=model_storage)
trainer.train(corpus_generator, lookup=True)
# KNNDoc2VecClassifier only supports doc2vec now
self.assertRaises(NotImplementedError, lambda: KNNDoc2VecClassifier(Word2VecModel()))
doc = docs.split(',')[0].split()
knn = KNNDoc2VecClassifier(embedding_model=d2v, k=0)
self.assertRaises(ValueError, lambda: knn.predict_soc([doc]))
knn = KNNDoc2VecClassifier(embedding_model=d2v, k=1)
soc_cls = SocClassifier(knn)
assert knn.predict_soc([doc])[0][0] == soc_cls.predict_soc([doc])[0][0]
# Build Annoy index
knn.build_ann_indexer(num_trees=5)
assert isinstance(knn.indexer, AnnoyIndexer)
# Save
model_storage.save_model(knn, knn.model_name)
assert set(os.listdir(os.getcwd())) == set([knn.model_name])
assert isinstance(knn.indexer, AnnoyIndexer)
# Load
new_knn = model_storage.load_model(knn.model_name)
assert new_knn.model_name == knn.model_name
assert new_knn.predict_soc([doc])[0][0] == '29-2061.00'
# Have to re-build the index whenever ones load the knn model to the memory
assert new_knn.indexer == None
@mock_s3
def test_knn_doc2vec_cls_s3(self):
client = boto3.client('s3')
client.create_bucket(Bucket='fake-open-skills', ACL='public-read-write')
s3_path = f"s3://fake-open-skills/model_cache/soc_classifiers"
s3_storage = S3Store(path=s3_path)
model_storage = ModelStorage(s3_storage)
corpus_generator = FakeCorpusGenerator()
# Embedding has no lookup_dict
d2v = Doc2VecModel(size=10, min_count=1, dm=0, alpha=0.025, min_alpha=0.025)
trainer = EmbeddingTrainer(d2v, model_storage=model_storage)
trainer.train(corpus_generator, lookup=False)
self.assertRaises(ValueError, lambda: KNNDoc2VecClassifier(embedding_model=d2v))
d2v = Doc2VecModel(size=10, min_count=1, dm=0, alpha=0.025, min_alpha=0.025)
trainer = EmbeddingTrainer(d2v, model_storage=model_storage)
trainer.train(corpus_generator, lookup=True)
# KNNDoc2VecClassifier only supports doc2vec now
self.assertRaises(NotImplementedError, lambda: KNNDoc2VecClassifier(Word2VecModel()))
doc = docs.split(',')[0].split()
knn = KNNDoc2VecClassifier(embedding_model=d2v, k=0)
self.assertRaises(ValueError, lambda: knn.predict_soc([doc]))
knn = KNNDoc2VecClassifier(embedding_model=d2v, k=10)
soc_cls = SocClassifier(knn)
assert knn.predict_soc([doc])[0][0] == soc_cls.predict_soc([doc])[0][0]
# Build Annoy index
knn.build_ann_indexer(num_trees=5)
assert isinstance(knn.indexer, AnnoyIndexer)
# Save
s3 = s3fs.S3FileSystem()
model_storage.save_model(knn, knn.model_name)
files = [f.split('/')[-1] for f in s3.ls(s3_path)]
assert set(files) == set([knn.model_name])
# Load
new_knn = model_storage.load_model(knn.model_name)
assert new_knn.model_name == knn.model_name
assert new_knn.predict_soc([doc])[0][0] == '29-2061.00'
# Have to re-build the index whenever ones load the knn model to the memory
assert new_knn.indexer == None
|
0559cebde8a49618ac782242eabe9690b2bb5260
|
649b0c3fda1546d933c862fa13a96684447a94c8
|
/tomviz/python/BinTiltSeriesByTwo.py
|
febbc434a9d151ba12b67c97eb7ced62135d9788
|
[
"BSD-3-Clause"
] |
permissive
|
OpenChemistry/tomviz
|
fa181db37eb5992dff6cae7a55d982495eabdba9
|
dd39cc4cf43ce1ea9224602508d372e5c5a5fd66
|
refs/heads/master
| 2023-06-01T13:51:40.416494
| 2023-04-20T19:21:12
| 2023-04-20T19:21:12
| 17,905,329
| 330
| 85
|
BSD-3-Clause
| 2023-05-22T21:34:33
| 2014-03-19T13:35:20
|
C++
|
UTF-8
|
Python
| false
| false
| 815
|
py
|
BinTiltSeriesByTwo.py
|
def transform(dataset):
"""Downsample tilt images by a factor of 2"""
from tomviz import utils
import scipy.ndimage
import numpy as np
import warnings
array = dataset.active_scalars
zoom = (0.5, 0.5, 1)
result_shape = utils.zoom_shape(array, zoom)
result = np.empty(result_shape, array.dtype, order='F')
# Downsample the dataset x2 using order 1 spline (linear)
warnings.filterwarnings('ignore', '.*output shape of zoom.*')
scipy.ndimage.interpolation.zoom(array, zoom,
output=result,
order=1,
mode='constant',
cval=0.0, prefilter=False)
# Set the result as the new scalars.
dataset.active_scalars = result
|
cc5840da1098581c71e7e036cae86ae8bf23673e
|
3256af0d6c19732bb84b256a9f792aaf7f3d901a
|
/f5/bigip/tm/sys/file.py
|
4f90e62b8af7dae5483b71d194f80dea4b784c7a
|
[
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
] |
permissive
|
F5Networks/f5-common-python
|
73e33ea489d989399d205077163f24ce584d83b9
|
3050df0079c2426af99b9a1b8f93d0b512468ff4
|
refs/heads/development
| 2023-08-29T10:11:23.713392
| 2022-09-21T02:45:03
| 2022-09-21T02:45:03
| 45,062,555
| 286
| 180
|
Apache-2.0
| 2023-05-12T23:13:03
| 2015-10-27T18:48:06
|
Python
|
UTF-8
|
Python
| false
| false
| 7,894
|
py
|
file.py
|
# coding=utf-8
#
# Copyright 2019 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""BIG-IP® system file module
REST URI
``http://localhost/mgmt/tm/sys/file``
GUI Path
N/A
REST Kind
``tm:sys:file:*``
"""
from distutils.version import LooseVersion
from f5.bigip.resource import Collection
from f5.bigip.resource import OrganizingCollection
from f5.bigip.resource import Resource
from f5.sdk_exception import UnsupportedMethod
class File(OrganizingCollection):
"""BIG-IP® System sys file collection."""
def __init__(self, sys):
super(File, self).__init__(sys)
self._meta_data['allowed_lazy_attributes'] = [
Data_Groups,
External_Monitors,
Ifiles,
Ssl_Certs,
Ssl_Csrs,
Ssl_Crls,
Ssl_Keys]
class Data_Groups(Collection):
"""BIG-IP® System sys file data-groups collection."""
def __init__(self, File):
super(Data_Groups, self).__init__(File)
self._meta_data['allowed_lazy_attributes'] = [Data_Group]
self._meta_data['attribute_registry'] =\
{'tm:sys:file:data-group:data-groupstate': Data_Group}
class Data_Group(Resource):
"""BIG-IP® System sys file data-groups resource."""
def __init__(self, data_groups):
super(Data_Group, self).__init__(data_groups)
self._meta_data['required_json_kind'] =\
'tm:sys:file:data-group:data-groupstate'
self._meta_data['required_creation_parameters'].update(
('name', 'sourcePath', 'type'))
def update(self, **kwargs):
if LooseVersion(self._meta_data['bigip']._meta_data['tmos_version']) \
< LooseVersion('12.0.0'):
if 'type' in self.__dict__:
del self.__dict__['type']
return self._update(**kwargs)
class External_Monitors(Collection):
"""BIG-IP® System sys file data-groups collection."""
def __init__(self, File):
super(External_Monitors, self).__init__(File)
self._meta_data['allowed_lazy_attributes'] = [External_Monitor]
self._meta_data['attribute_registry'] =\
{'tm:sys:file:external-monitor:external-monitorstate': External_Monitor}
class External_Monitor(Resource):
"""BIG-IP® System sys file data-groups resource."""
def __init__(self, external_monitors):
super(External_Monitor, self).__init__(external_monitors)
self._meta_data['required_json_kind'] =\
'tm:sys:file:external-monitor:external-monitorstate'
self._meta_data['required_creation_parameters'].update(
('name', 'sourcePath'))
class Ifiles(Collection):
"""BIG-IP® System sys file iFiles collection."""
def __init__(self, File):
super(Ifiles, self).__init__(File)
self._meta_data['allowed_lazy_attributes'] = [Ifile]
self._meta_data['attribute_registry'] = \
{'tm:sys:file:ifile:ifilestate': Ifile}
class Ifile(Resource):
"""BIG-IP® System sys file iFiles resource."""
def __init__(self, ifiles):
super(Ifile, self).__init__(ifiles)
self._meta_data['required_json_kind'] =\
'tm:sys:file:ifile:ifilestate'
self._meta_data['required_creation_parameters'].update(
('name', 'sourcePath'))
def modify(self, **kwargs):
'''Modify is not supported for iFiles
:raises: UnsupportedOperation
'''
raise UnsupportedMethod(
"%s does not support the update method" % self.__class__.__name__
)
class Ssl_Certs(Collection):
"""BIG-IP® System sys file ssl-certs collection."""
def __init__(self, File):
super(Ssl_Certs, self).__init__(File)
self._meta_data['allowed_lazy_attributes'] = [Ssl_Cert]
self._meta_data['attribute_registry'] =\
{'tm:sys:file:ssl-cert:ssl-certstate': Ssl_Cert}
class Ssl_Cert(Resource):
"""BIG-IP® System sys file ssl-certs resource."""
def __init__(self, ssl_certs):
super(Ssl_Cert, self).__init__(ssl_certs)
self._meta_data['required_json_kind'] =\
'tm:sys:file:ssl-cert:ssl-certstate'
self._meta_data['required_creation_parameters'].update(
('name', 'sourcePath'))
def modify(self, **kwargs):
'''Modify is not supported for iFiles
:raises: UnsupportedOperation
'''
raise UnsupportedMethod(
"%s does not support the update method" % self.__class__.__name__
)
class Ssl_Crls(Collection):
"""BIG-IP® System sys file ssl-crls collection."""
def __init__(self, File):
super(Ssl_Crls, self).__init__(File)
self._meta_data['allowed_lazy_attributes'] = [Ssl_Crl]
self._meta_data['attribute_registry'] =\
{'tm:sys:file:ssl-crl:ssl-crlstate': Ssl_Crl}
class Ssl_Crl(Resource):
"""BIG-IP® System sys file ssl-crls resource."""
def __init__(self, ssl_crls):
super(Ssl_Crl, self).__init__(ssl_crls)
self._meta_data['required_json_kind'] =\
'tm:sys:file:ssl-crl:ssl-crlstate'
self._meta_data['required_creation_parameters'].update(
('name', 'sourcePath'))
def modify(self, **kwargs):
'''Modify is not supported for iFiles
:raises: UnsupportedOperation
'''
raise UnsupportedMethod(
"%s does not support the update method" % self.__class__.__name__
)
class Ssl_Csrs(Collection):
"""BIG-IP® System sys file ssl-csrs collection."""
def __init__(self, File):
super(Ssl_Csrs, self).__init__(File)
self._meta_data['allowed_lazy_attributes'] = [Ssl_Csr]
self._meta_data['attribute_registry'] =\
{'tm:sys:file:ssl-csr:ssl-csrstate': Ssl_Csr}
self._meta_data['minimum_version'] = '12.0.0'
class Ssl_Csr(Resource):
"""BIG-IP® System sys file ssl-csrs resource."""
def __init__(self, ssl_csrs):
super(Ssl_Csr, self).__init__(ssl_csrs)
self._meta_data['required_json_kind'] =\
'tm:sys:file:ssl-csr:ssl-csrstate'
self._meta_data['required_creation_parameters'].update(
('name', 'sourcePath'))
def modify(self, **kwargs):
'''Modify is not supported for iFiles
:raises: UnsupportedOperation
'''
raise UnsupportedMethod(
"%s does not support the update method" % self.__class__.__name__
)
class Ssl_Keys(Collection):
"""BIG-IP® System sys file ssl-keys collection."""
def __init__(self, File):
super(Ssl_Keys, self).__init__(File)
self._meta_data['allowed_lazy_attributes'] = [Ssl_Key]
self._meta_data['attribute_registry'] =\
{'tm:sys:file:ssl-key:ssl-keystate': Ssl_Key}
class Ssl_Key(Resource):
"""BIG-IP® System sys file ssl-keys resource."""
def __init__(self, ssl_keys):
super(Ssl_Key, self).__init__(ssl_keys)
self._meta_data['required_json_kind'] =\
'tm:sys:file:ssl-key:ssl-keystate'
self._meta_data['required_creation_parameters'].update(
('name', 'sourcePath'))
def modify(self, **kwargs):
'''Modify is not supported for iFiles
:raises: UnsupportedOperation
'''
raise UnsupportedMethod(
"%s does not support the update method" % self.__class__.__name__
)
|
3572c556a96f23671d4c375ccf77c04b60c52aa3
|
3a8678a73ff5caa3df02da97a0a0b49ab4482994
|
/python/pyiceberg/avro/codecs/snappy_codec.py
|
2da8ed8f720d99b0af532b3b2c17faed7063d62a
|
[
"Apache-2.0"
] |
permissive
|
apache/iceberg
|
b21a9c1bfbb328919f51cd257772dfd1bd86aaff
|
c9ce6a123b49c1c4e5bd950b388d69e6ff849b5d
|
refs/heads/master
| 2023-09-03T15:54:18.098529
| 2023-09-03T12:37:39
| 2023-09-03T12:37:39
| 158,256,479
| 4,358
| 1,659
|
Apache-2.0
| 2023-09-14T16:31:51
| 2018-11-19T16:26:46
|
Java
|
UTF-8
|
Python
| false
| false
| 2,702
|
py
|
snappy_codec.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import binascii
import struct
from pyiceberg.avro.codecs.codec import Codec
STRUCT_CRC32 = struct.Struct(">I") # big-endian unsigned int
try:
import snappy
class SnappyCodec(Codec):
@staticmethod
def _check_crc32(bytes_: bytes, checksum: bytes) -> None:
"""Incrementally compute CRC-32 from bytes and compare to a checksum.
Args:
bytes_ (bytes): The bytes to check against `checksum`
checksum (bytes): Byte representation of a checksum
Raises:
ValueError: If the computed CRC-32 does not match the checksum
"""
if binascii.crc32(bytes_) & 0xFFFFFFFF != STRUCT_CRC32.unpack(checksum)[0]:
raise ValueError("Checksum failure")
@staticmethod
def compress(data: bytes) -> tuple[bytes, int]:
compressed_data = snappy.compress(data)
# A 4-byte, big-endian CRC32 checksum
compressed_data += STRUCT_CRC32.pack(binascii.crc32(data) & 0xFFFFFFFF)
return compressed_data, len(compressed_data)
@staticmethod
def decompress(data: bytes) -> bytes:
# Compressed data includes a 4-byte CRC32 checksum
data = data[0:-4]
uncompressed = snappy.decompress(data)
checksum = data[-4:]
SnappyCodec._check_crc32(uncompressed, checksum)
return uncompressed
except ImportError:
class SnappyCodec(Codec): # type: ignore
@staticmethod
def compress(data: bytes) -> tuple[bytes, int]:
raise ImportError("Snappy support not installed, please install using `pip install pyiceberg[snappy]`")
@staticmethod
def decompress(data: bytes) -> bytes:
raise ImportError("Snappy support not installed, please install using `pip install pyiceberg[snappy]`")
|
83915d16baa7fc50804f4a27e78232447edc7ea8
|
d05ff6dda43729011b7d469b0a2bc02ed66b6342
|
/frappe/patches/v11_0/update_list_user_settings.py
|
5209b9e384e969e9fc0ef7dff7677aebb660b7b7
|
[
"MIT"
] |
permissive
|
frappe/frappe
|
520c14bed3810c3360629a81dcc33f0ebe21ac4d
|
dd8f314bf4a8a4739eebbfac741abc533ac58bc1
|
refs/heads/develop
| 2023-08-30T19:29:10.406706
| 2023-08-30T11:20:40
| 2023-08-30T11:20:40
| 1,864,194
| 5,955
| 3,735
|
MIT
| 2023-09-14T16:08:04
| 2011-06-08T08:14:16
|
Python
|
UTF-8
|
Python
| false
| false
| 1,107
|
py
|
update_list_user_settings.py
|
import json
import frappe
from frappe.model.utils.user_settings import sync_user_settings, update_user_settings
def execute():
"""Update list_view's order by property from __UserSettings"""
users = frappe.db.sql("select distinct(user) from `__UserSettings`", as_dict=True)
for user in users:
# get user_settings for each user
settings = frappe.db.sql(
"select * from `__UserSettings` \
where user={}".format(
frappe.db.escape(user.user)
),
as_dict=True,
)
# traverse through each doctype's settings for a user
for d in settings:
data = json.loads(d["data"])
if data and ("List" in data) and ("order_by" in data["List"]) and data["List"]["order_by"]:
# convert order_by to sort_order & sort_by and delete order_by
order_by = data["List"]["order_by"]
if "`" in order_by and "." in order_by:
order_by = order_by.replace("`", "").split(".")[1]
data["List"]["sort_by"], data["List"]["sort_order"] = order_by.split(" ")
data["List"].pop("order_by")
update_user_settings(d["doctype"], json.dumps(data), for_update=True)
sync_user_settings()
|
bc3b47d3138ab50cf6591982c4e01423582e52c1
|
53a83642c01a8828e3d7bd0b18e33c3b694c2b84
|
/Python/GeeksforGeeks/sum-of-ap-series.py
|
76c7bbb2e3247db7c290306286f2b15534037d1f
|
[] |
no_license
|
anantkaushik/Competitive_Programming
|
1dcd60a28b5b951c23024d6090942be081ad249f
|
6dba38fd7aa4e71b5196d01d64e81f9336d08b13
|
refs/heads/master
| 2022-03-06T15:36:23.797340
| 2022-02-21T12:00:37
| 2022-02-21T12:00:37
| 82,700,948
| 271
| 95
| null | 2020-10-27T17:34:39
| 2017-02-21T16:18:16
|
Python
|
UTF-8
|
Python
| false
| false
| 763
|
py
|
sum-of-ap-series.py
|
"""
A series with same common difference is known as arithmetic series.
The first term of series is 'a' and common difference is d. The series looks like
a, a + d, a + 2d, a + 3d, . . . Find the sum of series.
Input : a = 1
d = 2
n = 4
Output : 16
1 + 3 + 5 + 7 = 16
Input : a = 2.5
d = 1.5
n = 20
Output : 335
Input:
The first line consists of an integer T i.e number of test cases. The first line and only
line of each test case consists of three values a,d,n.
Output:
Print the sum of the series. With two decimal places.
Example:
Input:
2
1 2 4
2.5 1.5 20
Output:
16.00
335.00
"""
t = int(input())
while t > 0:
a,d,n = map(float,input().split())
s = (n/2)*(2*a + (n- 1)*d)
print(format(s,'.2f'))
t -= 1
|
2efbf2c533d3a5cfeb53292dddcd25f8bd5e9306
|
3750387e046dfd287d02decc846860fae874bf3e
|
/hydra/core/utils.py
|
43ff3649a76c6bb7f82a37b9877de2a2075bd030
|
[
"MIT"
] |
permissive
|
facebookresearch/hydra
|
baf152caa30cd1d8a7e76ba2111fb9a49ecbe18c
|
b5ff66134f268164a20712d18b1230f4dd737444
|
refs/heads/main
| 2023-08-28T02:33:18.063795
| 2023-07-25T17:58:43
| 2023-07-25T17:58:43
| 191,632,914
| 7,667
| 692
|
MIT
| 2023-09-11T15:38:52
| 2019-06-12T19:33:15
|
Python
|
UTF-8
|
Python
| false
| false
| 10,502
|
py
|
utils.py
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import copy
import logging
import os
import re
import sys
from contextlib import contextmanager
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from os.path import splitext
from pathlib import Path
from textwrap import dedent
from typing import Any, Dict, Optional, Sequence, Union, cast
from omegaconf import DictConfig, OmegaConf, open_dict, read_write
from hydra import version
from hydra._internal.deprecation_warning import deprecation_warning
from hydra.core.hydra_config import HydraConfig
from hydra.core.singleton import Singleton
from hydra.types import HydraContext, TaskFunction
log = logging.getLogger(__name__)
def simple_stdout_log_config(level: int = logging.INFO) -> None:
root = logging.getLogger()
root.setLevel(level)
handler = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter("%(message)s")
handler.setFormatter(formatter)
root.addHandler(handler)
def configure_log(
log_config: DictConfig,
verbose_config: Union[bool, str, Sequence[str]] = False,
) -> None:
assert isinstance(verbose_config, (bool, str)) or OmegaConf.is_list(verbose_config)
if log_config is not None:
conf: Dict[str, Any] = OmegaConf.to_container( # type: ignore
log_config, resolve=True
)
if conf["root"] is not None:
logging.config.dictConfig(conf)
else:
# default logging to stdout
root = logging.getLogger()
root.setLevel(logging.INFO)
handler = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter(
"[%(asctime)s][%(name)s][%(levelname)s] - %(message)s"
)
handler.setFormatter(formatter)
root.addHandler(handler)
if isinstance(verbose_config, bool):
if verbose_config:
logging.getLogger().setLevel(logging.DEBUG)
else:
if isinstance(verbose_config, str):
verbose_list = OmegaConf.create([verbose_config])
elif OmegaConf.is_list(verbose_config):
verbose_list = verbose_config # type: ignore
else:
assert False
for logger in verbose_list:
logging.getLogger(logger).setLevel(logging.DEBUG)
def _save_config(cfg: DictConfig, filename: str, output_dir: Path) -> None:
output_dir.mkdir(parents=True, exist_ok=True)
with open(str(output_dir / filename), "w", encoding="utf-8") as file:
file.write(OmegaConf.to_yaml(cfg))
def filter_overrides(overrides: Sequence[str]) -> Sequence[str]:
"""
:param overrides: overrides list
:return: returning a new overrides list with all the keys starting with hydra. filtered.
"""
return [x for x in overrides if not x.startswith("hydra.")]
def _check_hydra_context(hydra_context: Optional[HydraContext]) -> None:
if hydra_context is None:
# hydra_context is required as of Hydra 1.2.
# We can remove this check in Hydra 1.3.
raise TypeError(
dedent(
"""
run_job's signature has changed: the `hydra_context` arg is now required.
For more info, check https://github.com/facebookresearch/hydra/pull/1581."""
),
)
def run_job(
task_function: TaskFunction,
config: DictConfig,
job_dir_key: str,
job_subdir_key: Optional[str],
hydra_context: HydraContext,
configure_logging: bool = True,
) -> "JobReturn":
_check_hydra_context(hydra_context)
callbacks = hydra_context.callbacks
old_cwd = os.getcwd()
orig_hydra_cfg = HydraConfig.instance().cfg
# init Hydra config for config evaluation
HydraConfig.instance().set_config(config)
output_dir = str(OmegaConf.select(config, job_dir_key))
if job_subdir_key is not None:
# evaluate job_subdir_key lazily.
# this is running on the client side in sweep and contains things such as job:id which
# are only available there.
subdir = str(OmegaConf.select(config, job_subdir_key))
output_dir = os.path.join(output_dir, subdir)
with read_write(config.hydra.runtime):
with open_dict(config.hydra.runtime):
config.hydra.runtime.output_dir = os.path.abspath(output_dir)
# update Hydra config
HydraConfig.instance().set_config(config)
_chdir = None
try:
ret = JobReturn()
task_cfg = copy.deepcopy(config)
with read_write(task_cfg):
with open_dict(task_cfg):
del task_cfg["hydra"]
ret.cfg = task_cfg
hydra_cfg = copy.deepcopy(HydraConfig.instance().cfg)
assert isinstance(hydra_cfg, DictConfig)
ret.hydra_cfg = hydra_cfg
overrides = OmegaConf.to_container(config.hydra.overrides.task)
assert isinstance(overrides, list)
ret.overrides = overrides
# handle output directories here
Path(str(output_dir)).mkdir(parents=True, exist_ok=True)
_chdir = hydra_cfg.hydra.job.chdir
if _chdir is None:
if version.base_at_least("1.2"):
_chdir = False
if _chdir is None:
url = "https://hydra.cc/docs/1.2/upgrades/1.1_to_1.2/changes_to_job_working_dir/"
deprecation_warning(
message=dedent(
f"""\
Future Hydra versions will no longer change working directory at job runtime by default.
See {url} for more information."""
),
stacklevel=2,
)
_chdir = True
if _chdir:
os.chdir(output_dir)
ret.working_dir = output_dir
else:
ret.working_dir = os.getcwd()
if configure_logging:
configure_log(config.hydra.job_logging, config.hydra.verbose)
if config.hydra.output_subdir is not None:
hydra_output = Path(config.hydra.runtime.output_dir) / Path(
config.hydra.output_subdir
)
_save_config(task_cfg, "config.yaml", hydra_output)
_save_config(hydra_cfg, "hydra.yaml", hydra_output)
_save_config(config.hydra.overrides.task, "overrides.yaml", hydra_output)
with env_override(hydra_cfg.hydra.job.env_set):
callbacks.on_job_start(config=config, task_function=task_function)
try:
ret.return_value = task_function(task_cfg)
ret.status = JobStatus.COMPLETED
except Exception as e:
ret.return_value = e
ret.status = JobStatus.FAILED
ret.task_name = JobRuntime.instance().get("name")
_flush_loggers()
callbacks.on_job_end(config=config, job_return=ret)
return ret
finally:
HydraConfig.instance().cfg = orig_hydra_cfg
if _chdir:
os.chdir(old_cwd)
def get_valid_filename(s: str) -> str:
s = str(s).strip().replace(" ", "_")
return re.sub(r"(?u)[^-\w.]", "", s)
def setup_globals() -> None:
# please add documentation when you add a new resolver
OmegaConf.register_new_resolver(
"now",
lambda pattern: datetime.now().strftime(pattern),
use_cache=True,
replace=True,
)
OmegaConf.register_new_resolver(
"hydra",
lambda path: OmegaConf.select(cast(DictConfig, HydraConfig.get()), path),
replace=True,
)
vi = sys.version_info
version_dict = {
"major": f"{vi[0]}",
"minor": f"{vi[0]}.{vi[1]}",
"micro": f"{vi[0]}.{vi[1]}.{vi[2]}",
}
OmegaConf.register_new_resolver(
"python_version", lambda level="minor": version_dict.get(level), replace=True
)
class JobStatus(Enum):
UNKNOWN = 0
COMPLETED = 1
FAILED = 2
@dataclass
class JobReturn:
overrides: Optional[Sequence[str]] = None
cfg: Optional[DictConfig] = None
hydra_cfg: Optional[DictConfig] = None
working_dir: Optional[str] = None
task_name: Optional[str] = None
status: JobStatus = JobStatus.UNKNOWN
_return_value: Any = None
@property
def return_value(self) -> Any:
assert self.status != JobStatus.UNKNOWN, "return_value not yet available"
if self.status == JobStatus.COMPLETED:
return self._return_value
else:
sys.stderr.write(
f"Error executing job with overrides: {self.overrides}" + os.linesep
)
raise self._return_value
@return_value.setter
def return_value(self, value: Any) -> None:
self._return_value = value
class JobRuntime(metaclass=Singleton):
def __init__(self) -> None:
self.conf: DictConfig = OmegaConf.create()
self.set("name", "UNKNOWN_NAME")
def get(self, key: str) -> Any:
ret = OmegaConf.select(self.conf, key)
if ret is None:
raise KeyError(f"Key not found in {type(self).__name__}: {key}")
return ret
def set(self, key: str, value: Any) -> None:
log.debug(f"Setting {type(self).__name__}:{key}={value}")
self.conf[key] = value
def validate_config_path(config_path: Optional[str]) -> None:
if config_path is not None:
split_file = splitext(config_path)
if split_file[1] in (".yaml", ".yml"):
msg = dedent(
"""\
Using config_path to specify the config name is not supported, specify the config name via config_name.
See https://hydra.cc/docs/1.2/upgrades/0.11_to_1.0/config_path_changes
"""
)
raise ValueError(msg)
@contextmanager
def env_override(env: Dict[str, str]) -> Any:
"""Temporarily set environment variables inside the context manager and
fully restore previous environment afterwards
"""
original_env = {key: os.getenv(key) for key in env}
os.environ.update(env)
try:
yield
finally:
for key, value in original_env.items():
if value is None:
del os.environ[key]
else:
os.environ[key] = value
def _flush_loggers() -> None:
# Python logging does not have an official API to flush all loggers.
# This will have to do.
for h_weak_ref in logging._handlerList: # type: ignore
try:
h_weak_ref().flush()
except Exception:
# ignore exceptions thrown during flushing
pass
|
3b7a1e88faa3b878e7902369fc09589fb3619449
|
974d04d2ea27b1bba1c01015a98112d2afb78fe5
|
/tools/final_ut_parallel_rule.py
|
e0fc86c19a8cc46e9452a75dca7d9697b20b0db4
|
[
"Apache-2.0"
] |
permissive
|
PaddlePaddle/Paddle
|
b3d2583119082c8e4b74331dacc4d39ed4d7cff0
|
22a11a60e0e3d10a3cf610077a3d9942a6f964cb
|
refs/heads/develop
| 2023-08-17T21:27:30.568889
| 2023-08-17T12:38:22
| 2023-08-17T12:38:22
| 65,711,522
| 20,414
| 5,891
|
Apache-2.0
| 2023-09-14T19:20:51
| 2016-08-15T06:59:08
|
C++
|
UTF-8
|
Python
| false
| false
| 6,008
|
py
|
final_ut_parallel_rule.py
|
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import sys
def classify_cases_by_mem(rootPath):
"""classify cases by mem"""
case_filename = '%s/build/classify_case_by_cardNum.txt' % rootPath
case_exec_100 = [
'test_conv_eltwiseadd_bn_fuse_pass',
'test_trt_convert_pool2d',
'test_fc_fuse_pass',
'test_trt_convert_depthwise_conv2d',
'test_quant2_int8_resnet50_mkldnn',
'test_conv_elementwise_add_act_fuse_pass',
'test_trt_convert_conv2d',
'test_paddle_save_load',
'test_logical_op',
'test_nearest_interp_op',
'test_pool2d_op',
'test_conv3d_transpose_op',
'test_lstmp_op',
'test_cross_entropy2_op',
'test_sgd_op',
'test_imperative_ptq',
'test_model',
'test_custom_relu_op_setup',
'test_dropout_op',
'test_concat_op',
] # 木桶原理 70s-100s之间的case
case_exec_200 = [
'test_post_training_quantization_mnist',
'test_trt_dynamic_shape_ernie_fp16_ser_deser',
'test_trt_dynamic_shape_ernie',
'test_layer_norm_op',
'trt_quant_int8_yolov3_r50_test',
'test_gru_op',
'test_post_training_quantization_while',
'test_mkldnn_log_softmax_op',
'test_mkldnn_matmulv2_op',
'test_mkldnn_shape_op',
'interceptor_pipeline_short_path_test',
'interceptor_pipeline_long_path_test',
'test_cpuonly_spawn',
] # 木桶原理 110s-200s之间的case 以及容易timeout
case_always_timeout = [
'test_quant2_int8_resnet50_channelwise_mkldnn',
'test_parallel_dygraph_unused_variables_gloo',
'test_seq2seq',
'test_pool3d_op',
'test_trilinear_interp_op',
'test_trilinear_interp_v2_op',
'test_dropout_op',
'test_parallel_dygraph_sync_batch_norm',
'test_conv3d_op',
'test_quant2_int8_resnet50_range_mkldnn',
] # always timeout
f = open(case_filename)
lines = f.readlines()
all_tests_by_card = {}
for line in lines:
if line.startswith('single_card_tests:'):
all_tests_by_card['single_card_tests'] = []
line = line.split('single_card_tests: ^job$|')[1].split('|')
for case in line:
case = case.replace('^', '').replace('$', '').strip()
all_tests_by_card['single_card_tests'].append(case)
elif line.startswith('multiple_card_tests:'):
all_tests_by_card['multiple_card_tests'] = []
line = line.split('multiple_card_tests: ^job$|')[1].split('|')
for case in line:
case = case.replace('^', '').replace('$', '').strip()
all_tests_by_card['multiple_card_tests'].append(case)
elif line.startswith('exclusive_card_tests:'):
all_tests_by_card['exclusive_card_tests'] = []
line = line.split('exclusive_card_tests: ^job$')[1].split('|')
for case in line:
case = case.replace('^', '').replace('$', '').strip()
all_tests_by_card['exclusive_card_tests'].append(case)
if not os.path.exists("/pre_test"):
os.mkdir("/pre_test")
with open("/pre_test/classify_case_by_cardNum.json", "w") as f:
json.dump(all_tests_by_card, f)
with open("/pre_test/ut_mem_map.json", 'r') as load_f:
new_lastest_mem = json.load(load_f)
no_parallel_case = '^job$'
for cardType in all_tests_by_card:
case_mem_0 = '^job$'
case_mem_1 = {}
for case in all_tests_by_card[cardType]:
if case in case_exec_100 or case in case_exec_200:
continue
if case in case_always_timeout:
no_parallel_case = no_parallel_case + '|^' + case + '$'
continue
if case not in new_lastest_mem:
continue
# mem = 0
if new_lastest_mem[case]["mem_nvidia"] == 0:
case_mem_0 = case_mem_0 + '|^' + case + '$'
# mem != 0
else:
case_mem_1[case] = new_lastest_mem[case]["mem_nvidia"]
with open('/pre_test/%s_mem0' % cardType, 'w') as f:
f.write(case_mem_0)
f.close()
case_mem_1_sort = sorted(case_mem_1.items(), key=lambda x: x[1])
case_mem_1_line = '^job$'
mem_1_sum = 0
with open('/pre_test/%s' % cardType, 'w') as f_not_0:
for index in case_mem_1_sort:
if mem_1_sum < 14 * 1024 * 2:
mem_1_sum += index[1]
case_mem_1_line = case_mem_1_line + '|^' + index[0] + '$'
else:
f_not_0.write(case_mem_1_line + '\n')
case_mem_1_line = '^job$|^' + index[0] + '$'
mem_1_sum = index[1]
f_not_0.write(case_mem_1_line + '\n')
if cardType == 'single_card_tests':
for cases in [case_exec_100, case_exec_200]:
case_mem_1_line = '^job$'
for case in cases:
case_mem_1_line = case_mem_1_line + '|^' + case + '$'
f_not_0.write(case_mem_1_line + '\n')
f_not_0.close()
os.system('cp %s/build/nightly_case /pre_test/' % rootPath)
if __name__ == '__main__':
rootPath = sys.argv[1]
classify_cases_by_mem(rootPath)
|
b4a2209c78959c6515736b5cde8d1ffa0bb6a3fe
|
77c4f4dd27b8d7497e66a7a5a87ad7ea83f2c4be
|
/python/pyarrow/interchange/dataframe.py
|
59ba765c175ad471274a99bf857c8880a072e0b8
|
[
"Apache-2.0",
"MIT",
"BSD-3-Clause",
"BSD-2-Clause",
"ZPL-2.1",
"BSL-1.0",
"LicenseRef-scancode-public-domain",
"NTP",
"OpenSSL",
"CC-BY-4.0",
"LLVM-exception",
"Python-2.0",
"CC0-1.0",
"LicenseRef-scancode-protobuf",
"JSON",
"Zlib",
"CC-BY-3.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
apache/arrow
|
0714bfbf6fd491e1f4ed4acf838845ce4b94ec3e
|
59954225d4615f9b3bd7a3c266fb68761794229a
|
refs/heads/main
| 2023-08-24T09:04:22.253199
| 2023-08-24T07:21:51
| 2023-08-24T07:21:51
| 51,905,353
| 12,955
| 3,585
|
Apache-2.0
| 2023-09-14T20:45:56
| 2016-02-17T08:00:23
|
C++
|
UTF-8
|
Python
| false
| false
| 8,405
|
py
|
dataframe.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import (
Any,
Iterable,
Optional,
Sequence,
)
import pyarrow as pa
from pyarrow.interchange.column import _PyArrowColumn
class _PyArrowDataFrame:
"""
A data frame class, with only the methods required by the interchange
protocol defined.
A "data frame" represents an ordered collection of named columns.
A column's "name" must be a unique string.
Columns may be accessed by name or by position.
This could be a public data frame class, or an object with the methods and
attributes defined on this DataFrame class could be returned from the
``__dataframe__`` method of a public data frame class in a library adhering
to the dataframe interchange protocol specification.
"""
def __init__(
self, df: pa.Table | pa.RecordBatch,
nan_as_null: bool = False,
allow_copy: bool = True
) -> None:
"""
Constructor - an instance of this (private) class is returned from
`pa.Table.__dataframe__` or `pa.RecordBatch.__dataframe__`.
"""
self._df = df
# ``nan_as_null`` is a keyword intended for the consumer to tell the
# producer to overwrite null values in the data with ``NaN`` (or
# ``NaT``).
if nan_as_null is True:
raise RuntimeError(
"nan_as_null=True currently has no effect, "
"use the default nan_as_null=False"
)
self._nan_as_null = nan_as_null
self._allow_copy = allow_copy
def __dataframe__(
self, nan_as_null: bool = False, allow_copy: bool = True
) -> _PyArrowDataFrame:
"""
Construct a new exchange object, potentially changing the parameters.
``nan_as_null`` is a keyword intended for the consumer to tell the
producer to overwrite null values in the data with ``NaN``.
It is intended for cases where the consumer does not support the bit
mask or byte mask that is the producer's native representation.
``allow_copy`` is a keyword that defines whether or not the library is
allowed to make a copy of the data. For example, copying data would be
necessary if a library supports strided buffers, given that this
protocol specifies contiguous buffers.
"""
return _PyArrowDataFrame(self._df, nan_as_null, allow_copy)
@property
def metadata(self) -> dict[str, Any]:
"""
The metadata for the data frame, as a dictionary with string keys. The
contents of `metadata` may be anything, they are meant for a library
to store information that it needs to, e.g., roundtrip losslessly or
for two implementations to share data that is not (yet) part of the
interchange protocol specification. For avoiding collisions with other
entries, please add name the keys with the name of the library
followed by a period and the desired name, e.g, ``pandas.indexcol``.
"""
# The metadata for the data frame, as a dictionary with string keys.
# Add schema metadata here (pandas metadata or custom metadata)
if self._df.schema.metadata:
schema_metadata = {"pyarrow." + k.decode('utf8'): v.decode('utf8')
for k, v in self._df.schema.metadata.items()}
return schema_metadata
else:
return {}
def num_columns(self) -> int:
"""
Return the number of columns in the DataFrame.
"""
return self._df.num_columns
def num_rows(self) -> int:
"""
Return the number of rows in the DataFrame, if available.
"""
return self._df.num_rows
def num_chunks(self) -> int:
"""
Return the number of chunks the DataFrame consists of.
"""
if isinstance(self._df, pa.RecordBatch):
return 1
else:
# pyarrow.Table can have columns with different number
# of chunks so we take the number of chunks that
# .to_batches() returns as it takes the min chunk size
# of all the columns (to_batches is a zero copy method)
batches = self._df.to_batches()
return len(batches)
def column_names(self) -> Iterable[str]:
"""
Return an iterator yielding the column names.
"""
return self._df.schema.names
def get_column(self, i: int) -> _PyArrowColumn:
"""
Return the column at the indicated position.
"""
return _PyArrowColumn(self._df.column(i),
allow_copy=self._allow_copy)
def get_column_by_name(self, name: str) -> _PyArrowColumn:
"""
Return the column whose name is the indicated name.
"""
return _PyArrowColumn(self._df.column(name),
allow_copy=self._allow_copy)
def get_columns(self) -> Iterable[_PyArrowColumn]:
"""
Return an iterator yielding the columns.
"""
return [
_PyArrowColumn(col, allow_copy=self._allow_copy)
for col in self._df.columns
]
def select_columns(self, indices: Sequence[int]) -> _PyArrowDataFrame:
"""
Create a new DataFrame by selecting a subset of columns by index.
"""
return _PyArrowDataFrame(
self._df.select(list(indices)), self._nan_as_null, self._allow_copy
)
def select_columns_by_name(
self, names: Sequence[str]
) -> _PyArrowDataFrame:
"""
Create a new DataFrame by selecting a subset of columns by name.
"""
return _PyArrowDataFrame(
self._df.select(list(names)), self._nan_as_null, self._allow_copy
)
def get_chunks(
self, n_chunks: Optional[int] = None
) -> Iterable[_PyArrowDataFrame]:
"""
Return an iterator yielding the chunks.
By default (None), yields the chunks that the data is stored as by the
producer. If given, ``n_chunks`` must be a multiple of
``self.num_chunks()``, meaning the producer must subdivide each chunk
before yielding it.
Note that the producer must ensure that all columns are chunked the
same way.
"""
# Subdivide chunks
if n_chunks and n_chunks > 1:
chunk_size = self.num_rows() // n_chunks
if self.num_rows() % n_chunks != 0:
chunk_size += 1
if isinstance(self._df, pa.Table):
batches = self._df.to_batches(max_chunksize=chunk_size)
else:
batches = []
for start in range(0, chunk_size * n_chunks, chunk_size):
batches.append(self._df.slice(start, chunk_size))
# In case when the size of the chunk is such that the resulting
# list is one less chunk then n_chunks -> append an empty chunk
if len(batches) == n_chunks - 1:
batches.append(pa.record_batch([[]], schema=self._df.schema))
# yields the chunks that the data is stored as
else:
if isinstance(self._df, pa.Table):
batches = self._df.to_batches()
else:
batches = [self._df]
# Create an iterator of RecordBatches
iterator = [_PyArrowDataFrame(batch,
self._nan_as_null,
self._allow_copy)
for batch in batches]
return iterator
|
bc1933e848a27d33b278bcc3c4e4379404162c80
|
6b6d42eadf53e90b08ce564fb188a9a4b126ef12
|
/utils/update_copyright.py
|
9bc54cd7bb6b7c30434f8f9d1b9777e39de7ad06
|
[
"Apache-2.0",
"LLVM-exception",
"NCSA"
] |
permissive
|
AdaCore/libadalang
|
f97b95d1672cb1e5083c49ee632c6f9c787d36c2
|
50d658afa70ccbf46b8f7d9d43a21d45d56b206c
|
refs/heads/master
| 2023-09-01T18:34:26.976692
| 2023-08-25T15:53:43
| 2023-08-25T15:53:43
| 47,627,172
| 158
| 49
|
Apache-2.0
| 2022-12-14T10:29:45
| 2015-12-08T14:28:22
|
Ada
|
UTF-8
|
Python
| false
| false
| 4,074
|
py
|
update_copyright.py
|
#!/usr/bin/env python3
import argparse
import datetime
import glob
import re
import sys
from typing import Match
class Updater:
"""Namespace for copyright updating code."""
copyright_re_list = [
re.compile(pattern)
for pattern in [
# For Ada/C source code
r"(?P<copyright>Copyright \(C\) (?P<years>[^\n]*), AdaCore)",
# For Sphinx's conf.py
r"(?P<copyright>copyright = u'(?P<years>[^']*), AdaCore')",
]
]
"""
List of regular expressions that match the copyright notices to update.
"""
single_year_re = re.compile(r"(\d{4})")
year_range_re = re.compile(r"(\d{4})-(\d{4})")
files = [
"ada/copyright.py",
"extensions/src/*.*",
"langkit/langkit/support/*.*",
"langkit/langkit/adasat/src/*.*",
"langkit/dev_guide/conf.py",
"user_manual/conf.py",
]
"""List of glob patterns for the files to update."""
def __init__(self, year: int):
"""
:param year: New year to include in copyright notices.
"""
self.year = year
def do_repl(self, m: Match[str]) -> str:
"""
Update a copyright notice.
:param m: Match object for a regular expression in
``Updater.copyright_re``.
:return: The updated copyright notice (to include ``self.year``).
"""
year_first: int
year_last: int
# Detect either a year range or a single year in the current copyright
# notice.
years_str = m.group("years")
m2 = self.year_range_re.search(years_str)
if m2:
year_first = int(m2.group(1))
year_last = int(m2.group(2))
else:
m2 = self.single_year_re.search(years_str)
assert m2
year_first = year_last = int(m2.group(1))
# Create the new year range
if year_first > year_last:
raise ValueError("invalid year range: {m2.group(0)}")
if self.year < year_first:
year_first = self.year
if year_last < self.year:
year_last = self.year
# Format it
years_str = (
str(year_first)
if year_first == year_last else
f"{year_first}-{year_last}"
)
# Return the updated copyright notice
return "".join([
m.string[m.start("copyright"):m.start("years")],
years_str,
m.string[m.end("years"):m.end("copyright")],
])
def patch_file(self, filename: str) -> None:
"""
Update copyright notices in the source file designated by ``filename``.
"""
print("Patching", filename)
with open(filename, "r") as f:
contents = f.read()
for copyright_re in self.copyright_re_list:
contents = re.sub(copyright_re, self.do_repl, contents)
with open(filename, "w") as f:
f.write(contents)
def patch_all_files(self) -> None:
"""
Update copyright notices for all files designated by ``Updater.files``.
"""
for pattern in self.files:
for f in glob.glob(pattern):
self.patch_file(f)
args_parser = argparse.ArgumentParser(
description="Update copyright notices to include current year"
)
args_parser.add_argument(
"--year", type=int, default=datetime.date.today().year,
help="Year to include in copyright notices (default: current year)"
)
args_parser.add_argument(
"files", nargs="*",
help="Files to update. If none passed, look for usual suspects in the"
" Libadalang/Langkit/AdaSAT repositories."
)
def main(args: argparse.Namespace) -> int:
updater = Updater(args.year)
# Process explicitly requested source files, Libadalang/Langkit/AdaSAT
# sources otherwise.
if args.files:
for f in args.files:
updater.patch_file(f)
else:
updater.patch_all_files()
return 0
if __name__ == "__main__":
sys.exit(main(args_parser.parse_args()))
|
6a1dec88cee29405b26fe4963f593ef900ea9293
|
96dcea595e7c16cec07b3f649afd65f3660a0bad
|
/homeassistant/components/yamaha/__init__.py
|
92a34517ec6eaed77214cb96e9fee02c8b563680
|
[
"Apache-2.0"
] |
permissive
|
home-assistant/core
|
3455eac2e9d925c92d30178643b1aaccf3a6484f
|
80caeafcb5b6e2f9da192d0ea6dd1a5b8244b743
|
refs/heads/dev
| 2023-08-31T15:41:06.299469
| 2023-08-31T14:50:53
| 2023-08-31T14:50:53
| 12,888,993
| 35,501
| 20,617
|
Apache-2.0
| 2023-09-14T21:50:15
| 2013-09-17T07:29:48
|
Python
|
UTF-8
|
Python
| false
| false
| 28
|
py
|
__init__.py
|
"""The yamaha component."""
|
4a40b6842907fb5a3d46dd74733f21a597c1cf60
|
d6f6d1d1aac892f7555c8ae436430f8da415b455
|
/chempy/util/tests/test_rendering.py
|
4542a41e76ae6c4bc9095021df2b77d70124a371
|
[
"BSD-2-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
bjodah/chempy
|
9884e1475cb62ec9c5ae3ecc0833efe7e36dbbab
|
1ef1bf6751884b57dc13dc420e1f5c634e954375
|
refs/heads/master
| 2023-08-10T07:47:32.227130
| 2023-07-16T09:59:09
| 2023-07-16T10:13:17
| 36,242,853
| 481
| 92
|
BSD-2-Clause
| 2023-07-16T09:57:03
| 2015-05-25T16:49:18
|
Python
|
UTF-8
|
Python
| false
| false
| 726
|
py
|
test_rendering.py
|
import math
from chempy import Reaction
from chempy.units import allclose, default_units as u
from ..testing import requires
from ..rendering import eval_template
from ..parsing import get_parsing_context
from chempy.units import units_library
@requires(units_library)
def test_eval_template():
rendered = eval_template("${2*pi*arg*m**2}", arg=1 / math.pi)
val = eval(rendered, get_parsing_context())
assert allclose(val, 2 * u.m ** 2)
@requires(units_library)
def test_eval_template__Reaction():
rendered = eval_template("2 OH -> H2O2; ${6*pi*arg}/M/s", arg=1 / math.pi)
assert allclose(
Reaction.from_string(rendered).param,
Reaction.from_string("2 OH -> H2O2; 6.0/M/s").param,
)
|
b8e6240b72d93b294a791be2777914dbabfaffd2
|
037a5d4c286d1c373f1771382e0baa057524cef5
|
/tests/backend/test_common.py
|
7df59cef56c05dfe234283506377606bb7fdfae8
|
[
"Apache-2.0"
] |
permissive
|
fizyr/keras-retinanet
|
4e26631f6a643ae55a4229c416be03007a39f75e
|
7ac91dfbbacce77d6d9633fc09e16cd0ee71fd5e
|
refs/heads/main
| 2023-07-28T21:40:32.400051
| 2023-03-16T14:51:24
| 2023-03-16T14:51:24
| 100,249,425
| 4,696
| 2,335
|
Apache-2.0
| 2023-09-06T07:32:39
| 2017-08-14T09:14:29
|
Python
|
UTF-8
|
Python
| false
| false
| 3,822
|
py
|
test_common.py
|
"""
Copyright 2017-2018 Fizyr (https://fizyr.com)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import numpy as np
from tensorflow import keras
import keras_retinanet.backend
def test_bbox_transform_inv():
boxes = np.array([[
[100, 100, 200, 200],
[100, 100, 300, 300],
[100, 100, 200, 300],
[100, 100, 300, 200],
[80, 120, 200, 200],
[80, 120, 300, 300],
[80, 120, 200, 300],
[80, 120, 300, 200],
]])
boxes = keras.backend.variable(boxes)
deltas = np.array([[
[0 , 0 , 0 , 0 ],
[0 , 0.1, 0 , 0 ],
[-0.3, 0 , 0 , 0 ],
[0.2 , 0.2, 0 , 0 ],
[0 , 0 , 0.1 , 0 ],
[0 , 0 , 0 , -0.3],
[0 , 0 , 0.2 , 0.2 ],
[0.1 , 0.2, -0.3, 0.4 ],
]])
deltas = keras.backend.variable(deltas)
expected = np.array([[
[100 , 100 , 200 , 200 ],
[100 , 104 , 300 , 300 ],
[ 94 , 100 , 200 , 300 ],
[108 , 104 , 300 , 200 ],
[ 80 , 120 , 202.4 , 200 ],
[ 80 , 120 , 300 , 289.2],
[ 80 , 120 , 204.8 , 307.2],
[ 84.4, 123.2, 286.8 , 206.4]
]])
result = keras_retinanet.backend.bbox_transform_inv(boxes, deltas)
result = keras.backend.eval(result)
np.testing.assert_array_almost_equal(result, expected, decimal=2)
def test_shift():
shape = (2, 3)
stride = 8
anchors = np.array([
[-8, -8, 8, 8],
[-16, -16, 16, 16],
[-12, -12, 12, 12],
[-12, -16, 12, 16],
[-16, -12, 16, 12]
], dtype=keras.backend.floatx())
expected = [
# anchors for (0, 0)
[4 - 8, 4 - 8, 4 + 8, 4 + 8],
[4 - 16, 4 - 16, 4 + 16, 4 + 16],
[4 - 12, 4 - 12, 4 + 12, 4 + 12],
[4 - 12, 4 - 16, 4 + 12, 4 + 16],
[4 - 16, 4 - 12, 4 + 16, 4 + 12],
# anchors for (0, 1)
[12 - 8, 4 - 8, 12 + 8, 4 + 8],
[12 - 16, 4 - 16, 12 + 16, 4 + 16],
[12 - 12, 4 - 12, 12 + 12, 4 + 12],
[12 - 12, 4 - 16, 12 + 12, 4 + 16],
[12 - 16, 4 - 12, 12 + 16, 4 + 12],
# anchors for (0, 2)
[20 - 8, 4 - 8, 20 + 8, 4 + 8],
[20 - 16, 4 - 16, 20 + 16, 4 + 16],
[20 - 12, 4 - 12, 20 + 12, 4 + 12],
[20 - 12, 4 - 16, 20 + 12, 4 + 16],
[20 - 16, 4 - 12, 20 + 16, 4 + 12],
# anchors for (1, 0)
[4 - 8, 12 - 8, 4 + 8, 12 + 8],
[4 - 16, 12 - 16, 4 + 16, 12 + 16],
[4 - 12, 12 - 12, 4 + 12, 12 + 12],
[4 - 12, 12 - 16, 4 + 12, 12 + 16],
[4 - 16, 12 - 12, 4 + 16, 12 + 12],
# anchors for (1, 1)
[12 - 8, 12 - 8, 12 + 8, 12 + 8],
[12 - 16, 12 - 16, 12 + 16, 12 + 16],
[12 - 12, 12 - 12, 12 + 12, 12 + 12],
[12 - 12, 12 - 16, 12 + 12, 12 + 16],
[12 - 16, 12 - 12, 12 + 16, 12 + 12],
# anchors for (1, 2)
[20 - 8, 12 - 8, 20 + 8, 12 + 8],
[20 - 16, 12 - 16, 20 + 16, 12 + 16],
[20 - 12, 12 - 12, 20 + 12, 12 + 12],
[20 - 12, 12 - 16, 20 + 12, 12 + 16],
[20 - 16, 12 - 12, 20 + 16, 12 + 12],
]
result = keras_retinanet.backend.shift(shape, stride, anchors)
result = keras.backend.eval(result)
np.testing.assert_array_equal(result, expected)
|
b0cd2e3915790e0d25b8a3cbcf71a94534f98693
|
010279e2ba272d09e9d2c4e903722e5faba2cf7a
|
/contrib/python/scipy/py3/scipy/spatial/distance.py
|
1941e467b62828cfe4ffc09a34888d549c663cf0
|
[
"Apache-2.0",
"Python-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"Qhull",
"BSD-3-Clause",
"BSL-1.0",
"BSD-2-Clause"
] |
permissive
|
catboost/catboost
|
854c1a1f439a96f1ae6b48e16644be20aa04dba2
|
f5042e35b945aded77b23470ead62d7eacefde92
|
refs/heads/master
| 2023-09-01T12:14:14.174108
| 2023-09-01T10:01:01
| 2023-09-01T10:22:12
| 97,556,265
| 8,012
| 1,425
|
Apache-2.0
| 2023-09-11T03:32:32
| 2017-07-18T05:29:04
|
Python
|
UTF-8
|
Python
| false
| false
| 90,588
|
py
|
distance.py
|
"""
Distance computations (:mod:`scipy.spatial.distance`)
=====================================================
.. sectionauthor:: Damian Eads
Function reference
------------------
Distance matrix computation from a collection of raw observation vectors
stored in a rectangular array.
.. autosummary::
:toctree: generated/
pdist -- pairwise distances between observation vectors.
cdist -- distances between two collections of observation vectors
squareform -- convert distance matrix to a condensed one and vice versa
directed_hausdorff -- directed Hausdorff distance between arrays
Predicates for checking the validity of distance matrices, both
condensed and redundant. Also contained in this module are functions
for computing the number of observations in a distance matrix.
.. autosummary::
:toctree: generated/
is_valid_dm -- checks for a valid distance matrix
is_valid_y -- checks for a valid condensed distance matrix
num_obs_dm -- # of observations in a distance matrix
num_obs_y -- # of observations in a condensed distance matrix
Distance functions between two numeric vectors ``u`` and ``v``. Computing
distances over a large collection of vectors is inefficient for these
functions. Use ``pdist`` for this purpose.
.. autosummary::
:toctree: generated/
braycurtis -- the Bray-Curtis distance.
canberra -- the Canberra distance.
chebyshev -- the Chebyshev distance.
cityblock -- the Manhattan distance.
correlation -- the Correlation distance.
cosine -- the Cosine distance.
euclidean -- the Euclidean distance.
jensenshannon -- the Jensen-Shannon distance.
mahalanobis -- the Mahalanobis distance.
minkowski -- the Minkowski distance.
seuclidean -- the normalized Euclidean distance.
sqeuclidean -- the squared Euclidean distance.
wminkowski -- (deprecated) alias of `minkowski`.
Distance functions between two boolean vectors (representing sets) ``u`` and
``v``. As in the case of numerical vectors, ``pdist`` is more efficient for
computing the distances between all pairs.
.. autosummary::
:toctree: generated/
dice -- the Dice dissimilarity.
hamming -- the Hamming distance.
jaccard -- the Jaccard distance.
kulsinski -- the Kulsinski distance.
rogerstanimoto -- the Rogers-Tanimoto dissimilarity.
russellrao -- the Russell-Rao dissimilarity.
sokalmichener -- the Sokal-Michener dissimilarity.
sokalsneath -- the Sokal-Sneath dissimilarity.
yule -- the Yule dissimilarity.
:func:`hamming` also operates over discrete numerical vectors.
"""
# Copyright (C) Damian Eads, 2007-2008. New BSD License.
__all__ = [
'braycurtis',
'canberra',
'cdist',
'chebyshev',
'cityblock',
'correlation',
'cosine',
'dice',
'directed_hausdorff',
'euclidean',
'hamming',
'is_valid_dm',
'is_valid_y',
'jaccard',
'jensenshannon',
'kulsinski',
'mahalanobis',
'matching',
'minkowski',
'num_obs_dm',
'num_obs_y',
'pdist',
'rogerstanimoto',
'russellrao',
'seuclidean',
'sokalmichener',
'sokalsneath',
'sqeuclidean',
'squareform',
'wminkowski',
'yule'
]
import warnings
import numpy as np
import dataclasses
from typing import List, Optional, Set, Callable
from functools import partial
from scipy._lib._util import _asarray_validated
from scipy._lib.deprecation import _deprecated
from . import _distance_wrap
from . import _hausdorff
from ..linalg import norm
from ..special import rel_entr
from . import _distance_pybind
def _copy_array_if_base_present(a):
"""Copy the array if its base points to a parent array."""
if a.base is not None:
return a.copy()
return a
def _correlation_cdist_wrap(XA, XB, dm, **kwargs):
XA = XA - XA.mean(axis=1, keepdims=True)
XB = XB - XB.mean(axis=1, keepdims=True)
_distance_wrap.cdist_cosine_double_wrap(XA, XB, dm, **kwargs)
def _correlation_pdist_wrap(X, dm, **kwargs):
X2 = X - X.mean(axis=1, keepdims=True)
_distance_wrap.pdist_cosine_double_wrap(X2, dm, **kwargs)
def _convert_to_type(X, out_type):
return np.ascontiguousarray(X, dtype=out_type)
def _nbool_correspond_all(u, v, w=None):
if u.dtype == v.dtype == bool and w is None:
not_u = ~u
not_v = ~v
nff = (not_u & not_v).sum()
nft = (not_u & v).sum()
ntf = (u & not_v).sum()
ntt = (u & v).sum()
else:
dtype = np.find_common_type([int], [u.dtype, v.dtype])
u = u.astype(dtype)
v = v.astype(dtype)
not_u = 1.0 - u
not_v = 1.0 - v
if w is not None:
not_u = w * not_u
u = w * u
nff = (not_u * not_v).sum()
nft = (not_u * v).sum()
ntf = (u * not_v).sum()
ntt = (u * v).sum()
return (nff, nft, ntf, ntt)
def _nbool_correspond_ft_tf(u, v, w=None):
if u.dtype == v.dtype == bool and w is None:
not_u = ~u
not_v = ~v
nft = (not_u & v).sum()
ntf = (u & not_v).sum()
else:
dtype = np.find_common_type([int], [u.dtype, v.dtype])
u = u.astype(dtype)
v = v.astype(dtype)
not_u = 1.0 - u
not_v = 1.0 - v
if w is not None:
not_u = w * not_u
u = w * u
nft = (not_u * v).sum()
ntf = (u * not_v).sum()
return (nft, ntf)
def _validate_cdist_input(XA, XB, mA, mB, n, metric_info, **kwargs):
# get supported types
types = metric_info.types
# choose best type
typ = types[types.index(XA.dtype)] if XA.dtype in types else types[0]
# validate data
XA = _convert_to_type(XA, out_type=typ)
XB = _convert_to_type(XB, out_type=typ)
# validate kwargs
_validate_kwargs = metric_info.validator
if _validate_kwargs:
kwargs = _validate_kwargs((XA, XB), mA + mB, n, **kwargs)
return XA, XB, typ, kwargs
def _validate_weight_with_size(X, m, n, **kwargs):
w = kwargs.pop('w', None)
if w is None:
return kwargs
if w.ndim != 1 or w.shape[0] != n:
raise ValueError("Weights must have same size as input vector. "
f"{w.shape[0]} vs. {n}")
kwargs['w'] = _validate_weights(w)
return kwargs
def _validate_hamming_kwargs(X, m, n, **kwargs):
w = kwargs.get('w', np.ones((n,), dtype='double'))
if w.ndim != 1 or w.shape[0] != n:
raise ValueError("Weights must have same size as input vector. %d vs. %d" % (w.shape[0], n))
kwargs['w'] = _validate_weights(w)
return kwargs
def _validate_mahalanobis_kwargs(X, m, n, **kwargs):
VI = kwargs.pop('VI', None)
if VI is None:
if m <= n:
# There are fewer observations than the dimension of
# the observations.
raise ValueError("The number of observations (%d) is too "
"small; the covariance matrix is "
"singular. For observations with %d "
"dimensions, at least %d observations "
"are required." % (m, n, n + 1))
if isinstance(X, tuple):
X = np.vstack(X)
CV = np.atleast_2d(np.cov(X.astype(np.double, copy=False).T))
VI = np.linalg.inv(CV).T.copy()
kwargs["VI"] = _convert_to_double(VI)
return kwargs
def _validate_minkowski_kwargs(X, m, n, **kwargs):
kwargs = _validate_weight_with_size(X, m, n, **kwargs)
if 'p' not in kwargs:
kwargs['p'] = 2.
else:
if kwargs['p'] < 1:
raise ValueError("p must be at least 1")
return kwargs
def _validate_pdist_input(X, m, n, metric_info, **kwargs):
# get supported types
types = metric_info.types
# choose best type
typ = types[types.index(X.dtype)] if X.dtype in types else types[0]
# validate data
X = _convert_to_type(X, out_type=typ)
# validate kwargs
_validate_kwargs = metric_info.validator
if _validate_kwargs:
kwargs = _validate_kwargs(X, m, n, **kwargs)
return X, typ, kwargs
def _validate_seuclidean_kwargs(X, m, n, **kwargs):
V = kwargs.pop('V', None)
if V is None:
if isinstance(X, tuple):
X = np.vstack(X)
V = np.var(X.astype(np.double, copy=False), axis=0, ddof=1)
else:
V = np.asarray(V, order='c')
if len(V.shape) != 1:
raise ValueError('Variance vector V must '
'be one-dimensional.')
if V.shape[0] != n:
raise ValueError('Variance vector V must be of the same '
'dimension as the vectors on which the distances '
'are computed.')
kwargs['V'] = _convert_to_double(V)
return kwargs
def _validate_vector(u, dtype=None):
# XXX Is order='c' really necessary?
u = np.asarray(u, dtype=dtype, order='c')
if u.ndim == 1:
return u
# Ensure values such as u=1 and u=[1] still return 1-D arrays.
u = np.atleast_1d(u.squeeze())
if u.ndim > 1:
raise ValueError("Input vector should be 1-D.")
warnings.warn(
"scipy.spatial.distance metrics ignoring length-1 dimensions is "
"deprecated in SciPy 1.7 and will raise an error in SciPy 1.9.",
DeprecationWarning)
return u
def _validate_weights(w, dtype=np.double):
w = _validate_vector(w, dtype=dtype)
if np.any(w < 0):
raise ValueError("Input weights should be all non-negative")
return w
@_deprecated(
msg="'wminkowski' metric is deprecated and will be removed in"
" SciPy 1.8.0, use 'minkowski' instead.")
def _validate_wminkowski_kwargs(X, m, n, **kwargs):
w = kwargs.pop('w', None)
if w is None:
raise ValueError('weighted minkowski requires a weight '
'vector `w` to be given.')
kwargs['w'] = _validate_weights(w)
if 'p' not in kwargs:
kwargs['p'] = 2.
return kwargs
def directed_hausdorff(u, v, seed=0):
"""
Compute the directed Hausdorff distance between two N-D arrays.
Distances between pairs are calculated using a Euclidean metric.
Parameters
----------
u : (M,N) array_like
Input array.
v : (O,N) array_like
Input array.
seed : int or None
Local `numpy.random.RandomState` seed. Default is 0, a random
shuffling of u and v that guarantees reproducibility.
Returns
-------
d : double
The directed Hausdorff distance between arrays `u` and `v`,
index_1 : int
index of point contributing to Hausdorff pair in `u`
index_2 : int
index of point contributing to Hausdorff pair in `v`
Raises
------
ValueError
An exception is thrown if `u` and `v` do not have
the same number of columns.
Notes
-----
Uses the early break technique and the random sampling approach
described by [1]_. Although worst-case performance is ``O(m * o)``
(as with the brute force algorithm), this is unlikely in practice
as the input data would have to require the algorithm to explore
every single point interaction, and after the algorithm shuffles
the input points at that. The best case performance is O(m), which
is satisfied by selecting an inner loop distance that is less than
cmax and leads to an early break as often as possible. The authors
have formally shown that the average runtime is closer to O(m).
.. versionadded:: 0.19.0
References
----------
.. [1] A. A. Taha and A. Hanbury, "An efficient algorithm for
calculating the exact Hausdorff distance." IEEE Transactions On
Pattern Analysis And Machine Intelligence, vol. 37 pp. 2153-63,
2015.
See Also
--------
scipy.spatial.procrustes : Another similarity test for two data sets
Examples
--------
Find the directed Hausdorff distance between two 2-D arrays of
coordinates:
>>> from scipy.spatial.distance import directed_hausdorff
>>> u = np.array([(1.0, 0.0),
... (0.0, 1.0),
... (-1.0, 0.0),
... (0.0, -1.0)])
>>> v = np.array([(2.0, 0.0),
... (0.0, 2.0),
... (-2.0, 0.0),
... (0.0, -4.0)])
>>> directed_hausdorff(u, v)[0]
2.23606797749979
>>> directed_hausdorff(v, u)[0]
3.0
Find the general (symmetric) Hausdorff distance between two 2-D
arrays of coordinates:
>>> max(directed_hausdorff(u, v)[0], directed_hausdorff(v, u)[0])
3.0
Find the indices of the points that generate the Hausdorff distance
(the Hausdorff pair):
>>> directed_hausdorff(v, u)[1:]
(3, 3)
"""
u = np.asarray(u, dtype=np.float64, order='c')
v = np.asarray(v, dtype=np.float64, order='c')
if u.shape[1] != v.shape[1]:
raise ValueError('u and v need to have the same '
'number of columns')
result = _hausdorff.directed_hausdorff(u, v, seed)
return result
def minkowski(u, v, p=2, w=None):
"""
Compute the Minkowski distance between two 1-D arrays.
The Minkowski distance between 1-D arrays `u` and `v`,
is defined as
.. math::
{||u-v||}_p = (\\sum{|u_i - v_i|^p})^{1/p}.
\\left(\\sum{w_i(|(u_i - v_i)|^p)}\\right)^{1/p}.
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
p : scalar
The order of the norm of the difference :math:`{||u-v||}_p`.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
minkowski : double
The Minkowski distance between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.minkowski([1, 0, 0], [0, 1, 0], 1)
2.0
>>> distance.minkowski([1, 0, 0], [0, 1, 0], 2)
1.4142135623730951
>>> distance.minkowski([1, 0, 0], [0, 1, 0], 3)
1.2599210498948732
>>> distance.minkowski([1, 1, 0], [0, 1, 0], 1)
1.0
>>> distance.minkowski([1, 1, 0], [0, 1, 0], 2)
1.0
>>> distance.minkowski([1, 1, 0], [0, 1, 0], 3)
1.0
"""
u = _validate_vector(u)
v = _validate_vector(v)
if p < 1:
raise ValueError("p must be at least 1")
u_v = u - v
if w is not None:
w = _validate_weights(w)
if p == 1:
root_w = w
elif p == 2:
# better precision and speed
root_w = np.sqrt(w)
elif p == np.inf:
root_w = (w != 0)
else:
root_w = np.power(w, 1/p)
u_v = root_w * u_v
dist = norm(u_v, ord=p)
return dist
def wminkowski(u, v, p, w):
"""
Compute the weighted Minkowski distance between two 1-D arrays.
The weighted Minkowski distance between `u` and `v`, defined as
.. math::
\\left(\\sum{(|w_i (u_i - v_i)|^p)}\\right)^{1/p}.
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
p : scalar
The order of the norm of the difference :math:`{||u-v||}_p`.
w : (N,) array_like
The weight vector.
Returns
-------
wminkowski : double
The weighted Minkowski distance between vectors `u` and `v`.
Notes
-----
`wminkowski` is deprecated and will be removed in SciPy 1.8.0.
Use `minkowski` with the ``w`` argument instead.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.wminkowski([1, 0, 0], [0, 1, 0], 1, np.ones(3))
2.0
>>> distance.wminkowski([1, 0, 0], [0, 1, 0], 2, np.ones(3))
1.4142135623730951
>>> distance.wminkowski([1, 0, 0], [0, 1, 0], 3, np.ones(3))
1.2599210498948732
>>> distance.wminkowski([1, 1, 0], [0, 1, 0], 1, np.ones(3))
1.0
>>> distance.wminkowski([1, 1, 0], [0, 1, 0], 2, np.ones(3))
1.0
>>> distance.wminkowski([1, 1, 0], [0, 1, 0], 3, np.ones(3))
1.0
"""
warnings.warn(
message="scipy.distance.wminkowski is deprecated and will be removed "
"in SciPy 1.8.0, use scipy.distance.minkowski instead.",
category=DeprecationWarning)
w = _validate_weights(w)
return minkowski(u, v, p=p, w=w**p)
def euclidean(u, v, w=None):
"""
Computes the Euclidean distance between two 1-D arrays.
The Euclidean distance between 1-D arrays `u` and `v`, is defined as
.. math::
{||u-v||}_2
\\left(\\sum{(w_i |(u_i - v_i)|^2)}\\right)^{1/2}
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
euclidean : double
The Euclidean distance between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.euclidean([1, 0, 0], [0, 1, 0])
1.4142135623730951
>>> distance.euclidean([1, 1, 0], [0, 1, 0])
1.0
"""
return minkowski(u, v, p=2, w=w)
def sqeuclidean(u, v, w=None):
"""
Compute the squared Euclidean distance between two 1-D arrays.
The squared Euclidean distance between `u` and `v` is defined as
.. math::
{||u-v||}_2^2
\\left(\\sum{(w_i |(u_i - v_i)|^2)}\\right)
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
sqeuclidean : double
The squared Euclidean distance between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.sqeuclidean([1, 0, 0], [0, 1, 0])
2.0
>>> distance.sqeuclidean([1, 1, 0], [0, 1, 0])
1.0
"""
# Preserve float dtypes, but convert everything else to np.float64
# for stability.
utype, vtype = None, None
if not (hasattr(u, "dtype") and np.issubdtype(u.dtype, np.inexact)):
utype = np.float64
if not (hasattr(v, "dtype") and np.issubdtype(v.dtype, np.inexact)):
vtype = np.float64
u = _validate_vector(u, dtype=utype)
v = _validate_vector(v, dtype=vtype)
u_v = u - v
u_v_w = u_v # only want weights applied once
if w is not None:
w = _validate_weights(w)
u_v_w = w * u_v
return np.dot(u_v, u_v_w)
def correlation(u, v, w=None, centered=True):
"""
Compute the correlation distance between two 1-D arrays.
The correlation distance between `u` and `v`, is
defined as
.. math::
1 - \\frac{(u - \\bar{u}) \\cdot (v - \\bar{v})}
{{||(u - \\bar{u})||}_2 {||(v - \\bar{v})||}_2}
where :math:`\\bar{u}` is the mean of the elements of `u`
and :math:`x \\cdot y` is the dot product of :math:`x` and :math:`y`.
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
centered : bool, optional
If True, `u` and `v` will be centered. Default is True.
Returns
-------
correlation : double
The correlation distance between 1-D array `u` and `v`.
"""
u = _validate_vector(u)
v = _validate_vector(v)
if w is not None:
w = _validate_weights(w)
if centered:
umu = np.average(u, weights=w)
vmu = np.average(v, weights=w)
u = u - umu
v = v - vmu
uv = np.average(u * v, weights=w)
uu = np.average(np.square(u), weights=w)
vv = np.average(np.square(v), weights=w)
dist = 1.0 - uv / np.sqrt(uu * vv)
# Return absolute value to avoid small negative value due to rounding
return np.abs(dist)
def cosine(u, v, w=None):
"""
Compute the Cosine distance between 1-D arrays.
The Cosine distance between `u` and `v`, is defined as
.. math::
1 - \\frac{u \\cdot v}
{||u||_2 ||v||_2}.
where :math:`u \\cdot v` is the dot product of :math:`u` and
:math:`v`.
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
cosine : double
The Cosine distance between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.cosine([1, 0, 0], [0, 1, 0])
1.0
>>> distance.cosine([100, 0, 0], [0, 1, 0])
1.0
>>> distance.cosine([1, 1, 0], [0, 1, 0])
0.29289321881345254
"""
# cosine distance is also referred to as 'uncentered correlation',
# or 'reflective correlation'
# clamp the result to 0-2
return max(0, min(correlation(u, v, w=w, centered=False), 2.0))
def hamming(u, v, w=None):
"""
Compute the Hamming distance between two 1-D arrays.
The Hamming distance between 1-D arrays `u` and `v`, is simply the
proportion of disagreeing components in `u` and `v`. If `u` and `v` are
boolean vectors, the Hamming distance is
.. math::
\\frac{c_{01} + c_{10}}{n}
where :math:`c_{ij}` is the number of occurrences of
:math:`\\mathtt{u[k]} = i` and :math:`\\mathtt{v[k]} = j` for
:math:`k < n`.
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
hamming : double
The Hamming distance between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.hamming([1, 0, 0], [0, 1, 0])
0.66666666666666663
>>> distance.hamming([1, 0, 0], [1, 1, 0])
0.33333333333333331
>>> distance.hamming([1, 0, 0], [2, 0, 0])
0.33333333333333331
>>> distance.hamming([1, 0, 0], [3, 0, 0])
0.33333333333333331
"""
u = _validate_vector(u)
v = _validate_vector(v)
if u.shape != v.shape:
raise ValueError('The 1d arrays must have equal lengths.')
u_ne_v = u != v
if w is not None:
w = _validate_weights(w)
return np.average(u_ne_v, weights=w)
def jaccard(u, v, w=None):
"""
Compute the Jaccard-Needham dissimilarity between two boolean 1-D arrays.
The Jaccard-Needham dissimilarity between 1-D boolean arrays `u` and `v`,
is defined as
.. math::
\\frac{c_{TF} + c_{FT}}
{c_{TT} + c_{FT} + c_{TF}}
where :math:`c_{ij}` is the number of occurrences of
:math:`\\mathtt{u[k]} = i` and :math:`\\mathtt{v[k]} = j` for
:math:`k < n`.
Parameters
----------
u : (N,) array_like, bool
Input array.
v : (N,) array_like, bool
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
jaccard : double
The Jaccard distance between vectors `u` and `v`.
Notes
-----
When both `u` and `v` lead to a `0/0` division i.e. there is no overlap
between the items in the vectors the returned distance is 0. See the
Wikipedia page on the Jaccard index [1]_, and this paper [2]_.
.. versionchanged:: 1.2.0
Previously, when `u` and `v` lead to a `0/0` division, the function
would return NaN. This was changed to return 0 instead.
References
----------
.. [1] https://en.wikipedia.org/wiki/Jaccard_index
.. [2] S. Kosub, "A note on the triangle inequality for the Jaccard
distance", 2016, :arxiv:`1612.02696`
Examples
--------
>>> from scipy.spatial import distance
>>> distance.jaccard([1, 0, 0], [0, 1, 0])
1.0
>>> distance.jaccard([1, 0, 0], [1, 1, 0])
0.5
>>> distance.jaccard([1, 0, 0], [1, 2, 0])
0.5
>>> distance.jaccard([1, 0, 0], [1, 1, 1])
0.66666666666666663
"""
u = _validate_vector(u)
v = _validate_vector(v)
nonzero = np.bitwise_or(u != 0, v != 0)
unequal_nonzero = np.bitwise_and((u != v), nonzero)
if w is not None:
w = _validate_weights(w)
nonzero = w * nonzero
unequal_nonzero = w * unequal_nonzero
a = np.double(unequal_nonzero.sum())
b = np.double(nonzero.sum())
return (a / b) if b != 0 else 0
def kulsinski(u, v, w=None):
"""
Compute the Kulsinski dissimilarity between two boolean 1-D arrays.
The Kulsinski dissimilarity between two boolean 1-D arrays `u` and `v`,
is defined as
.. math::
\\frac{c_{TF} + c_{FT} - c_{TT} + n}
{c_{FT} + c_{TF} + n}
where :math:`c_{ij}` is the number of occurrences of
:math:`\\mathtt{u[k]} = i` and :math:`\\mathtt{v[k]} = j` for
:math:`k < n`.
Parameters
----------
u : (N,) array_like, bool
Input array.
v : (N,) array_like, bool
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
kulsinski : double
The Kulsinski distance between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.kulsinski([1, 0, 0], [0, 1, 0])
1.0
>>> distance.kulsinski([1, 0, 0], [1, 1, 0])
0.75
>>> distance.kulsinski([1, 0, 0], [2, 1, 0])
0.33333333333333331
>>> distance.kulsinski([1, 0, 0], [3, 1, 0])
-0.5
"""
u = _validate_vector(u)
v = _validate_vector(v)
if w is None:
n = float(len(u))
else:
w = _validate_weights(w)
n = w.sum()
(nff, nft, ntf, ntt) = _nbool_correspond_all(u, v, w=w)
return (ntf + nft - ntt + n) / (ntf + nft + n)
def seuclidean(u, v, V):
"""
Return the standardized Euclidean distance between two 1-D arrays.
The standardized Euclidean distance between `u` and `v`.
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
V : (N,) array_like
`V` is an 1-D array of component variances. It is usually computed
among a larger collection vectors.
Returns
-------
seuclidean : double
The standardized Euclidean distance between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.seuclidean([1, 0, 0], [0, 1, 0], [0.1, 0.1, 0.1])
4.4721359549995796
>>> distance.seuclidean([1, 0, 0], [0, 1, 0], [1, 0.1, 0.1])
3.3166247903553998
>>> distance.seuclidean([1, 0, 0], [0, 1, 0], [10, 0.1, 0.1])
3.1780497164141406
"""
u = _validate_vector(u)
v = _validate_vector(v)
V = _validate_vector(V, dtype=np.float64)
if V.shape[0] != u.shape[0] or u.shape[0] != v.shape[0]:
raise TypeError('V must be a 1-D array of the same dimension '
'as u and v.')
return euclidean(u, v, w=1/V)
def cityblock(u, v, w=None):
"""
Compute the City Block (Manhattan) distance.
Computes the Manhattan distance between two 1-D arrays `u` and `v`,
which is defined as
.. math::
\\sum_i {\\left| u_i - v_i \\right|}.
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
cityblock : double
The City Block (Manhattan) distance between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.cityblock([1, 0, 0], [0, 1, 0])
2
>>> distance.cityblock([1, 0, 0], [0, 2, 0])
3
>>> distance.cityblock([1, 0, 0], [1, 1, 0])
1
"""
u = _validate_vector(u)
v = _validate_vector(v)
l1_diff = abs(u - v)
if w is not None:
w = _validate_weights(w)
l1_diff = w * l1_diff
return l1_diff.sum()
def mahalanobis(u, v, VI):
"""
Compute the Mahalanobis distance between two 1-D arrays.
The Mahalanobis distance between 1-D arrays `u` and `v`, is defined as
.. math::
\\sqrt{ (u-v) V^{-1} (u-v)^T }
where ``V`` is the covariance matrix. Note that the argument `VI`
is the inverse of ``V``.
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
VI : array_like
The inverse of the covariance matrix.
Returns
-------
mahalanobis : double
The Mahalanobis distance between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> iv = [[1, 0.5, 0.5], [0.5, 1, 0.5], [0.5, 0.5, 1]]
>>> distance.mahalanobis([1, 0, 0], [0, 1, 0], iv)
1.0
>>> distance.mahalanobis([0, 2, 0], [0, 1, 0], iv)
1.0
>>> distance.mahalanobis([2, 0, 0], [0, 1, 0], iv)
1.7320508075688772
"""
u = _validate_vector(u)
v = _validate_vector(v)
VI = np.atleast_2d(VI)
delta = u - v
m = np.dot(np.dot(delta, VI), delta)
return np.sqrt(m)
def chebyshev(u, v, w=None):
"""
Compute the Chebyshev distance.
Computes the Chebyshev distance between two 1-D arrays `u` and `v`,
which is defined as
.. math::
\\max_i {|u_i-v_i|}.
Parameters
----------
u : (N,) array_like
Input vector.
v : (N,) array_like
Input vector.
w : (N,) array_like, optional
Unused, as 'max' is a weightless operation. Here for API consistency.
Returns
-------
chebyshev : double
The Chebyshev distance between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.chebyshev([1, 0, 0], [0, 1, 0])
1
>>> distance.chebyshev([1, 1, 0], [0, 1, 0])
1
"""
u = _validate_vector(u)
v = _validate_vector(v)
if w is not None:
w = _validate_weights(w)
has_weight = w > 0
if has_weight.sum() < w.size:
u = u[has_weight]
v = v[has_weight]
return max(abs(u - v))
def braycurtis(u, v, w=None):
"""
Compute the Bray-Curtis distance between two 1-D arrays.
Bray-Curtis distance is defined as
.. math::
\\sum{|u_i-v_i|} / \\sum{|u_i+v_i|}
The Bray-Curtis distance is in the range [0, 1] if all coordinates are
positive, and is undefined if the inputs are of length zero.
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
braycurtis : double
The Bray-Curtis distance between 1-D arrays `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.braycurtis([1, 0, 0], [0, 1, 0])
1.0
>>> distance.braycurtis([1, 1, 0], [0, 1, 0])
0.33333333333333331
"""
u = _validate_vector(u)
v = _validate_vector(v, dtype=np.float64)
l1_diff = abs(u - v)
l1_sum = abs(u + v)
if w is not None:
w = _validate_weights(w)
l1_diff = w * l1_diff
l1_sum = w * l1_sum
return l1_diff.sum() / l1_sum.sum()
def canberra(u, v, w=None):
"""
Compute the Canberra distance between two 1-D arrays.
The Canberra distance is defined as
.. math::
d(u,v) = \\sum_i \\frac{|u_i-v_i|}
{|u_i|+|v_i|}.
Parameters
----------
u : (N,) array_like
Input array.
v : (N,) array_like
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
canberra : double
The Canberra distance between vectors `u` and `v`.
Notes
-----
When `u[i]` and `v[i]` are 0 for given i, then the fraction 0/0 = 0 is
used in the calculation.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.canberra([1, 0, 0], [0, 1, 0])
2.0
>>> distance.canberra([1, 1, 0], [0, 1, 0])
1.0
"""
u = _validate_vector(u)
v = _validate_vector(v, dtype=np.float64)
if w is not None:
w = _validate_weights(w)
with np.errstate(invalid='ignore'):
abs_uv = abs(u - v)
abs_u = abs(u)
abs_v = abs(v)
d = abs_uv / (abs_u + abs_v)
if w is not None:
d = w * d
d = np.nansum(d)
return d
def jensenshannon(p, q, base=None, *, axis=0, keepdims=False):
"""
Compute the Jensen-Shannon distance (metric) between
two probability arrays. This is the square root
of the Jensen-Shannon divergence.
The Jensen-Shannon distance between two probability
vectors `p` and `q` is defined as,
.. math::
\\sqrt{\\frac{D(p \\parallel m) + D(q \\parallel m)}{2}}
where :math:`m` is the pointwise mean of :math:`p` and :math:`q`
and :math:`D` is the Kullback-Leibler divergence.
This routine will normalize `p` and `q` if they don't sum to 1.0.
Parameters
----------
p : (N,) array_like
left probability vector
q : (N,) array_like
right probability vector
base : double, optional
the base of the logarithm used to compute the output
if not given, then the routine uses the default base of
scipy.stats.entropy.
axis : int, optional
Axis along which the Jensen-Shannon distances are computed. The default
is 0.
.. versionadded:: 1.7.0
keepdims : bool, optional
If this is set to `True`, the reduced axes are left in the
result as dimensions with size one. With this option,
the result will broadcast correctly against the input array.
Default is False.
.. versionadded:: 1.7.0
Returns
-------
js : double or ndarray
The Jensen-Shannon distances between `p` and `q` along the `axis`.
Notes
-----
.. versionadded:: 1.2.0
Examples
--------
>>> from scipy.spatial import distance
>>> distance.jensenshannon([1.0, 0.0, 0.0], [0.0, 1.0, 0.0], 2.0)
1.0
>>> distance.jensenshannon([1.0, 0.0], [0.5, 0.5])
0.46450140402245893
>>> distance.jensenshannon([1.0, 0.0, 0.0], [1.0, 0.0, 0.0])
0.0
>>> a = np.array([[1, 2, 3, 4],
... [5, 6, 7, 8],
... [9, 10, 11, 12]])
>>> b = np.array([[13, 14, 15, 16],
... [17, 18, 19, 20],
... [21, 22, 23, 24]])
>>> distance.jensenshannon(a, b, axis=0)
array([0.1954288, 0.1447697, 0.1138377, 0.0927636])
>>> distance.jensenshannon(a, b, axis=1)
array([0.1402339, 0.0399106, 0.0201815])
"""
p = np.asarray(p)
q = np.asarray(q)
p = p / np.sum(p, axis=axis, keepdims=True)
q = q / np.sum(q, axis=axis, keepdims=True)
m = (p + q) / 2.0
left = rel_entr(p, m)
right = rel_entr(q, m)
left_sum = np.sum(left, axis=axis, keepdims=keepdims)
right_sum = np.sum(right, axis=axis, keepdims=keepdims)
js = left_sum + right_sum
if base is not None:
js /= np.log(base)
return np.sqrt(js / 2.0)
def yule(u, v, w=None):
"""
Compute the Yule dissimilarity between two boolean 1-D arrays.
The Yule dissimilarity is defined as
.. math::
\\frac{R}{c_{TT} * c_{FF} + \\frac{R}{2}}
where :math:`c_{ij}` is the number of occurrences of
:math:`\\mathtt{u[k]} = i` and :math:`\\mathtt{v[k]} = j` for
:math:`k < n` and :math:`R = 2.0 * c_{TF} * c_{FT}`.
Parameters
----------
u : (N,) array_like, bool
Input array.
v : (N,) array_like, bool
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
yule : double
The Yule dissimilarity between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.yule([1, 0, 0], [0, 1, 0])
2.0
>>> distance.yule([1, 1, 0], [0, 1, 0])
0.0
"""
u = _validate_vector(u)
v = _validate_vector(v)
if w is not None:
w = _validate_weights(w)
(nff, nft, ntf, ntt) = _nbool_correspond_all(u, v, w=w)
half_R = ntf * nft
if half_R == 0:
return 0.0
else:
return float(2.0 * half_R / (ntt * nff + half_R))
@np.deprecate(message="spatial.distance.matching is deprecated in scipy 1.0.0; "
"use spatial.distance.hamming instead.")
def matching(u, v, w=None):
"""
Compute the Hamming distance between two boolean 1-D arrays.
This is a deprecated synonym for :func:`hamming`.
"""
return hamming(u, v, w=w)
def dice(u, v, w=None):
"""
Compute the Dice dissimilarity between two boolean 1-D arrays.
The Dice dissimilarity between `u` and `v`, is
.. math::
\\frac{c_{TF} + c_{FT}}
{2c_{TT} + c_{FT} + c_{TF}}
where :math:`c_{ij}` is the number of occurrences of
:math:`\\mathtt{u[k]} = i` and :math:`\\mathtt{v[k]} = j` for
:math:`k < n`.
Parameters
----------
u : (N,) array_like, bool
Input 1-D array.
v : (N,) array_like, bool
Input 1-D array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
dice : double
The Dice dissimilarity between 1-D arrays `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.dice([1, 0, 0], [0, 1, 0])
1.0
>>> distance.dice([1, 0, 0], [1, 1, 0])
0.3333333333333333
>>> distance.dice([1, 0, 0], [2, 0, 0])
-0.3333333333333333
"""
u = _validate_vector(u)
v = _validate_vector(v)
if w is not None:
w = _validate_weights(w)
if u.dtype == v.dtype == bool and w is None:
ntt = (u & v).sum()
else:
dtype = np.find_common_type([int], [u.dtype, v.dtype])
u = u.astype(dtype)
v = v.astype(dtype)
if w is None:
ntt = (u * v).sum()
else:
ntt = (u * v * w).sum()
(nft, ntf) = _nbool_correspond_ft_tf(u, v, w=w)
return float((ntf + nft) / np.array(2.0 * ntt + ntf + nft))
def rogerstanimoto(u, v, w=None):
"""
Compute the Rogers-Tanimoto dissimilarity between two boolean 1-D arrays.
The Rogers-Tanimoto dissimilarity between two boolean 1-D arrays
`u` and `v`, is defined as
.. math::
\\frac{R}
{c_{TT} + c_{FF} + R}
where :math:`c_{ij}` is the number of occurrences of
:math:`\\mathtt{u[k]} = i` and :math:`\\mathtt{v[k]} = j` for
:math:`k < n` and :math:`R = 2(c_{TF} + c_{FT})`.
Parameters
----------
u : (N,) array_like, bool
Input array.
v : (N,) array_like, bool
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
rogerstanimoto : double
The Rogers-Tanimoto dissimilarity between vectors
`u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.rogerstanimoto([1, 0, 0], [0, 1, 0])
0.8
>>> distance.rogerstanimoto([1, 0, 0], [1, 1, 0])
0.5
>>> distance.rogerstanimoto([1, 0, 0], [2, 0, 0])
-1.0
"""
u = _validate_vector(u)
v = _validate_vector(v)
if w is not None:
w = _validate_weights(w)
(nff, nft, ntf, ntt) = _nbool_correspond_all(u, v, w=w)
return float(2.0 * (ntf + nft)) / float(ntt + nff + (2.0 * (ntf + nft)))
def russellrao(u, v, w=None):
"""
Compute the Russell-Rao dissimilarity between two boolean 1-D arrays.
The Russell-Rao dissimilarity between two boolean 1-D arrays, `u` and
`v`, is defined as
.. math::
\\frac{n - c_{TT}}
{n}
where :math:`c_{ij}` is the number of occurrences of
:math:`\\mathtt{u[k]} = i` and :math:`\\mathtt{v[k]} = j` for
:math:`k < n`.
Parameters
----------
u : (N,) array_like, bool
Input array.
v : (N,) array_like, bool
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
russellrao : double
The Russell-Rao dissimilarity between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.russellrao([1, 0, 0], [0, 1, 0])
1.0
>>> distance.russellrao([1, 0, 0], [1, 1, 0])
0.6666666666666666
>>> distance.russellrao([1, 0, 0], [2, 0, 0])
0.3333333333333333
"""
u = _validate_vector(u)
v = _validate_vector(v)
if u.dtype == v.dtype == bool and w is None:
ntt = (u & v).sum()
n = float(len(u))
elif w is None:
ntt = (u * v).sum()
n = float(len(u))
else:
w = _validate_weights(w)
ntt = (u * v * w).sum()
n = w.sum()
return float(n - ntt) / n
def sokalmichener(u, v, w=None):
"""
Compute the Sokal-Michener dissimilarity between two boolean 1-D arrays.
The Sokal-Michener dissimilarity between boolean 1-D arrays `u` and `v`,
is defined as
.. math::
\\frac{R}
{S + R}
where :math:`c_{ij}` is the number of occurrences of
:math:`\\mathtt{u[k]} = i` and :math:`\\mathtt{v[k]} = j` for
:math:`k < n`, :math:`R = 2 * (c_{TF} + c_{FT})` and
:math:`S = c_{FF} + c_{TT}`.
Parameters
----------
u : (N,) array_like, bool
Input array.
v : (N,) array_like, bool
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
sokalmichener : double
The Sokal-Michener dissimilarity between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.sokalmichener([1, 0, 0], [0, 1, 0])
0.8
>>> distance.sokalmichener([1, 0, 0], [1, 1, 0])
0.5
>>> distance.sokalmichener([1, 0, 0], [2, 0, 0])
-1.0
"""
u = _validate_vector(u)
v = _validate_vector(v)
if w is not None:
w = _validate_weights(w)
nff, nft, ntf, ntt = _nbool_correspond_all(u, v, w=w)
return float(2.0 * (ntf + nft)) / float(ntt + nff + 2.0 * (ntf + nft))
def sokalsneath(u, v, w=None):
"""
Compute the Sokal-Sneath dissimilarity between two boolean 1-D arrays.
The Sokal-Sneath dissimilarity between `u` and `v`,
.. math::
\\frac{R}
{c_{TT} + R}
where :math:`c_{ij}` is the number of occurrences of
:math:`\\mathtt{u[k]} = i` and :math:`\\mathtt{v[k]} = j` for
:math:`k < n` and :math:`R = 2(c_{TF} + c_{FT})`.
Parameters
----------
u : (N,) array_like, bool
Input array.
v : (N,) array_like, bool
Input array.
w : (N,) array_like, optional
The weights for each value in `u` and `v`. Default is None,
which gives each value a weight of 1.0
Returns
-------
sokalsneath : double
The Sokal-Sneath dissimilarity between vectors `u` and `v`.
Examples
--------
>>> from scipy.spatial import distance
>>> distance.sokalsneath([1, 0, 0], [0, 1, 0])
1.0
>>> distance.sokalsneath([1, 0, 0], [1, 1, 0])
0.66666666666666663
>>> distance.sokalsneath([1, 0, 0], [2, 1, 0])
0.0
>>> distance.sokalsneath([1, 0, 0], [3, 1, 0])
-2.0
"""
u = _validate_vector(u)
v = _validate_vector(v)
if u.dtype == v.dtype == bool and w is None:
ntt = (u & v).sum()
elif w is None:
ntt = (u * v).sum()
else:
w = _validate_weights(w)
ntt = (u * v * w).sum()
(nft, ntf) = _nbool_correspond_ft_tf(u, v, w=w)
denom = np.array(ntt + 2.0 * (ntf + nft))
if not denom.any():
raise ValueError('Sokal-Sneath dissimilarity is not defined for '
'vectors that are entirely false.')
return float(2.0 * (ntf + nft)) / denom
_convert_to_double = partial(_convert_to_type, out_type=np.double)
_convert_to_bool = partial(_convert_to_type, out_type=bool)
# adding python-only wrappers to _distance_wrap module
_distance_wrap.pdist_correlation_double_wrap = _correlation_pdist_wrap
_distance_wrap.cdist_correlation_double_wrap = _correlation_cdist_wrap
@dataclasses.dataclass(frozen=True)
class CDistMetricWrapper:
metric_name: str
def __call__(self, XA, XB, *, out=None, **kwargs):
XA = np.ascontiguousarray(XA)
XB = np.ascontiguousarray(XB)
mA, n = XA.shape
mB, _ = XB.shape
metric_name = self.metric_name
metric_info = _METRICS[metric_name]
XA, XB, typ, kwargs = _validate_cdist_input(
XA, XB, mA, mB, n, metric_info, **kwargs)
w = kwargs.pop('w', None)
if w is not None:
metric = metric_info.dist_func
return _cdist_callable(
XA, XB, metric=metric, out=out, w=w, **kwargs)
dm = _prepare_out_argument(out, np.double, (mA, mB))
# get cdist wrapper
cdist_fn = getattr(_distance_wrap, f'cdist_{metric_name}_{typ}_wrap')
cdist_fn(XA, XB, dm, **kwargs)
return dm
@dataclasses.dataclass(frozen=True)
class CDistWeightedMetricWrapper:
metric_name: str
weighted_metric: str
def __call__(self, XA, XB, *, out=None, **kwargs):
XA = np.ascontiguousarray(XA)
XB = np.ascontiguousarray(XB)
mA, n = XA.shape
mB, _ = XB.shape
metric_name = self.metric_name
XA, XB, typ, kwargs = _validate_cdist_input(
XA, XB, mA, mB, n, _METRICS[metric_name], **kwargs)
dm = _prepare_out_argument(out, np.double, (mA, mB))
w = kwargs.pop('w', None)
if w is not None:
metric_name = self.weighted_metric
kwargs['w'] = w
# get cdist wrapper
cdist_fn = getattr(_distance_wrap, f'cdist_{metric_name}_{typ}_wrap')
cdist_fn(XA, XB, dm, **kwargs)
return dm
@dataclasses.dataclass(frozen=True)
class PDistMetricWrapper:
metric_name: str
def __call__(self, X, *, out=None, **kwargs):
X = np.ascontiguousarray(X)
m, n = X.shape
metric_name = self.metric_name
metric_info = _METRICS[metric_name]
X, typ, kwargs = _validate_pdist_input(
X, m, n, metric_info, **kwargs)
out_size = (m * (m - 1)) // 2
w = kwargs.pop('w', None)
if w is not None:
metric = metric_info.dist_func
return _pdist_callable(
X, metric=metric, out=out, w=w, **kwargs)
dm = _prepare_out_argument(out, np.double, (out_size,))
# get pdist wrapper
pdist_fn = getattr(_distance_wrap, f'pdist_{metric_name}_{typ}_wrap')
pdist_fn(X, dm, **kwargs)
return dm
@dataclasses.dataclass(frozen=True)
class PDistWeightedMetricWrapper:
metric_name: str
weighted_metric: str
def __call__(self, X, *, out=None, **kwargs):
X = np.ascontiguousarray(X)
m, n = X.shape
metric_name = self.metric_name
X, typ, kwargs = _validate_pdist_input(
X, m, n, _METRICS[metric_name], **kwargs)
out_size = (m * (m - 1)) // 2
dm = _prepare_out_argument(out, np.double, (out_size,))
w = kwargs.pop('w', None)
if w is not None:
metric_name = self.weighted_metric
kwargs['w'] = w
# get pdist wrapper
pdist_fn = getattr(_distance_wrap, f'pdist_{metric_name}_{typ}_wrap')
pdist_fn(X, dm, **kwargs)
return dm
@dataclasses.dataclass(frozen=True)
class MetricInfo:
# Name of python distance function
canonical_name: str
# All aliases, including canonical_name
aka: Set[str]
# unvectorized distance function
dist_func: Callable
# Optimized cdist function
cdist_func: Callable
# Optimized pdist function
pdist_func: Callable
# function that checks kwargs and computes default values:
# f(X, m, n, **kwargs)
validator: Optional[Callable] = None
# list of supported types:
# X (pdist) and XA (cdist) are used to choose the type. if there is no
# match the first type is used. Default double
types: List[str] = dataclasses.field(default_factory=lambda: ['double'])
# true if out array must be C-contiguous
requires_contiguous_out: bool = True
# Registry of implemented metrics:
_METRIC_INFOS = [
MetricInfo(
canonical_name='braycurtis',
aka={'braycurtis'},
dist_func=braycurtis,
cdist_func=_distance_pybind.cdist_braycurtis,
pdist_func=_distance_pybind.pdist_braycurtis,
),
MetricInfo(
canonical_name='canberra',
aka={'canberra'},
dist_func=canberra,
cdist_func=_distance_pybind.cdist_canberra,
pdist_func=_distance_pybind.pdist_canberra,
),
MetricInfo(
canonical_name='chebyshev',
aka={'chebychev', 'chebyshev', 'cheby', 'cheb', 'ch'},
dist_func=chebyshev,
cdist_func=_distance_pybind.cdist_chebyshev,
pdist_func=_distance_pybind.pdist_chebyshev,
),
MetricInfo(
canonical_name='cityblock',
aka={'cityblock', 'cblock', 'cb', 'c'},
dist_func=cityblock,
cdist_func=_distance_pybind.cdist_cityblock,
pdist_func=_distance_pybind.pdist_cityblock,
),
MetricInfo(
canonical_name='correlation',
aka={'correlation', 'co'},
dist_func=correlation,
cdist_func=CDistMetricWrapper('correlation'),
pdist_func=PDistMetricWrapper('correlation'),
),
MetricInfo(
canonical_name='cosine',
aka={'cosine', 'cos'},
dist_func=cosine,
cdist_func=CDistMetricWrapper('cosine'),
pdist_func=PDistMetricWrapper('cosine'),
),
MetricInfo(
canonical_name='dice',
aka={'dice'},
types=['bool'],
dist_func=dice,
cdist_func=CDistMetricWrapper('dice'),
pdist_func=PDistMetricWrapper('dice'),
),
MetricInfo(
canonical_name='euclidean',
aka={'euclidean', 'euclid', 'eu', 'e'},
dist_func=euclidean,
cdist_func=_distance_pybind.cdist_euclidean,
pdist_func=_distance_pybind.pdist_euclidean,
),
MetricInfo(
canonical_name='hamming',
aka={'matching', 'hamming', 'hamm', 'ha', 'h'},
types=['double', 'bool'],
validator=_validate_hamming_kwargs,
dist_func=hamming,
cdist_func=CDistWeightedMetricWrapper('hamming', 'hamming'),
pdist_func=PDistWeightedMetricWrapper('hamming', 'hamming'),
),
MetricInfo(
canonical_name='jaccard',
aka={'jaccard', 'jacc', 'ja', 'j'},
types=['double', 'bool'],
dist_func=jaccard,
cdist_func=CDistMetricWrapper('jaccard'),
pdist_func=PDistMetricWrapper('jaccard'),
),
MetricInfo(
canonical_name='jensenshannon',
aka={'jensenshannon', 'js'},
dist_func=jensenshannon,
cdist_func=CDistMetricWrapper('jensenshannon'),
pdist_func=PDistMetricWrapper('jensenshannon'),
),
MetricInfo(
canonical_name='kulsinski',
aka={'kulsinski'},
types=['bool'],
dist_func=kulsinski,
cdist_func=CDistMetricWrapper('kulsinski'),
pdist_func=PDistMetricWrapper('kulsinski'),
),
MetricInfo(
canonical_name='mahalanobis',
aka={'mahalanobis', 'mahal', 'mah'},
validator=_validate_mahalanobis_kwargs,
dist_func=mahalanobis,
cdist_func=CDistMetricWrapper('mahalanobis'),
pdist_func=PDistMetricWrapper('mahalanobis'),
),
MetricInfo(
canonical_name='minkowski',
aka={'minkowski', 'mi', 'm', 'pnorm'},
validator=_validate_minkowski_kwargs,
dist_func=minkowski,
cdist_func=_distance_pybind.cdist_minkowski,
pdist_func=_distance_pybind.pdist_minkowski,
),
MetricInfo(
canonical_name='rogerstanimoto',
aka={'rogerstanimoto'},
types=['bool'],
dist_func=rogerstanimoto,
cdist_func=CDistMetricWrapper('rogerstanimoto'),
pdist_func=PDistMetricWrapper('rogerstanimoto'),
),
MetricInfo(
canonical_name='russellrao',
aka={'russellrao'},
types=['bool'],
dist_func=russellrao,
cdist_func=CDistMetricWrapper('russellrao'),
pdist_func=PDistMetricWrapper('russellrao'),
),
MetricInfo(
canonical_name='seuclidean',
aka={'seuclidean', 'se', 's'},
validator=_validate_seuclidean_kwargs,
dist_func=seuclidean,
cdist_func=CDistMetricWrapper('seuclidean'),
pdist_func=PDistMetricWrapper('seuclidean'),
),
MetricInfo(
canonical_name='sokalmichener',
aka={'sokalmichener'},
types=['bool'],
dist_func=sokalmichener,
cdist_func=CDistMetricWrapper('sokalmichener'),
pdist_func=PDistMetricWrapper('sokalmichener'),
),
MetricInfo(
canonical_name='sokalsneath',
aka={'sokalsneath'},
types=['bool'],
dist_func=sokalsneath,
cdist_func=CDistMetricWrapper('sokalsneath'),
pdist_func=PDistMetricWrapper('sokalsneath'),
),
MetricInfo(
canonical_name='sqeuclidean',
aka={'sqeuclidean', 'sqe', 'sqeuclid'},
dist_func=sqeuclidean,
cdist_func=_distance_pybind.cdist_sqeuclidean,
pdist_func=_distance_pybind.pdist_sqeuclidean,
),
MetricInfo(
canonical_name='wminkowski',
aka={'wminkowski', 'wmi', 'wm', 'wpnorm'},
validator=_validate_wminkowski_kwargs,
dist_func=wminkowski,
cdist_func=CDistWeightedMetricWrapper(
'wminkowski', 'old_weighted_minkowski'),
pdist_func=PDistWeightedMetricWrapper(
'wminkowski', 'old_weighted_minkowski'),
),
MetricInfo(
canonical_name='yule',
aka={'yule'},
types=['bool'],
dist_func=yule,
cdist_func=CDistMetricWrapper('yule'),
pdist_func=PDistMetricWrapper('yule'),
),
]
_METRICS = {info.canonical_name: info for info in _METRIC_INFOS}
_METRIC_ALIAS = dict((alias, info)
for info in _METRIC_INFOS
for alias in info.aka)
_METRICS_NAMES = list(_METRICS.keys())
_TEST_METRICS = {'test_' + info.canonical_name: info for info in _METRIC_INFOS}
def pdist(X, metric='euclidean', *, out=None, **kwargs):
"""
Pairwise distances between observations in n-dimensional space.
See Notes for common calling conventions.
Parameters
----------
X : array_like
An m by n array of m original observations in an
n-dimensional space.
metric : str or function, optional
The distance metric to use. The distance function can
be 'braycurtis', 'canberra', 'chebyshev', 'cityblock',
'correlation', 'cosine', 'dice', 'euclidean', 'hamming',
'jaccard', 'jensenshannon', 'kulsinski', 'mahalanobis', 'matching',
'minkowski', 'rogerstanimoto', 'russellrao', 'seuclidean',
'sokalmichener', 'sokalsneath', 'sqeuclidean', 'yule'.
**kwargs : dict, optional
Extra arguments to `metric`: refer to each metric documentation for a
list of all possible arguments.
Some possible arguments:
p : scalar
The p-norm to apply for Minkowski, weighted and unweighted.
Default: 2.
w : ndarray
The weight vector for metrics that support weights (e.g., Minkowski).
V : ndarray
The variance vector for standardized Euclidean.
Default: var(X, axis=0, ddof=1)
VI : ndarray
The inverse of the covariance matrix for Mahalanobis.
Default: inv(cov(X.T)).T
out : ndarray.
The output array
If not None, condensed distance matrix Y is stored in this array.
Returns
-------
Y : ndarray
Returns a condensed distance matrix Y. For each :math:`i` and :math:`j`
(where :math:`i<j<m`),where m is the number of original observations.
The metric ``dist(u=X[i], v=X[j])`` is computed and stored in entry ``m
* i + j - ((i + 2) * (i + 1)) // 2``.
See Also
--------
squareform : converts between condensed distance matrices and
square distance matrices.
Notes
-----
See ``squareform`` for information on how to calculate the index of
this entry or to convert the condensed distance matrix to a
redundant square matrix.
The following are common calling conventions.
1. ``Y = pdist(X, 'euclidean')``
Computes the distance between m points using Euclidean distance
(2-norm) as the distance metric between the points. The points
are arranged as m n-dimensional row vectors in the matrix X.
2. ``Y = pdist(X, 'minkowski', p=2.)``
Computes the distances using the Minkowski distance
:math:`||u-v||_p` (p-norm) where :math:`p \\geq 1`.
3. ``Y = pdist(X, 'cityblock')``
Computes the city block or Manhattan distance between the
points.
4. ``Y = pdist(X, 'seuclidean', V=None)``
Computes the standardized Euclidean distance. The standardized
Euclidean distance between two n-vectors ``u`` and ``v`` is
.. math::
\\sqrt{\\sum {(u_i-v_i)^2 / V[x_i]}}
V is the variance vector; V[i] is the variance computed over all
the i'th components of the points. If not passed, it is
automatically computed.
5. ``Y = pdist(X, 'sqeuclidean')``
Computes the squared Euclidean distance :math:`||u-v||_2^2` between
the vectors.
6. ``Y = pdist(X, 'cosine')``
Computes the cosine distance between vectors u and v,
.. math::
1 - \\frac{u \\cdot v}
{{||u||}_2 {||v||}_2}
where :math:`||*||_2` is the 2-norm of its argument ``*``, and
:math:`u \\cdot v` is the dot product of ``u`` and ``v``.
7. ``Y = pdist(X, 'correlation')``
Computes the correlation distance between vectors u and v. This is
.. math::
1 - \\frac{(u - \\bar{u}) \\cdot (v - \\bar{v})}
{{||(u - \\bar{u})||}_2 {||(v - \\bar{v})||}_2}
where :math:`\\bar{v}` is the mean of the elements of vector v,
and :math:`x \\cdot y` is the dot product of :math:`x` and :math:`y`.
8. ``Y = pdist(X, 'hamming')``
Computes the normalized Hamming distance, or the proportion of
those vector elements between two n-vectors ``u`` and ``v``
which disagree. To save memory, the matrix ``X`` can be of type
boolean.
9. ``Y = pdist(X, 'jaccard')``
Computes the Jaccard distance between the points. Given two
vectors, ``u`` and ``v``, the Jaccard distance is the
proportion of those elements ``u[i]`` and ``v[i]`` that
disagree.
10. ``Y = pdist(X, 'jensenshannon')``
Computes the Jensen-Shannon distance between two probability arrays.
Given two probability vectors, :math:`p` and :math:`q`, the
Jensen-Shannon distance is
.. math::
\\sqrt{\\frac{D(p \\parallel m) + D(q \\parallel m)}{2}}
where :math:`m` is the pointwise mean of :math:`p` and :math:`q`
and :math:`D` is the Kullback-Leibler divergence.
11. ``Y = pdist(X, 'chebyshev')``
Computes the Chebyshev distance between the points. The
Chebyshev distance between two n-vectors ``u`` and ``v`` is the
maximum norm-1 distance between their respective elements. More
precisely, the distance is given by
.. math::
d(u,v) = \\max_i {|u_i-v_i|}
12. ``Y = pdist(X, 'canberra')``
Computes the Canberra distance between the points. The
Canberra distance between two points ``u`` and ``v`` is
.. math::
d(u,v) = \\sum_i \\frac{|u_i-v_i|}
{|u_i|+|v_i|}
13. ``Y = pdist(X, 'braycurtis')``
Computes the Bray-Curtis distance between the points. The
Bray-Curtis distance between two points ``u`` and ``v`` is
.. math::
d(u,v) = \\frac{\\sum_i {|u_i-v_i|}}
{\\sum_i {|u_i+v_i|}}
14. ``Y = pdist(X, 'mahalanobis', VI=None)``
Computes the Mahalanobis distance between the points. The
Mahalanobis distance between two points ``u`` and ``v`` is
:math:`\\sqrt{(u-v)(1/V)(u-v)^T}` where :math:`(1/V)` (the ``VI``
variable) is the inverse covariance. If ``VI`` is not None,
``VI`` will be used as the inverse covariance matrix.
15. ``Y = pdist(X, 'yule')``
Computes the Yule distance between each pair of boolean
vectors. (see yule function documentation)
16. ``Y = pdist(X, 'matching')``
Synonym for 'hamming'.
17. ``Y = pdist(X, 'dice')``
Computes the Dice distance between each pair of boolean
vectors. (see dice function documentation)
18. ``Y = pdist(X, 'kulsinski')``
Computes the Kulsinski distance between each pair of
boolean vectors. (see kulsinski function documentation)
19. ``Y = pdist(X, 'rogerstanimoto')``
Computes the Rogers-Tanimoto distance between each pair of
boolean vectors. (see rogerstanimoto function documentation)
20. ``Y = pdist(X, 'russellrao')``
Computes the Russell-Rao distance between each pair of
boolean vectors. (see russellrao function documentation)
21. ``Y = pdist(X, 'sokalmichener')``
Computes the Sokal-Michener distance between each pair of
boolean vectors. (see sokalmichener function documentation)
22. ``Y = pdist(X, 'sokalsneath')``
Computes the Sokal-Sneath distance between each pair of
boolean vectors. (see sokalsneath function documentation)
23. ``Y = pdist(X, 'wminkowski', p=2, w=w)``
Computes the weighted Minkowski distance between each pair of
vectors. (see wminkowski function documentation)
'wminkowski' is deprecated and will be removed in SciPy 1.8.0.
Use 'minkowski' instead.
24. ``Y = pdist(X, f)``
Computes the distance between all pairs of vectors in X
using the user supplied 2-arity function f. For example,
Euclidean distance between the vectors could be computed
as follows::
dm = pdist(X, lambda u, v: np.sqrt(((u-v)**2).sum()))
Note that you should avoid passing a reference to one of
the distance functions defined in this library. For example,::
dm = pdist(X, sokalsneath)
would calculate the pair-wise distances between the vectors in
X using the Python function sokalsneath. This would result in
sokalsneath being called :math:`{n \\choose 2}` times, which
is inefficient. Instead, the optimized C version is more
efficient, and we call it using the following syntax.::
dm = pdist(X, 'sokalsneath')
"""
# You can also call this as:
# Y = pdist(X, 'test_abc')
# where 'abc' is the metric being tested. This computes the distance
# between all pairs of vectors in X using the distance metric 'abc' but
# with a more succinct, verifiable, but less efficient implementation.
X = _asarray_validated(X, sparse_ok=False, objects_ok=True, mask_ok=True,
check_finite=False)
s = X.shape
if len(s) != 2:
raise ValueError('A 2-dimensional array must be passed.')
m, n = s
if callable(metric):
mstr = getattr(metric, '__name__', 'UnknownCustomMetric')
metric_info = _METRIC_ALIAS.get(mstr, None)
if metric_info is not None:
X, typ, kwargs = _validate_pdist_input(
X, m, n, metric_info, **kwargs)
return _pdist_callable(X, metric=metric, out=out, **kwargs)
elif isinstance(metric, str):
mstr = metric.lower()
metric_info = _METRIC_ALIAS.get(mstr, None)
if metric_info is not None:
pdist_fn = metric_info.pdist_func
return pdist_fn(X, out=out, **kwargs)
elif mstr.startswith("test_"):
metric_info = _TEST_METRICS.get(mstr, None)
if metric_info is None:
raise ValueError(f'Unknown "Test" Distance Metric: {mstr[5:]}')
X, typ, kwargs = _validate_pdist_input(
X, m, n, metric_info, **kwargs)
return _pdist_callable(
X, metric=metric_info.dist_func, out=out, **kwargs)
else:
raise ValueError('Unknown Distance Metric: %s' % mstr)
else:
raise TypeError('2nd argument metric must be a string identifier '
'or a function.')
def squareform(X, force="no", checks=True):
"""
Convert a vector-form distance vector to a square-form distance
matrix, and vice-versa.
Parameters
----------
X : array_like
Either a condensed or redundant distance matrix.
force : str, optional
As with MATLAB(TM), if force is equal to ``'tovector'`` or
``'tomatrix'``, the input will be treated as a distance matrix or
distance vector respectively.
checks : bool, optional
If set to False, no checks will be made for matrix
symmetry nor zero diagonals. This is useful if it is known that
``X - X.T1`` is small and ``diag(X)`` is close to zero.
These values are ignored any way so they do not disrupt the
squareform transformation.
Returns
-------
Y : ndarray
If a condensed distance matrix is passed, a redundant one is
returned, or if a redundant one is passed, a condensed distance
matrix is returned.
Notes
-----
1. ``v = squareform(X)``
Given a square n-by-n symmetric distance matrix ``X``,
``v = squareform(X)`` returns a ``n * (n-1) / 2``
(i.e. binomial coefficient n choose 2) sized vector `v`
where :math:`v[{n \\choose 2} - {n-i \\choose 2} + (j-i-1)]`
is the distance between distinct points ``i`` and ``j``.
If ``X`` is non-square or asymmetric, an error is raised.
2. ``X = squareform(v)``
Given a ``n * (n-1) / 2`` sized vector ``v``
for some integer ``n >= 1`` encoding distances as described,
``X = squareform(v)`` returns a n-by-n distance matrix ``X``.
The ``X[i, j]`` and ``X[j, i]`` values are set to
:math:`v[{n \\choose 2} - {n-i \\choose 2} + (j-i-1)]`
and all diagonal elements are zero.
In SciPy 0.19.0, ``squareform`` stopped casting all input types to
float64, and started returning arrays of the same dtype as the input.
"""
X = np.ascontiguousarray(X)
s = X.shape
if force.lower() == 'tomatrix':
if len(s) != 1:
raise ValueError("Forcing 'tomatrix' but input X is not a "
"distance vector.")
elif force.lower() == 'tovector':
if len(s) != 2:
raise ValueError("Forcing 'tovector' but input X is not a "
"distance matrix.")
# X = squareform(v)
if len(s) == 1:
if s[0] == 0:
return np.zeros((1, 1), dtype=X.dtype)
# Grab the closest value to the square root of the number
# of elements times 2 to see if the number of elements
# is indeed a binomial coefficient.
d = int(np.ceil(np.sqrt(s[0] * 2)))
# Check that v is of valid dimensions.
if d * (d - 1) != s[0] * 2:
raise ValueError('Incompatible vector size. It must be a binomial '
'coefficient n choose 2 for some integer n >= 2.')
# Allocate memory for the distance matrix.
M = np.zeros((d, d), dtype=X.dtype)
# Since the C code does not support striding using strides.
# The dimensions are used instead.
X = _copy_array_if_base_present(X)
# Fill in the values of the distance matrix.
_distance_wrap.to_squareform_from_vector_wrap(M, X)
# Return the distance matrix.
return M
elif len(s) == 2:
if s[0] != s[1]:
raise ValueError('The matrix argument must be square.')
if checks:
is_valid_dm(X, throw=True, name='X')
# One-side of the dimensions is set here.
d = s[0]
if d <= 1:
return np.array([], dtype=X.dtype)
# Create a vector.
v = np.zeros((d * (d - 1)) // 2, dtype=X.dtype)
# Since the C code does not support striding using strides.
# The dimensions are used instead.
X = _copy_array_if_base_present(X)
# Convert the vector to squareform.
_distance_wrap.to_vector_from_squareform_wrap(X, v)
return v
else:
raise ValueError(('The first argument must be one or two dimensional '
'array. A %d-dimensional array is not '
'permitted') % len(s))
def is_valid_dm(D, tol=0.0, throw=False, name="D", warning=False):
"""
Return True if input array is a valid distance matrix.
Distance matrices must be 2-dimensional numpy arrays.
They must have a zero-diagonal, and they must be symmetric.
Parameters
----------
D : array_like
The candidate object to test for validity.
tol : float, optional
The distance matrix should be symmetric. `tol` is the maximum
difference between entries ``ij`` and ``ji`` for the distance
metric to be considered symmetric.
throw : bool, optional
An exception is thrown if the distance matrix passed is not valid.
name : str, optional
The name of the variable to checked. This is useful if
throw is set to True so the offending variable can be identified
in the exception message when an exception is thrown.
warning : bool, optional
Instead of throwing an exception, a warning message is
raised.
Returns
-------
valid : bool
True if the variable `D` passed is a valid distance matrix.
Notes
-----
Small numerical differences in `D` and `D.T` and non-zeroness of
the diagonal are ignored if they are within the tolerance specified
by `tol`.
"""
D = np.asarray(D, order='c')
valid = True
try:
s = D.shape
if len(D.shape) != 2:
if name:
raise ValueError(('Distance matrix \'%s\' must have shape=2 '
'(i.e. be two-dimensional).') % name)
else:
raise ValueError('Distance matrix must have shape=2 (i.e. '
'be two-dimensional).')
if tol == 0.0:
if not (D == D.T).all():
if name:
raise ValueError(('Distance matrix \'%s\' must be '
'symmetric.') % name)
else:
raise ValueError('Distance matrix must be symmetric.')
if not (D[range(0, s[0]), range(0, s[0])] == 0).all():
if name:
raise ValueError(('Distance matrix \'%s\' diagonal must '
'be zero.') % name)
else:
raise ValueError('Distance matrix diagonal must be zero.')
else:
if not (D - D.T <= tol).all():
if name:
raise ValueError(('Distance matrix \'%s\' must be '
'symmetric within tolerance %5.5f.')
% (name, tol))
else:
raise ValueError('Distance matrix must be symmetric within'
' tolerance %5.5f.' % tol)
if not (D[range(0, s[0]), range(0, s[0])] <= tol).all():
if name:
raise ValueError(('Distance matrix \'%s\' diagonal must be'
' close to zero within tolerance %5.5f.')
% (name, tol))
else:
raise ValueError(('Distance matrix \'%s\' diagonal must be'
' close to zero within tolerance %5.5f.')
% tol)
except Exception as e:
if throw:
raise
if warning:
warnings.warn(str(e))
valid = False
return valid
def is_valid_y(y, warning=False, throw=False, name=None):
"""
Return True if the input array is a valid condensed distance matrix.
Condensed distance matrices must be 1-dimensional numpy arrays.
Their length must be a binomial coefficient :math:`{n \\choose 2}`
for some positive integer n.
Parameters
----------
y : array_like
The condensed distance matrix.
warning : bool, optional
Invokes a warning if the variable passed is not a valid
condensed distance matrix. The warning message explains why
the distance matrix is not valid. `name` is used when
referencing the offending variable.
throw : bool, optional
Throws an exception if the variable passed is not a valid
condensed distance matrix.
name : bool, optional
Used when referencing the offending variable in the
warning or exception message.
"""
y = np.asarray(y, order='c')
valid = True
try:
if len(y.shape) != 1:
if name:
raise ValueError(('Condensed distance matrix \'%s\' must '
'have shape=1 (i.e. be one-dimensional).')
% name)
else:
raise ValueError('Condensed distance matrix must have shape=1 '
'(i.e. be one-dimensional).')
n = y.shape[0]
d = int(np.ceil(np.sqrt(n * 2)))
if (d * (d - 1) / 2) != n:
if name:
raise ValueError(('Length n of condensed distance matrix '
'\'%s\' must be a binomial coefficient, i.e.'
'there must be a k such that '
'(k \\choose 2)=n)!') % name)
else:
raise ValueError('Length n of condensed distance matrix must '
'be a binomial coefficient, i.e. there must '
'be a k such that (k \\choose 2)=n)!')
except Exception as e:
if throw:
raise
if warning:
warnings.warn(str(e))
valid = False
return valid
def num_obs_dm(d):
"""
Return the number of original observations that correspond to a
square, redundant distance matrix.
Parameters
----------
d : array_like
The target distance matrix.
Returns
-------
num_obs_dm : int
The number of observations in the redundant distance matrix.
"""
d = np.asarray(d, order='c')
is_valid_dm(d, tol=np.inf, throw=True, name='d')
return d.shape[0]
def num_obs_y(Y):
"""
Return the number of original observations that correspond to a
condensed distance matrix.
Parameters
----------
Y : array_like
Condensed distance matrix.
Returns
-------
n : int
The number of observations in the condensed distance matrix `Y`.
"""
Y = np.asarray(Y, order='c')
is_valid_y(Y, throw=True, name='Y')
k = Y.shape[0]
if k == 0:
raise ValueError("The number of observations cannot be determined on "
"an empty distance matrix.")
d = int(np.ceil(np.sqrt(k * 2)))
if (d * (d - 1) / 2) != k:
raise ValueError("Invalid condensed distance matrix passed. Must be "
"some k where k=(n choose 2) for some n >= 2.")
return d
def _prepare_out_argument(out, dtype, expected_shape):
if out is None:
return np.empty(expected_shape, dtype=dtype)
if out.shape != expected_shape:
raise ValueError("Output array has incorrect shape.")
if not out.flags.c_contiguous:
raise ValueError("Output array must be C-contiguous.")
if out.dtype != np.double:
raise ValueError("Output array must be double type.")
return out
def _pdist_callable(X, *, out, metric, **kwargs):
n = X.shape[0]
out_size = (n * (n - 1)) // 2
dm = _prepare_out_argument(out, np.double, (out_size,))
k = 0
for i in range(X.shape[0] - 1):
for j in range(i + 1, X.shape[0]):
dm[k] = metric(X[i], X[j], **kwargs)
k += 1
return dm
def _cdist_callable(XA, XB, *, out, metric, **kwargs):
mA = XA.shape[0]
mB = XB.shape[0]
dm = _prepare_out_argument(out, np.double, (mA, mB))
for i in range(mA):
for j in range(mB):
dm[i, j] = metric(XA[i], XB[j], **kwargs)
return dm
def cdist(XA, XB, metric='euclidean', *, out=None, **kwargs):
"""
Compute distance between each pair of the two collections of inputs.
See Notes for common calling conventions.
Parameters
----------
XA : array_like
An :math:`m_A` by :math:`n` array of :math:`m_A`
original observations in an :math:`n`-dimensional space.
Inputs are converted to float type.
XB : array_like
An :math:`m_B` by :math:`n` array of :math:`m_B`
original observations in an :math:`n`-dimensional space.
Inputs are converted to float type.
metric : str or callable, optional
The distance metric to use. If a string, the distance function can be
'braycurtis', 'canberra', 'chebyshev', 'cityblock', 'correlation',
'cosine', 'dice', 'euclidean', 'hamming', 'jaccard', 'jensenshannon',
'kulsinski', 'mahalanobis', 'matching', 'minkowski', 'rogerstanimoto',
'russellrao', 'seuclidean', 'sokalmichener', 'sokalsneath',
'sqeuclidean', 'wminkowski', 'yule'.
**kwargs : dict, optional
Extra arguments to `metric`: refer to each metric documentation for a
list of all possible arguments.
Some possible arguments:
p : scalar
The p-norm to apply for Minkowski, weighted and unweighted.
Default: 2.
w : array_like
The weight vector for metrics that support weights (e.g., Minkowski).
V : array_like
The variance vector for standardized Euclidean.
Default: var(vstack([XA, XB]), axis=0, ddof=1)
VI : array_like
The inverse of the covariance matrix for Mahalanobis.
Default: inv(cov(vstack([XA, XB].T))).T
out : ndarray
The output array
If not None, the distance matrix Y is stored in this array.
Returns
-------
Y : ndarray
A :math:`m_A` by :math:`m_B` distance matrix is returned.
For each :math:`i` and :math:`j`, the metric
``dist(u=XA[i], v=XB[j])`` is computed and stored in the
:math:`ij` th entry.
Raises
------
ValueError
An exception is thrown if `XA` and `XB` do not have
the same number of columns.
Notes
-----
The following are common calling conventions:
1. ``Y = cdist(XA, XB, 'euclidean')``
Computes the distance between :math:`m` points using
Euclidean distance (2-norm) as the distance metric between the
points. The points are arranged as :math:`m`
:math:`n`-dimensional row vectors in the matrix X.
2. ``Y = cdist(XA, XB, 'minkowski', p=2.)``
Computes the distances using the Minkowski distance
:math:`||u-v||_p` (:math:`p`-norm) where :math:`p \\geq 1`.
3. ``Y = cdist(XA, XB, 'cityblock')``
Computes the city block or Manhattan distance between the
points.
4. ``Y = cdist(XA, XB, 'seuclidean', V=None)``
Computes the standardized Euclidean distance. The standardized
Euclidean distance between two n-vectors ``u`` and ``v`` is
.. math::
\\sqrt{\\sum {(u_i-v_i)^2 / V[x_i]}}.
V is the variance vector; V[i] is the variance computed over all
the i'th components of the points. If not passed, it is
automatically computed.
5. ``Y = cdist(XA, XB, 'sqeuclidean')``
Computes the squared Euclidean distance :math:`||u-v||_2^2` between
the vectors.
6. ``Y = cdist(XA, XB, 'cosine')``
Computes the cosine distance between vectors u and v,
.. math::
1 - \\frac{u \\cdot v}
{{||u||}_2 {||v||}_2}
where :math:`||*||_2` is the 2-norm of its argument ``*``, and
:math:`u \\cdot v` is the dot product of :math:`u` and :math:`v`.
7. ``Y = cdist(XA, XB, 'correlation')``
Computes the correlation distance between vectors u and v. This is
.. math::
1 - \\frac{(u - \\bar{u}) \\cdot (v - \\bar{v})}
{{||(u - \\bar{u})||}_2 {||(v - \\bar{v})||}_2}
where :math:`\\bar{v}` is the mean of the elements of vector v,
and :math:`x \\cdot y` is the dot product of :math:`x` and :math:`y`.
8. ``Y = cdist(XA, XB, 'hamming')``
Computes the normalized Hamming distance, or the proportion of
those vector elements between two n-vectors ``u`` and ``v``
which disagree. To save memory, the matrix ``X`` can be of type
boolean.
9. ``Y = cdist(XA, XB, 'jaccard')``
Computes the Jaccard distance between the points. Given two
vectors, ``u`` and ``v``, the Jaccard distance is the
proportion of those elements ``u[i]`` and ``v[i]`` that
disagree where at least one of them is non-zero.
10. ``Y = cdist(XA, XB, 'jensenshannon')``
Computes the Jensen-Shannon distance between two probability arrays.
Given two probability vectors, :math:`p` and :math:`q`, the
Jensen-Shannon distance is
.. math::
\\sqrt{\\frac{D(p \\parallel m) + D(q \\parallel m)}{2}}
where :math:`m` is the pointwise mean of :math:`p` and :math:`q`
and :math:`D` is the Kullback-Leibler divergence.
11. ``Y = cdist(XA, XB, 'chebyshev')``
Computes the Chebyshev distance between the points. The
Chebyshev distance between two n-vectors ``u`` and ``v`` is the
maximum norm-1 distance between their respective elements. More
precisely, the distance is given by
.. math::
d(u,v) = \\max_i {|u_i-v_i|}.
12. ``Y = cdist(XA, XB, 'canberra')``
Computes the Canberra distance between the points. The
Canberra distance between two points ``u`` and ``v`` is
.. math::
d(u,v) = \\sum_i \\frac{|u_i-v_i|}
{|u_i|+|v_i|}.
13. ``Y = cdist(XA, XB, 'braycurtis')``
Computes the Bray-Curtis distance between the points. The
Bray-Curtis distance between two points ``u`` and ``v`` is
.. math::
d(u,v) = \\frac{\\sum_i (|u_i-v_i|)}
{\\sum_i (|u_i+v_i|)}
14. ``Y = cdist(XA, XB, 'mahalanobis', VI=None)``
Computes the Mahalanobis distance between the points. The
Mahalanobis distance between two points ``u`` and ``v`` is
:math:`\\sqrt{(u-v)(1/V)(u-v)^T}` where :math:`(1/V)` (the ``VI``
variable) is the inverse covariance. If ``VI`` is not None,
``VI`` will be used as the inverse covariance matrix.
15. ``Y = cdist(XA, XB, 'yule')``
Computes the Yule distance between the boolean
vectors. (see `yule` function documentation)
16. ``Y = cdist(XA, XB, 'matching')``
Synonym for 'hamming'.
17. ``Y = cdist(XA, XB, 'dice')``
Computes the Dice distance between the boolean vectors. (see
`dice` function documentation)
18. ``Y = cdist(XA, XB, 'kulsinski')``
Computes the Kulsinski distance between the boolean
vectors. (see `kulsinski` function documentation)
19. ``Y = cdist(XA, XB, 'rogerstanimoto')``
Computes the Rogers-Tanimoto distance between the boolean
vectors. (see `rogerstanimoto` function documentation)
20. ``Y = cdist(XA, XB, 'russellrao')``
Computes the Russell-Rao distance between the boolean
vectors. (see `russellrao` function documentation)
21. ``Y = cdist(XA, XB, 'sokalmichener')``
Computes the Sokal-Michener distance between the boolean
vectors. (see `sokalmichener` function documentation)
22. ``Y = cdist(XA, XB, 'sokalsneath')``
Computes the Sokal-Sneath distance between the vectors. (see
`sokalsneath` function documentation)
23. ``Y = cdist(XA, XB, 'wminkowski', p=2., w=w)``
Computes the weighted Minkowski distance between the
vectors. (see `wminkowski` function documentation)
'wminkowski' is deprecated and will be removed in SciPy 1.8.0.
Use 'minkowski' instead.
24. ``Y = cdist(XA, XB, f)``
Computes the distance between all pairs of vectors in X
using the user supplied 2-arity function f. For example,
Euclidean distance between the vectors could be computed
as follows::
dm = cdist(XA, XB, lambda u, v: np.sqrt(((u-v)**2).sum()))
Note that you should avoid passing a reference to one of
the distance functions defined in this library. For example,::
dm = cdist(XA, XB, sokalsneath)
would calculate the pair-wise distances between the vectors in
X using the Python function `sokalsneath`. This would result in
sokalsneath being called :math:`{n \\choose 2}` times, which
is inefficient. Instead, the optimized C version is more
efficient, and we call it using the following syntax::
dm = cdist(XA, XB, 'sokalsneath')
Examples
--------
Find the Euclidean distances between four 2-D coordinates:
>>> from scipy.spatial import distance
>>> coords = [(35.0456, -85.2672),
... (35.1174, -89.9711),
... (35.9728, -83.9422),
... (36.1667, -86.7833)]
>>> distance.cdist(coords, coords, 'euclidean')
array([[ 0. , 4.7044, 1.6172, 1.8856],
[ 4.7044, 0. , 6.0893, 3.3561],
[ 1.6172, 6.0893, 0. , 2.8477],
[ 1.8856, 3.3561, 2.8477, 0. ]])
Find the Manhattan distance from a 3-D point to the corners of the unit
cube:
>>> a = np.array([[0, 0, 0],
... [0, 0, 1],
... [0, 1, 0],
... [0, 1, 1],
... [1, 0, 0],
... [1, 0, 1],
... [1, 1, 0],
... [1, 1, 1]])
>>> b = np.array([[ 0.1, 0.2, 0.4]])
>>> distance.cdist(a, b, 'cityblock')
array([[ 0.7],
[ 0.9],
[ 1.3],
[ 1.5],
[ 1.5],
[ 1.7],
[ 2.1],
[ 2.3]])
"""
# You can also call this as:
# Y = cdist(XA, XB, 'test_abc')
# where 'abc' is the metric being tested. This computes the distance
# between all pairs of vectors in XA and XB using the distance metric 'abc'
# but with a more succinct, verifiable, but less efficient implementation.
XA = np.asarray(XA)
XB = np.asarray(XB)
s = XA.shape
sB = XB.shape
if len(s) != 2:
raise ValueError('XA must be a 2-dimensional array.')
if len(sB) != 2:
raise ValueError('XB must be a 2-dimensional array.')
if s[1] != sB[1]:
raise ValueError('XA and XB must have the same number of columns '
'(i.e. feature dimension.)')
mA = s[0]
mB = sB[0]
n = s[1]
if callable(metric):
mstr = getattr(metric, '__name__', 'Unknown')
metric_info = _METRIC_ALIAS.get(mstr, None)
if metric_info is not None:
XA, XB, typ, kwargs = _validate_cdist_input(
XA, XB, mA, mB, n, metric_info, **kwargs)
return _cdist_callable(XA, XB, metric=metric, out=out, **kwargs)
elif isinstance(metric, str):
mstr = metric.lower()
metric_info = _METRIC_ALIAS.get(mstr, None)
if metric_info is not None:
cdist_fn = metric_info.cdist_func
return cdist_fn(XA, XB, out=out, **kwargs)
elif mstr.startswith("test_"):
metric_info = _TEST_METRICS.get(mstr, None)
if metric_info is None:
raise ValueError(f'Unknown "Test" Distance Metric: {mstr[5:]}')
XA, XB, typ, kwargs = _validate_cdist_input(
XA, XB, mA, mB, n, metric_info, **kwargs)
return _cdist_callable(
XA, XB, metric=metric_info.dist_func, out=out, **kwargs)
else:
raise ValueError('Unknown Distance Metric: %s' % mstr)
else:
raise TypeError('2nd argument metric must be a string identifier '
'or a function.')
|
9ed386d3715fcd61a423f7b0321ed453a5d2cb85
|
515e45025082ffbfda960635e31f99c4ca1aa7d8
|
/unix_build.py
|
83f916f26a2fe91c546f71ec2a6efc01e837a296
|
[
"Apache-2.0"
] |
permissive
|
kovidgoyal/html5-parser
|
62a3e626cba563076c7503fafb2fd83c506c61dd
|
ef7d4af932293fa04c3ac78a77b7fb2f0ac2f26d
|
refs/heads/master
| 2023-05-30T09:44:52.629086
| 2023-04-12T05:07:46
| 2023-04-12T05:07:46
| 93,229,662
| 714
| 42
|
Apache-2.0
| 2021-07-26T13:23:04
| 2017-06-03T06:56:36
|
C
|
UTF-8
|
Python
| false
| false
| 10,933
|
py
|
unix_build.py
|
#!/usr/bin/env python
# vim:fileencoding=utf-8
# License: Apache 2.0 Copyright: 2017, Kovid Goyal <kovid at kovidgoyal.net>
from __future__ import (absolute_import, division, print_function, unicode_literals)
import argparse
import errno
import glob
import os
import re
import shlex
import shutil
import subprocess
import sys
import sysconfig
from collections import namedtuple
from copy import deepcopy
from itertools import chain
try:
import configparser
except ImportError:
import ConfigParser as configparser
self_path = os.path.abspath(__file__)
base = os.path.dirname(self_path)
build_dir = os.path.join(base, 'build', 'custom')
freeze_dir = os.path.join(base, 'build', 'html5_parser')
_plat = sys.platform.lower()
isosx = 'darwin' in _plat
iswindows = hasattr(sys, 'getwindowsversion')
is_ci = os.environ.get('CI') == 'true'
Env = namedtuple('Env', 'cc cflags ldflags linker debug cc_name cc_ver')
PKGCONFIG = os.environ.get('PKGCONFIG_EXE', 'pkg-config')
cfg = configparser.ConfigParser()
cfg.read(os.path.join(base, 'setup.cfg'))
version = namedtuple('Version', 'major minor patch')(
*map(int, cfg.get('metadata', 'version').split('.')))
def safe_makedirs(path):
try:
os.makedirs(path)
except EnvironmentError as err:
if err.errno != errno.EEXIST:
raise
def add_python_flags(env, return_libs=False):
env.cflags.extend('-I' + sysconfig.get_path(x) for x in 'include platinclude'.split())
libs = []
libs += sysconfig.get_config_var('LIBS').split()
libs += sysconfig.get_config_var('SYSLIBS').split()
fw = sysconfig.get_config_var('PYTHONFRAMEWORK')
if fw:
for var in 'data include stdlib'.split():
val = sysconfig.get_path(var)
if val and '/{}.framework'.format(fw) in val:
fdir = val[:val.index('/{}.framework'.format(fw))]
if os.path.isdir(os.path.join(fdir, '{}.framework'.format(fw))):
framework_dir = fdir
break
else:
raise SystemExit('Failed to find Python framework')
libs.append(os.path.join(framework_dir, sysconfig.get_config_var('LDLIBRARY')))
else:
libs += ['-L' + sysconfig.get_config_var('LIBDIR')]
libs += ['-lpython' + sysconfig.get_config_var('VERSION') + getattr(sys, 'abiflags', '')]
libs += sysconfig.get_config_var('LINKFORSHARED').split()
env.ldflags.extend(libs)
return libs if return_libs else env
def pkg_config(pkg, *args):
try:
val = subprocess.check_output([PKGCONFIG, pkg] + list(args)).decode('utf-8')
except EnvironmentError as err:
if err.errno == errno.ENOENT:
raise SystemExit('pkg-config is required to build html5-parser')
raise
return list(filter(None, map(str, shlex.split(val))))
def env_var(which, default='', split=os.pathsep):
val = str(os.environ.get(which, default))
if not split:
return val
return list(filter(None, val.split(split)))
def include_dirs():
if 'LIBXML_INCLUDE_DIRS' in os.environ:
return env_var('LIBXML_INCLUDE_DIRS')
return [x[2:] for x in pkg_config('libxml-2.0', '--cflags-only-I')]
def libraries():
if iswindows:
return env_var('LIBXML_LIBS', 'libxml2')
if 'LIBXML_LIBS' in os.environ:
return env_var('LIBXML_LIBS')
return [x[2:] for x in pkg_config('libxml-2.0', '--libs-only-l')]
def library_dirs():
if 'LIBXML_LIB_DIRS' in os.environ:
return env_var('LIBXML_LIB_DIRS')
return [x[2:] for x in pkg_config('libxml-2.0', '--libs-only-L')]
def cc_version():
cc = os.environ.get('CC', 'gcc')
raw = subprocess.check_output([cc, '-dM', '-E', '-'], stdin=open(os.devnull, 'rb'))
m = re.search(br'^#define __clang__ 1', raw, flags=re.M)
cc_name = 'gcc' if m is None else 'clang'
ver = int(re.search(br'#define __GNUC__ (\d+)', raw, flags=re.M).group(1)), int(
re.search(br'#define __GNUC_MINOR__ (\d+)', raw, flags=re.M).group(1))
return cc, ver, cc_name
def get_sanitize_args(cc, ccver):
sanitize_args = set()
if cc == 'gcc' and ccver < (4, 8):
return sanitize_args
sanitize_args.add('-fno-omit-frame-pointer')
sanitize_args.add('-fsanitize=address')
if (cc == 'gcc' and ccver >= (5, 0)) or (cc == 'clang' and not isosx):
# clang on macOS does not support -fsanitize=undefined
sanitize_args.add('-fsanitize=undefined')
# if cc == 'gcc' or (cc == 'clang' and ccver >= (4, 2)):
# sanitize_args.add('-fno-sanitize-recover=all')
return sanitize_args
def init_env(debug=False, sanitize=False, native_optimizations=False, add_python=True):
native_optimizations = (native_optimizations and not sanitize and not debug)
cc, ccver, cc_name = cc_version()
stack_protector = '-fstack-protector'
if ccver >= (4, 9) and cc_name == 'gcc':
stack_protector += '-strong'
missing_braces = ''
if ccver < (5, 2) and cc_name == 'gcc':
missing_braces = '-Wno-missing-braces'
optimize = '-ggdb' if debug or sanitize else '-O3'
sanitize_args = get_sanitize_args(cc_name, ccver) if sanitize else set()
cflags = os.environ.get(
'OVERRIDE_CFLAGS', (
'-Wextra -Wno-missing-field-initializers -Wall -std=c99 -fvisibility=hidden'
' -pedantic-errors -Werror {} {} -D{}DEBUG -fwrapv {} {} -pipe {}').format(
optimize, ' '.join(sanitize_args), ('' if debug else 'N'), stack_protector,
missing_braces, '-march=native' if native_optimizations else ''))
libxml_cflags = pkg_config('libxml-2.0', '--cflags')
cflags = shlex.split(cflags) + libxml_cflags + shlex.split(sysconfig.get_config_var('CCSHARED'))
ldflags = os.environ.get(
'OVERRIDE_LDFLAGS', '-Wall -shared ' + ' '.join(sanitize_args) + ('' if debug else ' -O3'))
libxml_ldflags = pkg_config('libxml-2.0', '--libs')
ldflags = shlex.split(ldflags) + libxml_ldflags
cflags += shlex.split(os.environ.get('CFLAGS', ''))
ldflags += shlex.split(os.environ.get('LDFLAGS', ''))
cflags.append('-pthread')
cflags.extend((
'-DMAJOR=' + str(version.major),
'-DMINOR=' + str(version.minor),
'-DPATCH=' + str(version.patch),
))
ans = Env(cc, cflags, ldflags, cc, debug, cc_name, ccver)
return add_python_flags(ans) if add_python else ans
def run_tool(cmd):
if hasattr(cmd, 'lower'):
cmd = shlex.split(cmd)
print(' '.join(cmd))
p = subprocess.Popen(cmd)
ret = p.wait()
if ret != 0:
raise SystemExit(ret)
def newer(dest, *sources):
try:
dtime = os.path.getmtime(dest)
except EnvironmentError:
return True
for s in chain(sources, (self_path, )):
if os.path.getmtime(s) >= dtime:
return True
return False
def find_c_files(src_dir):
ans, headers = [], []
for x in sorted(os.listdir(src_dir)):
ext = os.path.splitext(x)[1]
if ext == '.c' and not x.endswith('-check.c'):
ans.append(os.path.join(src_dir, x))
elif ext == '.h':
headers.append(os.path.join(src_dir, x))
ans.sort(key=os.path.getmtime, reverse=True)
return tuple(ans), tuple(headers)
def build_obj(src, env, headers):
suffix = '-debug' if env.debug else ''
obj = os.path.join(build_dir, os.path.basename(src).rpartition('.')[0] + suffix + '.o')
if newer(obj, src, *headers):
cflags = list(env.cflags)
if src.endswith('char_ref.c'):
cflags.append('-Wno-unused-const-variable')
cmd = [env.cc] + cflags + ['-c', src] + ['-o', obj]
run_tool(cmd)
return obj
TEST_EXE = os.path.join(build_dir, 'test')
MEMLEAK_EXE = os.path.join(build_dir, 'mem-leak-check')
if is_ci:
TEST_EXE = os.path.join(os.path.dirname(os.path.abspath(sys.executable)), 'test-html5-parser')
SRC_DIRS = 'src gumbo'.split()
MOD_EXT = '.so'
def link(objects, env):
dest = os.path.join(build_dir, 'html_parser' + MOD_EXT)
o = ['-o', dest]
cmd = [env.linker] + objects + o + env.ldflags
if newer(dest, *objects):
run_tool(cmd)
return dest
def build(args, build_leak_check=False):
debug_objects = []
debug_env = init_env(debug=True, sanitize=True)
for sdir in SRC_DIRS:
sources, headers = find_c_files(sdir)
if sdir == 'src':
headers += ('gumbo/gumbo.h', )
debug_objects.extend(build_obj(c, debug_env, headers) for c in sources)
link(debug_objects, debug_env)
ldflags = add_python_flags(deepcopy(debug_env), return_libs=True)
if newer(TEST_EXE, *debug_objects):
cmd = ([debug_env.cc] + debug_env.cflags + ['test.c'] + ['-o', TEST_EXE] + ldflags)
run_tool(cmd)
if build_leak_check and newer(MEMLEAK_EXE, 'mem-leak-check.c', *debug_objects):
cmd = ([debug_env.cc] + debug_env.cflags + ['mem-leak-check.c'] + [
'-o', MEMLEAK_EXE] + debug_objects + debug_env.ldflags)
cmd = [x for x in cmd if x not in {'-fPIC', '-pthread', '-shared'}]
run_tool(cmd)
for mod in glob.glob(os.path.join(build_dir, '*' + MOD_EXT)):
shutil.copy2(mod, freeze_dir)
for mod in glob.glob(os.path.join('src', 'html5_parser', '*.py')):
shutil.copy2(mod, freeze_dir)
TEST_COMMAND = ['run_tests.py']
def add_python_path(env, path):
pp = env.get('PYTHONPATH', '')
to_join = filter(None, [os.path.abspath(path), pp])
env['PYTHONPATH'] = os.pathsep.join(to_join)
return env
def option_parser():
p = argparse.ArgumentParser()
p.add_argument(
'action',
nargs='?',
default='test',
choices='build test try leak'.split(),
help='Action to perform (default is build)')
p.add_argument('rest', nargs='*')
return p
def main():
args = option_parser().parse_args()
os.chdir(base)
safe_makedirs(build_dir), safe_makedirs(freeze_dir)
if args.action == 'build':
build(args)
elif args.action == 'test':
build(args)
os.environ['ASAN_OPTIONS'] = 'leak_check_at_exit=0'
add_python_path(os.environ, os.path.dirname(freeze_dir))
print('\nrunning tests...')
os.execlp(TEST_EXE, TEST_EXE, 'run_tests.py', *args.rest)
elif args.action == 'try':
build(args)
os.environ['ASAN_OPTIONS'] = 'leak_check_at_exit=0'
add_python_path(os.environ, os.path.dirname(freeze_dir))
os.execlp(
TEST_EXE, TEST_EXE, '-c', 'from html5_parser import *; ' + args.rest[0], *args.rest[1:])
elif args.action == 'leak':
build(args, build_leak_check=True)
os.environ['MEMLEAK_EXE'] = os.path.abspath(MEMLEAK_EXE)
os.environ['ASAN_OPTIONS'] = 'leak_check_at_exit=0'
add_python_path(os.environ, os.path.dirname(freeze_dir))
os.execlp(TEST_EXE, TEST_EXE, 'run_tests.py')
if __name__ == '__main__':
main()
|
2913efe52848982b21ccc09f0debb6339ceccb5d
|
aa37406dba4d5ce557bf0777fe29db145d237d10
|
/tests/serialization/test_model_utils.py
|
c188bb2a26bac3d0a72aa307d1ef754d5c1abfa8
|
[
"MIT"
] |
permissive
|
marcosschroh/dataclasses-avroschema
|
ef7c4963bf28203cb7e5c068162726fe3df53167
|
d0097dfff09881a95a9d0ef48f58ac73f826fe36
|
refs/heads/master
| 2023-08-18T18:16:45.284134
| 2023-08-15T09:11:56
| 2023-08-15T09:11:56
| 205,188,270
| 173
| 50
|
MIT
| 2023-09-13T14:37:42
| 2019-08-29T14:58:17
|
Python
|
UTF-8
|
Python
| false
| false
| 2,375
|
py
|
test_model_utils.py
|
import enum
import json
import typing
from dataclasses import dataclass
import pytest
from dataclasses_avroschema import AvroModel
from tests.serialization.test_serialization import CLASSES_DATA_BINARY
@pytest.mark.parametrize("klass, data, avro_binary, avro_json, instance_json, python_dict", CLASSES_DATA_BINARY)
def test_to_dict_to_json(klass, data, avro_binary, avro_json, instance_json, python_dict):
instance = klass(**data)
assert instance.to_dict() == python_dict
assert instance.to_json() == json.dumps(instance_json)
def test_dacite_config():
@dataclass
class Car(AvroModel):
total: int
@dataclass
class Bus(AvroModel):
driver: str
total: int
@dataclass
class Trip(AvroModel):
transport: typing.Union[Car, Bus]
data = {"driver": "Marcos", "total": 10}
bus = Bus.parse_obj(data=data)
serialized_val = Trip(transport=bus).serialize()
# It matches a Car object because it is the first element in the `Union`
# In order to match the proper element use `strict_unions_match` (check next test)
assert Trip.deserialize(serialized_val, create_instance=False) == {"transport": {"total": 10}}
instance = Trip.deserialize(serialized_val)
assert instance.transport == Car(total=10)
def test_custom_dacite_config():
class Color(str, enum.Enum):
BLUE = "BLUE"
RED = "RED"
@dataclass
class Car(AvroModel):
total: int
@dataclass
class Bus(AvroModel):
driver: str
total: int
color: Color
routes: typing.Tuple[str]
@dataclass
class Trip(AvroModel):
transport: typing.Union[Car, Bus]
class Meta:
dacite_config = {
"strict_unions_match": True,
"strict": True,
"cast": [], # this should not override the default cast behavior
}
data = {"driver": "Marcos", "total": 10, "color": Color.RED, "routes": ["route 53", "routes 51"]}
bus = Bus.parse_obj(data=data)
serialized_val = Trip(transport=bus).serialize()
assert Trip.deserialize(serialized_val, create_instance=False) == {
"transport": {"color": Color.RED, "driver": "Marcos", "routes": ("route 53", "routes 51"), "total": 10}
}
instance = Trip.deserialize(serialized_val)
assert instance.transport == bus
|
6a83cabf7bdcf48fc985c749f322540ad15b1056
|
a158b5b0cc491912ad0166fd891efd5abb951f51
|
/examples/python/qfssample.py
|
8a4cfd0294051b49a3c0466fb2918bb976e35caf
|
[
"Apache-2.0"
] |
permissive
|
quantcast/qfs
|
467651a3af7e77e779f199429d74dde67cee8c10
|
0d9dab4e51b27dde5869dd948e26b62e769e7d95
|
refs/heads/master
| 2023-08-04T12:47:56.591858
| 2023-05-06T02:42:34
| 2023-05-06T02:42:34
| 5,447,814
| 372
| 136
|
Apache-2.0
| 2023-05-06T01:22:44
| 2012-08-17T03:59:37
|
C++
|
UTF-8
|
Python
| false
| false
| 5,732
|
py
|
qfssample.py
|
#
# $Id$
#
# Author: Thilee Subramaniam
#
# Copyright 2012,2016 Quantcast Corporation. All rights reserved.
#
# This file is part of Kosmos File System (KFS).
#
# Licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
#
# NOTE: The python support for QFS is EXPERIMENTAL at this stage. The
# python extension module has not been tested on large scale
# deploymentsi yet. Please excercise caution while using the
# python module.
"""
This simple test tries to create some files and directories, and write some
data at specific offsets in the created files. Then it tries to ensure that
the created paths are valid, and that the file contents are as expected.
To run this script,
- Prepare qfs.so as described in the file 'doc/ClientDeveloperDoc'
- Ensure that the QFS metaserver and chunkserver are running.
- Ensure that the metaserver host/port matches the contents of argv[1].
- Ensure that the PYTHONPATH and LD_LIBRARY_PATH are set accordingly.
eg: PYTHONPATH=${PYTHONPATH}:~/code/qfs/build/lib/lib64/python \
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:~/code/qfs/build/lib \
python ./qfssample.py qfssample.cfg
"""
import os
import sys
import time
import errno
import qfs
def ParseConfig(config):
host = ''
port = -1
for line in open("qfssample.cfg"):
if line.startswith("#") or len(line.strip()) == 0:
continue
s = line.strip()
if s.split('=')[0].strip() == 'metaServer.name':
host = s.split('=')[1].strip()
elif s.split('=')[0].strip() == 'metaServer.port':
port = int(s.split('=')[1].strip())
if (host,port) == ('', -1):
sys.exit('Failed to parse config file')
return (host,port)
def main():
if len(sys.argv) < 2:
sys.exit('Usage: %s config_file' % sys.argv[0])
client = None
server = ParseConfig(sys.argv[1])
try:
client = qfs.client(server)
except:
print "Unable to start the QFS client."
print "Make sure that the meta- and chunkservers are running."
sys.exit(1)
testBaseDir = "qfssample_base"
testDirs = ("dir1", "dir2")
testFile1 = "dir1/file1"
testFile2 = "file2"
file1Content = "Cu populo nusquam alienum vim, graece latine prodesset ex qui, quo ea lucilius intellegat."
file2ContentA = { 0 : "are ", # at offset 0
40 : "you ", # at offset 40
1030 : "always ",
1048580 : "wrong?" }
file2ContentB = { 500 : "really " }
client.cd("/")
try: # just in case we didn't cleanup last time
client.rmdirs(testBaseDir)
except IOError, err:
pass
client.mkdir(testBaseDir)
client.cd(testBaseDir)
for td in testDirs:
client.mkdir(td)
time.sleep(1)
print "Created directories."
client.cd("/" + testBaseDir)
f1 = client.create(testFile1, 2)
f2 = client.create(testFile2, 3)
f1.write(file1Content)
for offset, content in file2ContentA.items():
f2.seek(offset)
f2.write(content)
print "Created files."
f1.sync()
f1.close()
f2.sync()
f2.close()
time.sleep(1)
print "Closed files (first time)."
f1 = client.open(testFile1, 'r')
f2 = client.open(testFile2, 'w')
print "Opened files."
for offset, content in file2ContentB.items():
f2.seek(offset)
f2.write(content)
f1.sync()
f1.close()
f2.sync()
f2.close()
time.sleep(1)
print "Closed files (second time)."
# Verify if everything is fine.
client.cd("/")
expected = ("dir1", "dir2", "file2")
for node in client.readdir(testBaseDir):
print node
if node in (".", ".."):
continue
if node not in expected:
sys.exit("%s is not in expected list %r" % (node, expected))
expected = ("file1")
for node in client.readdir(testBaseDir + "/dir1"):
print node
if node in (".", ".."):
continue
if node not in expected:
sys.exit("%s is not in expected list %r" % (node, expected))
print "Created paths are in order."
filePath1 = testBaseDir + "/" + testFile1
filePath2 = testBaseDir + "/" + testFile2
print "Stat for %s is %r" % (filePath1, client.stat(filePath1))
print "Stat for %s is %r" % (filePath2, client.stat(filePath2))
f1 = client.open(filePath1, 'r')
out = f1.read(2)
if (out != "Cu"):
sys.exit("Error: Expected 'Cu', got '%s'.", out)
f1.seek(31)
out = f1.read(6)
if (out != "graece"):
sys.exit("Error: Expected 'graece', got '%s'.", out)
pos = f1.tell()
if pos != 37:
sys.exit("Error: Expected 'pos = 37', got 'pos = %d'.", pos)
f1.close()
print "File1 contents are in order"
f2 = client.open(filePath2, 'r')
f2.seek(1032)
out = f2.read(3)
if (out != "way"):
sys.exit("Error: Expected 'way', got '%s'.", out)
f2.seek(1048578)
out = f2.read(7)
if out[2:] != "wrong":
sys.exit("Error: Expected '..wrong', got '%r'.", out)
f2.close()
print "File2 contents are in order"
client.rmdirs(testBaseDir)
if __name__ == '__main__':
main()
|
0694e475a7c83cfdd932a150ff747f58a5de4a7b
|
4506d81df5ae98078e5cbe79f613514ad12b1c83
|
/nipype/pipeline/plugins/lsf.py
|
dbd7f7d8107d99cadd4f468532cf94a87a6e04c3
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
nipy/nipype
|
d52eba1b98fda68e24d006ac0d5701fc8a531b9c
|
03a236320fa229299d637ff9af97865a6ae76aca
|
refs/heads/master
| 2023-08-28T10:36:07.020541
| 2023-08-25T13:40:09
| 2023-08-25T13:40:09
| 791,477
| 692
| 569
|
NOASSERTION
| 2023-09-11T06:04:51
| 2010-07-22T17:06:49
|
Python
|
UTF-8
|
Python
| false
| false
| 4,798
|
py
|
lsf.py
|
"""Parallel workflow execution via LSF
"""
import os
import re
from time import sleep
from ... import logging
from ...interfaces.base import CommandLine
from .base import SGELikeBatchManagerBase, logger
iflogger = logging.getLogger("nipype.interface")
class LSFPlugin(SGELikeBatchManagerBase):
"""Execute using LSF Cluster Submission
The plugin_args input to run can be used to control the LSF execution.
Currently supported options are:
- template : template to use for batch job submission
- bsub_args : arguments to be prepended to the job execution script in the
bsub call
"""
def __init__(self, **kwargs):
template = """
#$ -S /bin/sh
"""
self._retry_timeout = 2
self._max_tries = 2
self._bsub_args = ""
if "plugin_args" in kwargs and kwargs["plugin_args"]:
if "retry_timeout" in kwargs["plugin_args"]:
self._retry_timeout = kwargs["plugin_args"]["retry_timeout"]
if "max_tries" in kwargs["plugin_args"]:
self._max_tries = kwargs["plugin_args"]["max_tries"]
if "bsub_args" in kwargs["plugin_args"]:
self._bsub_args = kwargs["plugin_args"]["bsub_args"]
super().__init__(template, **kwargs)
def _is_pending(self, taskid):
"""LSF lists a status of 'PEND' when a job has been submitted but is
waiting to be picked up, and 'RUN' when it is actively being processed.
But _is_pending should return True until a job has finished and is
ready to be checked for completeness. So return True if status is
either 'PEND' or 'RUN'"""
cmd = CommandLine("bjobs", resource_monitor=False, terminal_output="allatonce")
cmd.inputs.args = "%d" % taskid
# check lsf task
oldlevel = iflogger.level
iflogger.setLevel(logging.getLevelName("CRITICAL"))
result = cmd.run(ignore_exception=True)
iflogger.setLevel(oldlevel)
# logger.debug(result.runtime.stdout)
if "DONE" in result.runtime.stdout or "EXIT" in result.runtime.stdout:
return False
else:
return True
def _submit_batchtask(self, scriptfile, node):
cmd = CommandLine(
"bsub",
environ=dict(os.environ),
resource_monitor=False,
terminal_output="allatonce",
)
bsubargs = ""
if self._bsub_args:
bsubargs = self._bsub_args
if "bsub_args" in node.plugin_args:
if "overwrite" in node.plugin_args and node.plugin_args["overwrite"]:
bsubargs = node.plugin_args["bsub_args"]
else:
bsubargs += " " + node.plugin_args["bsub_args"]
if "-o" not in bsubargs: # -o outfile
bsubargs = "{} -o {}".format(bsubargs, scriptfile + ".log")
if "-e" not in bsubargs:
# -e error file
bsubargs = "{} -e {}".format(bsubargs, scriptfile + ".log")
if node._hierarchy:
jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id))
else:
jobname = ".".join((dict(os.environ)["LOGNAME"], node._id))
jobnameitems = jobname.split(".")
jobnameitems.reverse()
jobname = ".".join(jobnameitems)
cmd.inputs.args = "{} -J {} sh {}".format(
bsubargs,
jobname,
scriptfile,
) # -J job_name_spec
logger.debug("bsub " + cmd.inputs.args)
oldlevel = iflogger.level
iflogger.setLevel(logging.getLevelName("CRITICAL"))
tries = 0
while True:
try:
result = cmd.run()
except Exception as e:
if tries < self._max_tries:
tries += 1
sleep(self._retry_timeout) # sleep 2 seconds and try again.
else:
iflogger.setLevel(oldlevel)
raise RuntimeError(
"\n".join(
(
"Could not submit lsf task for node %s" % node._id,
str(e),
)
)
)
else:
break
iflogger.setLevel(oldlevel)
# retrieve lsf taskid
match = re.search(r"<(\d*)>", result.runtime.stdout)
if match:
taskid = int(match.groups()[0])
else:
raise OSError(
"Can't parse submission job output id: %s" % result.runtime.stdout
)
self._pending[taskid] = node.output_dir()
logger.debug("submitted lsf task: %d for node %s" % (taskid, node._id))
return taskid
|
25d2e2c8644732ff4d68807ea3aa346778710e00
|
2c092a0bc0d04821d61fbbda5f75fc04812ead62
|
/demo.py
|
76043417de7cca74fec16f3ae13fe4c65e3a4e7e
|
[
"MIT"
] |
permissive
|
coffeehat/BIT-srun-login-script
|
9867c24030062fd7d75cb3bea65bfbd7f1756a6a
|
f9b1f18f14e1d9d36690e532ff36e49f2565c4bd
|
refs/heads/master
| 2022-09-14T16:11:41.243543
| 2022-09-13T12:27:19
| 2022-09-13T12:27:19
| 252,195,553
| 135
| 47
|
MIT
| 2022-09-13T12:27:20
| 2020-04-01T14:17:26
|
Python
|
UTF-8
|
Python
| false
| false
| 151
|
py
|
demo.py
|
from BitSrunLogin.LoginManager import LoginManager
lm = LoginManager()
lm.login(
username = "Your srun account",
password = "Your password"
)
|
7fb3437e3e881c9b5adb6b2ccea566da55e1211a
|
5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d
|
/alipay/aop/api/response/AlipayPcreditLoanLoanUnclearQueryResponse.py
|
e7dbe4ae0f833bc1773472f62146b67c0207148d
|
[
"Apache-2.0"
] |
permissive
|
alipay/alipay-sdk-python-all
|
8bd20882852ffeb70a6e929038bf88ff1d1eff1c
|
1fad300587c9e7e099747305ba9077d4cd7afde9
|
refs/heads/master
| 2023-08-27T21:35:01.778771
| 2023-08-23T07:12:26
| 2023-08-23T07:12:26
| 133,338,689
| 247
| 70
|
Apache-2.0
| 2023-04-25T04:54:02
| 2018-05-14T09:40:54
|
Python
|
UTF-8
|
Python
| false
| false
| 1,463
|
py
|
AlipayPcreditLoanLoanUnclearQueryResponse.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
from alipay.aop.api.domain.LoanApplyBudgetVO import LoanApplyBudgetVO
class AlipayPcreditLoanLoanUnclearQueryResponse(AlipayResponse):
def __init__(self):
super(AlipayPcreditLoanLoanUnclearQueryResponse, self).__init__()
self._loan_apply_budget_list = None
self._total = None
@property
def loan_apply_budget_list(self):
return self._loan_apply_budget_list
@loan_apply_budget_list.setter
def loan_apply_budget_list(self, value):
if isinstance(value, list):
self._loan_apply_budget_list = list()
for i in value:
if isinstance(i, LoanApplyBudgetVO):
self._loan_apply_budget_list.append(i)
else:
self._loan_apply_budget_list.append(LoanApplyBudgetVO.from_alipay_dict(i))
@property
def total(self):
return self._total
@total.setter
def total(self, value):
self._total = value
def parse_response_content(self, response_content):
response = super(AlipayPcreditLoanLoanUnclearQueryResponse, self).parse_response_content(response_content)
if 'loan_apply_budget_list' in response:
self.loan_apply_budget_list = response['loan_apply_budget_list']
if 'total' in response:
self.total = response['total']
|
6f19d6c1588057bcb992dc5cde5134aab17c20e7
|
45e376ae66b78b17788b1d3575b334b2cb1d0b1c
|
/checkov/terraform/checks/resource/aws/ECSServicePublicIP.py
|
2d4326dc4f7f418fb5b6e3dc31dba580b423f892
|
[
"Apache-2.0"
] |
permissive
|
bridgecrewio/checkov
|
aeb8febed2ed90e61d5755f8f9d80b125362644d
|
e64cbd27ffb6f09c2c9f081b45b7a821a3aa1a4d
|
refs/heads/main
| 2023-08-31T06:57:21.990147
| 2023-08-30T23:01:47
| 2023-08-30T23:01:47
| 224,386,599
| 5,929
| 1,056
|
Apache-2.0
| 2023-09-14T20:10:23
| 2019-11-27T08:55:14
|
Python
|
UTF-8
|
Python
| false
| false
| 1,345
|
py
|
ECSServicePublicIP.py
|
from checkov.common.models.enums import CheckCategories
from checkov.terraform.checks.resource.base_resource_negative_value_check import BaseResourceNegativeValueCheck
from typing import Any, List
class ECSServicePublicIP(BaseResourceNegativeValueCheck):
def __init__(self):
"""
NIST.800-53.r5 AC-21, NIST.800-53.r5 AC-3, NIST.800-53.r5 AC-3(7), NIST.800-53.r5 AC-4, NIST.800-53.r5 AC-4(21),
NIST.800-53.r5 AC-6, NIST.800-53.r5 SC-7, NIST.800-53.r5 SC-7(11), NIST.800-53.r5 SC-7(16),
NIST.800-53.r5 SC-7(20), NIST.800-53.r5 SC-7(21), NIST.800-53.r5 SC-7(3), NIST.800-53.r5 SC-7(4),
NIST.800-53.r5 SC-7(9)
ECS services should not have public IP addresses assigned to them automatically
"""
name = "Ensure ECS services do not have public IP addresses assigned to them automatically"
id = "CKV_AWS_333"
supported_resources = ["aws_ecs_service"]
categories = [CheckCategories.LOGGING]
super().__init__(
name=name,
id=id,
categories=categories,
supported_resources=supported_resources,
)
def get_inspected_key(self) -> str:
return "network_configuration/[0]/assign_public_ip"
def get_forbidden_values(self) -> List[Any]:
return [True]
check = ECSServicePublicIP()
|
e5120fb34e3c4cfe77f99c98a39d4db824bb4dc2
|
e75a40843a8738b84bd529a549c45776d09e70d9
|
/samples/server/petstore/python-blueplanet/app/openapi_server/__main__.py
|
333f12d0795f0b41a18a397171d83271e0a1c15f
|
[
"Apache-2.0"
] |
permissive
|
OpenAPITools/openapi-generator
|
3478dbf8e8319977269e2e84e0bf9960233146e3
|
8c2de11ac2f268836ac9bf0906b8bb6b4013c92d
|
refs/heads/master
| 2023-09-02T11:26:28.189499
| 2023-09-02T02:21:04
| 2023-09-02T02:21:04
| 133,134,007
| 17,729
| 6,577
|
Apache-2.0
| 2023-09-14T19:45:32
| 2018-05-12T09:57:56
|
Java
|
UTF-8
|
Python
| false
| false
| 341
|
py
|
__main__.py
|
#!/usr/bin/env python3
import connexion
from openapi_server import encoder
def main():
app = connexion.App(__name__, specification_dir='./swagger/')
app.app.json_encoder = encoder.JSONEncoder
app.add_api('swagger.yaml', arguments={'title': 'OpenAPI Petstore'})
app.run(port=8080)
if __name__ == '__main__':
main()
|
019ada4bdef2286d7c51932f91b1c845b9520b3a
|
a38bf459ae380f67e0de22f7106a8df4385a7076
|
/rules_python_gapic/pytest.py
|
72095851636e9f25a9b35957994d7d3d944c2816
|
[
"Apache-2.0"
] |
permissive
|
googleapis/gapic-generator-python
|
73ce9d52f6f5bb2652d49b237b24263d6637b1da
|
4eee26181e8db9fb5144eef5a76f178c1594e48a
|
refs/heads/main
| 2023-09-04T11:12:14.728757
| 2023-09-02T10:34:44
| 2023-09-02T10:34:44
| 129,809,857
| 116
| 65
|
Apache-2.0
| 2023-09-12T18:57:01
| 2018-04-16T21:47:04
|
Python
|
UTF-8
|
Python
| false
| false
| 612
|
py
|
pytest.py
|
import sys
import pytest
import os
if __name__ == "__main__":
# The generated file name will be of the form `<module_name>_pytest.py`.
# The generated gapic will be in a directory `<module_name>_srcjar.py``.
# Extract the `<module_name>`` from this file, and use it to determine the
# directory of the generated gapic.
# Only run `pytest` on the `tests` directory.
module_name = os.path.abspath(__file__).replace("_pytest.py", "")
src_directory = f"{module_name}_srcjar.py"
sys.exit(
pytest.main(["--disable-pytest-warnings", "--quiet", f"{src_directory}/tests"])
)
|
2acb1fb9f8560536f92efb9cef46bdffb9a601d9
|
7e6f0efd6f4733d09e61b4c6658455e6727cd48f
|
/seed/models/ubid_models.py
|
c89f7084f385a7f39a8c6038edb3c1301daec5d6
|
[
"BSD-2-Clause"
] |
permissive
|
SEED-platform/seed
|
0e4a6a2fa93f4c2528d0c295163a91f836a4253d
|
680b6a2b45f3c568d779d8ac86553a0b08c384c8
|
refs/heads/develop
| 2023-09-01T10:46:25.502697
| 2023-08-30T18:44:21
| 2023-08-30T18:44:21
| 25,450,714
| 108
| 75
|
NOASSERTION
| 2023-09-13T22:18:47
| 2014-10-20T04:26:53
|
Python
|
UTF-8
|
Python
| false
| false
| 2,166
|
py
|
ubid_models.py
|
# !/usr/bin/env python
# encoding: utf-8
"""
SEED Platform (TM), Copyright (c) Alliance for Sustainable Energy, LLC, and other contributors.
See also https://github.com/seed-platform/seed/main/LICENSE.md
"""
from django.db import models
from django.db.models import Q
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from seed.models import PropertyState, TaxLotState
from seed.utils.ubid import decode_unique_ids
class UbidModel(models.Model):
ubid = models.CharField(max_length=255, null=False, blank=False)
property = models.ForeignKey(PropertyState, on_delete=models.CASCADE, null=True)
taxlot = models.ForeignKey(TaxLotState, on_delete=models.CASCADE, null=True)
preferred = models.BooleanField(default=False)
class Meta:
# Two partial indexes to handle uniqueness with null values
constraints = [
models.UniqueConstraint(
fields=['ubid', 'property_id'],
name='unique_ubid_for_property',
condition=Q(taxlot_id__isnull=True)
),
models.UniqueConstraint(
fields=['ubid', 'taxlot_id'],
name='unique_ubid_for_taxlot',
condition=Q(property_id__isnull=True)
),
]
@receiver(post_save, sender=UbidModel)
def post_save_ubid_model(sender, **kwargs):
"""
Update state.ubid for the preferred UBID
"""
ubid_model: UbidModel = kwargs.get('instance')
state = ubid_model.property or ubid_model.taxlot
if ubid_model.preferred and state.ubid != ubid_model.ubid:
state.ubid = ubid_model.ubid
state.save()
decode_unique_ids(state)
elif not ubid_model.preferred and state.ubid == ubid_model.ubid:
state.ubid = None
state.save()
@receiver(pre_delete, sender=UbidModel)
def pre_delete_ubid_model(sender, **kwargs):
"""
If a preferred ubid is deleted, remove the state.ubid
"""
ubid_model: UbidModel = kwargs.get('instance')
if ubid_model.preferred:
state = ubid_model.property or ubid_model.taxlot
state.ubid = None
state.save()
|
da585b0085bca15afb6bba8ff851cd62a7f20f60
|
d05ff6dda43729011b7d469b0a2bc02ed66b6342
|
/frappe/custom/doctype/doctype_layout/patches/convert_web_forms_to_doctype_layout.py
|
7d22ee3c7dc139855d1d234aa7384894768f18de
|
[
"MIT"
] |
permissive
|
frappe/frappe
|
520c14bed3810c3360629a81dcc33f0ebe21ac4d
|
dd8f314bf4a8a4739eebbfac741abc533ac58bc1
|
refs/heads/develop
| 2023-08-30T19:29:10.406706
| 2023-08-30T11:20:40
| 2023-08-30T11:20:40
| 1,864,194
| 5,955
| 3,735
|
MIT
| 2023-09-14T16:08:04
| 2011-06-08T08:14:16
|
Python
|
UTF-8
|
Python
| false
| false
| 476
|
py
|
convert_web_forms_to_doctype_layout.py
|
import frappe
def execute():
for web_form_name in frappe.get_all("Web Form", pluck="name"):
web_form = frappe.get_doc("Web Form", web_form_name)
doctype_layout = frappe.get_doc(
dict(
doctype="DocType Layout",
document_type=web_form.doc_type,
name=web_form.title,
route=web_form.route,
fields=[
dict(fieldname=d.fieldname, label=d.label) for d in web_form.web_form_fields if d.fieldname
],
)
).insert()
print(doctype_layout.name)
|
1c3057fbc9f767500952cb84da18598cd4082341
|
f0a2768eee431c20ea9f4c2c589fdb7ea878f62f
|
/pmfx.py
|
bfe385823f9abbc5c58a8977a1812ef7bda20ab3
|
[
"MIT"
] |
permissive
|
polymonster/pmfx-shader
|
5c7f58f8325e7ce1d8c41bc76d9d261b23b01921
|
86e04a766885f3ac9946af0b14c9e500d2671a7d
|
refs/heads/master
| 2023-07-07T08:30:21.884207
| 2023-07-02T10:50:40
| 2023-07-02T10:50:40
| 195,820,234
| 337
| 13
|
MIT
| 2023-04-19T10:09:16
| 2019-07-08T13:43:01
|
Python
|
UTF-8
|
Python
| false
| false
| 119,986
|
py
|
pmfx.py
|
import os
import sys
import json
import jsn
import re
import math
import subprocess
import platform
import copy
import threading
import cgu
import hashlib
import pmfx_pipeline
# print error with colour
def print_error(msg):
ERROR = '\033[91m'
ENDC = '\033[0m'
print(ERROR + msg + ENDC, flush=True)
# print warning with colour
def print_warning(msg):
WARNING = '\033[93m'
ENDC = '\033[0m'
print(WARNING + msg + ENDC, flush=True)
# print ok with colour
def print_ok(msg):
OK = '\033[92m'
ENDC = '\033[0m'
print(OK + msg + ENDC, flush=True)
# paths and info for current build environment
class BuildInfo:
shader_platform = "" # hlsl, glsl, metal, spir-v, pssl
shader_sub_platform = "" # gles
shader_version = "0" # 4_0, 5_0 (hlsl), 330, 420 (glsl), 1.1, 2.0 (metal)
metal_sdk = "" # macosx, iphoneos, appletvos
metal_min_os = "" # iOS (9.0 - 13.0), macOS (10.11 - 10.15)
debug = False # generate shader with debug info
inputs = [] # array of input files or directories
extensions = [] # array of shader extension currently for glsl/gles
nvn_extensions = [] # array of shader extensions for nvn/glsl
root_dir = "" # cwd dir to run from
build_config = "" # json contents of build_config.json
pmfx_dir = "" # location of pmfx
tools_dir = "" # location of pmtech/tools
output_dir = "" # dir to build shader binaries
struct_dir = "" # dir to output the shader structs
crate_dir = "" # dir to output the shader structs (rust crate)
temp_dir = "" # dir to put temp shaders
this_file = "" # the file u are reading
macros_file = "" # pmfx.h
platform_macros_file = "" # glsl.h, hlsl.h, metal.h
macros_source = "" # source code inside _shader_macros.h
error_code = 0 # non-zero if any shaders failed to build
nvn_exe = "" # optional executable path for nvn
cmdline_string = "" # stores the full cmdline passed
num_threads = 4 # number of threadsto distribute work over
v_flip = False # glsl only (flip-y coord in vertex shader for consistency with other platforms)
args = "" # anything passed after -args is concatonated into a string and forwarded to other executables (fxc, glsl validator etc)
force = False # force compilation even if dependecies are up-to-date
# info and contents of a .pmfx file
class PmfxInfo:
includes = "" # list of included files
json = "" # json object containing techniques
json_text = "" # json as text to reload mutable dictionary
source = "" # source code of the entire shader +includes
# info of pmfx technique permutation which is a combination of vs, ps or cs
class TechniquePermutationInfo:
pmfx_name = "" # name of the .pmfx shader containing technique
technique_name = "" # name of technique
technique = "" # technique / permutation json
permutation = "" # permutation options
shader_version = "0" # shader version to compile with
source = "" # conditioned source code for permute
id = "" # permutation id
cbuffers = [] # list of cbuffers source code
functions = [] # list of functions source code
textures = [] # technique / permutation textures
shader = [] # list of shaders, vs, ps or cs
resource_decl = [] # list of shader resources (textures / buffers)
threads = [] # number of compute threads, x, y, z
error_code = 0 # return value from compilation
error_list = [] # list of errors / warnings from compilation
output_list = [] # list of output from compilation
# info about a single vs, ps, or cs
class SingleShaderInfo:
shader_type = "" # ie. vs (vertex), ps (pixel), cs (compute)
main_func_name = "" # entry point ie. vs_main
functions_source = "" # source code of all used functions
main_func_source = "" # source code of main function
input_struct_name = "" # name of input to shader ie. vs_input
instance_input_struct_name = "" # name of instance input to vertex shader
output_struct_name = "" # name of output from shader ie. vs_output
input_decl = "" # struct decl of input struct
instance_input_decl = "" # struct decl of instance input struct
output_decl = "" # struct decl of shader output
struct_decls = "" # decls of all generic structs
resource_decl = [] # decl of only used resources by shader
cbuffers = [] # array of cbuffer decls used by shader
sv_semantics = [] # array of tuple [(semantic, variable name), ..]
duplicate = False
# used for eval to allow undefined variables
class Reflector(object):
def __getitem__(self, name):
return 0
# parse command line args passed in
def parse_args():
global _info
# set defaults
_info.compiled = True
_info.cbuffer_offset = 4
_info.texture_offset = 32
_info.stage_in = 1
_info.v_flip = False
_info.debug = False
_info.args = ""
if len(sys.argv) == 1:
display_help()
for arg in sys.argv:
_info.cmdline_string += arg + " "
for i in range(1, len(sys.argv)):
if "-help" in sys.argv[i]:
display_help()
if "-root_dir" in sys.argv[i]:
os.chdir(sys.argv[i + 1])
if "-shader_platform" in sys.argv[i]:
_info.shader_platform = sys.argv[i + 1]
if "-shader_version" in sys.argv[i]:
_info.shader_version = sys.argv[i + 1]
if sys.argv[i] == "-i":
j = i + 1
while j < len(sys.argv) and sys.argv[j][0] != '-':
_info.inputs.append(sys.argv[j])
j = j + 1
i = j
elif sys.argv[i] == "-o":
_info.output_dir = sys.argv[i + 1]
elif sys.argv[i] == "-h":
_info.struct_dir = sys.argv[i + 1]
elif sys.argv[i] == "-rs":
_info.crate_dir = sys.argv[i + 1]
elif sys.argv[i] == "-t":
_info.temp_dir = sys.argv[i + 1]
elif sys.argv[i] == "-f":
_info.force = True
elif sys.argv[i] == "-source":
_info.compiled = False
elif sys.argv[i] == "-cbuffer_offset":
_info.cbuffer_offset = sys.argv[i + 1]
elif sys.argv[i] == "-texture_offset":
_info.cbuffer_offset = sys.argv[i + 1]
elif sys.argv[i] == "-stage_in":
_info.stage_in = sys.argv[i + 1]
elif sys.argv[i] == "-v_flip":
_info.v_flip = True
elif sys.argv[i] == "-d":
_info.debug = False
elif sys.argv[i] == "-metal_min_os":
_info.metal_min_os = sys.argv[i+1]
elif sys.argv[i] == "-metal_sdk":
_info.metal_sdk = sys.argv[i+1]
elif sys.argv[i] == "-nvn_exe":
_info.nvn_exe = sys.argv[i+1]
elif sys.argv[i] == "-num_threads":
_info.num_threads = int(sys.argv[i+1])
elif sys.argv[i] == "-extensions":
j = i + 1
while j < len(sys.argv) and sys.argv[j][0] != '-':
_info.extensions.append(sys.argv[j])
j = j + 1
i = j
elif sys.argv[i] == "-nvn_extensions":
j = i + 1
while j < len(sys.argv) and sys.argv[j][0] != '-':
_info.nvn_extensions.append(sys.argv[j])
j = j + 1
i = j
elif sys.argv[i] == "-args":
j = i + 1
_info.args = ""
while j < len(sys.argv):
_info.args += sys.argv[j] + " "
j = j + 1
i = j
required = [
"-shader_platform",
"-i",
"-o",
"-t"
]
if _info.shader_platform == "nvn":
required.append("-nvn_exe")
missing = False
for r in required:
if r not in sys.argv:
print_error("error: missing rquired argument {}".format(r))
missing = True
if missing:
print("exit")
sys.exit(1)
# display help for args
def display_help():
print("commandline arguments:")
print(" -v1 compile using pmfx version 1 (legacy) will use v2 otherwise")
print(" -num_threads 4 (default) <supply threadpool size>")
print(" -shader_platform <hlsl, glsl, gles, spirv, metal, pssl, nvn>")
print(" -shader_version (optional) <shader version unless overridden in technique>")
print(" hlsl: 3_0, 4_0 (default), 5_0, 6_0 [-v2]")
print(" glsl: 200, 330 (default), 420, 450")
print(" gles: 100, 300, 310, 350")
print(" spirv: 420 (default), 450")
print(" metal: 2.0 (default)")
print(" nvn: (glsl)")
print(" -metal_sdk [metal only] <iphoneos, macosx, appletvos>")
print(" -metal_min_os (optional) [metal only] <9.0 - 13.0 (ios), 10.11 - 10.15 (macos)>")
print(" -nvn_exe [nvn only] <path to execulatble that can compile glsl to nvn glslc>")
print(" -extensions (optional) [glsl/gles only] <list of glsl extension strings separated by spaces>")
print(" -nvn_extensions (optional) [nvn only] <list of nvn glsl extension strings separated by spaces>")
print(" -i <list of input files or directories separated by spaces>")
print(" -o <output dir for shaders>")
print(" -t <output dir for temp files>")
print(" -h (optional) <output dir header file with shader structs>")
print(" -d (optional) generate debuggable shader")
print(" -f (optional) force build / compile even if dependencies are up-to-date")
print(" -rs (optional) <output dir for rust crate with shader structs> [-v2]")
print(" -root_dir (optional) <directory> sets working directory here")
print(" -source (optional) (generates platform source into -o no compilation)")
print(" -stage_in <0, 1> (optional) [metal only] (default 1) ")
print(" uses stage_in for metal vertex buffers, 0 uses raw buffers")
print(" -cbuffer_offset (optional) [metal only] (default 4) ")
print(" specifies an offset applied to cbuffer locations to avoid collisions with vertex buffers")
print(" -texture_offset (optional) [vulkan only] (default 32) ")
print(" specifies an offset applied to texture locations to avoid collisions with buffers")
print(" -v_flip (optional) [glsl only] (inserts glsl uniform to conditionally flip verts in the y axis)")
print(" -args (optional) anything passed after this will be forward to the platform specific compiler")
print(" for example for fxc.exe /Zpr or dxc.exe -Zpr etc.. check the compiler help for options")
sys.stdout.flush()
sys.exit(0)
# duplicated from pmtech/tools/scripts/util
def get_platform_name():
plat = "win64"
if os.name == "posix":
plat = "osx"
if platform.system() == "Linux":
plat = "linux"
return plat
# gets shader sub platform name, gles (glsl) spirv (glsl)
def shader_sub_platform():
sub_platforms = ["gles", "spirv"]
if _info.shader_sub_platform in sub_platforms:
return _info.shader_sub_platform
return _info.shader_platform
# get extension for windows
def get_platform_exe():
if get_platform_name() == "win64":
return ".exe"
return ""
def sanitize_file_path(path):
path = path.replace("/", os.sep)
path = path.replace("\\", os.sep)
path = path.replace("@", ":")
return path
# duplicated from pmtech/tools/scripts/dependencies
def unstrict_json_safe_filename(file):
file = file.replace("\\", '/')
file = file.replace(":", "@")
return file
def create_dependency(file):
file = sanitize_file_path(file)
modified_time = os.path.getmtime(file)
return {"name": file, "timestamp": float(modified_time)}
# wrap a string in quotes
def in_quotes(string):
return "\"" + string + "\""
# convert signed to unsigned integer in a c like manner for comparisons
def us(v):
if v == -1:
return sys.maxsize
return v
# calls subprocess, waits and gets output errors
def call_wait_subprocess(cmdline):
exclude_output = [
"Microsoft (R)",
"Copyright (C)",
"compilation object save succeeded;"
]
p = subprocess.Popen(cmdline, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
error_code = p.wait()
output, err = p.communicate()
err_str = err.decode('utf-8')
err_str = err_str.strip(" ")
err_list = err_str.split("\n")
out_str = output.decode('utf-8')
out_str = out_str.strip(" ")
out_list = out_str.split("\n")
clean_err = []
for e in err_list:
if len(e) > 0:
clean_err.append(e.strip())
clean_out = []
for o in out_list:
o = o.strip()
exclude = False
for ex in exclude_output:
if o.startswith(ex):
exclude = True
break
if len(o) > 0 and not exclude:
clean_out.append(o)
return error_code, clean_err, clean_out
# recursively merge members of 2 json objects
def member_wise_merge(j1, j2):
for key in j2.keys():
if key not in j1.keys():
j1[key] = j2[key]
elif type(j1[key]) is dict:
j1[key] = member_wise_merge(j1[key], j2[key])
return j1
# remove comments, taken from stub_format.py ()
def remove_comments(file_data):
lines = file_data.split("\n")
inside_block = False
conditioned = ""
for line in lines:
if inside_block:
ecpos = line.find("*/")
if ecpos != -1:
inside_block = False
line = line[ecpos+2:]
else:
continue
cpos = line.find("//")
mcpos = line.find("/*")
if cpos != -1:
conditioned += line[:cpos] + "\n"
elif mcpos != -1:
conditioned += line[:mcpos] + "\n"
inside_block = True
else:
conditioned += line + "\n"
return conditioned
# tidy shader source with consistent spaces, remove tabs and comments to make subsequent operations easier
def sanitize_shader_source(shader_source):
# replace tabs with spaces
shader_source = shader_source.replace("\t", " ")
# replace all spaces with single space
shader_source = re.sub(' +', ' ', shader_source)
# remove comments
shader_source = remove_comments(shader_source)
return shader_source
# parse and split into an array, from a list of textures or cbuffers etc
def parse_and_split_block(code_block):
start = code_block.find("{") + 1
end = code_block.find("};")
block_conditioned = code_block[start:end].replace(";", " ")
block_conditioned = block_conditioned.replace(":", " ")
block_conditioned = block_conditioned.replace("(", " ")
block_conditioned = block_conditioned.replace(")", " ")
block_conditioned = block_conditioned.replace(",", " ")
block_conditioned = re.sub(' +', ' ', block_conditioned)
return block_conditioned.split()
# find the end of a body text enclosed in brackets
def enclose_brackets(text):
body_pos = text.find("{")
bracket_stack = ["{"]
text_len = len(text)
while len(bracket_stack) > 0 and body_pos < text_len:
body_pos += 1
character = text[body_pos:body_pos+1]
if character == "{":
bracket_stack.insert(0, "{")
if character == "}" and bracket_stack[0] == "{":
bracket_stack.pop(0)
body_pos += 1
return body_pos
# replace all "input" and "output" tokens to "_input" and "_ouput" to avoid glsl keywords
# todo: this should be replaced with "replace_token"
def replace_io_tokens(text):
token_io = ["input", "output"]
token_io_replace = ["_input", "_output"]
token_post_delimiters = ['.', ';', ' ', '(', ')', ',', '-', '+', '*', '/']
token_pre_delimiters = [' ', '\t', '\n', '(', ')', ',', '-', '+', '*', '/']
split = text.split(' ')
split_replace = []
for token in split:
for i in range(0, len(token_io)):
if token_io[i] in token:
last_char = len(token_io[i])
first_char = token.find(token_io[i])
t = token[first_char:first_char+last_char+1]
l = len(t)
if first_char > 0 and token[first_char-1] not in token_pre_delimiters:
continue
if l > last_char:
c = t[last_char]
if c in token_post_delimiters:
token = token.replace(token_io[i], token_io_replace[i])
continue
elif l == last_char:
token = token.replace(token_io[i], token_io_replace[i])
continue
split_replace.append(token)
replaced_text = ""
for token in split_replace:
replaced_text += token + " "
return replaced_text
# get info filename for dependency checking
def get_resource_info_filename(filename, build_dir):
global _info
base_filename = os.path.basename(filename)
dir_path = os.path.dirname(filename)
info_filename = os.path.join(_info.output_dir, os.path.splitext(base_filename)[0], "info.json")
return info_filename, base_filename, dir_path
# check file time stamps and build times to determine if rebuild needs to happen
# returns true if the file does not need re-building, false if a file/dependency is out of date or input has changed
def check_dependencies(filename, included_files):
global _info
# look for .json file
file_list = list()
file_list.append(sanitize_file_path(os.path.join(_info.root_dir, filename)))
file_list.append(sanitize_file_path(_info.this_file))
file_list.append(sanitize_file_path(_info.macros_file))
file_list.append(sanitize_file_path(_info.platform_macros_file))
info_filename, base_filename, dir_path = get_resource_info_filename(filename, _info.output_dir)
for f in included_files:
file_list.append(sanitize_file_path(os.path.join(_info.root_dir, f)))
if os.path.exists(info_filename) and os.path.getsize(info_filename) > 0:
info_file = open(info_filename, "r")
info = json.loads(info_file.read())
if "cmdline" not in info or _info.cmdline_string != info["cmdline"]:
return False
for prev_built_with_file in info["files"]:
sanitized_name = sanitize_file_path(prev_built_with_file["name"])
if sanitized_name in file_list:
if not os.path.exists(sanitized_name):
return False
if prev_built_with_file["timestamp"] < os.path.getmtime(sanitized_name):
info_file.close()
print(os.path.basename(sanitized_name) + ": out-of-date", flush=True)
return False
else:
print(sanitized_name + ": out-of-date", flush=True)
return False
if "failures" in info.keys():
if len(info["failures"]) > 0:
return False
info_file.close()
else:
return False
return True
# find generic structs
def find_structs(shader_text, special_structs):
struct_list = []
start = 0
while start != -1:
op = start
start = find_token("struct", shader_text[start:])
if start == -1:
break
start = op + start
end = shader_text.find("};", start)
if end != -1:
end += 2
found_struct = shader_text[start:end]
valid = True
for ss in special_structs:
if ss in found_struct:
valid = False
if valid:
struct_list.append(shader_text[start:end] + "\n")
start = end
return struct_list
def find_c_structs(shader_text):
special_structs = ["vs_output", "ps_input", "ps_output"]
return find_structs(shader_text, special_structs)
def find_struct_declarations(shader_text):
special_structs = ["vs_input", "vs_output", "ps_input", "ps_output", "vs_instance_input"]
return find_structs(shader_text, special_structs)
# find shader resources
def find_shader_resources(shader_text):
start = shader_text.find("declare_texture_samplers")
if start == -1:
start = shader_text.find("shader_resources")
if start == -1:
return "\n"
start = shader_text.find("{", start) + 1
end = shader_text.find("};", start)
texture_sampler_text = shader_text[start:end] + "\n"
texture_sampler_text = texture_sampler_text.replace("\t", "")
texture_sampler_text += "\n"
return texture_sampler_text
# find struct in shader source
def find_struct(shader_text, decl):
delimiters = [" ", "\n", "{"]
start = 0
while True:
start = shader_text.find(decl, start)
if start == -1:
return ""
for d in delimiters:
if shader_text[start+len(decl)] == d:
end = shader_text.find("};", start)
end += 2
if start != -1 and end != -1:
return shader_text[start:end] + "\n\n"
else:
return ""
start += len(decl)
# find cbuffers in source
def find_constant_buffers(shader_text):
cbuffer_list = []
start = 0
while start != -1:
pos = find_token("cbuffer", shader_text[start:])
if pos == -1:
break
start += pos
end = shader_text.find("};", start)
if end != -1:
end += 2
cbuffer_list.append(shader_text[start:end] + "\n")
start = end
return cbuffer_list
# find function source
def find_function(shader_text, decl):
start = shader_text.find(decl)
if start == -1:
return ""
body_pos = shader_text.find("{", start)
bracket_stack = ["{"]
text_len = len(shader_text)
while len(bracket_stack) > 0 and body_pos < text_len:
body_pos += 1
character = shader_text[body_pos:body_pos+1]
if character == "{":
bracket_stack.insert(0, "{")
if character == "}" and bracket_stack[0] == "{":
bracket_stack.pop(0)
body_pos += 1
return shader_text[start:body_pos] + "\n\n"
# find functions in source
def find_functions(shader_text):
deliminator_list = [";", "\n"]
function_list = []
start = 0
while 1:
start = shader_text.find("(", start)
if start == -1:
break
# make sure the { opens before any other deliminator
deliminator_pos = shader_text.find(";", start)
body_pos = shader_text.find("{", start)
if deliminator_pos < body_pos:
start = deliminator_pos
continue
# find the function name and return type
function_name = shader_text.rfind(" ", 0, start)
name_str = shader_text[function_name:start]
if name_str.find("if:") != -1:
start = deliminator_pos
continue
function_return_type = 0
for delim in deliminator_list:
decl_start = shader_text.rfind(delim, 0, function_name)
if decl_start != -1:
function_return_type = decl_start
bracket_stack = ["{"]
text_len = len(shader_text)
while len(bracket_stack) > 0 and body_pos < text_len:
body_pos += 1
character = shader_text[body_pos:body_pos+1]
if character == "{":
bracket_stack.insert(0, "{")
if character == "}" and bracket_stack[0] == "{":
bracket_stack.pop(0)
body_pos += 1
function_list.append(shader_text[function_return_type:body_pos] + "\n\n")
start = body_pos
return function_list
# find #include statements
def find_includes(file_text, root):
global added_includes
include_list = []
start = 0
while 1:
start = file_text.find("#include", start)
if start == -1:
break
start = file_text.find("\"", start) + 1
end = file_text.find("\"", start)
if start == -1 or end == -1:
break
include_name = file_text[start:end]
include_path = os.path.join(root, include_name)
include_path = sanitize_file_path(include_path)
if include_path not in added_includes:
include_list.append(include_path)
added_includes.append(include_path)
return include_list
# recursively search for #includes
def add_files_recursive(filename, root):
file_path = filename
if not os.path.exists(filename):
file_path = os.path.join(root, filename)
included_file = open(file_path, "r")
shader_source = included_file.read()
included_file.close()
shader_source = sanitize_shader_source(shader_source)
sub_root = os.path.dirname(file_path)
include_list = find_includes(shader_source, sub_root)
for include_file in reversed(include_list):
included_source, sub_includes = add_files_recursive(include_file, sub_root)
shader_source = included_source + "\n" + shader_source
include_list = include_list + sub_includes
return shader_source, include_list
# gather include files and
def create_shader_set(filename, root):
global _info
global added_includes
added_includes = []
shader_file_text, included_files = add_files_recursive(filename, root)
shader_base_name = os.path.basename(filename)
shader_set_dir = os.path.splitext(shader_base_name)[0]
shader_set_build_dir = os.path.join(_info.output_dir, shader_set_dir)
if not os.path.exists(shader_set_build_dir):
os.makedirs(shader_set_build_dir)
return shader_file_text, included_files
# gets constants only for this current permutation
def get_permutation_conditionals(pmfx_json, permutation):
block = pmfx_json.copy()
if "constants" in block:
# find conditionals
conditionals = []
cblock = block["constants"]
for key in cblock.keys():
if key.find("permutation(") != -1:
conditionals.append((key, cblock[key]))
# check conditionals valid
for c in conditionals:
# remove conditional permutation
del block["constants"][c[0]]
full_condition = c[0].replace("permutation", "")
full_condition = full_condition.replace("&&", "and")
full_condition = full_condition.replace("||", "or")
gv = dict()
for v in permutation:
gv[str(v[0])] = v[1]
try:
if eval(full_condition, gv):
block["constants"] = member_wise_merge(block["constants"], c[1])
except NameError:
pass
return block
# get list of technique / permutation specific
def generate_technique_texture_variables(_tp):
technique_textures = []
if "texture_samplers" not in _tp.technique.keys():
return
textures = _tp.technique["texture_samplers"]
for t in textures.keys():
technique_textures.append((textures[t]["type"], t, textures[t]["unit"]))
return technique_textures
# generate cbuffer meta data, c structs for access in code
def generate_technique_constant_buffers(pmfx_json, _tp):
offset = 0
constant_info = [["", 0], ["float", 1], ["float2", 2], ["float3", 3], ["float4", 4], ["float4x4", 16]]
technique_constants = [_tp.technique]
technique_json = _tp.technique
# find inherited constants
if "inherit_constants" in _tp.technique.keys():
for inherit in _tp.technique["inherit_constants"]:
inherit_conditionals = get_permutation_conditionals(pmfx_json[inherit], _tp.permutation)
technique_constants.append(inherit_conditionals)
# find all constants
shader_constant = []
shader_struct = []
pmfx_constants = dict()
for tc in technique_constants:
if "constants" in tc.keys():
# sort constants
sorted_constants = []
for const in tc["constants"]:
for ci in constant_info:
if ci[0] == tc["constants"][const]["type"]:
cc = [const, ci[1]]
pos = 0
for sc in sorted_constants:
if cc[1] > sc[1]:
sorted_constants.insert(pos, cc)
break
pos += 1
if pos >= len(sorted_constants):
sorted_constants.append(cc)
for const in sorted_constants:
const_name = const[0]
const_elems = const[1]
pmfx_constants[const_name] = tc["constants"][const_name]
pmfx_constants[const_name]["offset"] = offset
pmfx_constants[const_name]["num_elements"] = const_elems
shader_constant.append(" " + tc["constants"][const_name]["type"] + " " + "m_" + const_name + ";\n")
shader_struct.append(" " + tc["constants"][const_name]["type"] + " " + "m_" + const_name + ";\n")
offset += const_elems
if offset == 0:
return _tp.technique, "", ""
# we must pad to 16 bytes alignment
pre_pad_offset = offset
diff = offset / 4
next = math.ceil(diff)
pad = (next - diff) * 4
if pad != 0:
shader_constant.append(" " + constant_info[int(pad)][0] + " " + "m_padding" + ";\n")
shader_struct.append(" " + constant_info[int(pad)][0] + " " + "m_padding" + ";\n")
offset += pad
cb_str = "cbuffer material_data : register(b7)\n"
cb_str += "{\n"
for sc in shader_constant:
cb_str += sc
cb_str += "};\n"
# append permutation string to shader c struct
skips = [
_info.shader_platform.upper(),
_info.shader_sub_platform.upper()
]
permutation_name = ""
if int(_tp.id) != 0:
for p in _tp.permutation:
if p[0] in skips or p[0] in caps_list():
continue
if p[1] == 1:
permutation_name += "_" + p[0].lower()
if p[1] > 1:
permutation_name += "_" + p[0].lower() + p[1]
c_struct = "struct " + _tp.technique_name + permutation_name + "\n"
c_struct += "{\n"
for ss in shader_struct:
c_struct += ss
c_struct += "};\n\n"
technique_json["constants"] = pmfx_constants
technique_json["constants_used_bytes"] = int(pre_pad_offset * 4)
technique_json["constants_size_bytes"] = int(offset * 4)
assert int(offset * 4) % 16 == 0
return technique_json, c_struct, cb_str
# removes un-used input structures which may be empty if they have been defined out by permutation.
def strip_empty_inputs(input, main):
conditioned = input.replace("\n", "").replace(";", "").replace(";", "").replace("}", "").replace("{", "")
tokens = conditioned.split(" ")
for t in tokens:
if t == "":
tokens.remove(t)
if len(tokens) == 2:
# input is empty so remove from vs_main args
input = ""
name = tokens[1]
pos = main.find(name)
prev_delim = max(us(main[:pos].rfind(",")), us(main[:pos].rfind("(")))
next_delim = pos + min(us(main[pos:].find(",")), us(main[pos:].find(")")))
main = main.replace(main[prev_delim:next_delim], " ")
return input, main
# gets system value semantics (SV_InstanceID) and stores them in a tuple, for platform specific code gen later.
def get_sv_sematics(main):
supported_sv = ["SV_InstanceID", "SV_VertexID"]
sig = main[main.find("(")+1:main.find(")")]
args = sig.split(',')
sv_semantics = []
for sv in supported_sv:
for arg in args:
if arg.find(sv) != -1:
arg_split = arg.replace(":", " ").strip().split(" ")
var_type = arg_split[0].strip()
var_name = arg_split[1].strip()
sv_semantics.append((sv, var_type, var_name))
return sv_semantics
# evaluate permutation / technique defines in if: blocks and remove unused branches
def evaluate_conditional_blocks(source, permutation):
if not permutation:
return source
pos = 0
case_accepted = False
while True:
else_pos = source.find("else:", pos)
else_if_pos = source.find("else if:", pos)
pos = source.find("if:", pos)
else_case = False
first_case = True
if us(else_if_pos) < us(pos):
pos = else_if_pos
first_case = False
if us(else_pos) < us(pos):
pos = else_pos
else_case = True
first_case = False
if first_case:
case_accepted = False
if pos == -1:
break
if not else_case:
conditions_start = source.find("(", pos)
body_start = source.find("{", conditions_start) + 1
conditions = source[conditions_start:body_start - 1]
conditions = conditions.replace('\n', '')
conditions = conditions.replace("&&", " and ")
conditions = conditions.replace("||", " or ")
conditions = conditions.replace("!", " not ")
else:
body_start = source.find("{", pos) + 1
conditions = "True"
gv = dict()
for v in permutation:
gv[str(v[0])] = v[1]
lv = dict()
conditional_block = ""
i = body_start
stack_size = 1
while True:
if source[i] == "{":
stack_size += 1
if source[i] == "}":
stack_size -= 1
if stack_size == 0:
break
i += 1
if not case_accepted:
while True:
try:
if eval(conditions, gv, lv):
conditional_block = source[body_start:i]
case_accepted = True
break
else:
break
except NameError as e:
defname = re.search("name '([^\']*)' is not defined", str(e)).group(1)
lv[defname] = 0
conditional_block = ""
else:
conditional_block = ""
source = source.replace(source[pos:i+1], conditional_block)
pos += len(conditional_block)
return source
# recursively generate all possible permutations from inputs
def permute(define_list, permute_list, output_permutations):
if len(define_list) == 0:
output_permutations.append(list(permute_list))
else:
d = define_list.pop()
for s in d[1]:
ds = (d[0], s)
permute_list.append(ds)
output_permutations = permute(define_list, permute_list, output_permutations)
if len(permute_list) > 0:
permute_list.pop()
define_list.append(d)
return output_permutations
# generate numerical id for permutation
def generate_permutation_id(define_list, permutation):
pid = 0
for p in permutation:
for d in define_list:
if p[0] == d[0]:
if p[1] > 0:
exponent = d[2]
if exponent < 0:
continue
if p[1] > 1:
exponent = p[1]+exponent-1
pid += pow(2, exponent)
return pid
# return shader version as float for consistent comparisons, version will be a string
def shader_version_float(platform, version):
if platform == "metal":
# metal version is already a float
return float(version)
elif platform == "glsl" or platform == "spirv" or platform == "gles":
# glsl version is integer 330, 400, 450..
return float(version)
elif platform == "hlsl":
# hlsl version is 3_0, 5_0
return float(version.replace("_", "."))
assert 0
# just list of all the caps
def caps_list():
return [
"PMFX_TEXTURE_CUBE_ARRAY",
"PMFX_COMPUTE_SHADER"
]
# based on shader platform and version, some features may or may not be available
def defines_from_caps(define_list):
global _info
# platform, feature version
lookup = {
"metal": [
["PMFX_TEXTURE_CUBE_ARRAY", 0.0],
["PMFX_COMPUTE_SHADER", 0.0]
],
"glsl": [
["PMFX_TEXTURE_CUBE_ARRAY", 400.0],
["PMFX_COMPUTE_SHADER", 450.0]
],
"gles": [
["PMFX_TEXTURE_CUBE_ARRAY", 310.0],
["PMFX_COMPUTE_SHADER", 310.0]
],
"spirv": [
["PMFX_TEXTURE_CUBE_ARRAY", 400.0],
["PMFX_COMPUTE_SHADER", 450.0]
],
"hlsl": [
["PMFX_TEXTURE_CUBE_ARRAY", 4.0],
["PMFX_COMPUTE_SHADER", 5.0]
]
}
# check platform exists
platform = shader_sub_platform()
if platform not in lookup.keys():
return []
# add features
version = shader_version_float(platform, _info.shader_version)
define_list = []
for cap in lookup[platform]:
if version >= cap[1]:
define_list.append((cap[0], [1], -1))
return define_list
# generate permutation list from technique json
def generate_permutations(technique, technique_json):
global _info
output_permutations = []
define_list = []
permutation_options = dict()
permutation_option_mask = 0
define_string = ""
define_list.append((_info.shader_platform.upper(), [1], -1))
define_list.append((_info.shader_sub_platform.upper(), [1], -1))
define_list = defines_from_caps(define_list)
if "permutations" in technique_json:
for p in technique_json["permutations"].keys():
pp = technique_json["permutations"][p]
define_list.append((p, pp[1], pp[0]))
if "defines" in technique_json.keys():
for d in technique_json["defines"]:
define_list.append((d, [1], -1))
output_permutations = permute(define_list, [], [])
for key in technique_json["permutations"]:
tp = technique_json["permutations"][key]
ptype = "checkbox"
if len(tp[1]) > 2:
ptype = "input_int"
permutation_options[key] = {"val": pow(2, tp[0]), "type": ptype}
mask = pow(2, tp[0])
permutation_option_mask += mask
define_string += "#define " + technique.upper() + "_" + key + " " + str(mask) + "\n"
define_string += "\n"
# generate default permutation, inherit / get permutation constants
tp = list(output_permutations)
if len(tp) == 0:
default_permute = []
if "defines" in technique_json.keys():
for d in technique_json["defines"]:
default_permute.append((d, 1))
else:
default_permute = [("SINGLE_PERMUTATION", 1)]
tp.append(default_permute)
return tp, permutation_options, permutation_option_mask, define_list, define_string
# look for inherit member and inherit another pmfx technique
def inherit_technique(technique, pmfx_json):
if "inherit" in technique.keys():
inherit = technique["inherit"]
if inherit in pmfx_json.keys():
technique = member_wise_merge(technique, pmfx_json[inherit])
return technique
# parse pmfx file to find the json block pmfx: { }
def find_pmfx_json(shader_file_text, create_default=True):
pmfx_loc = shader_file_text.find("pmfx:")
if pmfx_loc != -1:
# pmfx json exists, return the block
json_loc = shader_file_text.find("{", pmfx_loc)
pmfx_end = enclose_brackets(shader_file_text[pmfx_loc:])
pmfx_json = jsn.loads(shader_file_text[json_loc:pmfx_end + json_loc])
shader_text_removed = shader_file_text[:pmfx_loc] + shader_file_text[pmfx_loc + pmfx_end:].strip()
return pmfx_json, shader_text_removed
elif create_default:
# shader can have no pmfx, provided it supplies vs_main and ps_main
if find_function(shader_file_text, "vs_main") and find_function(shader_file_text, "ps_main"):
pmfx_json = dict()
pmfx_json["default"] = {"vs": "vs_main", "ps": "ps_main"}
return pmfx_json, shader_file_text
return None, None
# strips array [] from a resource access
def strip_array_access(resource):
bp = resource.find("[")
if bp != -1:
resource = resource[:bp]
return resource
# checks for a raw access type
def get_raw_access_type(resource):
accesses = ["structured_buffer", "atomic_counter", "cbuffer_table"]
# index of resource decl arg that is the name of the resource
name_pos = {
"structured_buffer": 1,
"atomic_counter": 0,
"cbuffer_table": 0
}
for a in accesses:
if resource.find(a) != -1:
return a, name_pos[a]
return None, None
# find only used shader resources
def find_used_resources(shader_source, resource_decl):
if not resource_decl:
return
# find resource uses
uses = ["sample_texture", "read_texture", "write_texture", "sample_depth", "texture_sample"]
resource_uses = []
pos = 0
while True:
access, tok = cgu.find_first(shader_source, uses, pos)
if access == sys.maxsize:
break
start = shader_source.find("(", access)
use = shader_source[access:start]
end = shader_source.find(";", access)
if us(access) < us(start) < us(end):
args = shader_source[start+1:end-1].split(",")
if len(args) > 0:
# every resource access should have the resource as first arg
name = strip_array_access(args[0].strip(" "))
if name not in resource_uses:
resource_uses.append(name)
if use == "texture_sample":
# texture sample also has 'sampler'
sampler_name = strip_array_access(args[1].strip(" "))
if sampler_name not in resource_uses:
resource_uses.append(sampler_name)
pos = end
used_resource_decl = ""
resource_list = resource_decl.split(";")
for resource in resource_list:
resource = resource.strip()
start = resource.find("(") + 1
end = resource.find(")") - 1
args = resource[start:end].split(",")
name_positions = [0, 2] # 0 = single sample texture, 2 = msaa texture
# texture or msaa texture sampled with sample_texture...
for p in name_positions:
if len(args) > p:
name = args[p].strip(" ")
if name in resource_uses:
used_resource_decl = used_resource_decl.strip(" ")
used_resource_decl += resource + ";\n"
# structured buffer / cbuffer / atomic counter with [] operator access
type, name_index = get_raw_access_type(resource)
if type:
if len(args) >= name_index:
name = args[name_index].strip(" ")
if shader_source.find(name + "[") != -1:
used_resource_decl = used_resource_decl.strip(" ")
used_resource_decl += resource + ";\n"
return used_resource_decl
# find only used cbuffers
def find_used_cbuffers(shader_source, cbuffers):
# turn source to tokens
non_tokens = ["(", ")", "{", "}", ".", ",", "+", "-", "=", "*", "/", "&", "|", "~", "\n", "<", ">", "[", "]", ";"]
token_source = shader_source
for nt in non_tokens:
token_source = token_source.replace(nt, " ")
token_list = token_source.split(" ")
used_cbuffers = []
for cbuf in cbuffers:
member_list = parse_and_split_block(cbuf)
for i in range(1, len(member_list), 2):
member = member_list[i].strip()
array = member.find("[")
if array != -1:
if array == 0:
i += 1
continue
else:
member = member[:array]
if member in token_list:
used_cbuffers.append(cbuf)
break
return used_cbuffers
# find only used functions from a given entry point
def find_used_functions(entry_func, function_list):
used_functions = [entry_func]
added_function_names = []
ordered_function_list = [entry_func]
for used_func in used_functions:
for func in function_list:
if func == used_func:
continue
name = func.split(" ")[1]
end = name.find("(")
name = name[0:end]
if used_func.find(name + "(") != -1:
if name in added_function_names:
continue
used_functions.append(func)
added_function_names.append(name)
for func in function_list:
name = func.split(" ")[1]
end = name.find("(")
name = name[0:end]
if name in added_function_names:
ordered_function_list.append(func)
ordered_function_list.remove(entry_func)
used_function_source = ""
for used_func in ordered_function_list:
used_function_source += used_func + "\n\n"
return used_function_source
# generate a vs, ps or cs from _tp (technique permutation data)
def generate_single_shader(main_func, _tp):
_si = SingleShaderInfo()
_si.main_func_name = main_func
# find main func
main = ""
for func in _tp.functions:
pos = func.find(main_func)
if pos != -1:
if func[pos+len(main_func)] == "(" and func[pos-1] == " ":
main = func
if main == "":
print_error("error: could not find main function " + main_func)
return None
# find used functions,
_si.functions_source = find_used_functions(main, _tp.functions)
# find inputs / outputs
_si.instance_input_struct_name = None
_si.output_struct_name = main[0:main.find(" ")].strip()
input_signature = main[main.find("(")+1:main.find(")")].split(" ")
for i in range(0, len(input_signature)):
input_signature[i] = input_signature[i].replace(",", "")
if input_signature[i] == "_input" or input_signature[i] == "input":
_si.input_struct_name = input_signature[i-1]
elif input_signature[i] == "_instance_input" or input_signature[i] == "instance_input":
_si.instance_input_struct_name = input_signature[i-1]
# find source decl for inputs / outputs
if _si.instance_input_struct_name:
_si.instance_input_decl = find_struct(_tp.source, "struct " + _si.instance_input_struct_name)
_si.input_decl = find_struct(_tp.source, "struct " + _si.input_struct_name)
_si.output_decl = find_struct(_tp.source, "struct " + _si.output_struct_name)
# remove empty inputs which have no members due to permutation conditionals
_si.input_decl, main = strip_empty_inputs(_si.input_decl, main)
# get sv sematics to insert gl / metal specific equivalent
_si.sv_semantics = get_sv_sematics(main)
# condition main function with stripped inputs
if _si.instance_input_struct_name:
_si.instance_input_decl, main = strip_empty_inputs(_si.instance_input_decl, main)
if _si.instance_input_decl == "":
_si.instance_input_struct_name = None
_si.main_func_source = main
# find only used textures by this shader
full_source = _si.functions_source + main
_si.resource_decl = find_used_resources(full_source, _tp.resource_decl)
_si.cbuffers = find_used_cbuffers(full_source, _tp.cbuffers)
_si.threads = _tp.threads
return _si
# format source with indents
def format_source(source, indent_size):
formatted = ""
lines = source.split("\n")
indent = 0
indents = ["{"]
unindnets = ["}"]
for line in lines:
cur_indent = indent
line = line.strip(" ")
if len(line) < 1:
continue
if line[0] in indents:
indent += 1
elif line[0] in unindnets:
indent -= 1
cur_indent = indent
for i in range(0, cur_indent*indent_size):
formatted += " "
formatted += line
formatted += "\n"
return formatted
# hashes a shader to find identical shaders which have different permutation options
def shader_hash(_shader):
hash_source = ""
hash_source += _shader.input_decl
hash_source += _shader.instance_input_decl
hash_source += _shader.output_decl
hash_source += _shader.resource_decl
hash_source += _shader.functions_source
hash_source += _shader.main_func_source
for cb in _shader.cbuffers:
hash_source += cb
return hashlib.md5(hash_source.encode('utf-8')).hexdigest()
# hlsl source.. pssl is similar
def _hlsl_source(_info, pmfx_name, _tp, _shader):
shader_source = _info.macros_source
shader_source += _tp.struct_decls
for cb in _shader.cbuffers:
shader_source += cb
shader_source += _shader.input_decl
shader_source += _shader.instance_input_decl
shader_source += _shader.output_decl
shader_source += _shader.resource_decl
shader_source += _shader.functions_source
if _shader.shader_type == "cs":
shader_source += "[numthreads("
for i in range(0, 3):
shader_source += str(_tp.threads[i])
if i < 2:
shader_source += ", "
shader_source += ")]"
shader_source += _shader.main_func_source
shader_source = format_source(shader_source, 4)
return shader_source
# compile pssl
def compile_pssl(_info, pmfx_name, _tp, _shader):
orbis_sdk = os.getenv("SCE_ORBIS_SDK_DIR")
if not orbis_sdk:
print_error("error: you must have orbis sdk installed, "
"'SCE_ORBIS_SDK_DIR' environment variable is set and is added to your PATH.")
sys.exit(1)
shader_source = _hlsl_source(_info, pmfx_name, _tp, _shader)
# apply syntax changes
token_swaps = {
"cbuffer": "ConstantBuffer",
"SV_POSITION": "S_POSITION",
"SV_POSITION0": "S_POSITION",
"SV_Target": "S_TARGET_OUTPUT",
"SV_Target0": "S_TARGET_OUTPUT0",
"SV_Target1": "S_TARGET_OUTPUT1",
"SV_Target2": "S_TARGET_OUTPUT2",
"SV_Target3": "S_TARGET_OUTPUT3",
"SV_Target4": "S_TARGET_OUTPUT4",
"SV_Target5": "S_TARGET_OUTPUT5",
"SV_Target6": "S_TARGET_OUTPUT6",
"SV_Target7": "S_TARGET_OUTPUT7",
"SV_Depth": "S_DEPTH_OUTPUT",
"SV_InstanceID": "S_INSTANCE_ID",
"SV_VertexID": "S_VERTEX_ID"
}
for token in token_swaps:
shader_source = replace_token(token, token_swaps[token], shader_source)
extension = {
"vs": ".vs",
"ps": ".ps",
"cs": ".cs"
}
profile = {
"vs": "sce_vs_vs_orbis",
"ps": "sce_ps_orbis",
"cs": "sce_cs_orbis"
}
temp_path = os.path.join(_info.temp_dir, pmfx_name)
output_path = os.path.join(_info.output_dir, pmfx_name)
os.makedirs(temp_path, exist_ok=True)
os.makedirs(output_path, exist_ok=True)
temp_file_and_path = sanitize_file_path(os.path.join(temp_path, _tp.name + extension[_shader.shader_type]))
output_file_and_path = os.path.join(output_path, _tp.name + extension[_shader.shader_type] + "c")
temp_shader_source = open(temp_file_and_path, "w")
temp_shader_source.write(shader_source)
temp_shader_source.close()
cmdline = "orbis-wave-psslc" + " -profile " + profile[_shader.shader_type] + \
" -entry " + _shader.main_func_name + " " + temp_file_and_path + " -o " + output_file_and_path + " "
cmdline += _info.args
error_code, error_list, output_list = call_wait_subprocess(cmdline)
if error_code != 0:
_tp.error_code = error_code
_tp.error_list = error_list
_tp.output_list = output_list
# compile hlsl shader model 4
def compile_hlsl(_info, pmfx_name, _tp, _shader):
shader_source = _hlsl_source(_info, pmfx_name, _tp, _shader)
exe = os.path.join(_info.tools_dir, "bin", "fxc", "fxc")
# default sm 4
if _tp.shader_version == "0":
_tp.shader_version = "4_0"
sm = str(_tp.shader_version)
shader_model = {
"vs": "vs_" + sm,
"ps": "ps_" + sm,
"cs": "cs_" + sm
}
extension = {
"vs": ".vs",
"ps": ".ps",
"cs": ".cs"
}
temp_path = os.path.join(_info.temp_dir, pmfx_name)
output_path = os.path.join(_info.output_dir, pmfx_name)
os.makedirs(temp_path, exist_ok=True)
os.makedirs(output_path, exist_ok=True)
temp_file_and_path = os.path.join(temp_path, _tp.name + extension[_shader.shader_type])
output_file_and_path = os.path.join(output_path, _tp.name + extension[_shader.shader_type] + "c")
temp_shader_source = open(temp_file_and_path, "w")
temp_shader_source.write(shader_source)
temp_shader_source.close()
cmdline = exe + " "
cmdline += "/T " + shader_model[_shader.shader_type] + " "
cmdline += "/E " + _shader.main_func_name + " "
if _info.debug:
cmdline += "/Fc /Od /Zi" + " "
cmdline += "/Fo " + output_file_and_path + " " + temp_file_and_path + " "
cmdline += _info.args
compiled = _info.compiled
if not compiled:
temp_shader_source = open(output_file_and_path, "w")
temp_shader_source.write(shader_source)
temp_shader_source.close()
return 0
else:
error_code, error_list, output_list = call_wait_subprocess(cmdline)
if error_code != 0:
_tp.error_code = error_code
_tp.error_list = error_list
_tp.output_list = output_list
# parse shader inputs annd output source into a list of elements and semantics
def parse_io_struct(source):
if len(source) == 0:
return [], []
io_source = source
start = io_source.find("{")
end = io_source.find("}")
elements = []
semantics = []
prev_input = start+1
next_input = 0
while next_input < end:
next_input = io_source.find(";", prev_input)
if next_input > 0:
next_semantic = io_source.find(":", prev_input)
elements.append(io_source[prev_input:next_semantic].strip())
semantics.append(io_source[next_semantic+1:next_input].strip())
prev_input = next_input + 1
else:
break
# the last input will always be "};" pop it out
elements.pop(len(elements)-1)
semantics.pop(len(semantics)-1)
return elements, semantics
# generate a global struct to access input structures in a hlsl like manner
def generate_global_io_struct(io_elements, decl):
# global input struct for hlsl compatibility to access like input.value
struct_source = decl
struct_source += "\n{\n"
for element in io_elements:
struct_source += element + ";\n"
struct_source += "};\n"
struct_source += "\n"
return struct_source
# assign vs or ps inputs to the global struct
def generate_input_assignment(io_elements, decl, local_var, suffix):
assign_source = "//assign " + decl + " struct from glsl inputs\n"
assign_source += decl + " " + local_var + ";\n"
for element in io_elements:
if element.split()[1] == "position" and "vs_output" in decl:
continue
var_name = element.split()[1]
assign_source += local_var + "." + var_name + " = " + var_name + suffix + ";\n"
return assign_source
# assign vs or ps outputs from the global struct to the output locations
def generate_output_assignment(_info, io_elements, local_var, suffix, gles2=False):
assign_source = "\n//assign glsl global outputs from structs\n"
for element in io_elements:
var_name = element.split()[1]
if var_name == "position":
assign_source += "gl_Position = " + local_var + "." + var_name + ";\n"
if _info.v_flip:
assign_source += "gl_Position.y *= v_flip;\n"
if _info.shader_sub_platform == "spirv":
assign_source += "gl_Position.y *= -1.0;\n"
else:
if gles2:
if suffix == "_ps_output":
assign_source += "gl_FragColor" + " = " + local_var + "." + var_name + ";\n"
continue
assign_source += var_name + suffix + " = " + local_var + "." + var_name + ";\n"
return assign_source
# generates a texture declaration from a texture list
def generate_texture_decl(texture_list):
if not texture_list:
return ""
texture_decl = ""
for alias in texture_list:
decl = str(alias[0]) + "( " + str(alias[1]) + ", " + str(alias[2]) + " );\n"
texture_decl += decl
return texture_decl
# insert glsl location if we need it
def insert_layout_location(loc):
if _info.shader_sub_platform == "spirv" or _info.shader_sub_platform == "nvn":
return "layout(location = " + str(loc) + ") "
return ""
# gets structured buffers from resource decls (type, name, binding)
def get_structured_buffers(shader):
res = shader.resource_decl.split(";")
sb = []
for r in res:
r = r.strip()
if len(r) == 0:
continue
if r.find("structured_buffer") != -1:
decl = r[r.find("("):].split(",")
args = []
for d in decl:
args.append(d.strip().strip("(").strip(")").strip())
sb.append(args)
return sb
# extracts the texture types into dictionary from resource decl to replace sample calls
def texture_types_from_resource_decl(resource_decl):
tex_dict = dict()
resource_list = resource_decl.split(";")
for resource in resource_list:
start = resource.find("(") + 1
end = resource.find(")") - 1
args = resource[start:end].split(",")
name_positions = [0, 2] # 0 = single sample texture, 2 = msaa texture
# texture or msaa texture sampled with sample_texture...
name = ""
for p in name_positions:
if len(args) > p:
name = args[p].strip(" ")
tex_type = resource[:start-1]
if len(name) > 0:
tex_dict[name] = tex_type.strip()
return tex_dict
# locates pmfx sample_texture calls and replaces with non-polymorphic function calls
def replace_texture_samples(shader, texture_types_dict):
sampler_tokens = ["sample_texture", "sample_texture_level", "sample_texture_grad", "sample_texture_array"]
pos = 0
while True:
sample, tok = cgu.find_first_token(shader, sampler_tokens, pos)
if sample == sys.maxsize:
break
name_start = sample + shader[sample:].find("(") + 1
name_end = name_start+ shader[name_start:].find(",")
name_str = shader[name_start:name_end].strip()
if name_str in texture_types_dict:
tex_type = texture_types_dict[name_str]
tex_type = tex_type.replace("texture_", "")
tex_type = tex_type.replace("_array", "")
insert = shader[:sample+len(tok)] + "_" + tex_type
insert += shader[sample+len(tok):]
shader = insert
end = shader[sample:].find(")")
pos = sample+end+1
return shader
# generates gles 2 compatible uniforms packed into glUniform4fv
def generate_uniform_pack(cbuffer_name, cbuffer_body):
v4_type = {
"float4": 1,
"float4x4": 4
}
output = dict()
cbuffer_body = cbuffer_body.strip("{")
cbuffer_body = cbuffer_body.strip("};").strip()
cbuffer_name = cbuffer_name.strip()
members = cbuffer_body.split(";")
v4_counter = 0
member_pairs = []
for member in members:
member = member.strip()
if len(member) <= 0:
continue
pair = member.split(" ")
type = pair[0]
name = pair[1]
member_pairs.append((type, name))
if type not in v4_type.keys():
print("cannot pack type into float4 array: " + type)
exit(1)
v4_counter += v4_type[type]
output["decl"] = "uniform float4 " + cbuffer_name + "[" + str(v4_counter) + "];\n"
v4_pos = 0
assign = ""
for member in member_pairs:
if member[0] == "float4x4":
assign += (member[0] + " " + member[1] + ";\n")
assign += (member[1] + "[0] = " + cbuffer_name + "[" + str(v4_pos) + "];\n")
assign += (member[1] + "[1] = " + cbuffer_name + "[" + str(v4_pos+1) + "];\n")
assign += (member[1] + "[2] = " + cbuffer_name + "[" + str(v4_pos+2) + "];\n")
assign += (member[1] + "[3] = " + cbuffer_name + "[" + str(v4_pos+3) + "];\n")
else:
assign += (member[0] + " " + member[1] + " = " + cbuffer_name + "[" + str(v4_pos) + "];\n")
v4_pos += v4_type[member[0]]
output["assign"] = assign
return output
# unpacks a uniform pack into variables of the correct type, this is relying on the optimiser to rip out the reduant assigns
def insert_uniform_unpack_assignment(functions_source, uniform_pack):
pos = 0
inserted_source = ""
while True:
bp = functions_source[pos:].find("{")
if bp == -1:
break
bp = pos + bp
ep = enclose_brackets(functions_source[bp:])
if ep == -1:
break
ep = bp + ep
pos = ep + 1
inserted_source += functions_source[:bp+1]
inserted_source += "\n" + uniform_pack["assign"] + "\n"
inserted_source += functions_source[bp+1:ep]
return inserted_source
# replace token pasting in structured buffer definitions, since gles does not support it by default
def replace_token_pasting(shader):
tokens = ["structured_buffer", "structured_buffer_rw", "atomic_counter"]
pos = 0
new_shader = ""
decls = shader.split(";")
for decl in decls:
if decl.strip() == "":
continue
contains_token = False
for token in tokens:
if token in decl:
contains_token = True
if not contains_token:
new_shader += decl.strip() + ";\n"
continue
decl_start = decl.find("(") + 1
decl_end = decl.find(")")
decl_str = decl[decl_start:decl_end].strip()
decl_params = decl_str.split(",")
name_param = decl_params[1].strip()
if decl.find("atomic_counter") != -1:
name_param = decl_params[0].strip()
new_decl_str = decl_str + ", " + name_param + "_buffer"
new_decl_str = decl.replace(decl_str, new_decl_str).strip()
# replace atomic uint with uint as a uint in a gles ssbo is atomic by default
new_decl_str = new_decl_str.replace("atomic_uint", "uint")
new_shader += new_decl_str + ";\n"
return new_shader
# compile glsl
def compile_glsl(_info, pmfx_name, _tp, _shader):
# parse inputs and outputs into semantics
inputs, input_semantics = parse_io_struct(_shader.input_decl)
outputs, output_semantics = parse_io_struct(_shader.output_decl)
instance_inputs, instance_input_semantics = parse_io_struct(_shader.instance_input_decl)
# default 330
if _tp.shader_version == "0":
_tp.shader_version = "330"
# some capabilities
# binding points for samples and uniform buffers are only supported 420 onwards..
binding_points = int(_tp.shader_version) >= 420
texture_cube_array = int(_tp.shader_version) >= 400
texture_arrays = True
attribute_stage_in = False
varying_in = False
gl_frag_color = False
explicit_texture_sampling = False
use_uniform_pack = False
uniform_pack = None
if _info.shader_sub_platform == "gles":
if shader_version_float("gles", _tp.shader_version) <= 200:
attribute_stage_in = True
varying_in = True
gl_frag_color = True
explicit_texture_sampling = True
use_uniform_pack = True
uniform_pack = dict()
uniform_pack["decl"] = ""
uniform_pack["assign"] = ""
if shader_version_float("gles", _tp.shader_version) >= 320:
binding_points = True
# uniform buffers
uniform_buffers = ""
for cbuf in _shader.cbuffers:
name_start = cbuf.find(" ")
name_end = cbuf.find(":")
if name_end == -1:
continue
if binding_points:
reg_start = cbuf.find("register(") + len("register(")
reg_end = reg_start + cbuf[reg_start:].find(")")
reg = cbuf[reg_start:reg_end]
reg = reg.replace("b", " ")
uniform_buf = "layout (binding=" + reg + ",std140) uniform"
else:
uniform_buf = "layout (std140) uniform"
body_start = cbuf.find("{")
body_end = cbuf.find("};") + 2
cbuffer_body = cbuf[body_start:body_end]
cbuffer_name = cbuf[name_start:name_end]
if not use_uniform_pack:
uniform_buf += cbuf[name_start:name_end]
uniform_buf += "\n"
uniform_buf += cbuf[body_start:body_end] + "\n"
uniform_buffers += uniform_buf + "\n"
else:
uniform_pack_cbuf = generate_uniform_pack(cbuffer_name, cbuffer_body)
uniform_pack["decl"] += uniform_pack_cbuf["decl"]
uniform_pack["assign"] += uniform_pack_cbuf["assign"]
uniform_buffers += uniform_pack_cbuf["decl"]
# header and macros
shader_source = ""
if _info.shader_sub_platform == "gles":
if shader_version_float("gles", _tp.shader_version) >= 300:
shader_source += "#version " + _tp.shader_version + " es\n"
# extensions
for ext in _info.extensions:
shader_source += "#extension " + ext + " : require\n"
shader_source += "#define PMFX_" + ext + " 1\n"
shader_source += "#define GLES3\n"
else:
shader_source += "#define GLES2\n"
shader_source += "#define GLSL\n"
shader_source += "#define GLES\n"
if texture_arrays:
shader_source += "#define PMFX_TEXTURE_ARRAYS\n"
if binding_points:
shader_source += "#define PMFX_BINDING_POINTS\n"
if shader_version_float("gles", _tp.shader_version) >= 320:
shader_source += "#define PMFX_GLES_COMPUTE\n"
else:
shader_source += "#version " + _tp.shader_version + " core\n"
# extensions
for ext in _info.extensions:
shader_source += "#extension " + ext + " : require\n"
shader_source += "#define PMFX_" + ext + " 1\n"
for ext in _info.nvn_extensions:
shader_source += "#extension " + ext + " : enable\n"
shader_source += "#define PMFX_" + ext + " 1\n"
shader_source += "#define GLSL\n"
if binding_points:
shader_source += "#define PMFX_BINDING_POINTS\n"
if texture_cube_array:
shader_source += "#define PMFX_TEXTURE_CUBE_ARRAY\n"
if texture_arrays:
shader_source += "#define PMFX_TEXTURE_ARRAYS\n"
# texture offset is to avoid collisions on descriptor set slots in vulkan
if _info.shader_sub_platform == "spirv":
shader_source += "#define PMFX_TEXTURE_OFFSET " + str(_info.texture_offset) + "\n"
else:
shader_source += "#define PMFX_TEXTURE_OFFSET 0\n"
shader_source += "//" + pmfx_name + " " + _tp.name + " " + _shader.shader_type + " " + str(_tp.id) + "\n"
shader_source += _info.macros_source
# input structs
skip_0 = _info.shader_sub_platform == "spirv"
index_counter = 0
for input in inputs:
if _shader.shader_type == "vs":
if attribute_stage_in:
shader_source += "attribute " + input + "_vs_input;\n"
else:
shader_source += "layout(location = " + str(index_counter) + ") in " + input + "_vs_input;\n"
elif _shader.shader_type == "ps":
if index_counter != 0 or not skip_0:
if varying_in:
shader_source += "varying " + input + "_vs_output;\n"
else:
shader_source += insert_layout_location(index_counter)
shader_source += "in " + input + "_vs_output;\n"
index_counter += 1
for instance_input in instance_inputs:
shader_source += insert_layout_location(index_counter)
shader_source += "layout(location = " + str(index_counter) + ") in " + instance_input + "_instance_input;\n"
index_counter += 1
# outputs structs
index_counter = 0
if _shader.shader_type == "vs":
for output in outputs:
if output.split()[1] != "position":
if varying_in:
shader_source += "varying " + output + "_" + _shader.shader_type + "_output;\n"
else:
shader_source += insert_layout_location(index_counter)
shader_source += "out " + output + "_" + _shader.shader_type + "_output;\n"
index_counter += 1
elif _shader.shader_type == "ps":
for p in range(0, len(outputs)):
if "SV_Depth" in output_semantics[p]:
continue
else:
if not gl_frag_color:
output_index = output_semantics[p].replace("SV_Target", "")
if output_index != "":
shader_source += "layout(location = " + output_index + ") "
else:
shader_source += insert_layout_location(0)
shader_source += "out " + outputs[p] + "_ps_output;\n"
# insert vflip uniform for correcting texture and viewport y coords
if _info.v_flip:
shader_source += "uniform float v_flip;\n"
# global structs for access to inputs or outputs from any function in vs or ps
if _shader.shader_type != "cs":
shader_source += generate_global_io_struct(inputs, "struct " + _shader.input_struct_name)
if _shader.instance_input_struct_name:
if len(instance_inputs) > 0:
shader_source += generate_global_io_struct(instance_inputs, "struct " + _shader.instance_input_struct_name)
if len(outputs) > 0:
shader_source += generate_global_io_struct(outputs, "struct " + _shader.output_struct_name)
# convert sample_texture to sample_texture_2d etc
if explicit_texture_sampling:
texture_types = texture_types_from_resource_decl(_shader.resource_decl)
_shader.functions_source = replace_texture_samples(_shader.functions_source, texture_types)
_shader.main_func_source = replace_texture_samples(_shader.main_func_source, texture_types)
if uniform_pack:
_shader.functions_source = insert_uniform_unpack_assignment(_shader.functions_source, uniform_pack)
_shader.main_func_source = insert_uniform_unpack_assignment(_shader.main_func_source, uniform_pack)
resource_decl = _shader.resource_decl
if _info.shader_sub_platform == "gles":
resource_decl = replace_token_pasting(resource_decl)
shader_source += _tp.struct_decls
shader_source += uniform_buffers
shader_source += resource_decl
shader_source += _shader.functions_source
glsl_main = _shader.main_func_source
skip_function_start = glsl_main.find("{") + 1
if _shader.shader_type != "cs":
# this doesnt handle multiple return statements very well
skip_function_end = glsl_main.rfind("return")
glsl_main = glsl_main[skip_function_start:skip_function_end].strip()
else:
# cs shaders do not return, so we need to strip off the '}'
glsl_main = glsl_main[skip_function_start:].strip()
glsl_main = glsl_main.strip("}")
input_name = {
"vs": "_vs_input",
"ps": "_vs_output",
"cs": "_cs_input"
}
output_name = {
"vs": "_vs_output",
"ps": "_ps_output",
"cs": "_cs_output"
}
if _shader.shader_type == "cs":
shader_source += "layout("
shader_source += "local_size_x = " + str(_tp.threads[0]) + ", "
shader_source += "local_size_y = " + str(_tp.threads[1]) + ", "
shader_source += "local_size_z = " + str(_tp.threads[2])
shader_source += ") in;\n"
shader_source += "void main()\n{\n"
shader_source += "ivec3 gid = ivec3(gl_GlobalInvocationID);\n"
shader_source += glsl_main
shader_source += "\n}\n"
else:
# vs and ps need to assign in / out attributes to structs
pre_assign = generate_input_assignment(inputs, _shader.input_struct_name, "_input", input_name[_shader.shader_type])
if _shader.instance_input_struct_name:
if len(instance_inputs) > 0:
pre_assign += generate_input_assignment(instance_inputs,
_shader.instance_input_struct_name, "instance_input", "_instance_input")
post_assign = generate_output_assignment(_info, outputs, "_output", output_name[_shader.shader_type], gl_frag_color)
shader_source += "void main()\n{\n"
shader_source += "\n" + pre_assign + "\n"
shader_source += glsl_main
shader_source += "\n" + post_assign + "\n"
shader_source += "}\n"
# condition source
shader_source = replace_io_tokens(shader_source)
shader_source = format_source(shader_source, 4)
# replace sv_semantic tokens
for sv in _shader.sv_semantics:
if sv[0] == "SV_InstanceID":
shader_source = replace_token(sv[2], "gl_InstanceID", shader_source)
elif sv[0] == "SV_VertexID":
shader_source = replace_token(sv[2], "gl_VertexID", shader_source)
extension = {
"vs": ".vsc",
"ps": ".psc",
"cs": ".csc"
}
temp_extension = {
"vs": ".vert",
"ps": ".frag",
"cs": ".comp"
}
temp_path = os.path.join(_info.temp_dir, pmfx_name)
output_path = os.path.join(_info.output_dir, pmfx_name)
os.makedirs(temp_path, exist_ok=True)
os.makedirs(output_path, exist_ok=True)
temp_file_and_path = os.path.join(temp_path, _tp.name + temp_extension[_shader.shader_type])
temp_shader_source = open(temp_file_and_path, "w")
temp_shader_source.write(shader_source)
temp_shader_source.close()
output_path = os.path.join(_info.output_dir, pmfx_name)
os.makedirs(output_path, exist_ok=True)
output_file_and_path = os.path.join(output_path, _tp.name + extension[_shader.shader_type])
if _info.shader_sub_platform == "nvn":
nvn_sdk = os.getenv("NINTENDO_SDK_ROOT")
if not nvn_sdk:
print_error("error: you must have nintendo switch sdk installed, "
"'NINTENDO_SDK_ROOT' environment variable is set and is added to your PATH.")
sys.exit(1)
exe = os.path.normpath(_info.nvn_exe)
nvn_type = {
"vs": "-stage vertex",
"ps": "-stage fragment",
"cs": "-stage compute"
}
cmd = "-input " + sanitize_file_path(temp_file_and_path) + " "
cmd += nvn_type[_shader.shader_type] + " " + sanitize_file_path(temp_file_and_path) + " "
cmd += "-output " + sanitize_file_path(output_file_and_path) + " "
cmd += _info.args
error_code, error_list, output_list = call_wait_subprocess(exe + " " + cmd)
_tp.error_code = error_code
_tp.error_list = error_list
_tp.output_list = output_list
else:
exe = os.path.join(_info.tools_dir, "bin", "glsl", get_platform_name(), "validator" + get_platform_exe())
if _info.shader_sub_platform == "spirv":
exe += " -V "
exe += " -o " + output_file_and_path
error_code, error_list, output_list = call_wait_subprocess(exe + " " + temp_file_and_path)
_tp.error_code = error_code
_tp.error_list = error_list
_tp.output_list = output_list
if _info.shader_sub_platform != "spirv":
# copy glsl shader to data
shader_file = open(output_file_and_path, "w")
shader_file.write(shader_source)
shader_file.close()
return error_code
# we need to convert ubytes 255 to float 1.0
def convert_ubyte_to_float(semantic):
if semantic.find("COLOR"):
return False
return True
# gets metal packed types from hlsl semantic, all types are float except COLOR: uchar, BLENDINDICES uchar
def get_metal_packed_decl(stage_in, input, semantic):
vector_sizes = ["2", "3", "4"]
packed_decl = ""
if not stage_in:
packed_decl = "packed_"
split = input.split(" ")
type = split[0]
if semantic.find("COLOR") != -1 or semantic.find("BLENDINDICES") != -1:
packed_decl += "uchar"
count = type[len(type)-1]
if count in vector_sizes:
packed_decl += count
else:
packed_decl += type
for i in range(1, len(split)):
packed_decl += " " + split[i]
return packed_decl
# finds token in source code
def find_token(token, string):
delimiters = [
"(", ")", "{", "}", ".", ",", "+", "-", "=", "*", "/",
"&", "|", "~", "\n", "\t", "<", ">", "[", "]", ";", " "
]
fp = string.find(token)
if fp != -1:
left = False
right = False
# check left
if fp > 0:
for d in delimiters:
if string[fp-1] == d:
left = True
break
else:
left = True
# check right
ep = fp + len(token)
if fp < ep-1:
for d in delimiters:
if string[ep] == d:
right = True
break
else:
right = True
if left and right:
return fp
# try again
tt = find_token(token, string[fp+len(token):])
if tt == -1:
return -1
return fp+len(token) + tt
return -1
# replace all occurences of token in source code
def replace_token(token, replace, string):
iter = 0
while True:
pos = find_token(token, string)
if pos == -1:
break
else:
string = string[:pos] + replace + string[pos+len(token):]
pass
return string
# metal main functions require textures and buffers to be passed in as args, and do not support global decls
def metal_functions(functions, cbuffers, textures):
cbuf_members_list = []
for c in cbuffers:
cbuf_members = parse_and_split_block(c)
cbuf_members_list.append(cbuf_members)
texture_list = textures.split(";")
texture_args = []
for t in texture_list:
cpos = t.find(",")
if cpos == -1:
continue
spos = t.find("(")
macro_args = t[spos + 1:].split(",")
tex_type = t[:spos] + "_arg"
name_pos = 0
if t.find("texture_2dms") != -1:
name_pos = 2
name = macro_args[name_pos].strip()
texture_args.append((name, tex_type + "(" + name + ")"))
fl = find_functions(functions)
final_funcs = ""
func_sig_additions = dict()
for f in fl:
bp = f.find("(")
ep = f.find(")")
fb = f[ep:]
fn = f.find(" ")
fn = f[fn+1:bp]
sig = f[:bp+1]
count = 0
# insert cbuf members
for c in cbuf_members_list:
for i in range(0, len(c), 2):
ap = c[i+1].find("[")
member = c[i+1]
if ap != -1:
member = member[:ap]
if find_token(member, fb) != -1:
if count > 0:
sig += ",\n"
if fn in func_sig_additions.keys():
func_sig_additions[fn].append(member)
else:
func_sig_additions[fn] = [member]
ref_type = "& "
if ap != -1:
ref_type = "* "
sig += "constant " + c[i] + ref_type + member
count += 1
# insert texture members
for t in texture_args:
if find_token(t[0], fb) != -1:
if count > 0:
sig += ",\n"
sig += t[1]
count += 1
if fn in func_sig_additions.keys():
func_sig_additions[fn].append(t[0])
func_sig_additions[fn].append("sampler_" + t[0])
else:
func_sig_additions[fn] = [t[0]]
func_sig_additions[fn].append("sampler_" + t[0])
if bp != -1 and ep != -1:
args = f[bp+1:ep]
arg_list = args.split(",")
for arg in arg_list:
if count > 0:
sig += ",\n"
count += 1
address_space = "thread"
toks = arg.split(" ")
if '' in toks:
toks.remove('')
if '\n' in toks:
toks.remove('\n')
ref = False
for t in toks:
if t == "out" or t == "inout":
ref = True
if t == "in":
address_space = "constant"
ref = True
if not ref:
sig += arg
else:
array = toks[2].find("[")
if array == -1:
sig += address_space + " " + toks[1] + "& " + toks[2]
else:
sig += address_space + " " + toks[1] + "* " + toks[2][:array]
# find used cbuf memb
func = sig + fb
final_funcs += func
return final_funcs, func_sig_additions
# cascade through and pass textures and buffers to function calls in metal source code
def insert_function_sig_additions(function_body, function_sig_additions):
for k in function_sig_additions.keys():
op = 0
fp = 0
while fp != -1:
fp = find_token(k, function_body[op:])
if fp != -1:
fp = op + fp
fp += len(k)
insert_string = function_body[:fp+1]
for a in function_sig_additions[k]:
insert_string += a + ", "
insert_string += function_body[fp+1:]
function_body = insert_string
op = fp
return function_body
# compile shader for apple metal
def compile_metal(_info, pmfx_name, _tp, _shader):
# parse inputs and outputs into semantics
inputs, input_semantics = parse_io_struct(_shader.input_decl)
outputs, output_semantics = parse_io_struct(_shader.output_decl)
instance_inputs, instance_input_semantics = parse_io_struct(_shader.instance_input_decl)
shader_source = "#include <metal_stdlib>\n"
shader_source += "using namespace metal;\n"
shader_source += "#define BUF_OFFSET " + str(_info.cbuffer_offset) + "\n"
shader_source += _info.macros_source
# struct decls
shader_source += _tp.struct_decls
stream_out = False
if "stream_out" in _tp.technique.keys():
if _tp.technique["stream_out"]:
stream_out = True
# cbuffer decls
metal_cbuffers = []
for cbuf in _shader.cbuffers:
name_start = cbuf.find(" ")
name_end = cbuf.find(":")
body_start = cbuf.find("{")
body_end = cbuf.find("};") + 2
register_start = cbuf.find("(") + 1
register_end = cbuf.find(")")
name = cbuf[name_start:name_end].strip()
reg = cbuf[register_start:register_end]
reg = reg.replace('b', '')
metal_cbuffers.append((name, reg))
shader_source += "struct c_" + name + "\n"
shader_source += cbuf[body_start:body_end]
shader_source += "\n"
# packed inputs
vs_stage_in = _info.stage_in
attrib_index = 0
if _shader.shader_type == "vs":
if vs_stage_in:
if len(inputs) > 0:
shader_source += "struct packed_" + _shader.input_struct_name + "\n{\n"
for i in range(0, len(inputs)):
shader_source += get_metal_packed_decl(vs_stage_in, inputs[i], input_semantics[i])
shader_source += " [[attribute(" + str(attrib_index) + ")]]"
shader_source += ";\n"
attrib_index += 1
if _shader.instance_input_struct_name:
for i in range(0, len(instance_inputs)):
shader_source += get_metal_packed_decl(vs_stage_in, instance_inputs[i], instance_input_semantics[i])
shader_source += " [[attribute(" + str(attrib_index) + ")]]"
shader_source += ";\n"
attrib_index += 1
shader_source += "};\n"
else:
if len(inputs) > 0:
shader_source += "struct packed_" + _shader.input_struct_name + "\n{\n"
for i in range(0, len(inputs)):
shader_source += get_metal_packed_decl(vs_stage_in, inputs[i], input_semantics[i])
shader_source += ";\n"
attrib_index += 1
shader_source += "};\n"
if _shader.instance_input_struct_name:
if len(instance_inputs) > 0:
shader_source += "struct packed_" + _shader.instance_input_struct_name + "\n{\n"
for i in range(0, len(instance_inputs)):
shader_source += get_metal_packed_decl(vs_stage_in, instance_inputs[i], instance_input_semantics[i])
shader_source += ";\n"
attrib_index += 1
shader_source += "};\n"
# inputs
if len(inputs) > 0:
shader_source += "struct " + _shader.input_struct_name + "\n{\n"
for i in range(0, len(inputs)):
shader_source += inputs[i] + ";\n"
shader_source += "};\n"
if _shader.instance_input_struct_name:
if len(instance_inputs) > 0:
shader_source += "struct " + _shader.instance_input_struct_name + "\n{\n"
for i in range(0, len(instance_inputs)):
shader_source += instance_inputs[i] + ";\n"
shader_source += "};\n"
# outputs
if len(outputs) > 0:
shader_source += "struct " + _shader.output_struct_name + "\n{\n"
for i in range(0, len(outputs)):
shader_source += outputs[i]
if output_semantics[i].find("SV_POSITION") != -1:
shader_source += " [[position]]"
# mrt
sv_pos = output_semantics[i].find("SV_Target")
if sv_pos != -1:
channel_pos = sv_pos + len("SV_Target")
if channel_pos < len(output_semantics[i]):
shader_source += " [[color(" + output_semantics[i][channel_pos] + ")]]"
else:
shader_source += " [[color(0)]]"
sv_pos = output_semantics[i].find("SV_Depth")
if sv_pos != -1:
shader_source += " [[depth(any)]]"
shader_source += ";\n"
shader_source += "};\n"
main_type = {
"vs": "vertex",
"ps": "fragment",
"cs": "kernel"
}
# functions
function_source, function_sig_additions = metal_functions(_shader.functions_source,
_shader.cbuffers, _shader.resource_decl)
shader_source += function_source
# main decl
stream_out_name = _shader.output_struct_name
if stream_out:
_shader.output_struct_name = "void"
# sv sematics
vertex_id_var = "vid"
instance_id_var = "iid"
for sv in _shader.sv_semantics:
if sv[0] == "SV_InstanceID":
instance_id_var = sv[2]
elif sv[0] == "SV_VertexID":
vertex_id_var = sv[2]
shader_source += main_type[_shader.shader_type] + " "
shader_source += _shader.output_struct_name + " " + _shader.shader_type + "_main" + "("
if _shader.shader_type == "vs":
shader_source += "\n uint " + vertex_id_var + " [[vertex_id]]"
shader_source += "\n, uint " + instance_id_var + " [[instance_id]]"
if _shader.shader_type == "vs" and not vs_stage_in:
shader_source += "\n, const device packed_" + _shader.input_struct_name + "* vertices" + "[[buffer(0)]]"
if _shader.instance_input_struct_name:
if len(instance_inputs) > 0:
shader_source += "\n, const device packed_" + _shader.instance_input_struct_name + "* instances" + "[[buffer(1)]]"
elif _shader.shader_type == "vs":
shader_source += "\n, packed_" + _shader.input_struct_name + " in_vertex [[stage_in]]"
elif _shader.shader_type == "ps":
shader_source += _shader.input_struct_name + " input [[stage_in]]"
elif _shader.shader_type == "cs":
shader_source += "uint3 gid[[thread_position_in_grid]]"
# vertex stream out
if stream_out:
shader_source += "\n, device " + stream_out_name + "* stream_out_vertices" + "[[buffer(7)]]"
# pass in textures and buffers
invalid = ["", "\n"]
texture_list = _shader.resource_decl.split(";")
for texture in texture_list:
if texture not in invalid:
shader_source += "\n, " + texture.strip("\n")
cbuffer_offset = _info.cbuffer_offset
# pass in cbuffers.. cbuffers start at cbuffer_offset reserving space for (cbuffer_offset-1) vertex buffers..
for cbuf in metal_cbuffers:
regi = int(cbuf[1]) + cbuffer_offset
shader_source += "\n, " + "constant " "c_" + cbuf[0] + " &" + cbuf[0] + " [[buffer(" + str(regi) + ")]]"
shader_source += ")\n{\n"
vertex_array_index = "(vertices[" + vertex_id_var + "]."
instance_array_index = "(instances[" + instance_id_var + "]."
if vs_stage_in:
vertex_array_index = "(in_vertex."
instance_array_index = "(in_vertex."
# create function prologue for main and insert assignment to unpack vertex
from_ubyte = "0.00392156862"
if _shader.shader_type == "vs":
shader_source += _shader.input_struct_name + " input;\n"
v_inputs = [(inputs, input_semantics, "input.", vertex_array_index)]
if _shader.instance_input_struct_name:
if len(instance_inputs) > 0:
shader_source += _shader.instance_input_struct_name + " instance_input;\n"
v_inputs.append((instance_inputs, instance_input_semantics, "instance_input.", instance_array_index))
for vi in v_inputs:
for i in range(0, len(vi[0])):
split_input = vi[0][i].split(" ")
input_name = split_input[1]
input_unpack_type = split_input[0]
shader_source += vi[2] + input_name + " = "
shader_source += input_unpack_type
shader_source += vi[3] + input_name
# convert ubyte to float
if convert_ubyte_to_float(vi[1][i]):
shader_source += ") * " + from_ubyte + ";"
else:
shader_source += ");\n"
used_code = function_source + " " + _shader.main_func_source
# create a function prologue for cbuffer assignment
for c in range(0, len(_shader.cbuffers)):
cbuf_members = parse_and_split_block(_shader.cbuffers[c])
for i in range(0, len(cbuf_members), 2):
ref_type = "& "
point = ""
decl = cbuf_members[i + 1]
assign = decl
array_pos = cbuf_members[i + 1].find("[")
if array_pos != -1:
decl = decl[:array_pos]
ref_type = "* "
assign = decl + "[0]"
point = "&"
# check for use
if find_token(decl, used_code) == -1:
continue
shader_source += "constant " + cbuf_members[i] + ref_type + decl
shader_source += " = " + point + metal_cbuffers[c][0] + "." + assign
shader_source += ";\n"
main_func_body = _shader.main_func_source.find("{") + 1
main_body_source = _shader.main_func_source[main_func_body:]
main_body_source = insert_function_sig_additions(main_body_source, function_sig_additions)
shader_source += main_body_source
shader_source = format_source(shader_source, 4)
if stream_out:
shader_source = shader_source.replace("return output;", "stream_out_vertices[vid] = output;")
temp_path = os.path.join(_info.temp_dir, pmfx_name)
output_path = os.path.join(_info.output_dir, pmfx_name)
os.makedirs(temp_path, exist_ok=True)
os.makedirs(output_path, exist_ok=True)
extension = {
"vs": "_vs.metal",
"ps": "_ps.metal",
"cs": "_cs.metal"
}
intermdiate_extension = {
"vs": "_vs.air",
"ps": "_ps.air",
"cs": "_cs.air"
}
output_extension = {
"vs": ".vsc",
"ps": ".psc",
"cs": ".csc"
}
temp_file_and_path = os.path.join(temp_path, _tp.name + extension[_shader.shader_type])
output_file_and_path = os.path.join(output_path, _tp.name + output_extension[_shader.shader_type])
compiled = _info.compiled
if not compiled:
temp_shader_source = open(output_file_and_path, "w")
temp_shader_source.write(shader_source)
temp_shader_source.close()
return 0
else:
# default to metal 2.0, but allow cmdline override
metal_version = "2.0"
if _tp.shader_version != "0":
metal_version = _tp.shader_version
# selection of sdk, macos, ios, tvos
metal_sdk = "macosx"
if _info.metal_sdk != "":
metal_sdk = _info.metal_sdk
# insert some defaults fo version min based on os
metal_min_os = ""
if metal_sdk == "macosx":
metal_min_os = "10.11"
if _info.metal_min_os != "":
metal_min_os = _info.metal_min_os
metal_min_os = "-mmacosx-version-min=" + metal_min_os
elif metal_sdk == "iphoneos":
metal_min_os = "9.0"
if _info.metal_min_os != "":
metal_min_os = _info.metal_min_os
metal_min_os = "-mios-version-min=" + metal_min_os
elif metal_sdk == "appletvos":
metal_min_os = "13.0"
if _info.metal_min_os != "":
metal_min_os = _info.metal_min_os
metal_min_os = "-mtvos-version-min=" + metal_min_os
# finally set metal -std.
if metal_sdk == "iphoneos" or metal_sdk == "appletvos":
metal_version = "-std=ios-metal" + metal_version
else:
metal_version = "-std=macos-metal" + metal_version
temp_shader_source = open(temp_file_and_path, "w")
temp_shader_source.write(shader_source)
temp_shader_source.close()
intermediate_file_and_path = temp_file_and_path.replace(extension[_shader.shader_type], intermdiate_extension[_shader.shader_type])
# compile .air
cmdline = "xcrun -sdk " + metal_sdk + " metal " + metal_min_os + " " + metal_version + " -c "
cmdline += temp_file_and_path + " "
cmdline += "-o " + intermediate_file_and_path
error_code, error_list, output_list = call_wait_subprocess(cmdline)
if error_code == 0:
cmdline = "xcrun -sdk " + metal_sdk + " metallib "
cmdline += intermediate_file_and_path + " "
cmdline += "-o " + output_file_and_path + " "
cmdline += _info.args
error_code, error_list_2, output_list_2 = call_wait_subprocess(cmdline)
error_list.extend(error_list_2)
output_list.extend(output_list_2)
if error_code != 0:
_tp.error_code = error_code
_tp.error_list = error_list
_tp.output_list = output_list
# generate a shader info file with an array of technique permutation descriptions and dependency timestamps
def generate_shader_info(filename, included_files, techniques):
global _info
info_filename, base_filename, dir_path = get_resource_info_filename(filename, _info.output_dir)
shader_info = dict()
shader_info["cmdline"] = _info.cmdline_string
shader_info["files"] = []
shader_info["techniques"] = techniques["techniques"]
shader_info["failures"] = techniques["failures"]
# special files which affect the validity of compiled shaders
shader_info["files"].append(create_dependency(_info.this_file))
shader_info["files"].append(create_dependency(_info.macros_file))
shader_info["files"].append(create_dependency(_info.platform_macros_file))
included_files.insert(0, os.path.join(dir_path, base_filename))
for ifile in included_files:
full_name = os.path.join(_info.root_dir, ifile)
shader_info["files"].append(create_dependency(full_name))
output_info = open(info_filename, 'wb+')
output_info.write(bytes(json.dumps(shader_info, indent=4), 'UTF-8'))
output_info.close()
return shader_info
# generate json description of vs inputs and outputs
def generate_input_info(inputs):
semantic_info = [
["SV_POSITION", "4"],
["POSITION", "4"],
["TEXCOORD", "4"],
["NORMAL", "4"],
["TANGENT", "4"],
["BITANGENT", "4"],
["BLENDWEIGHTS", "4"],
["COLOR", "1"],
["BLENDINDICES", "1"]
]
type_info = ["int", "uint", "float", "double"]
input_desc = []
inputs_split = parse_and_split_block(inputs)
offset = int(0)
for i in range(0, len(inputs_split), 3):
num_elements = 1
element_size = 1
for type in type_info:
if inputs_split[i].find(type) != -1:
str_num = inputs_split[i].replace(type, "")
if str_num != "":
num_elements = int(str_num)
for sem in semantic_info:
if inputs_split[i+2].find(sem[0]) != -1:
semantic_id = semantic_info.index(sem)
semantic_name = sem[0]
semantic_index = inputs_split[i+2].replace(semantic_name, "")
if semantic_index == "":
semantic_index = "0"
element_size = sem[1]
break
size = int(element_size) * int(num_elements)
input_attribute = {
"name": inputs_split[i+1],
"semantic_index": int(semantic_index),
"semantic_id": int(semantic_id),
"size": int(size),
"element_size": int(element_size),
"num_elements": int(num_elements),
"offset": int(offset),
}
input_desc.append(input_attribute)
offset += size
return input_desc
# generate metadata for the technique with info about textures, cbuffers, inputs, outputs, binding points and more
def generate_technique_permutation_info(_tp):
_tp.technique["name"] = _tp.technique_name
# textures
shader_resources_split = parse_and_split_block(_tp.resource_decl)
i = 0
_tp.technique["texture_sampler_bindings"] = []
_tp.technique["structured_buffers"] = []
_tp.technique["descriptor_tables"] = []
_tp.technique["samplers"] = []
while i < len(shader_resources_split):
offset = i
res_type = shader_resources_split[i+0]
# structured buffers
if res_type.find("structured_buffer") != -1:
offset = i+1
buffer_desc = {
"type": shader_resources_split[i+1],
"name": shader_resources_split[i+2],
"location": shader_resources_split[i+3]
}
_tp.technique["structured_buffers"].append(buffer_desc)
elif res_type.find("_table") != -1:
table_desc = {
"name": shader_resources_split[offset+1],
"data_type": shader_resources_split[offset+2],
"dimension": shader_resources_split[offset+3],
"type": res_type,
"unit": int(shader_resources_split[offset+4]),
"space": int(shader_resources_split[offset+5])
}
_tp.technique["descriptor_tables"].append(table_desc)
offset = i+3
elif res_type.find("sampler_state") != -1:
sampler_desc = {
"name": shader_resources_split[offset+1],
"unit": int(shader_resources_split[offset+2])
}
_tp.technique["samplers"].append(sampler_desc)
else:
# textures
if res_type == "texture_2dms":
data_type = shader_resources_split[i+1]
fragments = shader_resources_split[i+2]
offset = i+2
else:
data_type = "float4"
fragments = 1
sampler_desc = {
"name": shader_resources_split[offset+1],
"data_type": data_type,
"fragments": fragments,
"type": res_type,
"unit": int(shader_resources_split[offset+2])
}
_tp.technique["texture_sampler_bindings"].append(sampler_desc)
i = offset+3
# cbuffers
_tp.technique["cbuffers"] = []
for buffer in _tp.cbuffers:
pos = buffer.find("{")
if pos == -1:
continue
buffer_decl = buffer[0:pos-1]
buffer_decl_split = buffer_decl.split(":")
buffer_name = buffer_decl_split[0].split()[1]
buffer_loc_start = buffer_decl_split[1].find("(") + 1
buffer_loc_end = buffer_decl_split[1].find(")", buffer_loc_start)
buffer_reg = buffer_decl_split[1][buffer_loc_start:buffer_loc_end]
buffer_reg = buffer_reg.strip('b')
space = -1
cpos = buffer_reg.find(",")
if cpos != -1:
space = buffer_reg[cpos+1:].strip().strip('space')
buffer_reg = buffer_reg[:cpos]
buffer_desc = {"name": buffer_name, "location": int(buffer_reg), "space": int(space)}
_tp.technique["cbuffers"].append(buffer_desc)
# io structs from vs.. vs input, instance input, vs output (ps input)
_tp.technique["vs_inputs"] = generate_input_info(_tp.shader[0].input_decl)
_tp.technique["instance_inputs"] = generate_input_info(_tp.shader[0].instance_input_decl)
_tp.technique["vs_outputs"] = generate_input_info(_tp.shader[0].output_decl)
# vs and ps files
if "vs" in _tp.filenames.keys():
_tp.technique["vs_file"] = _tp.filenames["vs"] + ".vsc"
if "ps" in _tp.filenames.keys():
_tp.technique["ps_file"] = _tp.filenames["ps"] + ".psc"
if "cs" in _tp.filenames.keys():
_tp.technique["cs_file"] = _tp.filenames["cs"] + ".csc"
# permutation
_tp.technique["permutations"] = _tp.permutation_options
_tp.technique["permutation_id"] = _tp.id
_tp.technique["permutation_option_mask"] = _tp.mask
return _tp.technique
# compiles single shader using platform specific compiler or validator, _tp is technique / permutation info
def compile_single_shader(_tp):
for s in _tp.shader:
if s.duplicate:
continue
if _info.shader_platform == "hlsl":
compile_hlsl(_info, _tp.pmfx_name, _tp, s)
elif _info.shader_platform == "pssl":
compile_pssl(_info, _tp.pmfx_name, _tp, s)
elif _info.shader_platform == "glsl":
compile_glsl(_info, _tp.pmfx_name, _tp, s)
elif _info.shader_platform == "metal":
compile_metal(_info, _tp.pmfx_name, _tp, s)
else:
print_error("error: invalid shader platform " + _info.shader_platform)
# parse a pmfx file which is a collection of techniques and permutations, made up of vs, ps, cs combinations
def parse_pmfx(file, root):
global _info
# new pmfx info
_pmfx = PmfxInfo()
file_and_path = os.path.join(root, file)
shader_file_text, included_files = create_shader_set(file_and_path, root)
_pmfx.json, _pmfx.source = find_pmfx_json(shader_file_text)
# _pmfx.source = shader_file_text
_pmfx.json_text = json.dumps(_pmfx.json)
# pmfx file may be an include or library module containing only functions
if not _pmfx.json:
return
# check dependencies
force = False
up_to_date = check_dependencies(file_and_path, included_files)
if up_to_date and not force:
print(file + ": up-to-date", flush=True)
return
print(file, flush=True)
c_code = ""
pmfx_name = os.path.basename(file).replace(".pmfx", "")
pmfx_output_info = dict()
pmfx_output_info["techniques"] = []
# add cbuffers and structs as c structs
c_code += "namespace " + pmfx_name + "\n{\n"
global_cbuffers = find_constant_buffers(_pmfx.source)
# structs
global_structs = find_struct_declarations(_pmfx.source)
for s in global_structs:
c_code += s
# cbuffers
for buf in global_cbuffers:
decl = buf[:buf.find(":")].split(" ")
c_code += "\nstruct " + decl[1] + "\n"
body = buf.find("{")
c_code += buf[body:]
# for techniques in pmfx
success = True
compile_jobs = []
for technique in _pmfx.json:
pmfx_json = json.loads(_pmfx.json_text)
technique_json = pmfx_json[technique].copy()
technique_json = inherit_technique(technique_json, pmfx_json)
technique_permutations, permutation_options, mask, define_list, c_defines = generate_permutations(technique, technique_json)
c_code += c_defines
# for permutations in technique
for permutation in technique_permutations:
pmfx_json = json.loads(_pmfx.json_text)
_tp = TechniquePermutationInfo()
_tp.pmfx_name = pmfx_name
_tp.shader = []
_tp.cbuffers = []
# gather technique permutation info
_tp.id = generate_permutation_id(define_list, permutation)
_tp.permutation = permutation
_tp.technique_name = technique
_tp.technique = inherit_technique(pmfx_json[technique], pmfx_json)
_tp.mask = mask
_tp.permutation_options = permutation_options
valid = True
_tp.shader_version = _info.shader_version
if "supported_platforms" in _tp.technique:
p = shader_sub_platform()
sp = _tp.technique["supported_platforms"]
if p not in sp:
print_warning("warning: " + _tp.technique_name + " not supported on " + p)
valid = False
else:
sv = sp[p]
if "all" in sv:
pass
elif _tp.shader_version not in sv:
valid = False
print_warning("warning: " + _tp.technique_name + " not supported on " +
p + " " + _info.shader_version +
", forcing to version " + sv[0])
# force shader version to specified
_tp.shader_version = sv[0]
if not valid:
continue
if _tp.id != 0:
_tp.name = _tp.technique_name + "__" + str(_tp.id) + "__"
else:
_tp.name = _tp.technique_name
# strip condition permutations from source
permutation.append((_info.shader_platform.upper(), 1))
permutation.append((shader_sub_platform().upper(), 1))
_tp.source = evaluate_conditional_blocks(_pmfx.source, permutation)
# get permutation constants..
_tp.technique = get_permutation_conditionals(_tp.technique, _tp.permutation)
# global cbuffers
_tp.cbuffers = find_constant_buffers(_pmfx.source)
# technique, permutation specific constants
_tp.technique, c_struct, tp_cbuffer = generate_technique_constant_buffers(pmfx_json, _tp)
c_code += c_struct
# add technique / permutation specific cbuffer to the list
_tp.cbuffers.append(tp_cbuffer)
# technique, permutation specific textures..
_tp.textures = generate_technique_texture_variables(_tp)
_tp.resource_decl = find_shader_resources(_tp.source)
# add technique textures
if _tp.textures:
_tp.resource_decl += generate_texture_decl(_tp.textures)
# find functions
_tp.functions = find_functions(_tp.source)
# find structs
struct_list = find_struct_declarations(_tp.source)
_tp.struct_decls = ""
for struct in struct_list:
_tp.struct_decls += struct + "\n"
# number of threads for cs
if "threads" in pmfx_json[technique]:
threads = pmfx_json[technique]["threads"]
_tp.threads = [1, 1, 1]
for i in range(0, len(threads)):
_tp.threads[i] = threads[i]
# generate single shader data
shader_types = ["vs", "ps", "cs"]
for s in shader_types:
if s in _tp.technique.keys():
single_shader = generate_single_shader(_tp.technique[s], _tp)
if single_shader:
single_shader.shader_type = s
if single_shader:
_tp.shader.append(single_shader)
compile_jobs.append(copy.copy(_tp))
# find duplicated / redundant permutation combinations
unique = dict()
for j in compile_jobs:
j.filenames = dict()
for s in j.shader:
hash = shader_hash(s)
if hash not in unique:
s.duplicate = False
unique[str(hash)] = j.name
j.filenames[s.shader_type] = j.name
else:
s.duplicate = True
j.filenames[s.shader_type] = unique[str(hash)]
threads = []
for j in compile_jobs:
x = threading.Thread(target=compile_single_shader, args=(j,))
threads.append(x)
x.start()
# wait for threads
for t in threads:
t.join()
pmfx_output_info["failures"] = dict()
for i in range(0, len(compile_jobs)):
c = compile_jobs[i]
str_id = ""
if c.id != 0:
str_id = "__" + str(c.id) + "__"
output_name = c.pmfx_name + "::" + c.technique_name + str_id
if c.error_code == 0:
print(output_name, flush=True)
else:
print_error(output_name + " failed to compile")
pmfx_output_info["failures"][c.pmfx_name] = True
_info.error_code = 1
for out in c.output_list:
print(out, flush=True)
for err in c.error_list:
print_error(" " + err)
pmfx_output_info["techniques"].append(generate_technique_permutation_info(compile_jobs[i]))
# write a shader info file with timestamp for dependencies
generate_shader_info(file_and_path, included_files, pmfx_output_info)
# write out a c header for accessing materials in code
if len(_info.struct_dir) > 0:
if c_code != "":
c_code += "}\n"
fmt = ""
lines = c_code.split("\n")
if len(lines) > 3:
indents = 0
for l in lines:
if l == "":
continue
if l.find("}") != -1:
indents -= 1
for i in range(0, indents):
fmt += " "
fmt += l.strip() + "\n"
if l.find("{") != -1:
indents += 1
h_filename = file.replace(".pmfx", ".h")
h_filename = os.path.basename(h_filename)
if not os.path.exists(_info.struct_dir):
os.mkdir(_info.struct_dir)
h_filename = os.path.join(_info.struct_dir, h_filename)
h_file = open(h_filename, "w+")
h_file.write(fmt)
h_file.close()
# handles some hardcoded cases of platform varitions
def configure_sub_platforms():
global _info
if _info.shader_platform == "spirv":
_info.shader_platform = "glsl"
_info.shader_version = "450"
_info.shader_sub_platform = "spirv"
elif _info.shader_platform == "gles":
_info.shader_platform = "glsl"
_info.shader_sub_platform = "gles"
elif _info.shader_platform == "nvn":
_info.shader_platform = "glsl"
_info.shader_sub_platform = "nvn"
# get global info which contains the parsed args and other info
def get_info():
global _info
return _info
# sets the global info
def set_info(info):
global _info
_info = info
# main function to avoid shadowing
def main(parse_function, version):
print("--------------------------------------------------------------------------------", flush=True)
print("pmfx shader (v{}) -------------------------------------------------------------".format(version), flush=True)
print("--------------------------------------------------------------------------------", flush=True)
global _info
_info = BuildInfo()
_info.error_code = 0
# get dirs for build output
_info.root_dir = os.getcwd()
_info.this_file = os.path.realpath(__file__)
parse_args()
configure_sub_platforms()
# configure for running via script or via exe
if getattr(sys, 'frozen', False):
# exe location with bin/ and platform/ same dir
_info.pmfx_dir = os.path.dirname(sys.executable)
_info.this_file = sys.executable
else:
# script location with bin/ and platform/ in same dir
_info.pmfx_dir = os.path.dirname(_info.this_file)
_info.pmfx_dir = os.path.dirname(_info.this_file)
_info.macros_file = os.path.join(_info.pmfx_dir, "platform", "pmfx.h")
_info.platform_macros_file = os.path.join(_info.pmfx_dir, "platform", _info.shader_platform + ".h")
_info.tools_dir = _info.pmfx_dir
# global shader macros for glsl, hlsl and metal portability
mf = open(_info.platform_macros_file)
_info.macros_source = mf.read()
mf.close()
mf = open(_info.macros_file)
_info.macros_source += mf.read()
mf.close()
source_list = _info.inputs
for source in source_list:
if os.path.isdir(source):
for root, dirs, files in os.walk(source):
for file in files:
if file.endswith(".pmfx"):
try:
parse_function(file, root)
except Exception as e:
print_error("error: while processing {}".format(os.path.join(root, file)))
raise e
else:
parse_function(source, "")
# error code for ci
sys.exit(_info.error_code)
# builds self into an exe
def build_executable():
# dist dir based on platform
platform = get_platform_name()
pyinstaller = {
"win64": "pyinstaller",
"osx": "python3 -m PyInstaller",
"linux": "python3 -m PyInstaller"
}
# requires pyinstaller
p = subprocess.Popen(
"{} pmfx.py -i NONE --onefile --distpath dist/{} --workpath dist/build/{}".format(pyinstaller[platform], platform, platform), shell=True)
p.wait()
# copy relevant files
import shutil
# shader source
shutil.copytree("platform", "dist/{}/platform".format(platform), dirs_exist_ok=True)
# platform binaries
if platform == "win64":
shutil.copytree("bin/fxc", "dist/win64/bin/fxc", dirs_exist_ok=True)
shutil.copytree("bin/dxc", "dist/win64/bin/dxc", dirs_exist_ok=True)
shutil.copytree("bin/glsl/win64", "dist/win64/bin/glsl/win64", dirs_exist_ok=True)
elif platform == "osx":
shutil.copytree("bin/glsl/osx", "dist/osx/bin/glsl/osx", dirs_exist_ok=True)
elif platform == "linux":
shutil.copytree("bin/glsl/linux", "dist/linux/bin/glsl/linux", dirs_exist_ok=True)
# zip
exe_names = {
"win64": "Windows-x64",
"osx": "macOS-x64",
"linux": "Linux-x64"
}
shutil.make_archive("dist/" + exe_names[platform], 'zip', "dist/{}".format(platform))
# entry
if __name__ == "__main__":
if "-v1" in sys.argv:
main(parse_pmfx, "1.1")
else:
pmfx_pipeline.main()
|
67da92a11b52b4049efd7adfc5176768bac04bbc
|
578db86c51d44ebddd0dc7b1738985b3dc69eb74
|
/corehq/apps/data_interfaces/migrations/0024_add_automaticupdaterule_upstream_id.py
|
0fa2884a93e7ef175eef35202828db4534842fa9
|
[
"BSD-3-Clause"
] |
permissive
|
dimagi/commcare-hq
|
a43c7dd32b5f89c89fd5aa1b1359ab7301f4ff6b
|
e7391ddae1af1dbf118211ecb52c83fc508aa656
|
refs/heads/master
| 2023-08-16T22:38:27.853437
| 2023-08-16T19:07:19
| 2023-08-16T19:07:19
| 247,278
| 499
| 203
|
BSD-3-Clause
| 2023-09-14T19:03:24
| 2009-07-09T17:00:07
|
Python
|
UTF-8
|
Python
| false
| false
| 422
|
py
|
0024_add_automaticupdaterule_upstream_id.py
|
# Generated by Django 2.2.24 on 2021-10-28 20:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('data_interfaces', '0023_auto_20210914_1726'),
]
operations = [
migrations.AddField(
model_name='automaticupdaterule',
name='upstream_id',
field=models.CharField(max_length=32, null=True),
),
]
|
69d077fbc6e0e7849080e3fc3ed7398ad3d8180d
|
ec9fd70a945d9ad6c7018ed96ca71a65c76bb471
|
/onshape_to_robot/pure_sketch.py
|
1e485400a36cab93beb755e8b639e9f86d4e6895
|
[
"MIT"
] |
permissive
|
Rhoban/onshape-to-robot
|
0c252e37c09988d878bdaed2ffd42e460bd48411
|
f4716a23046bada1c90e3e1acd81686a6d1f2593
|
refs/heads/master
| 2023-08-17T23:22:54.592494
| 2023-03-14T15:52:19
| 2023-03-14T15:52:19
| 176,774,675
| 175
| 40
|
MIT
| 2023-09-05T23:31:58
| 2019-03-20T16:30:53
|
Python
|
UTF-8
|
Python
| false
| false
| 5,226
|
py
|
pure_sketch.py
|
import numpy as np
import math
import commentjson as json
import os
import sys, os
from colorama import Fore, Back, Style
def main():
if len(sys.argv) < 2:
print('Usage: onshape-to-robot-pure-shape {STL file} [prefix=PureShapes]')
else:
fileName = sys.argv[1]
robotDir = os.path.dirname(fileName)
configFile = os.path.join(robotDir, 'config.json')
prefix = 'PureShapes'
if len(sys.argv) > 2:
prefix = sys.argv[2]
from .onshape_api.client import Client
client = Client(logging=False, creds=configFile)
parts = fileName.split('.')
parts[-1] = 'part'
partFileName = '.'.join(parts)
parts[-1] = 'scad'
scadFileName = '.'.join(parts)
with open(partFileName, 'r', encoding="utf-8") as stream:
part = json.load(stream)
partid = part['partId']
result = client.get_sketches(part['documentId'], part['documentMicroversion'], part['elementId'], part['configuration'])
scad = "% scale(1000) import(\""+os.path.basename(fileName)+"\");\n"
sketchDatas = []
for sketch in result['sketches']:
if sketch['sketch'].startswith(prefix):
parts = sketch['sketch'].split(' ')
if len(parts) >= 2:
sketch['thickness'] = float(parts[1])
else:
print(Fore.RED + "ERROR: The sketch name should contain extrusion size (e.g \"PureShapes 5.3\")"
+ Style.RESET_ALL)
exit(0)
sketchDatas.append(sketch)
if len(sketchDatas):
print(Fore.GREEN + "* Found "+str(len(sketchDatas))+" PureShapes sketches" + Style.RESET_ALL)
for sketchData in sketchDatas:
# Retrieving sketch transform matrix
m = sketchData['transformMatrix']
mm = [m[0:4], m[4:8], m[8:12], m[12:16]]
mm[0][3] *= 1000
mm[1][3] *= 1000
mm[2][3] *= 1000
scad += "\n"
scad += "// Sketch "+sketchData['sketch']+"\n"
scad += 'multmatrix('+str(mm)+') {'+"\n"
scad += "thickness = %f;\n" % sketchData['thickness']
scad += "translate([0, 0, -thickness]) {\n"
boxes = {}
def boxSet(id, pointName, point):
if id not in boxes:
boxes[id] = {}
boxes[id][pointName] = point
for entry in sketchData['geomEntities']:
if entry['entityType'] == 'circle':
center = entry['center']
scad += " translate([%f, %f, 0]) {\n" % (center[0]*1000, center[1]*1000)
scad += " cylinder(r=%f,h=thickness);\n" % (entry['radius']*1000)
scad += " }\n"
if entry['entityType'] == 'point':
parts = entry['id'].split('.')
if len(parts) == 3:
if parts[1] == 'top' and parts[2] == 'start':
boxSet(parts[0], 'A', entry['point'])
if parts[1] == 'top' and parts[2] == 'end':
boxSet(parts[0], 'B', entry['point'])
if parts[1] == 'bottom' and parts[2] == 'start':
boxSet(parts[0], 'C', entry['point'])
if parts[1] == 'bottom' and parts[2] == 'end':
boxSet(parts[0], 'D', entry['point'])
for id in boxes:
if len(boxes[id]) == 4:
A, B = np.array(boxes[id]['A']), np.array(boxes[id]['B'])
C, D = np.array(boxes[id]['C']), np.array(boxes[id]['D'])
AB = B-A
# Making sure that the orientation of the square is correct
AB90 = np.array([-AB[1], AB[0]])
side = AB90.dot(C-A)
width = np.linalg.norm(B-A)
height = np.linalg.norm(B-D)
if side < 0:
A, B, C, D = C, D, A, B
AB = B-A
alpha = np.rad2deg(math.atan2(AB[1], AB[0]))
scad += " translate([%f, %f, 0]) {\n" % (A[0]*1000, A[1]*1000)
scad += ' rotate([0, 0, '+str(alpha)+']) {'+"\n"
scad += " cube([%f, %f, thickness]);\n" % (width*1000, height*1000)
scad += " }\n"
scad += " }\n"
scad += "}\n"
scad += "}\n"
with open(scadFileName, 'w', encoding="utf-8") as stream:
stream.write(scad)
directory = os.path.dirname(fileName)
os.system('cd '+directory+'; openscad '+os.path.basename(scadFileName))
else:
print(Fore.RED + "ERROR: Can't find pure shape sketch in this part" + Style.RESET_ALL)
if __name__ == "__main__":
main()
|
d5a1b26332b4e407c23b85752687862f336342f1
|
b40d1a26ea04a19ec0da7bf55db84b7ee36cc898
|
/leetcode.com/python/997_Find_the_Town_Judge.py
|
3cd5b17877d5a22cf9490fd0f158f17d219aa041
|
[
"MIT"
] |
permissive
|
partho-maple/coding-interview-gym
|
5e8af7d404c28d4b9b52e5cffc540fd51d8025cf
|
20ae1a048eddbc9a32c819cf61258e2b57572f05
|
refs/heads/master
| 2022-09-11T16:36:01.702626
| 2022-03-14T08:39:47
| 2022-03-14T08:39:47
| 69,802,909
| 862
| 438
|
MIT
| 2022-08-18T06:42:46
| 2016-10-02T14:51:31
|
Python
|
UTF-8
|
Python
| false
| false
| 1,089
|
py
|
997_Find_the_Town_Judge.py
|
from collections import defaultdict
class Solution(object):
def findJudge(self, N, trust):
"""
:type N: int
:type trust: List[List[int]]
:rtype: int
"""
graph = defaultdict(list)
for i in range(1, N + 1):
graph[i] = []
for trustPair in trust:
parent, child = trustPair
graph[parent].append(child)
judges = []
# Checks for first condition. If someone doesn't trust anyone then he/she is a probable candidate for town judge
for i in range(1, N + 1):
if not graph[i] or len(graph[i]) == 0:
judges.append(i)
# Violets second conditions. Because there is someone else or noone that doesn't truse the town judge
if len(judges) != 1:
return - 1
# Checking for third condition
for i in range(1, N + 1):
childSet = set(graph[i])
if judges[-1] != i and judges[-1] not in childSet:
return -1
return judges[-1]
"""
outDegree/edgeList is 0
"""
|
4a820dbffde3844cddf0b7c8e917e2cace72ab0d
|
7f59e2c4e771c19378e9839406c220d3985e7efe
|
/public-engines/sms-spam-engine/marvin_sms_spam_engine/data_handler/training_preparator.py
|
8dba2e9ccef67cb6c00ba2bb7d6738169fca7c3e
|
[
"Apache-2.0"
] |
permissive
|
apache/incubator-marvin
|
c6ff32d50eb01ccd84266587d79f562a9e371496
|
58fdccf2e677041a13966ddbdd96d484edf3b474
|
refs/heads/develop
| 2023-08-30T12:46:56.973102
| 2022-11-18T15:27:52
| 2022-11-18T15:27:52
| 148,087,939
| 112
| 77
|
Apache-2.0
| 2023-03-07T05:45:59
| 2018-09-10T02:27:54
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 1,154
|
py
|
training_preparator.py
|
#!/usr/bin/env python
# coding=utf-8
"""TrainingPreparator engine action.
Use this module to add the project main code.
"""
from .._compatibility import six
from .._logging import get_logger
from marvin_python_toolbox.engine_base import EngineBaseDataHandler
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.model_selection import train_test_split
__all__ = ['TrainingPreparator']
logger = get_logger('training_preparator')
class TrainingPreparator(EngineBaseDataHandler):
def __init__(self, **kwargs):
super(TrainingPreparator, self).__init__(**kwargs)
def execute(self, params, **kwargs):
X_train, X_test, y_train, y_test = train_test_split(
self.marvin_initial_dataset["text"], self.marvin_initial_dataset["label"],
test_size=params["test_size"], random_state=params["random_state"])
vect = CountVectorizer()
vect.fit(X_train)
self.marvin_dataset = {
"X_train": vect.transform(X_train),
"X_test": vect.transform(X_test),
"y_train": y_train,
"y_test": y_test,
"vect": vect
}
|
d6fb126e558b6cdbb5c0f23c47424daaff24ee43
|
0c3c8d83fd375c54d228cc9ee490765b92b41084
|
/raccoon_src/utils/help_utils.py
|
e097e2049f5d70fa3879ff3e3da3c3aa5a311f45
|
[
"MIT"
] |
permissive
|
evyatarmeged/Raccoon
|
4a082ebe25490e4ab5ed07191621048d06e0609e
|
9cf6c1129221aa51280f5705106660d23b2f1b92
|
refs/heads/master
| 2022-10-18T01:21:34.548060
| 2022-01-10T12:33:00
| 2022-01-10T12:33:00
| 133,257,682
| 2,911
| 429
|
MIT
| 2022-09-28T09:23:00
| 2018-05-13T17:05:21
|
Python
|
UTF-8
|
Python
| false
| false
| 6,620
|
py
|
help_utils.py
|
import os
import distutils.spawn
from platform import system
from collections import Counter
from subprocess import PIPE, check_call, CalledProcessError
from requests.exceptions import ConnectionError
from raccoon_src.utils.exceptions import RaccoonException, ScannerException, RequestHandlerException
from raccoon_src.utils.request_handler import RequestHandler
class HelpUtilities:
PATH = ""
@classmethod
def validate_target_is_up(cls, host):
cmd = "ping -c 1 {}".format(host.target)
try:
check_call(cmd.split(), stdout=PIPE, stderr=PIPE)
return
except CalledProcessError:
# Maybe ICMP is blocked. Try web server
try:
if host.port == 443 or host.port == 80:
url = "{}://{}".format(host.protocol, host.target)
else:
url = "{}://{}:{}".format(host.protocol, host.target, host.port)
rh = RequestHandler()
rh.send("GET", url=url, timeout=15)
return
except (ConnectionError, RequestHandlerException):
raise RaccoonException("Target {} seems to be down (no response to ping or from a web server"
" at port {}).\nRun with --skip-health-check to ignore hosts"
" considered as down.".format(host, host.port))
@classmethod
def parse_cookie_arg(cls, cookie_arg):
try:
cookies = {}
for c in cookie_arg.split(','):
c = c.split(":")
cookies[c[0]] = c[1]
return cookies
except (IndexError, TypeError):
raise RaccoonException("Cookie parsing error occurred, probably due to invalid cookie format.\n"
"Cookie format should be comma separated key:value pairs. Use --help "
"for more info.")
@classmethod
def validate_wordlist_args(cls, proxy_list, wordlist, subdomain_list):
if proxy_list and not os.path.isfile(proxy_list):
raise FileNotFoundError("Not a valid file path, {}".format(proxy_list))
if wordlist and not os.path.isfile(wordlist):
raise FileNotFoundError("Not a valid file path, {}".format(wordlist))
if subdomain_list and not os.path.isfile(subdomain_list):
raise FileNotFoundError("Not a valid file path, {}".format(wordlist))
@classmethod
def validate_port_range(cls, port_range):
"""Validate port range for Nmap scan"""
ports = port_range.split("-")
if all(ports) and int(ports[-1]) <= 65535 and not len(ports) != 2:
return True
raise ScannerException("Invalid port range {}".format(port_range))
@classmethod
def validate_proxy_args(cls, *args):
"""No more than 1 of the following can be specified: tor_routing, proxy, proxy_list"""
supplied_proxies = Counter((not arg for arg in (*args,))).get(False)
if not supplied_proxies:
return
elif supplied_proxies > 1:
raise RaccoonException("Must specify only one of the following:\n"
"--tor-routing, --proxy-list, --proxy")
@classmethod
def determine_verbosity(cls, quiet):
if quiet:
return "CRITICAL"
else:
return "INFO"
@classmethod
def find_nmap_executable(cls):
return distutils.spawn.find_executable("nmap")
@classmethod
def find_openssl_executable(cls):
return distutils.spawn.find_executable("openssl")
@classmethod
def find_mac_gtimeout_executable(cls):
"""To add macOS support, the coreutils package needs to be installed using homebrew"""
return distutils.spawn.find_executable("gtimeout")
@classmethod
def validate_executables(cls):
if not (cls.find_nmap_executable() and cls.find_openssl_executable()):
raise RaccoonException("Could not find Nmap or OpenSSL "
"installed. Please install them and run Raccoon again.")
if system() == "Darwin":
if not cls.find_mac_gtimeout_executable():
raise RaccoonException("To support Raccoon with macOS 'gtimeout' must be installed.\n"
"gtimeout can be installed by running 'brew install coreutils'")
return
@classmethod
def create_output_directory(cls, outdir):
"""Tries to create base output directory"""
cls.PATH = outdir
try:
os.mkdir(outdir)
except FileExistsError:
pass
@classmethod
def get_output_path(cls, module_path):
return "{}/{}".format(cls.PATH, module_path)
@classmethod
def confirm_traffic_routs_through_tor(cls):
rh = RequestHandler()
try:
page = rh.send("GET", url="https://check.torproject.org")
if "Congratulations. This browser is configured to use Tor." in page.text:
return
elif "Sorry. You are not using Tor" in page.text:
raise RaccoonException("Traffic does not seem to be routed through Tor.\nExiting")
except RequestHandlerException:
raise RaccoonException("Tor service seems to be down - not able to connect to 127.0.0.1:9050.\nExiting")
@classmethod
def query_dns_dumpster(cls, host):
# Start DNS Dumpster session for the token
request_handler = RequestHandler()
dnsdumpster_session = request_handler.get_new_session()
url = "https://dnsdumpster.com"
if host.naked:
target = host.naked
else:
target = host.target
payload = {
"targetip": target,
"csrfmiddlewaretoken": None
}
try:
dnsdumpster_session.get(url, timeout=10)
jar = dnsdumpster_session.cookies
for c in jar:
if not c.__dict__.get("name") == "csrftoken":
continue
payload["csrfmiddlewaretoken"] = c.__dict__.get("value")
break
dnsdumpster_session.post(url, data=payload, headers={"Referer": "https://dnsdumpster.com/"})
return dnsdumpster_session.get("https://dnsdumpster.com/static/map/{}.png".format(target))
except ConnectionError:
raise RaccoonException
@classmethod
def extract_hosts_from_cidr(cls):
pass
@classmethod
def extract_hosts_from_range(cls):
pass
|
0f14e5c256e7e0134adb14989c4d2fe727b63472
|
279f415dd1e06c594c6c87deda57e201c73c4542
|
/espnet2/asr/frontend/fused.py
|
34f3315fa71c9d81bd6954431a86644b02718305
|
[
"Apache-2.0"
] |
permissive
|
espnet/espnet
|
f7ba47271c1a6b1ed606dbbfb04a7f14220bb585
|
bcd20948db7846ee523443ef9fd78c7a1248c95e
|
refs/heads/master
| 2023-08-28T23:43:34.238336
| 2023-08-23T02:51:39
| 2023-08-23T02:51:39
| 114,054,873
| 7,242
| 2,244
|
Apache-2.0
| 2023-09-14T08:01:11
| 2017-12-13T00:45:11
|
Python
|
UTF-8
|
Python
| false
| false
| 5,752
|
py
|
fused.py
|
from typing import Tuple
import numpy as np
import torch
from typeguard import check_argument_types
from espnet2.asr.frontend.abs_frontend import AbsFrontend
from espnet2.asr.frontend.default import DefaultFrontend
from espnet2.asr.frontend.s3prl import S3prlFrontend
class FusedFrontends(AbsFrontend):
def __init__(
self, frontends=None, align_method="linear_projection", proj_dim=100, fs=16000
):
assert check_argument_types()
super().__init__()
self.align_method = (
align_method # fusing method : linear_projection only for now
)
self.proj_dim = proj_dim # dim of the projection done on each frontend
self.frontends = [] # list of the frontends to combine
for i, frontend in enumerate(frontends):
frontend_type = frontend["frontend_type"]
if frontend_type == "default":
n_mels, fs, n_fft, win_length, hop_length = (
frontend.get("n_mels", 80),
fs,
frontend.get("n_fft", 512),
frontend.get("win_length"),
frontend.get("hop_length", 128),
)
window, center, normalized, onesided = (
frontend.get("window", "hann"),
frontend.get("center", True),
frontend.get("normalized", False),
frontend.get("onesided", True),
)
fmin, fmax, htk, apply_stft = (
frontend.get("fmin", None),
frontend.get("fmax", None),
frontend.get("htk", False),
frontend.get("apply_stft", True),
)
self.frontends.append(
DefaultFrontend(
n_mels=n_mels,
n_fft=n_fft,
fs=fs,
win_length=win_length,
hop_length=hop_length,
window=window,
center=center,
normalized=normalized,
onesided=onesided,
fmin=fmin,
fmax=fmax,
htk=htk,
apply_stft=apply_stft,
)
)
elif frontend_type == "s3prl":
frontend_conf, download_dir, multilayer_feature = (
frontend.get("frontend_conf"),
frontend.get("download_dir"),
frontend.get("multilayer_feature"),
)
self.frontends.append(
S3prlFrontend(
fs=fs,
frontend_conf=frontend_conf,
download_dir=download_dir,
multilayer_feature=multilayer_feature,
)
)
else:
raise NotImplementedError # frontends are only default or s3prl
self.frontends = torch.nn.ModuleList(self.frontends)
self.gcd = np.gcd.reduce([frontend.hop_length for frontend in self.frontends])
self.factors = [frontend.hop_length // self.gcd for frontend in self.frontends]
if torch.cuda.is_available():
dev = "cuda"
else:
dev = "cpu"
if self.align_method == "linear_projection":
self.projection_layers = [
torch.nn.Linear(
in_features=frontend.output_size(),
out_features=self.factors[i] * self.proj_dim,
)
for i, frontend in enumerate(self.frontends)
]
self.projection_layers = torch.nn.ModuleList(self.projection_layers)
self.projection_layers = self.projection_layers.to(torch.device(dev))
def output_size(self) -> int:
return len(self.frontends) * self.proj_dim
def forward(
self, input: torch.Tensor, input_lengths: torch.Tensor
) -> Tuple[torch.Tensor, torch.Tensor]:
# step 0 : get all frontends features
self.feats = []
for frontend in self.frontends:
with torch.no_grad():
input_feats, feats_lens = frontend.forward(input, input_lengths)
self.feats.append([input_feats, feats_lens])
if (
self.align_method == "linear_projection"
): # TODO(Dan): to add other align methods
# first step : projections
self.feats_proj = []
for i, frontend in enumerate(self.frontends):
input_feats = self.feats[i][0]
self.feats_proj.append(self.projection_layers[i](input_feats))
# 2nd step : reshape
self.feats_reshaped = []
for i, frontend in enumerate(self.frontends):
input_feats_proj = self.feats_proj[i]
bs, nf, dim = input_feats_proj.shape
input_feats_reshaped = torch.reshape(
input_feats_proj, (bs, nf * self.factors[i], dim // self.factors[i])
)
self.feats_reshaped.append(input_feats_reshaped)
# 3rd step : drop the few last frames
m = min([x.shape[1] for x in self.feats_reshaped])
self.feats_final = [x[:, :m, :] for x in self.feats_reshaped]
input_feats = torch.cat(
self.feats_final, dim=-1
) # change the input size of the preencoder : proj_dim * n_frontends
feats_lens = torch.ones_like(self.feats[0][1]) * (m)
else:
raise NotImplementedError
return input_feats, feats_lens
|
32dab7469bcd6431520e24ffd9da9442bacd2f8c
|
a5a99f646e371b45974a6fb6ccc06b0a674818f2
|
/DQM/L1TMonitorClient/python/L1TStage2CaloLayer2DEClient_cfi.py
|
c4f32762c73def8e859f7d0fb4e98c22b01d9216
|
[
"Apache-2.0"
] |
permissive
|
cms-sw/cmssw
|
4ecd2c1105d59c66d385551230542c6615b9ab58
|
19c178740257eb48367778593da55dcad08b7a4f
|
refs/heads/master
| 2023-08-23T21:57:42.491143
| 2023-08-22T20:22:40
| 2023-08-22T20:22:40
| 10,969,551
| 1,006
| 3,696
|
Apache-2.0
| 2023-09-14T19:14:28
| 2013-06-26T14:09:07
|
C++
|
UTF-8
|
Python
| false
| false
| 480
|
py
|
L1TStage2CaloLayer2DEClient_cfi.py
|
import FWCore.ParameterSet.Config as cms
from DQMServices.Core.DQMEDHarvester import DQMEDHarvester
l1tStage2CaloLayer2DEClient = DQMEDHarvester("L1TStage2CaloLayer2DEClient",
monitorDir = cms.untracked.string('L1TEMU/L1TStage2CaloLayer2/L1TStage2CaloLayer2DERatio'),
inputDataDir = cms.untracked.string('L1T/L1TStage2CaloLayer2'),
inputEmulDir = cms.untracked.string('L1TEMU/L1TStage2CaloLayer2/L1TStage2CaloLayer2EMU')
)
|
9812629e91305439c42ce59262ebf3d9cfa25a8c
|
a3d6556180e74af7b555f8d47d3fea55b94bcbda
|
/tools/cr/cr/actions/builder.py
|
ee237bb2dff206d30caba8fd0f2a1bda3fdd25c3
|
[
"BSD-3-Clause"
] |
permissive
|
chromium/chromium
|
aaa9eda10115b50b0616d2f1aed5ef35d1d779d6
|
a401d6cf4f7bf0e2d2e964c512ebb923c3d8832c
|
refs/heads/main
| 2023-08-24T00:35:12.585945
| 2023-08-23T22:01:11
| 2023-08-23T22:01:11
| 120,360,765
| 17,408
| 7,102
|
BSD-3-Clause
| 2023-09-10T23:44:27
| 2018-02-05T20:55:32
| null |
UTF-8
|
Python
| false
| false
| 2,513
|
py
|
builder.py
|
# Copyright 2013 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module for the Builder base class."""
import difflib
import cr
class Builder(cr.Action, cr.Plugin.Type):
"""Base class for implementing builders.
Builder implementations must override the Build and Clean methods at a
minimum to build a target and clean up back to a pristine state respectively.
They can also override Rebuild if they are able to handle it in a more
efficient way that a Clean Build sequence.
They should override the GetTargets method to return the set of valid targets
the build system knows about, and override IsTarget if they can implement it
more efficiently than checking from presents in the result of GetTargets.
"""
SELECTOR_ARG = '--builder'
SELECTOR = 'CR_BUILDER'
SELECTOR_HELP = 'Sets the builder to use to update dependencies.'
@cr.Plugin.activemethod
def Build(self, targets, arguments):
raise NotImplementedError('Must be overridden.')
@cr.Plugin.activemethod
def Clean(self, targets, arguments):
"""Clean temporary files built by a target."""
raise NotImplementedError('Must be overridden.')
@cr.Plugin.activemethod
def Rebuild(self, targets, arguments):
"""Make a target build even if it is up to date.
Default implementation is to do a Clean and Build sequence.
Do not call the base version if you implement a more efficient one.
"""
self.Clean(targets, [])
self.Build(targets, arguments)
@cr.Plugin.activemethod
def GetTargets(self):
"""Gets the full set of targets supported by this builder.
Used in automatic target name transformations, and also in offering the
user choices.
"""
return []
@cr.Plugin.activemethod
def IsTarget(self, target_name):
"""Check if a target name is on the builder knows about."""
return target_name in self.GetTargets()
@cr.Plugin.activemethod
def GuessTargets(self, target_name):
"""Returns a list of closest matching targets for a named target."""
return difflib.get_close_matches(target_name, self.GetTargets(), 10, 0.4)
class SkipBuilder(Builder):
"""The "skip" version of a Builder, causes the build step to be skipped."""
@property
def priority(self):
return super(SkipBuilder, self).priority - 1
def Build(self, targets, arguments):
pass
def Clean(self, targets, arguments):
pass
def IsTarget(self, target_name):
return True
|
ff4f64ead6e9f5d3fa68200a4e192af2f0b115e1
|
bbd69601912a3361d788efd03a47f9d4e3bac09e
|
/wx/lib/pubsub/core/topicobj.py
|
edff958484db6d00f7bfdad87fa7f198e52e0029
|
[
"BSD-2-Clause"
] |
permissive
|
wxWidgets/Phoenix
|
56929484460a0399a8f1d9582bc77c20aa14748d
|
a1184286703cf24c4b88e5bc14cf2979c1b1ea00
|
refs/heads/master
| 2023-09-01T07:10:17.437093
| 2023-08-31T05:38:01
| 2023-08-31T05:38:01
| 5,078,061
| 2,268
| 677
| null | 2023-09-09T17:06:59
| 2012-07-17T06:22:25
|
Python
|
UTF-8
|
Python
| false
| false
| 19,013
|
py
|
topicobj.py
|
"""
Provide the Topic class.
:copyright: Copyright since 2006 by Oliver Schoenborn, all rights reserved.
:license: BSD, see LICENSE_BSD_Simple.txt for details.
"""
from weakref import ref as weakref
from .listener import (
Listener,
ListenerValidator,
)
from .topicutils import (
ALL_TOPICS,
stringize,
tupleize,
validateName,
smartDedent,
)
from .topicexc import (
TopicDefnError,
TopicNameError,
ExcHandlerError,
)
from .publishermixin import PublisherMixin
from .topicargspec import (
ArgsInfo,
ArgSpecGiven,
topicArgsFromCallable,
SenderMissingReqdMsgDataError,
SenderUnknownMsgDataError,
MessageDataSpecError,
)
from .. import py2and3
class Topic(PublisherMixin):
"""
Represent topics in pubsub. Contains information about a topic,
including topic's message data specification (MDS), the list of
subscribed listeners, docstring for the topic. It allows Python-like
access to subtopics (e.g. A.B is subtopic B of topic A).
"""
def __init__(self, treeConfig, nameTuple, description,
msgArgsInfo, parent=None):
"""Create a topic. Should only be called by TopicManager via its
getOrCreateTopic() method (which gets called in several places
in pubsub, such as sendMessage, subscribe, and newTopic).
:param treeConfig: topic tree configuration settings
:param nameTuple: topic name, in tuple format (no dots)
:param description: "docstring" for topic
:param ArgsInfo msgArgsInfo: object that defines MDS for topic
:param parent: parent of topic
:raises ValueError: invalid topic name
"""
if parent is None:
if nameTuple != (ALL_TOPICS,):
msg = 'Only one topic, named %s, can be root of topic tree'
raise ValueError(msg % 'pub.ALL_TOPICS')
else:
validateName(nameTuple)
self.__tupleName = nameTuple
self.__handlingUncaughtListenerExc = False
self._treeConfig = treeConfig
PublisherMixin.__init__(self)
self.__validator = None
# Registered listeners were originally kept in a Python list; however
# a few methods require lookup of the Listener for the given callable,
# which is an O(n) operation. A set() could have been more suitable but
# there is no way of retrieving an element from a set without iterating
# over the set, again an O(n) operation. A dict() is ok too. Because
# Listener.__eq__(callable) returns true if the Listener instance wraps
# the given callable, and because Listener.__hash__ produces the hash
# value of the wrapped callable, calling dict[callable] on a
# dict(Listener -> Listener) mapping will be O(1) in most cases:
# the dict will take the callables hash, find the list of Listeners that
# have that hash, and then iterate over that inner list to find the
# Listener instance which satisfies Listener == callable, and will return
# the Listener.
self.__listeners = dict()
# specification:
self.__description = None
self.setDescription(description)
self.__msgArgs = msgArgsInfo
if msgArgsInfo.isComplete():
self.__finalize()
else:
assert not self._treeConfig.raiseOnTopicUnspecified
# now that we know the args are fine, we can link to parent
self.__parentTopic = None
self.__subTopics = {}
if parent is None:
assert self.hasMDS()
else:
self.__parentTopic = weakref(parent)
assert self.__msgArgs.parentAI() is parent._getListenerSpec()
parent.__adoptSubtopic( self )
def setDescription(self, desc):
"""Set the 'docstring' of topic"""
self.__description = desc
def getDescription(self):
"""Return the 'docstring' of topic"""
if self.__description is None:
return None
return smartDedent(self.__description)
def setMsgArgSpec(self, argsDocs, required=()):
"""Specify the message data for topic messages.
:param argsDocs: a dictionary of keyword names (message data name) and data 'docstring'; cannot be None
:param required: a list of those keyword names, appearing in argsDocs,
which are required (all others are assumed optional)
Can only be called if this info has not been already set at construction
or in a previous call.
:raise RuntimeError: if MDS already set at construction or previous call."""
assert self.__parentTopic is not None # for root of tree, this method never called!
if argsDocs is None:
raise ValueError('Cannot set listener spec to None')
if self.__msgArgs is None or not self.__msgArgs.isComplete():
try:
specGiven = ArgSpecGiven(argsDocs, required)
self.__msgArgs = ArgsInfo(self.__tupleName, specGiven,
self.__parentTopic()._getListenerSpec())
except MessageDataSpecError:
# discard the lower part of the stack trace
exc = py2and3.getexcobj()
raise exc
self.__finalize()
else:
raise RuntimeError('Not allowed to call this: msg spec already set!')
def getArgs(self):
"""Returns a pair (reqdArgs, optArgs) where reqdArgs is tuple
of names of required message arguments, optArgs is tuple
of names for optional arguments. If topic args not specified
yet, returns (None, None)."""
sendable = self.__msgArgs.isComplete()
assert sendable == self.hasMDS()
if sendable:
return (self.__msgArgs.allRequired ,
self.__msgArgs.allOptional)
return None, None
def getArgDescriptions(self):
"""Get a map of keyword names to docstrings: documents each MDS element. """
return self.__msgArgs.getArgsDocs()
def setArgDescriptions(self, **docs):
"""Set the docstring for each MDS datum."""
self.__msgArgs.setArgsDocs(docs)
def hasMDS(self):
"""Return true if this topic has a message data specification (MDS)."""
return self.__validator is not None
def filterMsgArgs(self, msgKwargs, check=False):
"""Get the MDS docstrings for each of the specified kwargs."""
filteredArgs = self.__msgArgs.filterArgs(msgKwargs)
# if no check of args yet, do it now:
if check:
self.__msgArgs.check(filteredArgs)
return filteredArgs
def isAll(self):
"""Returns true if this topic is the 'all topics' topic. All root
topics behave as though they are child of that topic. """
return self.__tupleName == (ALL_TOPICS,)
def isRoot(self):
"""Returns true if this is a "root" topic, false otherwise. A
root topic is a topic whose name contains no dots and which
has pub.ALL_TOPICS as parent."""
parent = self.getParent()
if parent:
return parent.isAll()
assert self.isAll()
return False
def getName(self):
"""Return dotted form of full topic name"""
return stringize(self.__tupleName)
def getNameTuple(self):
"""Return tuple form of full topic name"""
return self.__tupleName
def getNodeName(self):
"""Return the last part of the topic name (has no dots)"""
name = self.__tupleName[-1]
return name
def getParent(self):
"""Get Topic object that is parent of self (i.e. self is a subtopic
of parent). Return none if self is the "all topics" topic."""
if self.__parentTopic is None:
return None
return self.__parentTopic()
def hasSubtopic(self, name=None):
"""Return true only if name is a subtopic of self. If name not
specified, return true only if self has at least one subtopic."""
if name is None:
return len(self.__subTopics) > 0
return name in self.__subTopics
def getSubtopic(self, relName):
"""Get the specified subtopic object. The relName can be a valid
subtopic name, a dotted-name string, or a tuple. """
if not relName:
raise ValueError("getSubtopic() arg can't be empty")
topicTuple = tupleize(relName)
assert topicTuple
topicObj = self
for topicName in topicTuple:
child = topicObj.__subTopics.get(topicName)
if child is None:
msg = 'Topic "%s" doesn\'t have "%s" as subtopic' % (topicObj.getName(), topicName)
raise TopicNameError(relName, msg)
topicObj = child
return topicObj
def getSubtopics(self):
"""Get a list of Topic instances that are subtopics of self."""
return py2and3.values(self.__subTopics)
def getNumListeners(self):
"""Return number of listeners currently subscribed to topic. This is
different from number of listeners that will get notified since more
general topics up the topic tree may have listeners."""
return len(self.__listeners)
def hasListener(self, listener):
"""Return true if listener is subscribed to this topic."""
return listener in self.__listeners
def hasListeners(self):
"""Return true if there are any listeners subscribed to
this topic, false otherwise."""
return bool(self.__listeners)
def getListeners(self):
"""Get a copy of list of listeners subscribed to this topic. Safe to iterate over while listeners
get un/subscribed from this topics (such as while sending a message)."""
return py2and3.keys(self.__listeners)
def getListenersIter(self):
"""Get an iterator over listeners subscribed to this topic. Do not use if listeners can be
un/subscribed while iterating. """
return py2and3.iterkeys(self.__listeners)
def validate(self, listener):
"""Checks whether listener could be subscribed to this topic:
if yes, just returns; if not, raises ListenerMismatchError.
Note that method raises TopicDefnError if self not
hasMDS()."""
if not self.hasMDS():
raise TopicDefnError(self.__tupleName)
return self.__validator.validate(listener)
def isValid(self, listener):
"""Return True only if listener could be subscribed to this topic,
otherwise returns False. Note that method raises TopicDefnError
if self not hasMDS()."""
if not self.hasMDS():
raise TopicDefnError(self.__tupleName)
return self.__validator.isValid(listener)
def subscribe(self, listener):
"""Subscribe listener to this topic. Returns a pair
(pub.Listener, success). The success is true only if listener
was not already subscribed and is now subscribed. """
if listener in self.__listeners:
assert self.hasMDS()
subdLisnr, newSub = self.__listeners[listener], False
else:
if self.__validator is None:
args, reqd = topicArgsFromCallable(listener)
self.setMsgArgSpec(args, reqd)
argsInfo = self.__validator.validate(listener)
weakListener = Listener(
listener, argsInfo, onDead=self.__onDeadListener)
self.__listeners[weakListener] = weakListener
subdLisnr, newSub = weakListener, True
# notify of subscription
self._treeConfig.notificationMgr.notifySubscribe(subdLisnr, self, newSub)
return subdLisnr, newSub
def unsubscribe(self, listener):
"""Unsubscribe the specified listener from this topic. Returns
the pub.Listener object associated with the listener that was
unsubscribed, or None if the specified listener was not
subscribed to this topic. Note that this method calls
``notifyUnsubscribe(listener, self)`` on all registered notification
handlers (see pub.addNotificationHandler)."""
unsubdLisnr = self.__listeners.pop(listener, None)
if unsubdLisnr is None:
return None
unsubdLisnr._unlinkFromTopic_()
assert listener == unsubdLisnr.getCallable()
# notify of unsubscription
self._treeConfig.notificationMgr.notifyUnsubscribe(unsubdLisnr, self)
return unsubdLisnr
def unsubscribeAllListeners(self, filter=None):
"""Clears list of subscribed listeners. If filter is given, it must
be a function that takes a listener and returns true if the listener
should be unsubscribed. Returns the list of Listener for listeners
that were unsubscribed."""
unsubd = []
if filter is None:
for listener in self.__listeners:
listener._unlinkFromTopic_()
unsubd = py2and3.keys(self.__listeners)
self.__listeners = {}
else:
unsubd = []
for listener in py2and3.keys(self.__listeners):
if filter(listener):
unsubd.append(listener)
listener._unlinkFromTopic_()
del self.__listeners[listener]
# send notification regarding all listeners actually unsubscribed
notificationMgr = self._treeConfig.notificationMgr
for unsubdLisnr in unsubd:
notificationMgr.notifyUnsubscribe(unsubdLisnr, self)
return unsubd
#############################################################
#
# Implementation
#
#############################################################
def _getListenerSpec(self):
"""Only to be called by pubsub package"""
return self.__msgArgs
def _publish(self, data):
"""This sends message to listeners of parent topics as well.
If an exception is raised in a listener, the publish is
aborted, except if there is a handler (see
pub.setListenerExcHandler)."""
self._treeConfig.notificationMgr.notifySend('pre', self)
# send to ourself
iterState = self._mix_prePublish(data)
self.__sendMessage(data, self, iterState)
# send up the chain
topicObj = self.getParent()
while topicObj is not None:
if topicObj.hasListeners():
iterState = self._mix_prePublish(data, topicObj, iterState)
self.__sendMessage(data, topicObj, iterState)
# done for this topic, continue up branch to parent towards root
topicObj = topicObj.getParent()
self._treeConfig.notificationMgr.notifySend('post', self)
def __sendMessage(self, data, topicObj, iterState):
# now send message data to each listener for current topic;
# use list of listeners rather than iterator, so that if listeners added/removed during
# send loop, no runtime exception:
for listener in topicObj.getListeners():
try:
self._treeConfig.notificationMgr.notifySend('in', topicObj, pubListener=listener)
self._mix_callListener(listener, data, iterState)
except Exception:
# if exception handling is on, handle, otherwise re-raise
handler = self._treeConfig.listenerExcHandler
if handler is None or self.__handlingUncaughtListenerExc:
raise
# try handling the exception so we can continue the send:
try:
self.__handlingUncaughtListenerExc = True
handler( listener.name(), topicObj )
self.__handlingUncaughtListenerExc = False
except Exception:
exc = py2and3.getexcobj()
#print 'exception raised', exc
self.__handlingUncaughtListenerExc = False
raise ExcHandlerError(listener.name(), topicObj, exc)
def __finalize(self):
"""Finalize the topic specification, which currently means
creating the listener validator for this topic. This allows
calls to subscribe() to validate that listener adheres to
topic's message data specification (MDS)."""
assert self.__msgArgs.isComplete()
assert not self.hasMDS()
# must make sure can adopt a validator
required = self.__msgArgs.allRequired
optional = self.__msgArgs.allOptional
self.__validator = ListenerValidator(required, list(optional) )
assert not self.__listeners
def _undefineSelf_(self, topicsMap):
"""Called by topic manager when deleting a topic."""
if self.__parentTopic is not None:
self.__parentTopic().__abandonSubtopic(self.__tupleName[-1])
self.__undefineBranch(topicsMap)
def __undefineBranch(self, topicsMap):
"""Unsubscribe all our listeners, remove all subtopics from self,
then detach from parent. Parent is not notified, because method
assumes it has been called by parent"""
#print 'Remove %s listeners (%s)' % (self.getName(), self.getNumListeners())
self.unsubscribeAllListeners()
self.__parentTopic = None
for subName, subObj in py2and3.iteritems(self.__subTopics):
assert isinstance(subObj, Topic)
#print 'Unlinking %s from parent' % subObj.getName()
subObj.__undefineBranch(topicsMap)
self.__subTopics = {}
del topicsMap[self.getName()]
def __adoptSubtopic(self, topicObj):
"""Add topicObj as child topic."""
assert topicObj.__parentTopic() is self
attrName = topicObj.getNodeName()
self.__subTopics[attrName] = topicObj
def __abandonSubtopic(self, name):
"""The given subtopic becomes orphan (no parent)."""
topicObj = self.__subTopics.pop(name)
assert topicObj.__parentTopic() is self
def __onDeadListener(self, weakListener):
"""One of our subscribed listeners has died, so remove it and notify"""
pubListener = self.__listeners.pop(weakListener)
# notify:
self._treeConfig.notificationMgr.notifyDeadListener(pubListener, self)
def __str__(self):
return "%s(%s)" % (self.getName(), self.getNumListeners())
|
b7dc47a9284ed816a8c2977d6669ccaf027c72ee
|
aeef2494b283012ed619870c4275e7d015f4017a
|
/sdk/python/pulumi_gcp/dataplex/task.py
|
ba437f62e334a333a6f88977fd9a5c3eaea6d77f
|
[
"BSD-3-Clause",
"MPL-2.0",
"Apache-2.0"
] |
permissive
|
pulumi/pulumi-gcp
|
d4fd3f80c3df5290edaf33eb5eafe34e6699d0ff
|
7deea0a50a4ee5ab7bd722a83eca01707e298f85
|
refs/heads/master
| 2023-08-31T07:12:45.921522
| 2023-08-31T06:16:27
| 2023-08-31T06:16:27
| 97,485,806
| 160
| 63
|
Apache-2.0
| 2023-09-14T19:49:36
| 2017-07-17T14:28:37
|
Java
|
UTF-8
|
Python
| false
| false
| 40,110
|
py
|
task.py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['TaskArgs', 'Task']
@pulumi.input_type
class TaskArgs:
def __init__(__self__, *,
execution_spec: pulumi.Input['TaskExecutionSpecArgs'],
trigger_spec: pulumi.Input['TaskTriggerSpecArgs'],
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
lake: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
notebook: Optional[pulumi.Input['TaskNotebookArgs']] = None,
project: Optional[pulumi.Input[str]] = None,
spark: Optional[pulumi.Input['TaskSparkArgs']] = None,
task_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Task resource.
:param pulumi.Input['TaskExecutionSpecArgs'] execution_spec: Configuration for the cluster
Structure is documented below.
:param pulumi.Input['TaskTriggerSpecArgs'] trigger_spec: Configuration for the cluster
Structure is documented below.
:param pulumi.Input[str] description: User-provided description of the task.
:param pulumi.Input[str] display_name: User friendly display name.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: User-defined labels for the task.
:param pulumi.Input[str] lake: The lake in which the task will be created in.
:param pulumi.Input[str] location: The location in which the task will be created in.
:param pulumi.Input['TaskNotebookArgs'] notebook: A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Structure is documented below.
(Required)
Path to input notebook. This can be the Cloud Storage URI of the notebook file or the path to a Notebook Content. The execution args are accessible as environment variables (TASK_key=value).
:param pulumi.Input[str] project: The project in which jobs are run. By default, the project containing the Lake is used. If a project is provided, the ExecutionSpec.service_account must belong to this project.
If it is not provided, the provider project is used.
:param pulumi.Input['TaskSparkArgs'] spark: A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Structure is documented below.
:param pulumi.Input[str] task_id: The task Id of the task.
"""
pulumi.set(__self__, "execution_spec", execution_spec)
pulumi.set(__self__, "trigger_spec", trigger_spec)
if description is not None:
pulumi.set(__self__, "description", description)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if lake is not None:
pulumi.set(__self__, "lake", lake)
if location is not None:
pulumi.set(__self__, "location", location)
if notebook is not None:
pulumi.set(__self__, "notebook", notebook)
if project is not None:
pulumi.set(__self__, "project", project)
if spark is not None:
pulumi.set(__self__, "spark", spark)
if task_id is not None:
pulumi.set(__self__, "task_id", task_id)
@property
@pulumi.getter(name="executionSpec")
def execution_spec(self) -> pulumi.Input['TaskExecutionSpecArgs']:
"""
Configuration for the cluster
Structure is documented below.
"""
return pulumi.get(self, "execution_spec")
@execution_spec.setter
def execution_spec(self, value: pulumi.Input['TaskExecutionSpecArgs']):
pulumi.set(self, "execution_spec", value)
@property
@pulumi.getter(name="triggerSpec")
def trigger_spec(self) -> pulumi.Input['TaskTriggerSpecArgs']:
"""
Configuration for the cluster
Structure is documented below.
"""
return pulumi.get(self, "trigger_spec")
@trigger_spec.setter
def trigger_spec(self, value: pulumi.Input['TaskTriggerSpecArgs']):
pulumi.set(self, "trigger_spec", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
User-provided description of the task.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
User friendly display name.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
User-defined labels for the task.
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter
def lake(self) -> Optional[pulumi.Input[str]]:
"""
The lake in which the task will be created in.
"""
return pulumi.get(self, "lake")
@lake.setter
def lake(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "lake", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The location in which the task will be created in.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def notebook(self) -> Optional[pulumi.Input['TaskNotebookArgs']]:
"""
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Structure is documented below.
(Required)
Path to input notebook. This can be the Cloud Storage URI of the notebook file or the path to a Notebook Content. The execution args are accessible as environment variables (TASK_key=value).
"""
return pulumi.get(self, "notebook")
@notebook.setter
def notebook(self, value: Optional[pulumi.Input['TaskNotebookArgs']]):
pulumi.set(self, "notebook", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The project in which jobs are run. By default, the project containing the Lake is used. If a project is provided, the ExecutionSpec.service_account must belong to this project.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter
def spark(self) -> Optional[pulumi.Input['TaskSparkArgs']]:
"""
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Structure is documented below.
"""
return pulumi.get(self, "spark")
@spark.setter
def spark(self, value: Optional[pulumi.Input['TaskSparkArgs']]):
pulumi.set(self, "spark", value)
@property
@pulumi.getter(name="taskId")
def task_id(self) -> Optional[pulumi.Input[str]]:
"""
The task Id of the task.
"""
return pulumi.get(self, "task_id")
@task_id.setter
def task_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "task_id", value)
@pulumi.input_type
class _TaskState:
def __init__(__self__, *,
create_time: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
execution_spec: Optional[pulumi.Input['TaskExecutionSpecArgs']] = None,
execution_statuses: Optional[pulumi.Input[Sequence[pulumi.Input['TaskExecutionStatusArgs']]]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
lake: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
notebook: Optional[pulumi.Input['TaskNotebookArgs']] = None,
project: Optional[pulumi.Input[str]] = None,
spark: Optional[pulumi.Input['TaskSparkArgs']] = None,
state: Optional[pulumi.Input[str]] = None,
task_id: Optional[pulumi.Input[str]] = None,
trigger_spec: Optional[pulumi.Input['TaskTriggerSpecArgs']] = None,
uid: Optional[pulumi.Input[str]] = None,
update_time: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Task resources.
:param pulumi.Input[str] create_time: The time when the task was created.
:param pulumi.Input[str] description: User-provided description of the task.
:param pulumi.Input[str] display_name: User friendly display name.
:param pulumi.Input['TaskExecutionSpecArgs'] execution_spec: Configuration for the cluster
Structure is documented below.
:param pulumi.Input[Sequence[pulumi.Input['TaskExecutionStatusArgs']]] execution_statuses: Configuration for the cluster
Structure is documented below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: User-defined labels for the task.
:param pulumi.Input[str] lake: The lake in which the task will be created in.
:param pulumi.Input[str] location: The location in which the task will be created in.
:param pulumi.Input[str] name: (Output)
The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
:param pulumi.Input['TaskNotebookArgs'] notebook: A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Structure is documented below.
(Required)
Path to input notebook. This can be the Cloud Storage URI of the notebook file or the path to a Notebook Content. The execution args are accessible as environment variables (TASK_key=value).
:param pulumi.Input[str] project: The project in which jobs are run. By default, the project containing the Lake is used. If a project is provided, the ExecutionSpec.service_account must belong to this project.
If it is not provided, the provider project is used.
:param pulumi.Input['TaskSparkArgs'] spark: A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Structure is documented below.
:param pulumi.Input[str] state: (Output)
Execution state for the job.
:param pulumi.Input[str] task_id: The task Id of the task.
:param pulumi.Input['TaskTriggerSpecArgs'] trigger_spec: Configuration for the cluster
Structure is documented below.
:param pulumi.Input[str] uid: (Output)
System generated globally unique ID for the job.
:param pulumi.Input[str] update_time: (Output)
Last update time of the status.
"""
if create_time is not None:
pulumi.set(__self__, "create_time", create_time)
if description is not None:
pulumi.set(__self__, "description", description)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if execution_spec is not None:
pulumi.set(__self__, "execution_spec", execution_spec)
if execution_statuses is not None:
pulumi.set(__self__, "execution_statuses", execution_statuses)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if lake is not None:
pulumi.set(__self__, "lake", lake)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if notebook is not None:
pulumi.set(__self__, "notebook", notebook)
if project is not None:
pulumi.set(__self__, "project", project)
if spark is not None:
pulumi.set(__self__, "spark", spark)
if state is not None:
pulumi.set(__self__, "state", state)
if task_id is not None:
pulumi.set(__self__, "task_id", task_id)
if trigger_spec is not None:
pulumi.set(__self__, "trigger_spec", trigger_spec)
if uid is not None:
pulumi.set(__self__, "uid", uid)
if update_time is not None:
pulumi.set(__self__, "update_time", update_time)
@property
@pulumi.getter(name="createTime")
def create_time(self) -> Optional[pulumi.Input[str]]:
"""
The time when the task was created.
"""
return pulumi.get(self, "create_time")
@create_time.setter
def create_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "create_time", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
User-provided description of the task.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
User friendly display name.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="executionSpec")
def execution_spec(self) -> Optional[pulumi.Input['TaskExecutionSpecArgs']]:
"""
Configuration for the cluster
Structure is documented below.
"""
return pulumi.get(self, "execution_spec")
@execution_spec.setter
def execution_spec(self, value: Optional[pulumi.Input['TaskExecutionSpecArgs']]):
pulumi.set(self, "execution_spec", value)
@property
@pulumi.getter(name="executionStatuses")
def execution_statuses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['TaskExecutionStatusArgs']]]]:
"""
Configuration for the cluster
Structure is documented below.
"""
return pulumi.get(self, "execution_statuses")
@execution_statuses.setter
def execution_statuses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['TaskExecutionStatusArgs']]]]):
pulumi.set(self, "execution_statuses", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
User-defined labels for the task.
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter
def lake(self) -> Optional[pulumi.Input[str]]:
"""
The lake in which the task will be created in.
"""
return pulumi.get(self, "lake")
@lake.setter
def lake(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "lake", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The location in which the task will be created in.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
(Output)
The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def notebook(self) -> Optional[pulumi.Input['TaskNotebookArgs']]:
"""
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Structure is documented below.
(Required)
Path to input notebook. This can be the Cloud Storage URI of the notebook file or the path to a Notebook Content. The execution args are accessible as environment variables (TASK_key=value).
"""
return pulumi.get(self, "notebook")
@notebook.setter
def notebook(self, value: Optional[pulumi.Input['TaskNotebookArgs']]):
pulumi.set(self, "notebook", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The project in which jobs are run. By default, the project containing the Lake is used. If a project is provided, the ExecutionSpec.service_account must belong to this project.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter
def spark(self) -> Optional[pulumi.Input['TaskSparkArgs']]:
"""
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Structure is documented below.
"""
return pulumi.get(self, "spark")
@spark.setter
def spark(self, value: Optional[pulumi.Input['TaskSparkArgs']]):
pulumi.set(self, "spark", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
(Output)
Execution state for the job.
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter(name="taskId")
def task_id(self) -> Optional[pulumi.Input[str]]:
"""
The task Id of the task.
"""
return pulumi.get(self, "task_id")
@task_id.setter
def task_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "task_id", value)
@property
@pulumi.getter(name="triggerSpec")
def trigger_spec(self) -> Optional[pulumi.Input['TaskTriggerSpecArgs']]:
"""
Configuration for the cluster
Structure is documented below.
"""
return pulumi.get(self, "trigger_spec")
@trigger_spec.setter
def trigger_spec(self, value: Optional[pulumi.Input['TaskTriggerSpecArgs']]):
pulumi.set(self, "trigger_spec", value)
@property
@pulumi.getter
def uid(self) -> Optional[pulumi.Input[str]]:
"""
(Output)
System generated globally unique ID for the job.
"""
return pulumi.get(self, "uid")
@uid.setter
def uid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "uid", value)
@property
@pulumi.getter(name="updateTime")
def update_time(self) -> Optional[pulumi.Input[str]]:
"""
(Output)
Last update time of the status.
"""
return pulumi.get(self, "update_time")
@update_time.setter
def update_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "update_time", value)
class Task(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
execution_spec: Optional[pulumi.Input[pulumi.InputType['TaskExecutionSpecArgs']]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
lake: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
notebook: Optional[pulumi.Input[pulumi.InputType['TaskNotebookArgs']]] = None,
project: Optional[pulumi.Input[str]] = None,
spark: Optional[pulumi.Input[pulumi.InputType['TaskSparkArgs']]] = None,
task_id: Optional[pulumi.Input[str]] = None,
trigger_spec: Optional[pulumi.Input[pulumi.InputType['TaskTriggerSpecArgs']]] = None,
__props__=None):
"""
A Dataplex task represents the work that you want Dataplex to do on a schedule. It encapsulates code, parameters, and the schedule.
To get more information about Task, see:
* [API documentation](https://cloud.google.com/dataplex/docs/reference/rest/v1/projects.locations.lakes.tasks)
* How-to Guides
* [Official Documentation](https://cloud.google.com/dataplex/docs)
## Example Usage
## Import
Task can be imported using any of these accepted formats
```sh
$ pulumi import gcp:dataplex/task:Task default projects/{{project}}/locations/{{location}}/lakes/{{lake}}/tasks/{{task_id}}
```
```sh
$ pulumi import gcp:dataplex/task:Task default {{project}}/{{location}}/{{lake}}/{{task_id}}
```
```sh
$ pulumi import gcp:dataplex/task:Task default {{location}}/{{lake}}/{{task_id}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: User-provided description of the task.
:param pulumi.Input[str] display_name: User friendly display name.
:param pulumi.Input[pulumi.InputType['TaskExecutionSpecArgs']] execution_spec: Configuration for the cluster
Structure is documented below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: User-defined labels for the task.
:param pulumi.Input[str] lake: The lake in which the task will be created in.
:param pulumi.Input[str] location: The location in which the task will be created in.
:param pulumi.Input[pulumi.InputType['TaskNotebookArgs']] notebook: A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Structure is documented below.
(Required)
Path to input notebook. This can be the Cloud Storage URI of the notebook file or the path to a Notebook Content. The execution args are accessible as environment variables (TASK_key=value).
:param pulumi.Input[str] project: The project in which jobs are run. By default, the project containing the Lake is used. If a project is provided, the ExecutionSpec.service_account must belong to this project.
If it is not provided, the provider project is used.
:param pulumi.Input[pulumi.InputType['TaskSparkArgs']] spark: A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Structure is documented below.
:param pulumi.Input[str] task_id: The task Id of the task.
:param pulumi.Input[pulumi.InputType['TaskTriggerSpecArgs']] trigger_spec: Configuration for the cluster
Structure is documented below.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: TaskArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
A Dataplex task represents the work that you want Dataplex to do on a schedule. It encapsulates code, parameters, and the schedule.
To get more information about Task, see:
* [API documentation](https://cloud.google.com/dataplex/docs/reference/rest/v1/projects.locations.lakes.tasks)
* How-to Guides
* [Official Documentation](https://cloud.google.com/dataplex/docs)
## Example Usage
## Import
Task can be imported using any of these accepted formats
```sh
$ pulumi import gcp:dataplex/task:Task default projects/{{project}}/locations/{{location}}/lakes/{{lake}}/tasks/{{task_id}}
```
```sh
$ pulumi import gcp:dataplex/task:Task default {{project}}/{{location}}/{{lake}}/{{task_id}}
```
```sh
$ pulumi import gcp:dataplex/task:Task default {{location}}/{{lake}}/{{task_id}}
```
:param str resource_name: The name of the resource.
:param TaskArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(TaskArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
execution_spec: Optional[pulumi.Input[pulumi.InputType['TaskExecutionSpecArgs']]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
lake: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
notebook: Optional[pulumi.Input[pulumi.InputType['TaskNotebookArgs']]] = None,
project: Optional[pulumi.Input[str]] = None,
spark: Optional[pulumi.Input[pulumi.InputType['TaskSparkArgs']]] = None,
task_id: Optional[pulumi.Input[str]] = None,
trigger_spec: Optional[pulumi.Input[pulumi.InputType['TaskTriggerSpecArgs']]] = None,
__props__=None):
opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = TaskArgs.__new__(TaskArgs)
__props__.__dict__["description"] = description
__props__.__dict__["display_name"] = display_name
if execution_spec is None and not opts.urn:
raise TypeError("Missing required property 'execution_spec'")
__props__.__dict__["execution_spec"] = execution_spec
__props__.__dict__["labels"] = labels
__props__.__dict__["lake"] = lake
__props__.__dict__["location"] = location
__props__.__dict__["notebook"] = notebook
__props__.__dict__["project"] = project
__props__.__dict__["spark"] = spark
__props__.__dict__["task_id"] = task_id
if trigger_spec is None and not opts.urn:
raise TypeError("Missing required property 'trigger_spec'")
__props__.__dict__["trigger_spec"] = trigger_spec
__props__.__dict__["create_time"] = None
__props__.__dict__["execution_statuses"] = None
__props__.__dict__["name"] = None
__props__.__dict__["state"] = None
__props__.__dict__["uid"] = None
__props__.__dict__["update_time"] = None
super(Task, __self__).__init__(
'gcp:dataplex/task:Task',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
create_time: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
execution_spec: Optional[pulumi.Input[pulumi.InputType['TaskExecutionSpecArgs']]] = None,
execution_statuses: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TaskExecutionStatusArgs']]]]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
lake: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
notebook: Optional[pulumi.Input[pulumi.InputType['TaskNotebookArgs']]] = None,
project: Optional[pulumi.Input[str]] = None,
spark: Optional[pulumi.Input[pulumi.InputType['TaskSparkArgs']]] = None,
state: Optional[pulumi.Input[str]] = None,
task_id: Optional[pulumi.Input[str]] = None,
trigger_spec: Optional[pulumi.Input[pulumi.InputType['TaskTriggerSpecArgs']]] = None,
uid: Optional[pulumi.Input[str]] = None,
update_time: Optional[pulumi.Input[str]] = None) -> 'Task':
"""
Get an existing Task resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] create_time: The time when the task was created.
:param pulumi.Input[str] description: User-provided description of the task.
:param pulumi.Input[str] display_name: User friendly display name.
:param pulumi.Input[pulumi.InputType['TaskExecutionSpecArgs']] execution_spec: Configuration for the cluster
Structure is documented below.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TaskExecutionStatusArgs']]]] execution_statuses: Configuration for the cluster
Structure is documented below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: User-defined labels for the task.
:param pulumi.Input[str] lake: The lake in which the task will be created in.
:param pulumi.Input[str] location: The location in which the task will be created in.
:param pulumi.Input[str] name: (Output)
The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
:param pulumi.Input[pulumi.InputType['TaskNotebookArgs']] notebook: A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Structure is documented below.
(Required)
Path to input notebook. This can be the Cloud Storage URI of the notebook file or the path to a Notebook Content. The execution args are accessible as environment variables (TASK_key=value).
:param pulumi.Input[str] project: The project in which jobs are run. By default, the project containing the Lake is used. If a project is provided, the ExecutionSpec.service_account must belong to this project.
If it is not provided, the provider project is used.
:param pulumi.Input[pulumi.InputType['TaskSparkArgs']] spark: A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Structure is documented below.
:param pulumi.Input[str] state: (Output)
Execution state for the job.
:param pulumi.Input[str] task_id: The task Id of the task.
:param pulumi.Input[pulumi.InputType['TaskTriggerSpecArgs']] trigger_spec: Configuration for the cluster
Structure is documented below.
:param pulumi.Input[str] uid: (Output)
System generated globally unique ID for the job.
:param pulumi.Input[str] update_time: (Output)
Last update time of the status.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _TaskState.__new__(_TaskState)
__props__.__dict__["create_time"] = create_time
__props__.__dict__["description"] = description
__props__.__dict__["display_name"] = display_name
__props__.__dict__["execution_spec"] = execution_spec
__props__.__dict__["execution_statuses"] = execution_statuses
__props__.__dict__["labels"] = labels
__props__.__dict__["lake"] = lake
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["notebook"] = notebook
__props__.__dict__["project"] = project
__props__.__dict__["spark"] = spark
__props__.__dict__["state"] = state
__props__.__dict__["task_id"] = task_id
__props__.__dict__["trigger_spec"] = trigger_spec
__props__.__dict__["uid"] = uid
__props__.__dict__["update_time"] = update_time
return Task(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="createTime")
def create_time(self) -> pulumi.Output[str]:
"""
The time when the task was created.
"""
return pulumi.get(self, "create_time")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
User-provided description of the task.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[Optional[str]]:
"""
User friendly display name.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="executionSpec")
def execution_spec(self) -> pulumi.Output['outputs.TaskExecutionSpec']:
"""
Configuration for the cluster
Structure is documented below.
"""
return pulumi.get(self, "execution_spec")
@property
@pulumi.getter(name="executionStatuses")
def execution_statuses(self) -> pulumi.Output[Sequence['outputs.TaskExecutionStatus']]:
"""
Configuration for the cluster
Structure is documented below.
"""
return pulumi.get(self, "execution_statuses")
@property
@pulumi.getter
def labels(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
User-defined labels for the task.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter
def lake(self) -> pulumi.Output[Optional[str]]:
"""
The lake in which the task will be created in.
"""
return pulumi.get(self, "lake")
@property
@pulumi.getter
def location(self) -> pulumi.Output[Optional[str]]:
"""
The location in which the task will be created in.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
(Output)
The relative resource name of the job, of the form: projects/{project_number}/locations/{locationId}/lakes/{lakeId}/tasks/{taskId}/jobs/{jobId}.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def notebook(self) -> pulumi.Output[Optional['outputs.TaskNotebook']]:
"""
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Structure is documented below.
(Required)
Path to input notebook. This can be the Cloud Storage URI of the notebook file or the path to a Notebook Content. The execution args are accessible as environment variables (TASK_key=value).
"""
return pulumi.get(self, "notebook")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The project in which jobs are run. By default, the project containing the Lake is used. If a project is provided, the ExecutionSpec.service_account must belong to this project.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter
def spark(self) -> pulumi.Output[Optional['outputs.TaskSpark']]:
"""
A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time.
Structure is documented below.
"""
return pulumi.get(self, "spark")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
(Output)
Execution state for the job.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskId")
def task_id(self) -> pulumi.Output[Optional[str]]:
"""
The task Id of the task.
"""
return pulumi.get(self, "task_id")
@property
@pulumi.getter(name="triggerSpec")
def trigger_spec(self) -> pulumi.Output['outputs.TaskTriggerSpec']:
"""
Configuration for the cluster
Structure is documented below.
"""
return pulumi.get(self, "trigger_spec")
@property
@pulumi.getter
def uid(self) -> pulumi.Output[str]:
"""
(Output)
System generated globally unique ID for the job.
"""
return pulumi.get(self, "uid")
@property
@pulumi.getter(name="updateTime")
def update_time(self) -> pulumi.Output[str]:
"""
(Output)
Last update time of the status.
"""
return pulumi.get(self, "update_time")
|
4b46d6907eadffac66865dc94a46df35c33fa214
|
620323fc090cebaf7aca456ff3f7fbbe1e210394
|
/socket__tcp__examples/http__post__contact_ng_server.py
|
90fa93a9e96fdeda3a104a89322f8d65953f5cbe
|
[
"CC-BY-4.0"
] |
permissive
|
gil9red/SimplePyScripts
|
bd2733372728bf9b9f00570e90316fa12116516b
|
773c2c9724edd8827a1dbd91694d780e03fcb05a
|
refs/heads/master
| 2023-08-31T04:26:09.120173
| 2023-08-30T17:22:59
| 2023-08-30T17:22:59
| 22,650,442
| 157
| 46
| null | 2023-09-08T17:51:33
| 2014-08-05T16:19:52
|
Python
|
UTF-8
|
Python
| false
| false
| 1,185
|
py
|
http__post__contact_ng_server.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = "ipetrash"
import socket
INT_SOFT_ID = "<INT_SOFT_ID>"
POINT_CODE = "<POINT_CODE>"
# NG Server
HOST = "10.7.8.31"
PORT = 12000
post_data = f"""
<?xml version="1.0"?>
<REQUEST OBJECT_CLASS="TAbonentObject" ACTION="GET_CHANGES" VERSION="0" TYPE_VERSION="I" PACK="ZLIB"
INT_SOFT_ID="{INT_SOFT_ID}"
POINT_CODE="{POINT_CODE}"
SignOut="No"
ExpectSigned="No"
/>
"""
http_request = (
"POST / HTTP/1.1\r\n",
"Host: {host}:{port}\r\n",
"Accept-Encoding: gzip, deflate\r\n",
"User-Agent: {user_agent}\r\n",
"Connection: keep-alive\r\n",
"Accept: */*\r\n",
"Content-Length: {content_length}\r\n",
"\r\n",
"\n",
"{body}\n",
)
http_request = "".join(http_request)
http_request = http_request.format(
host=HOST,
port=PORT,
user_agent="iHuman",
content_length=len(post_data),
body=post_data,
)
print(repr(http_request))
sock = socket.socket()
sock.connect((HOST, PORT))
sock.send(http_request.encode())
print(f"Socket name: {sock.getsockname()}")
print("\nResponse:")
while True:
data = sock.recv(1024)
if not data:
break
print(len(data), data)
|
188e426e9e2873268a960759426c465f190a16f5
|
3a3e715407bff57f7811356ddbb58d097d3ce6ab
|
/tests/integration/conftest.py
|
8f0d89170eb462329b988d9d91b9ee25f2666377
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
smartsheet-platform/smartsheet-python-sdk
|
cc72585df2290ce89adc236f2c38e04717b75269
|
e32cef3c7faeba30a4cec37f9ac15b5a1d72cc83
|
refs/heads/master
| 2023-01-12T19:22:31.806414
| 2022-12-06T16:35:04
| 2022-12-06T16:35:04
| 28,953,329
| 137
| 94
|
Apache-2.0
| 2023-01-19T12:35:26
| 2015-01-08T06:59:27
|
Python
|
UTF-8
|
Python
| false
| false
| 5,275
|
py
|
conftest.py
|
import pytest
import smartsheet
from datetime import datetime
from dateutil.tz import *
import json
import os
import six
@pytest.fixture(scope="module")
def smart_setup(request):
# set up a test session folder with basic starting points
smart = smartsheet.Smartsheet(max_retry_time=60)
now = datetime.now(tzlocal()).strftime("%Y-%m-%d %H:%M:%S")
users = os.environ.get('SMARTSHEET_FIXTURE_USERS', None)
if users is None:
pytest.exit('Environment not setup correctly...aborting')
users = json.loads(users)
fixusers = {}
for nick,info in six.iteritems(users):
profile = smart.Users.get_user(info['id'])
assert isinstance(profile, smart.models.UserProfile)
fixusers[nick] = profile
action = smart.Groups.list_groups(include_all=True)
assert isinstance(action, smart.models.IndexResult)
grps = action.result
groups = {}
need_exec = True
for gp in grps:
groups[gp.name] = gp
if gp.name == 'exec':
need_exec = False
if need_exec:
group = smart.models.Group({
'name': 'exec',
'members': [
smart.models.GroupMember({
'email': fixusers['moe'].email
}),
smart.models.GroupMember({
'email': fixusers['admin'].email
})
]
})
action = smart.Groups.create_group(group)
assert action.message == 'SUCCESS'
# test run base folders
folder_name = 'pytest ' + now
action = smart.Home.create_folder(folder_name)
assert action.message == 'SUCCESS'
test_folder = action.result
# add a sheet to mess around with
sheet = smart.models.Sheet({
'name': 'pytest_fixture_sheet ' + now,
'columns': [{
'title': 'The First Column',
'primary': True,
'type': 'TEXT_NUMBER'
}, {
'title': 'Favorite',
'type': 'CHECKBOX',
'symbol': 'STAR'
}, {
'title': 'Disposable',
'type': 'TEXT_NUMBER'
}]
})
action = smart.Folders.create_sheet_in_folder(test_folder.id, sheet)
assert action.message == 'SUCCESS'
sheet = action.result
# get primary column id
for idx, col in enumerate(sheet.columns):
if col.primary:
break
sheet_primary_col = col
# add a row
action = sheet.add_rows([smart.models.Row({
'to_top': True,
'cells': [{
'column_id': sheet_primary_col.id,
'value': 'The first column of the first row.'
}]
})])
assert action.message == 'SUCCESS'
sheet = smart.Sheets.get_sheet(sheet.id)
assert isinstance(sheet, smart.models.Sheet)
sheet_b = smart.models.Sheet({
'name': 'pytest_fixture_sheetB ' + now,
'columns': [{
'title': 'Brand',
'primary': True,
'type': 'TEXT_NUMBER'
}]
})
action = smart.Folders.create_sheet_in_folder(test_folder.id, sheet_b)
assert action.message == 'SUCCESS'
sheet_b = action.result
for idx, col in enumerate(sheet_b.columns):
if col.primary:
break
sheet_b_primary_col = col
action = sheet_b.add_rows([
smart.models.Row({
'to_top': True,
'cells': [{
'column_id': sheet_b_primary_col.id,
'value': 'Nike'
}]
}),
smart.models.Row({
'to_top': True,
'cells': [{
'column_id': sheet_b_primary_col.id,
'value': 'Google'
}]
}),
smart.models.Row({
'to_top': True,
'cells': [{
'column_id': sheet_b_primary_col.id,
'value': 'Adidas'
}]
}),
smart.models.Row({
'to_top': True,
'cells': [{
'column_id': sheet_b_primary_col.id,
'value': 'Keen'
}]
})])
assert action.message == 'SUCCESS'
sheet_b = smart.Sheets.get_sheet(sheet_b.id)
assert isinstance(sheet_b, smart.models.Sheet)
fixture = {
'smart': smart,
'folder': test_folder,
'sheet': sheet,
'sheet_primary_col': sheet_primary_col,
'sheet_b': sheet_b,
'sheet_b_primary_col': sheet_b_primary_col,
'now': now,
'users': fixusers,
'groups': groups
}
def smart_teardown():
action = fixture['smart'].Sheets.delete_sheet(fixture['sheet'].id)
assert action.message == 'SUCCESS'
print("deleted fixture sheet")
action = fixture['smart'].Sheets.delete_sheet(fixture['sheet_b'].id)
assert action.message == 'SUCCESS'
print("deleted fixture sheet_b")
action = fixture['smart'].Folders.delete_folder(fixture['folder'].id)
assert action.message == 'SUCCESS'
print("deleted fixture folder")
if 'folder_b' in fixture:
action = fixture['smart'].Folders.delete_folder(fixture['folder_b'].id)
assert action.message == 'SUCCESS'
print("deleted fixture folder_b")
request.addfinalizer(smart_teardown)
return fixture
|
235c9b6c7f435e4e89f1c19d6b9e6c175c824849
|
09c87fe780df6d1f9eb33799ed516a0bbd7ab1e3
|
/Research/wx doco/ImageViewer4.py
|
ae138a23a84ff39376e565ccfc968cfa960a846f
|
[] |
no_license
|
abulka/pynsource
|
8ad412b85dc1acaeb83d7d34af8cc033c6baba91
|
979436525c57fdaeaa832e960985e0406e123587
|
refs/heads/master
| 2023-04-13T12:58:02.911318
| 2023-04-11T09:56:32
| 2023-04-11T09:56:32
| 32,249,425
| 271
| 46
| null | 2022-10-10T04:36:57
| 2015-03-15T07:21:43
|
Python
|
UTF-8
|
Python
| false
| false
| 527
|
py
|
ImageViewer4.py
|
#!/usr/bin/python
import wx
class TestApp(wx.App):
def OnInit(self):
self.MainFrame = wx.Frame(None, -1, "Test Frame")
self.MainFrame.SetBackgroundColour(wx.WHITE)
self.BMP = wx.Bitmap("../../outyuml.png", wx.BITMAP_TYPE_PNG)
self.MainFrame.Bind(wx.EVT_PAINT, self.OnPaint)
self.MainFrame.Show()
return True
def OnPaint(self, Event):
DC = wx.PaintDC(self.MainFrame)
DC.DrawBitmap(self.BMP, 0, 0)
Event.Skip()
App = TestApp(1)
App.MainLoop()
|
644b4b19b9c6c2543e81f8972870fb6b7f11e951
|
f4095ef092092399102bb21d1198e324f10f53ed
|
/pontoon/base/errors.py
|
566810a1ae49411fee2c19a665056bb8a2002fa7
|
[
"BSD-3-Clause"
] |
permissive
|
mozilla/pontoon
|
2c53227570099ca666467d4e3d78e929bf456c9c
|
0c4f74e15b1e442a9cee9b1cd636214b24f5352b
|
refs/heads/master
| 2023-09-06T04:15:41.009180
| 2023-09-01T14:23:51
| 2023-09-01T14:23:51
| 1,385,890
| 1,367
| 713
|
BSD-3-Clause
| 2023-09-13T18:04:41
| 2011-02-19T11:25:51
|
Python
|
UTF-8
|
Python
| false
| false
| 481
|
py
|
errors.py
|
import logging
from django.conf import settings
from raygun4py import raygunprovider
log = logging.getLogger(__name__)
def send_exception(exception, exc_info=None):
"""
Function sends exception to selected provider.
"""
api_key = settings.RAYGUN4PY_CONFIG["api_key"]
if api_key:
provider = raygunprovider.RaygunSender(api_key)
provider.send_exception(exception, exc_info=exc_info)
else:
log.error(exception, exc_info=exc_info)
|
ffd8216ae4c71ae073f2a50692f5df0007d4e4db
|
0e36ab6fe38ff8a95b0773e37d86e43476ae9d18
|
/src/1258.synonymous-sentences.1191/solution.py
|
752942e4f79d0ea9fb4c967006101d191d07d5cd
|
[
"MIT"
] |
permissive
|
jiangshanmeta/meta
|
368a7920ade4568d5c51ef2669d87e7c18b862f8
|
2cc5c27e897bb227a6b50bafd1f14771b9592717
|
refs/heads/master
| 2023-08-16T14:39:48.310006
| 2023-08-06T08:57:53
| 2023-08-06T08:57:53
| 115,569,337
| 266
| 59
|
MIT
| 2023-08-06T08:57:54
| 2017-12-28T00:47:17
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 1,961
|
py
|
solution.py
|
# union-find + backTracking
class Solution:
def generateSentences(self, synonyms, text):
group = {}
for item in synonyms :
word1 = item[0]
word2 = item[1]
if word1 not in group :
group[word1] = word1
if word2 not in group :
group[word2] = word2
while group[word1] != group[group[word1]] :
group[word1] = group[group[word1]]
while group[word2] != group[group[word2]] :
group[word2] = group[group[word2]]
if group[word1] != group[word2] :
group[group[word2]] = group[word1]
group[word2] = group[word1]
relatives = {}
for key in group :
while group[key] != group[group[key]] :
group[key] = group[group[key]]
parent = group[key]
if parent not in relatives :
relatives[parent] = []
relatives[parent].append(key)
relatives[key] = relatives[parent]
for key in group :
if group[key] == key :
relatives[key].sort()
result = []
self.backTracking(text.split(' '),relatives,0,[],result )
return result
def backTracking(self,sentense,relatives,index,sequence,result) :
if index == len(sentense) :
result.append(" ".join(sequence))
return
word = sentense[index]
if word in relatives :
replaces = relatives[word]
for w in replaces :
sequence.append(w)
self.backTracking(sentense,relatives,index+1,sequence,result)
sequence.pop()
else :
sequence.append(word)
self.backTracking(sentense,relatives,index+1,sequence,result)
sequence.pop()
|
2823c846535c3205a13a53f00f90551426e4e57a
|
6958f617af0c5a76304ceb1006c77bc70ca0e195
|
/benchmarks/microbenchmarks/_items.py
|
3d3a7cbf1b672ffe42964dd250ea2df27b5f44c2
|
[
"Apache-2.0"
] |
permissive
|
taichi-dev/taichi
|
3fae315a494f1c97392d5b931c939abbbfba1bdc
|
b30b511f55e3d0ebff765ee048d0aaa4ba9e7667
|
refs/heads/master
| 2023-09-02T13:28:18.208792
| 2023-08-23T23:22:43
| 2023-08-23T23:22:43
| 74,660,642
| 17,231
| 1,841
|
Apache-2.0
| 2023-09-14T11:29:32
| 2016-11-24T10:00:05
|
C++
|
UTF-8
|
Python
| false
| false
| 2,905
|
py
|
_items.py
|
from microbenchmarks._utils import size2tag
import taichi as ti
class BenchmarkItem:
name = "item"
def __init__(self):
self._items = {} # {'tag': impl, ...}
def get(self):
return self._items
def get_tags(self):
return list(self._items.keys())
def impl(self, tag: str):
return self._items[tag]
def remove(self, tags: list):
for tag in tags:
self._items.pop(tag)
def update(self, adict: dict):
self._items.update(adict)
class DataType(BenchmarkItem):
name = "dtype"
integer_list = ["i32", "i64"]
def __init__(self):
self._items = {
str(ti.i32): ti.i32,
str(ti.i64): ti.i64,
str(ti.f32): ti.f32,
str(ti.f64): ti.f64,
}
def remove_integer(self):
self.remove(self.integer_list)
@staticmethod
def is_integer(dtype: str):
integer_list = ["i32", "u32", "i64", "u64"]
return True if dtype in integer_list else False
class DataSize(BenchmarkItem):
name = "dsize"
def __init__(self):
self._items = {}
for i in range(2, 10, 2): # [16KB,256KB,4MB,64MB]
size_bytes = (4**i) * 1024 # kibibytes(KiB) = 1024
self._items[size2tag(size_bytes)] = size_bytes
class Container(BenchmarkItem):
name = "container"
def __init__(self):
self._items = {"field": ti.field, "ndarray": ti.ndarray}
class MathOps(BenchmarkItem):
name = "math_op"
# reference: https://docs.taichi-lang.org/docs/operator
def __init__(self):
self._items = {
# Trigonometric
"sin": ti.sin,
"cos": ti.cos,
"tan": ti.tan,
"asin": ti.asin,
"acos": ti.acos,
"tanh": ti.tanh,
# Other arithmetic
"sqrt": ti.sqrt,
"rsqrt": ti.rsqrt, # A fast version for `1 / ti.sqrt(x)`.
"exp": ti.exp,
"log": ti.log,
"round": ti.round,
"floor": ti.floor,
"ceil": ti.ceil,
"abs": ti.abs,
}
class AtomicOps(BenchmarkItem):
name = "atomic_op"
def __init__(self):
self._items = {
"atomic_add": ti.atomic_add,
"atomic_sub": ti.atomic_sub,
"atomic_and": ti.atomic_and,
"atomic_or": ti.atomic_or,
"atomic_xor": ti.atomic_xor,
"atomic_max": ti.atomic_max,
"atomic_min": ti.atomic_min,
}
@staticmethod
def is_logical_op(op: str):
logical_op_list = ["atomic_and", "atomic_or", "atomic_xor"]
return True if op in logical_op_list else False
@staticmethod
def is_supported_type(op: str, dtype: str):
if AtomicOps.is_logical_op(op) and not DataType.is_integer(dtype):
return False
else:
return True
|
5bd04afe1899f8fc8b37591010745be474766cf9
|
cca493aeb14bb9e56e5c90eed176366896e675ac
|
/ColorPalette/__main__.py
|
c359e1f35be9a102008282002b0a1be717982461
|
[
"MIT"
] |
permissive
|
rodartha/ColorPalette
|
8e4819f192a25f4fdad7315722b805cf3568e25e
|
6fdad5c065106ea4e79e6d8269608da0021604fd
|
refs/heads/master
| 2023-07-21T13:48:33.388819
| 2020-08-26T18:43:09
| 2020-08-26T18:43:09
| 217,322,625
| 310
| 38
|
MIT
| 2023-07-06T21:48:01
| 2019-10-24T14:43:10
|
Python
|
UTF-8
|
Python
| false
| false
| 5,892
|
py
|
__main__.py
|
"""
Colorpalette python package.
Colin Page <cwpage@umich.edu>
Copyright 2019.
"""
import os
from PIL import Image, ImageFont, ImageDraw
import matplotlib.pyplot as plt
from scipy import cluster
import pandas as pd
import math
import colorsys
import click
def get_color_pallete(input_file, output_file, num_colors, display_color=False):
img = plt.imread(input_file)
red, green, blue = [], [], []
for line in img:
for pixel in line:
r, g, b = pixel
red.append(r)
green.append(g)
blue.append(b)
df = pd.DataFrame({
'red': red,
'green': green,
'blue': blue
})
df['standardized_red'] = cluster.vq.whiten(df['red'])
df['standardized_green'] = cluster.vq.whiten(df['green'])
df['standardized_blue'] = cluster.vq.whiten(df['blue'])
color_pallete, distortion = cluster.vq.kmeans(df[['standardized_red', 'standardized_green', 'standardized_blue']], num_colors)
colors = []
red_std, green_std, blue_std = df[['red', 'green', 'blue']].std()
for color in color_pallete:
scaled_red, scaled_green, scaled_blue = color
colors.append((
math.ceil(scaled_red * red_std) ,
math.ceil(scaled_green * green_std) ,
math.ceil(scaled_blue * blue_std)
))
colors.sort(key=lambda x: step(x[0], x[1], x[2], 8))
# FIXME: need a smart way to resize fonts based on picture size
font_size = 11
font = ImageFont.truetype("Roboto-Medium.ttf", font_size)
sample_text = '#F8F8F7'
proper_font_size = False
pil_img = Image.open(input_file)
pil_width, pil_height = pil_img.size
height = 0
if pil_height > pil_width:
height = math.floor(pil_height / 6)
else:
height = math.floor(pil_height / 4)
pallete = Image.new('RGB', (pil_width, height), (255, 255, 255))
single_img_space = math.floor(pil_width / num_colors)
single_img_offset = math.floor(single_img_space / 14)
total_offset = single_img_offset * (num_colors + 1)
single_img_width = math.floor((pil_width - total_offset) / num_colors)
single_img_space = single_img_width + single_img_offset
final_img_width = (single_img_width + (pil_width - (single_img_space * num_colors))) - single_img_offset
while not proper_font_size:
if get_text_width(font, sample_text) > single_img_width and font_size > 1:
font_size -= 1
font = ImageFont.truetype("Roboto-Medium.ttf", font_size)
elif get_text_width(font, sample_text) < single_img_width - 20:
font_size += 1
font = ImageFont.truetype("Roboto-Medium.ttf", font_size)
else:
proper_font_size = True
x_offset = 0
for i in range(len(colors)):
if i == len(colors) - 1:
new_img = Image.new('RGB', (final_img_width, height), colors[i])
pallete.paste(new_img, (x_offset, 0))
if display_color:
draw = ImageDraw.Draw(pallete)
draw.text((x_offset, height - 20 - get_text_height(font, sample_text)), get_hex_color(colors[i]), (255, 255, 255), font=font)
elif i == 0:
new_img = Image.new('RGB', (single_img_width, height), colors[i])
pallete.paste(new_img, (single_img_offset, 0))
if display_color:
draw = ImageDraw.Draw(pallete)
draw.text((single_img_offset, height - 20 - get_text_height(font, sample_text)), get_hex_color(colors[i]), (255, 255, 255), font=font)
x_offset += single_img_space + single_img_offset
else:
new_img = Image.new('RGB', (single_img_width, height), colors[i])
pallete.paste(new_img, (x_offset, 0))
if display_color:
draw = ImageDraw.Draw(pallete)
draw.text((x_offset, height - 20 - get_text_height(font, sample_text)), get_hex_color(colors[i]), (255, 255, 255), font=font)
x_offset += single_img_space
pallete.save(output_file)
def append_color_pallete(original_image, color_pallete, output_file):
og_img = Image.open(original_image)
og_width, og_height = og_img.size
pallete_img = Image.open(color_pallete)
pallete_width, pallete_height = pallete_img.size
height_offset = math.ceil(og_height / 20)
if og_height > og_width:
height_offset = math.ceil(og_height / 30)
total_width = og_width
total_height = og_height + pallete_height + (height_offset * 2)
combined_img = Image.new('RGB', (total_width, total_height), (255, 255, 255))
combined_img.paste(og_img, (0, 0))
combined_img.paste(pallete_img, (0, og_height + height_offset))
combined_img.save(output_file)
def create_pallete(filename, num_colors, display_color=False):
file_path = filename.split('/')
file_prefix = ''
file_split = ''
for i in range(len(file_path)):
if i != len(file_path) - 1:
file_prefix = file_prefix + file_path[i] + '/'
else:
file_split = file_path[i]
file_split = file_split.split('.')
if file_split[1] != 'jpg' and file_split[1] != 'png':
raise("The file must be a jpg or png")
output_palette = file_prefix + file_split[0] + '_palette.' + file_split[1]
output_combined = file_prefix + file_split[0] + '_with_palette.' + file_split[1]
get_color_pallete(filename, output_palette, num_colors, display_color)
append_color_pallete(filename, output_palette, output_combined)
def step(r, g, b, repititions=1):
lum = math.sqrt(0.241 * r + 0.691 * g + 0.068 * b)
h, s, v = colorsys.rgb_to_hsv(r, g, b)
h2 = int(h * repititions)
lum2 = int(lum * repititions)
v2 = int(v * repititions)
if h2 % 2 == 1:
v2 = repititions - v2
lum = repititions - lum
return (h2, lum, v2)
def get_hex_color(color):
return '#%02x%02x%02x' % color
def get_text_width(font, text):
width = 0
for ch in text:
width += font.getsize(ch)[0]
return width
def get_text_height(font, text):
height = []
for ch in text:
height.append(font.getsize(ch)[1])
return max(height)
@click.command()
@click.argument('image_file')
@click.argument('num_colors')
@click.option('--text', '-t', default=False, is_flag=True, help='')
def main(image_file, num_colors, text):
try:
create_pallete(image_file, int(num_colors), text)
except Exception as e:
print(e)
if __name__ == '__main__':
# pylint: disable=no-value-for-parameter
main()
|
602d183f7d69dfc0c9bde6150458034c0aaf24fe
|
f305f84ea6f721c2391300f0a60e21d2ce14f2a5
|
/6_tree/重构json/297.二叉树的序列化与反序列化-json.py
|
b2422c2ca7d98c09400edbbb51aeac8ff5cccd45
|
[] |
no_license
|
981377660LMT/algorithm-study
|
f2ada3e6959338ae1bc21934a84f7314a8ecff82
|
7e79e26bb8f641868561b186e34c1127ed63c9e0
|
refs/heads/master
| 2023-09-01T18:26:16.525579
| 2023-09-01T12:21:58
| 2023-09-01T12:21:58
| 385,861,235
| 225
| 24
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 765
|
py
|
297.二叉树的序列化与反序列化-json.py
|
import json
from typing import Optional
class TreeNode:
def __init__(self, x: int):
self.val = x
self.left: Optional['TreeNode'] = None
self.right: Optional['TreeNode'] = None
class Codec:
def serialize(self, root: TreeNode) -> str:
def dfs(root: Optional['TreeNode']):
if not root:
return
return {'val': root.val, 'left': dfs(root.left), 'right': dfs(root.right)}
return json.dumps(dfs(root))
def deserialize(self, data: str) -> Optional['TreeNode']:
def dfs(obj):
if obj is None:
return
return TreeNode(obj['val'], dfs(obj['left']), dfs(obj['right']))
return dfs(json.loads(data))
|
da0c125a8f3418fe3687483050095b381274233e
|
d4412fbe37540e2c4cbe59ed6503d3661ccb7d9c
|
/tests/test_auto_parallel/test_tensor_shard/test_metainfo/test_embedding_metainfo.py
|
e3f76a95c4a5f4d7832b5916e4172276d2c43a41
|
[
"BSD-3-Clause",
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0",
"BSD-2-Clause",
"MIT"
] |
permissive
|
hpcaitech/ColossalAI
|
a082ed08a3807b53c49d1f86835b9808590d9042
|
c7b60f75470f067d1342705708810a660eabd684
|
refs/heads/main
| 2023-09-01T04:13:13.834565
| 2023-08-30T15:07:21
| 2023-08-30T15:07:21
| 422,274,596
| 32,044
| 4,084
|
Apache-2.0
| 2023-09-14T15:19:54
| 2021-10-28T16:19:44
|
Python
|
UTF-8
|
Python
| false
| false
| 2,383
|
py
|
test_embedding_metainfo.py
|
import pytest
import torch
from colossalai.auto_parallel.tensor_shard.sharding_strategy import OperationData, OperationDataType
from colossalai.testing.utils import clear_cache_before_run
from tests.test_auto_parallel.test_tensor_shard.test_metainfo.utils import print_results
if torch.__version__ >= '1.12.0':
from colossalai.auto_parallel.meta_profiler import meta_register
@pytest.mark.skipif(torch.__version__ < '1.12.0', reason="need pytorch 1.12.0 or higher for aten level operations")
@clear_cache_before_run()
def test_embedding_meta_info():
meta_func = meta_register.get(torch.nn.Embedding)
# construct meta tensors
input_tensor = torch.randint(0, 50256, (8, 1024), device="meta")
weight_tensor = torch.rand(50257, 1024, device="meta")
output_tensor = torch.rand(8, 1024, 1024, device="meta")
# construct operation data
input_data = OperationData(name="input", type=OperationDataType.ARG, data=input_tensor)
weight_data = OperationData(name="weight", type=OperationDataType.PARAM, data=weight_tensor)
output_data = OperationData(name="output", type=OperationDataType.OUTPUT, data=output_tensor)
# construct args and kwargs
args = [input_data, weight_data, output_data]
kwargs = {'inplace': False}
# estimated results
compute_cost, memory_cost, fwd_in, fwd_buffer, fwd_out = meta_func(*args, **kwargs)
# actual results
input_real_tensor = torch.randint(0, 50256, (8, 1024), device="cuda")
embedding_module = torch.nn.Embedding(50257, 1024).cuda()
# fwd
torch.cuda.reset_peak_memory_stats()
mem_stamp0 = torch.cuda.memory_allocated()
output_real_tensor = embedding_module(input_real_tensor)
fwd_allocated = torch.cuda.memory_allocated() - mem_stamp0
fwd_peak = torch.cuda.max_memory_allocated() - mem_stamp0
# bwd
upstream_grad = torch.rand_like(output_real_tensor)
torch.cuda.reset_peak_memory_stats()
mem_stamp0 = torch.cuda.memory_allocated()
torch.autograd.backward(output_real_tensor, upstream_grad)
bwd_allocated = torch.cuda.memory_allocated() - mem_stamp0
bwd_peak = torch.cuda.max_memory_allocated() - mem_stamp0
print_results([input_real_tensor], [output_real_tensor], compute_cost, memory_cost, fwd_allocated, fwd_peak,
bwd_allocated, bwd_peak)
if __name__ == '__main__':
test_embedding_meta_info()
|
14e175f20fa543a9069822980835947d0307ffb9
|
fdbfbcf4d6a0ef6f3c1b600e7b8037eed0f03f9e
|
/examples/hydroelastic/python_nonconvex_mesh/drop_pepper.py
|
9ad715b905980fa129ea18abce95f8037716f98e
|
[
"BSD-3-Clause"
] |
permissive
|
RobotLocomotion/drake
|
4529c397f8424145623dd70665531b5e246749a0
|
3905758e8e99b0f2332461b1cb630907245e0572
|
refs/heads/master
| 2023-08-30T21:45:12.782437
| 2023-08-30T15:59:07
| 2023-08-30T15:59:07
| 16,256,144
| 2,904
| 1,270
|
NOASSERTION
| 2023-09-14T20:51:30
| 2014-01-26T16:11:05
|
C++
|
UTF-8
|
Python
| false
| false
| 5,394
|
py
|
drop_pepper.py
|
"""
This is an example of using hydroelastic contact model through pydrake with
non-convex meshes. It reads SDFormat files of:
- a non-convex mesh of a yellow bell pepper with compliant-hydroelastic
properties,
- a non-convex mesh of a bowl with rigid-hydroelastic properties,
- and a table top (anchored to the World) represented as a box primitive with
compliant-hydroelastic properties.
"""
import argparse
import numpy as np
from pydrake.math import RigidTransform
from pydrake.multibody.parsing import Parser
from pydrake.multibody.plant import AddMultibodyPlant
from pydrake.multibody.plant import MultibodyPlantConfig
from pydrake.systems.analysis import ApplySimulatorConfig
from pydrake.systems.analysis import Simulator
from pydrake.systems.analysis import SimulatorConfig
from pydrake.systems.analysis import PrintSimulatorStatistics
from pydrake.systems.framework import DiagramBuilder
from pydrake.visualization import AddDefaultVisualization
def make_pepper_bowl_table(contact_model, time_step):
builder = DiagramBuilder()
plant, scene_graph = AddMultibodyPlant(
MultibodyPlantConfig(
time_step=time_step,
contact_model=contact_model,
contact_surface_representation="polygon",
discrete_contact_solver="sap"),
builder)
parser = Parser(plant)
parser.AddModels(
url="package://drake/examples/hydroelastic/python_nonconvex_mesh/"
"pepper.sdf")
parser.AddModels(
url="package://drake/examples/hydroelastic/python_nonconvex_mesh/"
"bowl.sdf")
(table,) = parser.AddModels(
url="package://drake/examples/hydroelastic/python_nonconvex_mesh/"
"table.sdf")
# We pose the table with its top surface on World's X-Y plane.
# Intuitively we push it down 1 cm because the box is 2 cm thick.
p_WTable_fixed = RigidTransform(np.array([0, 0, -0.01]))
plant.WeldFrames(
frame_on_parent_F=plant.world_frame(),
frame_on_child_M=plant.GetFrameByName("table", table),
X_FM=p_WTable_fixed)
plant.Finalize()
AddDefaultVisualization(builder=builder)
diagram = builder.Build()
return diagram, plant
def simulate_diagram(diagram, plant,
pepper_position, pepper_wz,
bowl_position,
simulation_time, target_realtime_rate):
simulator = Simulator(diagram)
ApplySimulatorConfig(
SimulatorConfig(target_realtime_rate=target_realtime_rate,
publish_every_time_step=True),
simulator)
q_init_val = np.array([
1, 0, 0, 0, pepper_position[0], pepper_position[1], pepper_position[2],
1, 0, 0, 0, bowl_position[0], bowl_position[1], bowl_position[2]
])
v_init_val = np.hstack((np.array([0, 0, pepper_wz]), np.zeros(3),
np.zeros(3), np.zeros(3)))
plant.SetPositionsAndVelocities(
diagram.GetSubsystemContext(plant, simulator.get_context()),
np.concatenate((q_init_val, v_init_val)))
simulator.get_mutable_context().SetTime(0)
simulator.Initialize()
simulator.AdvanceTo(boundary_time=simulation_time)
PrintSimulatorStatistics(simulator)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"--simulation_time", type=float, default=2,
help="Desired duration of the simulation in seconds. "
"Default %(default)s.")
parser.add_argument(
"--contact_model", type=str, default="hydroelastic_with_fallback",
help="Contact model. Options are: 'point', 'hydroelastic', "
"'hydroelastic_with_fallback'. Default %(default)s.")
parser.add_argument(
"--time_step", type=float, default=0.01,
help="The fixed time step period (in seconds) of discrete updates "
"for the multibody plant modeled as a discrete system. "
"Strictly positive. Default %(default)s.")
parser.add_argument(
"--pepper_position", nargs=3, metavar=('x', 'y', 'z'),
default=[0, -0.15, 0.10],
help="Pepper's initial position of the bottom of the pepper: "
"x, y, z (in meters) in World frame. Default %(default)s.")
parser.add_argument(
"--pepper_wz", type=float, default=150,
help="Pepper's initial angular velocity in the z-axis in rad/s. "
"Default %(default)s.")
parser.add_argument(
"--bowl_position", nargs=3, metavar=('x', 'y', 'z'),
default=[0, -0.07, 0.061],
help="Bowl's initial position of its center: "
"x, y, z (in meters) in World frame. Default %(default)s.")
parser.add_argument(
"--target_realtime_rate", type=float, default=1.0,
help="Target realtime rate. Set to 0 to run as fast as it can. "
"Default %(default)s.")
args = parser.parse_args()
diagram, plant = make_pepper_bowl_table(args.contact_model, args.time_step)
simulate_diagram(diagram, plant,
np.array(args.pepper_position),
args.pepper_wz,
np.array(args.bowl_position),
args.simulation_time,
args.target_realtime_rate)
|
a139583627320d8a6356629132ebce27f0164a1b
|
8a33857e67de2bcdd6ab67dd69fa21debb3810fe
|
/webserver/endpoints/annotations.py
|
7ea5a34e729990a2afc9b09810160fdb97f235c6
|
[
"MIT"
] |
permissive
|
sacdallago/bio_embeddings
|
78c5f11e852c46bb98c78633f529f53457639c7a
|
efb9801f0de9b9d51d19b741088763a7d2d0c3a2
|
refs/heads/develop
| 2023-05-10T18:49:39.640162
| 2022-08-04T07:43:37
| 2022-08-04T07:43:37
| 180,608,802
| 383
| 60
|
MIT
| 2023-02-16T01:41:53
| 2019-04-10T15:17:42
|
HTML
|
UTF-8
|
Python
| false
| false
| 8,897
|
py
|
annotations.py
|
from itertools import groupby
from flask import request, abort
from flask_restx import Resource
from webserver.endpoints import api
from webserver.endpoints.request_models import sequence_post_parameters_annotations, sequence_get_parameters_annotations, \
residue_landscape_post_parameters
from webserver.endpoints.task_interface import get_features
from webserver.endpoints.task_interface import get_residue_landscape
from webserver.endpoints.utils import check_valid_sequence, get_queues
from webserver.utilities.parsers import (
Source, Evidence, annotations_to_protvista_converter,
SecondaryStructure, Disorder, BindingResidues, MembraneResidues
)
ns = api.namespace("annotations", description="Get annotations on the fly.")
def _filter_ontology(annotations, ontology):
if annotations.get(ontology):
first_k = next(goannotations for identifier, goannotations in groupby(annotations[ontology], lambda x: x["identifier"]))
annotations[ontology] = list(first_k)
def _get_annotations_from_params(params):
sequence = params.get('sequence')
if not sequence or len(sequence) > 2000 or not check_valid_sequence(sequence):
return abort(400, "Sequence is too long or contains invalid characters.")
model_name = params.get('model', 'prottrans_t5_xl_u50')
annotations = get_features(model_name, sequence)
if model_name == 'prottrans_t5_xl_u50' and 'prott5_residue_landscape_annotations' in get_queues():
residue_landscape_output = get_residue_landscape(model_name=model_name, sequence=sequence)
# merge the output of the residue landscape into the feature dict
# add the meta information
for key in residue_landscape_output['meta']:
annotations['meta'][key] = residue_landscape_output['meta'][key]
residue_landscape_output.pop('meta', None)
# add all the remaining information
for key in residue_landscape_output:
annotations[key] = residue_landscape_output[key]
annotations['sequence'] = sequence
format = params.get('format', 'legacy')
only_closest_k = params.get('only_closest_k', True)
if only_closest_k == True:
_filter_ontology(annotations, "predictedBPO")
_filter_ontology(annotations, "predictedCCO")
_filter_ontology(annotations, "predictedMFO")
if format == "protvista-predictprotein":
source = Source(
url=request.url,
id="sync",
name=f"bio_embeddings using {model_name}"
)
evidence = Evidence(
source=source,
)
protvista_features = dict()
protvista_features['sequence'] = sequence
protvista_features['features'] = list()
if annotations.get('predictedDSSP8'):
protvista_features['features'].extend(
annotations_to_protvista_converter(
features_string=annotations['predictedDSSP8'],
evidences=[evidence],
type=f"SECONDARY_STRUCTURE_8_STATES_({model_name})", feature_enum=SecondaryStructure
)
)
if annotations.get('predictedDSSP3'):
protvista_features['features'].extend(
annotations_to_protvista_converter(
features_string=annotations['predictedDSSP3'],
evidences=[evidence],
type=f"SECONDARY_STRUCTURE_3_STATES_({model_name})",
feature_enum=SecondaryStructure
)
)
if annotations.get('predictedDisorder'):
protvista_features['features'].extend(
annotations_to_protvista_converter(
features_string=annotations['predictedDisorder'],
evidences=[evidence],
type=f"DISORDER_({model_name})",
feature_enum=Disorder
)
)
if annotations.get('predictedBindingMetal'):
protvista_features['features'].extend(
annotations_to_protvista_converter(
features_string=annotations['predictedBindingMetal'],
evidences=[evidence],
type=f"BINDING_METAL_({model_name})",
feature_enum=BindingResidues
)
)
if annotations.get('predictedBindingNucleicAcids'):
protvista_features['features'].extend(
annotations_to_protvista_converter(
features_string=annotations['predictedBindingNucleicAcids'],
evidences=[evidence],
type=f"BINDING_NUCLEIC_ACIDS_({model_name})",
feature_enum=BindingResidues
)
)
if annotations.get('predictedBindingSmallMolecules'):
protvista_features['features'].extend(
annotations_to_protvista_converter(
features_string=annotations['predictedBindingSmallMolecules'],
evidences=[evidence],
type=f"BINDING_SMALL_MOLECULES_({model_name})",
feature_enum=BindingResidues
)
)
if annotations.get('predictedTransmembrane'):
protvista_features['features'].extend(
annotations_to_protvista_converter(
features_string=annotations['predictedTransmembrane'],
evidences=[evidence],
type=f"TRANSMEMBRANE_({model_name})",
feature_enum=MembraneResidues
)
)
return protvista_features
elif format == "legacy":
predictedCCO = {}
predictedBPO = {}
predictedMFO = {}
for prediction in annotations['predictedCCO']:
predictedCCO[prediction['GO_Term']] = max(predictedCCO.get(prediction['GO_Term'], -1), prediction['RI'])
for prediction in annotations['predictedBPO']:
predictedBPO[prediction['GO_Term']] = max(predictedBPO.get(prediction['GO_Term'], -1), prediction['RI'])
for prediction in annotations['predictedMFO']:
predictedMFO[prediction['GO_Term']] = max(predictedMFO.get(prediction['GO_Term'], -1), prediction['RI'])
annotations['predictedCCO'] = predictedCCO
annotations['predictedBPO'] = predictedBPO
annotations['predictedMFO'] = predictedMFO
return annotations
elif format == "go-predictprotein":
mapping_function = lambda x: {
"gotermid": x['GO_Term'],
"gotermname": x['GO_Name'],
"gotermscore": round(x['RI'] * 100)
}
predictedCCO = {
"ontology": "Cellular Component Ontology",
"goTermWithScore": list(map(mapping_function, annotations['predictedCCO']))
}
predictedBPO = {
"ontology": "Biological Process Ontology",
"goTermWithScore": list(map(mapping_function, annotations['predictedBPO']))
}
predictedMFO = {
"ontology": "Molecular Function Ontology",
"goTermWithScore": list(map(mapping_function, annotations['predictedMFO']))
}
return [predictedBPO, predictedCCO, predictedMFO]
elif format == "full":
return annotations
else:
abort(400, f"Wrong format passed: {format}")
@ns.route('')
class Annotations(Resource):
@api.expect(sequence_get_parameters_annotations, validate=True)
@api.response(200, "Annotations in specified format")
@api.response(400, "Invalid input. See return message for details.")
@api.response(505, "Server error")
def get(self):
params = request.args
return _get_annotations_from_params(params)
@api.expect(sequence_post_parameters_annotations, validate=True)
@api.response(200, "Annotations in specified format")
@api.response(400, "Invalid input. See return message for details.")
@api.response(505, "Server error")
def post(self):
params = request.json
return _get_annotations_from_params(params)
@ns.route('/residue/landscape')
class residue_landscape(Resource):
@api.expect(residue_landscape_post_parameters, validate=True)
@api.response(200, "Returns an hdf5 file with one dataset called `sequence` "
"containing the embedding_buffer of the supplied sequence.")
@api.response(400, "Invalid input. See return message for details.")
@api.response(505, "Server error")
def post(self):
params = request.json
sequence = params.get('sequence')
if not sequence or len(sequence) > 2000 or not check_valid_sequence(sequence):
return abort(400, "Sequence is too long or contains invalid characters.")
return get_residue_landscape(model_name='prottrans_t5_xl_u50', sequence=sequence)
|
53a40395ca2ac795b2ea3872ff2e54d29c976598
|
c1b77c0b1630c2e319e7ba7782a744f4ac867f7d
|
/gpytorch/functions/_log_normal_cdf.py
|
8662b7db0a1cf59d8571862d10a71e1eb8e6341a
|
[
"MIT",
"Python-2.0"
] |
permissive
|
cornellius-gp/gpytorch
|
6b9ab969b2888fa7f27f236a1b20041f00cc0253
|
5e93d2c04ac0634a7aeea9fd964be529bb250888
|
refs/heads/master
| 2023-08-31T21:13:02.741585
| 2023-08-25T19:24:53
| 2023-08-25T19:24:53
| 93,868,719
| 3,182
| 578
|
MIT
| 2023-09-13T01:06:00
| 2017-06-09T14:48:20
|
Python
|
UTF-8
|
Python
| false
| false
| 3,778
|
py
|
_log_normal_cdf.py
|
#!/usr/bin/env python3
import math
import torch
from torch.autograd import Function
from torch.distributions import Normal
class LogNormalCDF(Function):
@staticmethod
def forward(ctx, z):
c = torch.tensor(
[
0.00048204,
-0.00142906,
0.0013200243174,
0.0009461589032,
-0.0045563339802,
0.00556964649138,
0.00125993961762116,
-0.01621575378835404,
0.02629651521057465,
-0.001829764677455021,
2 * (1 - math.pi / 3),
(4 - math.pi) / 3,
1,
1,
],
dtype=z.dtype,
device=z.device,
)
r = torch.tensor(
[
1.2753666447299659525,
5.019049726784267463450,
6.1602098531096305441,
7.409740605964741794425,
2.9788656263939928886,
],
dtype=z.dtype,
device=z.device,
)
q = torch.tensor(
[
2.260528520767326969592,
9.3960340162350541504,
12.048951927855129036034,
17.081440747466004316,
9.608965327192787870698,
3.3690752069827527677,
],
dtype=z.dtype,
device=z.device,
)
log_phi_z = torch.zeros_like(z)
# Three cases to handle: An entry of z is near zero, an entry of z is small, or an entry of z neither of these.
z_near_zero = z.pow(2).lt(0.04)
z_is_small = z.lt(-1)
z_is_ordinary = ~(z_near_zero | z_is_small)
# Case 1: Entries of z that are near zero
if z_near_zero.sum() > 0:
log_phi_first = -z.masked_select(z_near_zero).div_(math.sqrt(2 * math.pi))
f = 0
for c_i in c.tolist():
f = log_phi_first.mul(c_i + f)
log_phi_z.masked_scatter_(z_near_zero, f.mul_(-2).sub_(math.log(2)))
# Case 2: Entries of z that are very small
if z_is_small.sum() > 0:
z_where_z_is_small = z.masked_select(z_is_small)
numerator = torch.tensor(0.5641895835477550741, dtype=z.dtype, device=z.device)
numerator = numerator.expand_as(z_where_z_is_small)
denominator = torch.tensor(1.0, dtype=z.dtype, device=z.device)
denominator = denominator.expand_as(z_where_z_is_small)
for r_i in r:
numerator = -z_where_z_is_small.mul(numerator.div(math.sqrt(2))) + r_i
for q_i in q:
denominator = -z_where_z_is_small.mul(denominator.div(math.sqrt(2))) + q_i
e = numerator.div(denominator)
log_phi_z.masked_scatter_(z_is_small, torch.log(e / 2) - z_where_z_is_small.pow(2).div_(2))
ctx.denominator = denominator
ctx.numerator = numerator
log_phi_z.masked_scatter_(z_is_ordinary, torch.log(Normal(0.0, 1.0).cdf(z.masked_select(z_is_ordinary))))
ctx.save_for_backward(z, log_phi_z)
return log_phi_z
@staticmethod
def backward(ctx, grad_output):
z, log_phi_z = ctx.saved_tensors
log_phi_z_grad = torch.zeros_like(z)
z_is_small = z.lt(-1)
z_is_not_small = ~z_is_small
if z_is_small.sum() > 0:
log_phi_z_grad[z_is_small] = torch.abs(ctx.denominator.div(ctx.numerator)).mul(math.sqrt(2 / math.pi))
exp = z[z_is_not_small].pow(2).div(-2).sub(log_phi_z[z_is_not_small]).add(math.log(0.5))
log_phi_z_grad[z_is_not_small] = torch.exp(exp).mul(math.sqrt(2 / math.pi))
return log_phi_z_grad.mul(grad_output)
|
c7816e3f35de86a0f7dcd897c491a6d062368f37
|
f3806d9fb54773908cd9704121a543b114470aca
|
/angr/engines/soot/expressions/cast.py
|
2b2752aee019cb04b5dfcd2a94a4c4896b3d4831
|
[
"BSD-2-Clause"
] |
permissive
|
angr/angr
|
8ae95fceca51b0a001de56477d984dd01193ac1d
|
37e8ca1c3308ec601ad1d7c6bc8081ff38a7cffd
|
refs/heads/master
| 2023-08-17T03:15:21.007865
| 2023-08-15T18:44:57
| 2023-08-15T18:44:57
| 40,328,394
| 7,184
| 1,306
|
BSD-2-Clause
| 2023-09-14T20:14:23
| 2015-08-06T21:46:55
|
Python
|
UTF-8
|
Python
| false
| false
| 753
|
py
|
cast.py
|
import logging
from archinfo import ArchSoot
from .base import SimSootExpr
from ..values.thisref import SimSootValue_ThisRef
l = logging.getLogger("angr.engines.soot.expressions.cast")
class SimSootExpr_Cast(SimSootExpr):
def _execute(self):
# get value
local = self._translate_value(self.expr.value)
value_uncasted = self.state.memory.load(local)
# cast value
if self.expr.cast_type in ArchSoot.primitive_types:
javavm_simos = self.state.project.simos
self.expr = javavm_simos.cast_primitive(self.state, value_uncasted, to_type=self.expr.cast_type)
else:
self.expr = SimSootValue_ThisRef(heap_alloc_id=value_uncasted.heap_alloc_id, type_=self.expr.cast_type)
|
71dc65ca3382d84b385da54a3cfbb3cb934d01b6
|
80f94bea418d7956df1ba19d4d6a1d7715a94ade
|
/lib/galaxy/version.py
|
40152f5d6ac1ac1649af4fc9fbcc7190376ac4c7
|
[
"CC-BY-2.5",
"MIT",
"CC-BY-3.0",
"AFL-3.0"
] |
permissive
|
galaxyproject/galaxy
|
5748409eb6693b1611f289d164f85e20c3237495
|
b9ae7a16ba0465995e880ae9701b7e87226b9bab
|
refs/heads/dev
| 2023-08-28T22:35:51.248138
| 2023-08-26T08:02:33
| 2023-08-26T08:02:33
| 31,211,061
| 1,277
| 1,137
|
NOASSERTION
| 2023-09-14T19:39:01
| 2015-02-23T14:18:06
|
Python
|
UTF-8
|
Python
| false
| false
| 119
|
py
|
version.py
|
VERSION_MAJOR = "23.2"
VERSION_MINOR = "dev0"
VERSION = VERSION_MAJOR + (f".{VERSION_MINOR}" if VERSION_MINOR else "")
|
b4f6fc1b241fa076a5568672002c3a4f30539991
|
99199db3f78a344e72b281c71c690518ae07375a
|
/octavia/controller/worker/v2/flows/amphora_flows.py
|
54df1ff8f919c73a2e2cc977214e06c8fdd21c2a
|
[
"Apache-2.0"
] |
permissive
|
openstack/octavia
|
3faf2afe2ade5bd3978bb3a0558d2eeefc648ba2
|
0426285a41464a5015494584f109eed35a0d44db
|
refs/heads/master
| 2023-09-01T20:12:48.272344
| 2023-08-31T17:24:04
| 2023-08-31T17:24:04
| 21,018,188
| 147
| 180
|
Apache-2.0
| 2021-03-30T12:34:30
| 2014-06-19T22:47:19
|
Python
|
UTF-8
|
Python
| false
| false
| 28,343
|
py
|
amphora_flows.py
|
# Copyright 2015 Hewlett-Packard Development Company, L.P.
# Copyright 2020 Red Hat, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from oslo_config import cfg
from oslo_log import log as logging
from taskflow.patterns import linear_flow
from taskflow.patterns import unordered_flow
from octavia.common import constants
from octavia.common import utils
from octavia.controller.worker.v2.tasks import amphora_driver_tasks
from octavia.controller.worker.v2.tasks import cert_task
from octavia.controller.worker.v2.tasks import compute_tasks
from octavia.controller.worker.v2.tasks import database_tasks
from octavia.controller.worker.v2.tasks import lifecycle_tasks
from octavia.controller.worker.v2.tasks import network_tasks
from octavia.controller.worker.v2.tasks import retry_tasks
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class AmphoraFlows(object):
def get_create_amphora_flow(self):
"""Creates a flow to create an amphora.
:returns: The flow for creating the amphora
"""
create_amphora_flow = linear_flow.Flow(constants.CREATE_AMPHORA_FLOW)
create_amphora_flow.add(database_tasks.CreateAmphoraInDB(
provides=constants.AMPHORA_ID))
create_amphora_flow.add(lifecycle_tasks.AmphoraIDToErrorOnRevertTask(
requires=constants.AMPHORA_ID))
create_amphora_flow.add(cert_task.GenerateServerPEMTask(
provides=constants.SERVER_PEM))
create_amphora_flow.add(
database_tasks.UpdateAmphoraDBCertExpiration(
requires=(constants.AMPHORA_ID, constants.SERVER_PEM)))
create_amphora_flow.add(compute_tasks.CertComputeCreate(
requires=(constants.AMPHORA_ID, constants.SERVER_PEM,
constants.SERVER_GROUP_ID,
constants.BUILD_TYPE_PRIORITY, constants.FLAVOR),
provides=constants.COMPUTE_ID))
create_amphora_flow.add(database_tasks.MarkAmphoraBootingInDB(
requires=(constants.AMPHORA_ID, constants.COMPUTE_ID)))
retry_subflow = linear_flow.Flow(
constants.COMPUTE_CREATE_RETRY_SUBFLOW,
retry=compute_tasks.ComputeRetry())
retry_subflow.add(
compute_tasks.ComputeWait(
requires=(constants.COMPUTE_ID, constants.AMPHORA_ID),
provides=constants.COMPUTE_OBJ))
create_amphora_flow.add(retry_subflow)
create_amphora_flow.add(database_tasks.UpdateAmphoraInfo(
requires=(constants.AMPHORA_ID, constants.COMPUTE_OBJ),
provides=constants.AMPHORA))
retry_subflow = linear_flow.Flow(
constants.CREATE_AMPHORA_RETRY_SUBFLOW,
retry=amphora_driver_tasks.AmpRetry())
retry_subflow.add(
amphora_driver_tasks.AmphoraComputeConnectivityWait(
requires=constants.AMPHORA,
inject={'raise_retry_exception': True}))
create_amphora_flow.add(retry_subflow)
create_amphora_flow.add(database_tasks.ReloadAmphora(
requires=constants.AMPHORA,
provides=constants.AMPHORA))
create_amphora_flow.add(amphora_driver_tasks.AmphoraFinalize(
requires=constants.AMPHORA))
create_amphora_flow.add(database_tasks.MarkAmphoraReadyInDB(
requires=constants.AMPHORA))
return create_amphora_flow
def get_amphora_for_lb_subflow(self, prefix, role):
"""Create a new amphora for lb."""
sf_name = prefix + '-' + constants.CREATE_AMP_FOR_LB_SUBFLOW
create_amp_for_lb_subflow = linear_flow.Flow(sf_name)
create_amp_for_lb_subflow.add(database_tasks.CreateAmphoraInDB(
name=sf_name + '-' + constants.CREATE_AMPHORA_INDB,
requires=constants.LOADBALANCER_ID,
provides=constants.AMPHORA_ID))
create_amp_for_lb_subflow.add(cert_task.GenerateServerPEMTask(
name=sf_name + '-' + constants.GENERATE_SERVER_PEM,
provides=constants.SERVER_PEM))
create_amp_for_lb_subflow.add(
database_tasks.UpdateAmphoraDBCertExpiration(
name=sf_name + '-' + constants.UPDATE_CERT_EXPIRATION,
requires=(constants.AMPHORA_ID, constants.SERVER_PEM)))
create_amp_for_lb_subflow.add(compute_tasks.CertComputeCreate(
name=sf_name + '-' + constants.CERT_COMPUTE_CREATE,
requires=(constants.AMPHORA_ID, constants.SERVER_PEM,
constants.BUILD_TYPE_PRIORITY,
constants.SERVER_GROUP_ID,
constants.FLAVOR, constants.AVAILABILITY_ZONE),
provides=constants.COMPUTE_ID))
create_amp_for_lb_subflow.add(database_tasks.UpdateAmphoraComputeId(
name=sf_name + '-' + constants.UPDATE_AMPHORA_COMPUTEID,
requires=(constants.AMPHORA_ID, constants.COMPUTE_ID)))
create_amp_for_lb_subflow.add(database_tasks.MarkAmphoraBootingInDB(
name=sf_name + '-' + constants.MARK_AMPHORA_BOOTING_INDB,
requires=(constants.AMPHORA_ID, constants.COMPUTE_ID)))
create_amp_for_lb_subflow.add(self._retry_compute_wait_flow(sf_name))
create_amp_for_lb_subflow.add(database_tasks.UpdateAmphoraInfo(
name=sf_name + '-' + constants.UPDATE_AMPHORA_INFO,
requires=(constants.AMPHORA_ID, constants.COMPUTE_OBJ),
provides=constants.AMPHORA))
create_amp_for_lb_subflow.add(self._retry_flow(sf_name))
create_amp_for_lb_subflow.add(amphora_driver_tasks.AmphoraFinalize(
name=sf_name + '-' + constants.AMPHORA_FINALIZE,
requires=constants.AMPHORA))
create_amp_for_lb_subflow.add(
database_tasks.MarkAmphoraAllocatedInDB(
name=sf_name + '-' + constants.MARK_AMPHORA_ALLOCATED_INDB,
requires=(constants.AMPHORA, constants.LOADBALANCER_ID)))
if role == constants.ROLE_MASTER:
create_amp_for_lb_subflow.add(database_tasks.MarkAmphoraMasterInDB(
name=sf_name + '-' + constants.MARK_AMP_MASTER_INDB,
requires=constants.AMPHORA))
elif role == constants.ROLE_BACKUP:
create_amp_for_lb_subflow.add(database_tasks.MarkAmphoraBackupInDB(
name=sf_name + '-' + constants.MARK_AMP_BACKUP_INDB,
requires=constants.AMPHORA))
elif role == constants.ROLE_STANDALONE:
create_amp_for_lb_subflow.add(
database_tasks.MarkAmphoraStandAloneInDB(
name=sf_name + '-' + constants.MARK_AMP_STANDALONE_INDB,
requires=constants.AMPHORA))
return create_amp_for_lb_subflow
def _retry_compute_wait_flow(self, sf_name):
retry_task = sf_name + '-' + constants.COMPUTE_WAIT
retry_subflow = linear_flow.Flow(
sf_name + '-' + constants.COMPUTE_CREATE_RETRY_SUBFLOW,
retry=compute_tasks.ComputeRetry())
retry_subflow.add(
compute_tasks.ComputeWait(
name=retry_task,
requires=(constants.COMPUTE_ID, constants.AMPHORA_ID),
provides=constants.COMPUTE_OBJ))
return retry_subflow
def _retry_flow(self, sf_name):
retry_task = sf_name + '-' + constants.AMP_COMPUTE_CONNECTIVITY_WAIT
retry_subflow = linear_flow.Flow(
sf_name + '-' + constants.CREATE_AMPHORA_RETRY_SUBFLOW,
retry=amphora_driver_tasks.AmpRetry())
retry_subflow.add(
amphora_driver_tasks.AmphoraComputeConnectivityWait(
name=retry_task, requires=constants.AMPHORA,
inject={'raise_retry_exception': True}))
return retry_subflow
def get_delete_amphora_flow(
self, amphora,
retry_attempts=CONF.controller_worker.amphora_delete_retries,
retry_interval=(
CONF.controller_worker.amphora_delete_retry_interval)):
"""Creates a subflow to delete an amphora and it's port.
This flow is idempotent and safe to retry.
:param amphora: An amphora dict object.
:param retry_attempts: The number of times the flow is retried.
:param retry_interval: The time to wait, in seconds, between retries.
:returns: The subflow for deleting the amphora.
:raises AmphoraNotFound: The referenced Amphora was not found.
"""
amphora_id = amphora[constants.ID]
delete_amphora_flow = linear_flow.Flow(
name=constants.DELETE_AMPHORA_FLOW + '-' + amphora_id,
retry=retry_tasks.SleepingRetryTimesController(
name='retry-' + constants.DELETE_AMPHORA_FLOW + '-' +
amphora_id,
attempts=retry_attempts, interval=retry_interval))
delete_amphora_flow.add(lifecycle_tasks.AmphoraToErrorOnRevertTask(
name=constants.AMPHORA_TO_ERROR_ON_REVERT + '-' + amphora_id,
inject={constants.AMPHORA: amphora}))
delete_amphora_flow.add(
database_tasks.MarkAmphoraPendingDeleteInDB(
name=constants.MARK_AMPHORA_PENDING_DELETE + '-' + amphora_id,
inject={constants.AMPHORA: amphora}))
delete_amphora_flow.add(database_tasks.MarkAmphoraHealthBusy(
name=constants.MARK_AMPHORA_HEALTH_BUSY + '-' + amphora_id,
inject={constants.AMPHORA: amphora}))
delete_amphora_flow.add(compute_tasks.ComputeDelete(
name=constants.DELETE_AMPHORA + '-' + amphora_id,
inject={constants.AMPHORA: amphora,
constants.PASSIVE_FAILURE: True}))
delete_amphora_flow.add(database_tasks.DisableAmphoraHealthMonitoring(
name=constants.DISABLE_AMP_HEALTH_MONITORING + '-' + amphora_id,
inject={constants.AMPHORA: amphora}))
delete_amphora_flow.add(database_tasks.MarkAmphoraDeletedInDB(
name=constants.MARK_AMPHORA_DELETED + '-' + amphora_id,
inject={constants.AMPHORA: amphora}))
if amphora.get(constants.VRRP_PORT_ID):
delete_amphora_flow.add(network_tasks.DeletePort(
name=(constants.DELETE_PORT + '-' + str(amphora_id) + '-' +
str(amphora[constants.VRRP_PORT_ID])),
inject={constants.PORT_ID: amphora[constants.VRRP_PORT_ID],
constants.PASSIVE_FAILURE: True}))
# TODO(johnsom) What about cleaning up any member ports?
# maybe we should get the list of attached ports prior to delete
# and call delete on them here. Fix this as part of
# https://storyboard.openstack.org/#!/story/2007077
return delete_amphora_flow
def get_vrrp_subflow(self, prefix, timeout_dict=None,
create_vrrp_group=True):
sf_name = prefix + '-' + constants.GET_VRRP_SUBFLOW
vrrp_subflow = linear_flow.Flow(sf_name)
# Optimization for failover flow. No reason to call this
# when configuring the secondary amphora.
if create_vrrp_group:
vrrp_subflow.add(database_tasks.CreateVRRPGroupForLB(
name=sf_name + '-' + constants.CREATE_VRRP_GROUP_FOR_LB,
requires=constants.LOADBALANCER_ID))
vrrp_subflow.add(network_tasks.GetAmphoraeNetworkConfigs(
name=sf_name + '-' + constants.GET_AMP_NETWORK_CONFIG,
requires=constants.LOADBALANCER_ID,
provides=constants.AMPHORAE_NETWORK_CONFIG))
# VRRP update needs to be run on all amphora to update
# their peer configurations. So parallelize this with an
# unordered subflow.
update_amps_subflow = unordered_flow.Flow('VRRP-update-subflow')
# We have three tasks to run in order, per amphora
amp_0_subflow = linear_flow.Flow('VRRP-amp-0-update-subflow')
amp_0_subflow.add(amphora_driver_tasks.AmphoraIndexUpdateVRRPInterface(
name=sf_name + '-0-' + constants.AMP_UPDATE_VRRP_INTF,
requires=constants.AMPHORAE,
inject={constants.AMPHORA_INDEX: 0,
constants.TIMEOUT_DICT: timeout_dict},
provides=constants.AMP_VRRP_INT))
amp_0_subflow.add(amphora_driver_tasks.AmphoraIndexVRRPUpdate(
name=sf_name + '-0-' + constants.AMP_VRRP_UPDATE,
requires=(constants.LOADBALANCER_ID,
constants.AMPHORAE_NETWORK_CONFIG, constants.AMPHORAE,
constants.AMP_VRRP_INT),
inject={constants.AMPHORA_INDEX: 0,
constants.TIMEOUT_DICT: timeout_dict}))
amp_0_subflow.add(amphora_driver_tasks.AmphoraIndexVRRPStart(
name=sf_name + '-0-' + constants.AMP_VRRP_START,
requires=constants.AMPHORAE,
inject={constants.AMPHORA_INDEX: 0,
constants.TIMEOUT_DICT: timeout_dict}))
amp_1_subflow = linear_flow.Flow('VRRP-amp-1-update-subflow')
amp_1_subflow.add(amphora_driver_tasks.AmphoraIndexUpdateVRRPInterface(
name=sf_name + '-1-' + constants.AMP_UPDATE_VRRP_INTF,
requires=constants.AMPHORAE,
inject={constants.AMPHORA_INDEX: 1,
constants.TIMEOUT_DICT: timeout_dict},
provides=constants.AMP_VRRP_INT))
amp_1_subflow.add(amphora_driver_tasks.AmphoraIndexVRRPUpdate(
name=sf_name + '-1-' + constants.AMP_VRRP_UPDATE,
requires=(constants.LOADBALANCER_ID,
constants.AMPHORAE_NETWORK_CONFIG, constants.AMPHORAE,
constants.AMP_VRRP_INT),
inject={constants.AMPHORA_INDEX: 1,
constants.TIMEOUT_DICT: timeout_dict}))
amp_1_subflow.add(amphora_driver_tasks.AmphoraIndexVRRPStart(
name=sf_name + '-1-' + constants.AMP_VRRP_START,
requires=constants.AMPHORAE,
inject={constants.AMPHORA_INDEX: 1,
constants.TIMEOUT_DICT: timeout_dict}))
update_amps_subflow.add(amp_0_subflow)
update_amps_subflow.add(amp_1_subflow)
vrrp_subflow.add(update_amps_subflow)
return vrrp_subflow
def cert_rotate_amphora_flow(self):
"""Implement rotation for amphora's cert.
1. Create a new certificate
2. Upload the cert to amphora
3. update the newly created certificate info to amphora
4. update the cert_busy flag to be false after rotation
:returns: The flow for updating an amphora
"""
rotated_amphora_flow = linear_flow.Flow(
constants.CERT_ROTATE_AMPHORA_FLOW)
rotated_amphora_flow.add(lifecycle_tasks.AmphoraToErrorOnRevertTask(
requires=constants.AMPHORA))
# create a new certificate, the returned value is the newly created
# certificate
rotated_amphora_flow.add(cert_task.GenerateServerPEMTask(
provides=constants.SERVER_PEM))
# update it in amphora task
rotated_amphora_flow.add(amphora_driver_tasks.AmphoraCertUpload(
requires=(constants.AMPHORA, constants.SERVER_PEM)))
# update the newly created certificate info to amphora
rotated_amphora_flow.add(database_tasks.UpdateAmphoraDBCertExpiration(
requires=(constants.AMPHORA_ID, constants.SERVER_PEM)))
# update the cert_busy flag to be false after rotation
rotated_amphora_flow.add(database_tasks.UpdateAmphoraCertBusyToFalse(
requires=constants.AMPHORA_ID))
return rotated_amphora_flow
def update_amphora_config_flow(self):
"""Creates a flow to update the amphora agent configuration.
:returns: The flow for updating an amphora
"""
update_amphora_flow = linear_flow.Flow(
constants.UPDATE_AMPHORA_CONFIG_FLOW)
update_amphora_flow.add(lifecycle_tasks.AmphoraToErrorOnRevertTask(
requires=constants.AMPHORA))
update_amphora_flow.add(amphora_driver_tasks.AmphoraConfigUpdate(
requires=(constants.AMPHORA, constants.FLAVOR)))
return update_amphora_flow
def get_amphora_for_lb_failover_subflow(
self, prefix, role=constants.ROLE_STANDALONE,
failed_amp_vrrp_port_id=None, is_vrrp_ipv6=False):
"""Creates a new amphora that will be used in a failover flow.
:requires: loadbalancer_id, flavor, vip, vip_sg_id, loadbalancer
:provides: amphora_id, amphora
:param prefix: The flow name prefix to use on the flow and tasks.
:param role: The role this amphora will have in the topology.
:param failed_amp_vrrp_port_id: The base port ID of the failed amp.
:param is_vrrp_ipv6: True if the base port IP is IPv6.
:return: A Taskflow sub-flow that will create the amphora.
"""
sf_name = prefix + '-' + constants.CREATE_AMP_FOR_FAILOVER_SUBFLOW
amp_for_failover_flow = linear_flow.Flow(sf_name)
# Try to allocate or boot an amphora instance (unconfigured)
amp_for_failover_flow.add(self.get_amphora_for_lb_subflow(
prefix=prefix + '-' + constants.FAILOVER_LOADBALANCER_FLOW,
role=role))
# Create the VIP base (aka VRRP) port for the amphora.
amp_for_failover_flow.add(network_tasks.CreateVIPBasePort(
name=prefix + '-' + constants.CREATE_VIP_BASE_PORT,
requires=(constants.VIP, constants.VIP_SG_ID,
constants.AMPHORA_ID,
constants.ADDITIONAL_VIPS),
provides=constants.BASE_PORT))
# Attach the VIP base (aka VRRP) port to the amphora.
amp_for_failover_flow.add(compute_tasks.AttachPort(
name=prefix + '-' + constants.ATTACH_PORT,
requires=(constants.AMPHORA, constants.PORT),
rebind={constants.PORT: constants.BASE_PORT}))
# Update the amphora database record with the VIP base port info.
amp_for_failover_flow.add(database_tasks.UpdateAmpFailoverDetails(
name=prefix + '-' + constants.UPDATE_AMP_FAILOVER_DETAILS,
requires=(constants.AMPHORA, constants.VIP, constants.BASE_PORT)))
# Update the amphora networking for the plugged VIP port
amp_for_failover_flow.add(network_tasks.GetAmphoraNetworkConfigsByID(
name=prefix + '-' + constants.GET_AMPHORA_NETWORK_CONFIGS_BY_ID,
requires=(constants.LOADBALANCER_ID, constants.AMPHORA_ID),
provides=constants.AMPHORAE_NETWORK_CONFIG))
# Disable the base (vrrp) port on the failed amphora
# This prevents a DAD failure when bringing up the new amphora.
# Keepalived will handle this for act/stdby.
if (role == constants.ROLE_STANDALONE and failed_amp_vrrp_port_id and
is_vrrp_ipv6):
amp_for_failover_flow.add(network_tasks.AdminDownPort(
name=prefix + '-' + constants.ADMIN_DOWN_PORT,
inject={constants.PORT_ID: failed_amp_vrrp_port_id}))
amp_for_failover_flow.add(amphora_driver_tasks.AmphoraPostVIPPlug(
name=prefix + '-' + constants.AMPHORA_POST_VIP_PLUG,
requires=(constants.AMPHORA, constants.LOADBALANCER,
constants.AMPHORAE_NETWORK_CONFIG)))
# Plug member ports
amp_for_failover_flow.add(network_tasks.CalculateAmphoraDelta(
name=prefix + '-' + constants.CALCULATE_AMPHORA_DELTA,
requires=(constants.LOADBALANCER, constants.AMPHORA,
constants.AVAILABILITY_ZONE),
provides=constants.DELTA))
amp_for_failover_flow.add(network_tasks.HandleNetworkDelta(
name=prefix + '-' + constants.HANDLE_NETWORK_DELTA,
requires=(constants.AMPHORA, constants.DELTA),
provides=constants.UPDATED_PORTS))
amp_for_failover_flow.add(amphora_driver_tasks.AmphoraePostNetworkPlug(
name=prefix + '-' + constants.AMPHORAE_POST_NETWORK_PLUG,
requires=(constants.LOADBALANCER, constants.UPDATED_PORTS)))
return amp_for_failover_flow
def get_failover_amphora_flow(self, failed_amphora, lb_amp_count):
"""Get a Taskflow flow to failover an amphora.
1. Build a replacement amphora.
2. Delete the old amphora.
3. Update the amphorae listener configurations.
4. Update the VRRP configurations if needed.
:param failed_amphora: The amphora dict to failover.
:param lb_amp_count: The number of amphora on this load balancer.
:returns: The flow that will provide the failover.
"""
failover_amp_flow = linear_flow.Flow(
constants.FAILOVER_AMPHORA_FLOW)
# Revert LB to provisioning_status ERROR if this flow goes wrong
failover_amp_flow.add(lifecycle_tasks.LoadBalancerToErrorOnRevertTask(
requires=constants.LOADBALANCER))
# Revert amphora to status ERROR if this flow goes wrong
failover_amp_flow.add(lifecycle_tasks.AmphoraToErrorOnRevertTask(
requires=constants.AMPHORA,
inject={constants.AMPHORA: failed_amphora}))
if failed_amphora[constants.ROLE] in (constants.ROLE_MASTER,
constants.ROLE_BACKUP):
amp_role = 'master_or_backup'
elif failed_amphora[constants.ROLE] == constants.ROLE_STANDALONE:
amp_role = 'standalone'
else:
amp_role = 'undefined'
LOG.info("Performing failover for amphora: %s",
{"id": failed_amphora[constants.ID],
"load_balancer_id": failed_amphora.get(
constants.LOAD_BALANCER_ID),
"lb_network_ip": failed_amphora.get(constants.LB_NETWORK_IP),
"compute_id": failed_amphora.get(constants.COMPUTE_ID),
"role": amp_role})
failover_amp_flow.add(database_tasks.MarkAmphoraPendingDeleteInDB(
requires=constants.AMPHORA,
inject={constants.AMPHORA: failed_amphora}))
failover_amp_flow.add(database_tasks.MarkAmphoraHealthBusy(
requires=constants.AMPHORA,
inject={constants.AMPHORA: failed_amphora}))
failover_amp_flow.add(network_tasks.GetVIPSecurityGroupID(
requires=constants.LOADBALANCER_ID,
provides=constants.VIP_SG_ID))
is_vrrp_ipv6 = False
if failed_amphora.get(constants.LOAD_BALANCER_ID):
if failed_amphora.get(constants.VRRP_IP):
is_vrrp_ipv6 = utils.is_ipv6(failed_amphora[constants.VRRP_IP])
# Get a replacement amphora and plug all of the networking.
#
# Do this early as the compute services have been observed to be
# unreliable. The community decided the chance that deleting first
# would open resources for an instance is less likely than the
# compute service failing to boot an instance for other reasons.
# TODO(johnsom) Move this back out to run for spares after
# delete amphora API is available.
failover_amp_flow.add(self.get_amphora_for_lb_failover_subflow(
prefix=constants.FAILOVER_LOADBALANCER_FLOW,
role=failed_amphora[constants.ROLE],
failed_amp_vrrp_port_id=failed_amphora.get(
constants.VRRP_PORT_ID),
is_vrrp_ipv6=is_vrrp_ipv6))
failover_amp_flow.add(
self.get_delete_amphora_flow(
failed_amphora,
retry_attempts=CONF.controller_worker.amphora_delete_retries,
retry_interval=(
CONF.controller_worker.amphora_delete_retry_interval)))
failover_amp_flow.add(
database_tasks.DisableAmphoraHealthMonitoring(
requires=constants.AMPHORA,
inject={constants.AMPHORA: failed_amphora}))
if not failed_amphora.get(constants.LOAD_BALANCER_ID):
# This is an unallocated amphora (bogus), we are done.
return failover_amp_flow
failover_amp_flow.add(database_tasks.GetLoadBalancer(
requires=constants.LOADBALANCER_ID,
inject={constants.LOADBALANCER_ID:
failed_amphora[constants.LOAD_BALANCER_ID]},
provides=constants.LOADBALANCER))
failover_amp_flow.add(database_tasks.GetAmphoraeFromLoadbalancer(
name=constants.GET_AMPHORAE_FROM_LB,
requires=constants.LOADBALANCER_ID,
inject={constants.LOADBALANCER_ID:
failed_amphora[constants.LOAD_BALANCER_ID]},
provides=constants.AMPHORAE))
# Setup timeouts for our requests to the amphorae
timeout_dict = {
constants.CONN_MAX_RETRIES:
CONF.haproxy_amphora.active_connection_max_retries,
constants.CONN_RETRY_INTERVAL:
CONF.haproxy_amphora.active_connection_retry_interval}
# Listeners update needs to be run on all amphora to update
# their peer configurations. So parallelize this with an
# unordered subflow.
update_amps_subflow = unordered_flow.Flow(
constants.UPDATE_AMPS_SUBFLOW)
for amp_index in range(0, lb_amp_count):
update_amps_subflow.add(
amphora_driver_tasks.AmphoraIndexListenerUpdate(
name=str(amp_index) + '-' + constants.AMP_LISTENER_UPDATE,
requires=(constants.LOADBALANCER, constants.AMPHORAE),
inject={constants.AMPHORA_INDEX: amp_index,
constants.TIMEOUT_DICT: timeout_dict}))
failover_amp_flow.add(update_amps_subflow)
# Configure and enable keepalived in the amphora
if lb_amp_count == 2:
failover_amp_flow.add(
self.get_vrrp_subflow(constants.GET_VRRP_SUBFLOW,
timeout_dict, create_vrrp_group=False))
# Reload the listener. This needs to be done here because
# it will create the required haproxy check scripts for
# the VRRP deployed above.
# A "U" or newer amphora-agent will remove the need for this
# task here.
# TODO(johnsom) Remove this in the "W" cycle
reload_listener_subflow = unordered_flow.Flow(
constants.AMPHORA_LISTENER_RELOAD_SUBFLOW)
for amp_index in range(0, lb_amp_count):
reload_listener_subflow.add(
amphora_driver_tasks.AmphoraIndexListenersReload(
name=(str(amp_index) + '-' +
constants.AMPHORA_RELOAD_LISTENER),
requires=(constants.LOADBALANCER, constants.AMPHORAE),
inject={constants.AMPHORA_INDEX: amp_index,
constants.TIMEOUT_DICT: timeout_dict}))
failover_amp_flow.add(reload_listener_subflow)
# Remove any extraneous ports
# Note: Nova sometimes fails to delete ports attached to an instance.
# For example, if you create an LB with a listener, then
# 'openstack server delete' the amphora, you will see the vrrp
# port attached to that instance will remain after the instance
# is deleted.
# TODO(johnsom) Fix this as part of
# https://storyboard.openstack.org/#!/story/2007077
# Mark LB ACTIVE
failover_amp_flow.add(
database_tasks.MarkLBActiveInDB(mark_subobjects=True,
requires=constants.LOADBALANCER))
return failover_amp_flow
|
2131e8c36d58b791548150dcd22193222e1f90a2
|
aee26a4c731a84481a499679c3d4cef9ec954aed
|
/tacker/api/vnflcm/v1/sync_resource.py
|
f9219bfef4c7b1d11b93049eb8d29097a81d87c7
|
[
"Apache-2.0"
] |
permissive
|
openstack/tacker
|
6976cbee3afadfd9390849b56da2837feb93e912
|
9c7918f0b501cdeaffae40f585b76fc92b8e196e
|
refs/heads/master
| 2023-09-04T01:22:43.106241
| 2023-08-31T00:06:42
| 2023-08-31T00:42:20
| 21,259,951
| 125
| 172
|
Apache-2.0
| 2021-05-09T06:13:08
| 2014-06-27T01:11:56
|
Python
|
UTF-8
|
Python
| false
| false
| 5,064
|
py
|
sync_resource.py
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log as logging
from tacker.common import csar_utils
from tacker.common import exceptions
from tacker.common import utils
from tacker.conductor.conductorrpc import vnf_pkgm_rpc
from tacker.glance_store import store as glance_store
from tacker import objects
from tacker.objects import fields
import tacker.vnfm.nfvo_client as nfvo_client
import time
import webob
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class SyncVnfPackage:
vnf_package_rpc_api = vnf_pkgm_rpc.VNFPackageRPCAPI()
@classmethod
def create_package(cls, context, vnf_package_info):
"""vnf_package, create a vnf_package_vnfd table."""
vnf_package_info = utils.convert_camelcase_to_snakecase(
vnf_package_info)
try:
vnf_package = cls.__create_vnf_package(context, vnf_package_info)
except Exception as exc:
raise webob.exc.HTTPInternalServerError(
explanation=exc)
try:
artifact_paths = cls._get_artifact_paths(vnf_package_info)
vnf_package_binary = \
nfvo_client.VnfPackageRequest.download_vnf_packages(
vnf_package.id, artifact_paths)
except nfvo_client.UndefinedExternalSettingException as exc:
raise webob.exc.HTTPNotFound(explanation=exc)
except (nfvo_client.FaliedDownloadContentException, Exception) as exc:
raise webob.exc.HTTPInternalServerError(
explanation=exc)
try:
(location, size, _, multihash, _) = glance_store.store_csar(
context, vnf_package.id, vnf_package_binary)
cls.__update_vnf_package(vnf_package, location, size, multihash)
cls.vnf_package_rpc_api.upload_vnf_package_content(
context, vnf_package)
vnf_package_vnfd = cls._get_vnf_package_vnfd(
context, vnf_package_info.get('vnfd_id'))
except Exception as exc:
raise webob.exc.HTTPInternalServerError(
explanation=exc)
return vnf_package_vnfd
@classmethod
def _get_artifact_paths(cls, vnf_package_info):
additional_artifacts = vnf_package_info.get('additional_artifacts')
if additional_artifacts is None:
return None
return [artifact.get('artifact_path')
for artifact in additional_artifacts
if 'artifact_path' in artifact]
@classmethod
def __store_csar(cls, context, id, body):
(location, size, checksum, multihash,
loc_meta) = glance_store.store_csar(context, id, body)
return location, size, checksum, multihash, loc_meta
@classmethod
def __load_csar(cls, context, vnf_package):
location = vnf_package.location_glance_store
zip_path = glance_store.load_csar(vnf_package.id, location)
vnf_data, flavours = csar_utils.load_csar_data(
context.elevated(), vnf_package.id, zip_path)
return vnf_data, flavours
@classmethod
def __create_vnf_package(cls, context, vnf_package_info):
"""VNF Package Table Registration."""
vnf_package = objects.VnfPackage(
context=context,
id=vnf_package_info.get('id'),
onboarding_state=fields.PackageOnboardingStateType.CREATED,
operational_state=fields.PackageOperationalStateType.DISABLED,
usage_state=fields.PackageUsageStateType.NOT_IN_USE,
tenant_id=context.project_id
)
vnf_package.create()
return vnf_package
@classmethod
def __update_vnf_package(cls, vnf_package, location, size, multihash):
"""VNF Package Table Update."""
vnf_package.algorithm = CONF.vnf_package.hashing_algorithm
vnf_package.location_glance_store = location
vnf_package.hash = multihash
vnf_package.size = size
vnf_package.save()
@classmethod
def _get_vnf_package_vnfd(cls, context, vnfd_id):
"""Get VNF Package VNFD."""
for num in range(CONF.vnf_lcm.retry_num):
try:
vnfd = objects.VnfPackageVnfd.get_by_id(
context,
vnfd_id)
return vnfd
except exceptions.VnfPackageVnfdNotFound:
LOG.debug("retry_wait %s" %
CONF.vnf_lcm.retry_wait)
time.sleep(CONF.vnf_lcm.retry_wait)
return None
|
42c5a61d98a9f27253d15a2169acf84bb0ec4094
|
3c6b36eb1f4f9760c52903f6d0ec4a501f948c90
|
/osp/fields/models/field_index.py
|
814a1e567b9999a7b15b90965a046764c1d9ccc5
|
[
"Apache-2.0"
] |
permissive
|
davidmcclure/open-syllabus-project
|
38444249af845013e3f281a7a713dca83159c56e
|
078cfd4c5a257fbfb0901d43bfbc6350824eed4e
|
refs/heads/master
| 2021-06-30T21:47:07.636558
| 2021-06-27T15:15:35
| 2021-06-27T15:15:35
| 50,152,020
| 220
| 14
|
Apache-2.0
| 2021-06-27T15:11:15
| 2016-01-22T02:29:57
|
Python
|
UTF-8
|
Python
| false
| false
| 1,409
|
py
|
field_index.py
|
from osp.common import config
from osp.common.mixins.elasticsearch import Elasticsearch
from osp.common.utils import query_bar
from osp.fields.models import Field
from clint.textui import progress
class Field_Index(Elasticsearch):
es_index = 'field'
es_mapping = {
'_id': {
'index': 'not_analyzed',
'store': True,
},
'properties': {
'name': {
'type': 'string'
},
}
}
@classmethod
def es_stream_docs(cls):
"""
Index fields.
Yields:
dict: The next document.
"""
for row in query_bar(Field.select()):
yield dict(
_id = row.id,
name = row.name,
)
@classmethod
def materialize_facets(cls, counts):
"""
Materialize facet counts.
Returns:
dict: {label, value, count}
"""
ids = [c[0] for c in counts]
result = config.es.mget(
index = cls.es_index,
doc_type = cls.es_index,
body = { 'ids': ids }
)
facets = []
for i, doc in enumerate(result['docs']):
facets.append(dict(
label = doc['_source']['name'],
value = int(doc['_id']),
count = counts[i][1]
))
return facets
|
fa6d26001faf15b30cbb7a8203fa2eccd1381ee5
|
d8aabbc108b074817cb05eba4acff68d4f5c2d6c
|
/test/zmq/zmq_watcher_server.py
|
5a9d698027571e2231d1b5dbf0430526507e97c2
|
[
"MIT",
"BSD-2-Clause",
"LGPL-2.1-or-later",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
microsoft/tensorwatch
|
e5e868795bd1536f9f2e3cb56b34a97a82e6704e
|
f59730dc7a8735232ef417685800652372c3b5dd
|
refs/heads/master
| 2023-06-29T21:52:27.900779
| 2023-06-12T18:21:59
| 2023-06-12T18:21:59
| 186,783,422
| 3,626
| 394
|
MIT
| 2023-08-30T06:59:14
| 2019-05-15T08:29:34
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 256
|
py
|
zmq_watcher_server.py
|
from tensorwatch.watcher import Watcher
import time
from tensorwatch import utils
utils.set_debug_verbosity(10)
def main():
watcher = Watcher()
for i in range(5000):
watcher.observe(x=i)
# print(i)
time.sleep(1)
main()
|
4e7cb3696feec22022155ded3ab3f54433682725
|
abe6c00f9790df7e6ef20dc02d0b1b225b5020cb
|
/tests/agent/test_agent_run_cancellation.py
|
f13345e6ee77c4e1e360d41939f922819c8a490a
|
[
"Apache-2.0"
] |
permissive
|
PrefectHQ/prefect
|
000e6c5f7df80f76a181f0a30f8661c96417c8bd
|
2c50d2b64c811c364cbc5faa2b5c80a742572090
|
refs/heads/main
| 2023-09-05T20:25:42.965208
| 2023-09-05T18:58:06
| 2023-09-05T18:58:06
| 139,199,684
| 12,917
| 1,539
|
Apache-2.0
| 2023-09-14T20:25:45
| 2018-06-29T21:59:26
|
Python
|
UTF-8
|
Python
| false
| false
| 23,389
|
py
|
test_agent_run_cancellation.py
|
from typing import Generator
from unittest.mock import call
import anyio
import pytest
from prefect.agent import PrefectAgent
from prefect.blocks.core import Block
from prefect.client.orchestration import PrefectClient
from prefect.exceptions import InfrastructureNotAvailable, InfrastructureNotFound
from prefect.infrastructure.base import Infrastructure
from prefect.server.database.orm_models import ORMDeployment
from prefect.server.schemas.core import Deployment
from prefect.states import (
Cancelled,
Cancelling,
Completed,
Pending,
Running,
Scheduled,
StateType,
)
from prefect.testing.utilities import AsyncMock
from prefect.utilities.dispatch import get_registry_for_type
def legacy_named_cancelling_state(**kwargs):
return Cancelled(name="Cancelling", **kwargs)
async def _create_test_deployment_from_orm(
prefect_client: PrefectClient, orm_deployment: ORMDeployment, **kwargs
) -> Deployment:
api_deployment = Deployment.from_orm(orm_deployment)
updated_deployment = api_deployment.copy(update=kwargs)
deployment_id = await prefect_client.create_deployment(
**updated_deployment.dict(
exclude=api_deployment._reset_fields().union(
{
"is_schedule_active",
"created_by",
"updated_by",
"work_queue_id",
}
)
)
)
updated_deployment.id = deployment_id
return updated_deployment
# Test cancellation is called for the correct flow runs -------------------------------
@pytest.mark.parametrize(
"cancelling_constructor", [legacy_named_cancelling_state, Cancelling]
)
async def test_agent_cancel_run_called_for_cancelling_run(
prefect_client: PrefectClient,
deployment: ORMDeployment,
cancelling_constructor,
):
flow_run = await prefect_client.create_flow_run_from_deployment(
deployment.id,
state=cancelling_constructor(),
)
async with PrefectAgent(
work_queues=[deployment.work_queue_name],
work_pool_name=flow_run.work_pool_name,
prefetch_seconds=10,
) as agent:
agent.cancel_run = AsyncMock()
await agent.check_for_cancelled_flow_runs()
agent.cancel_run.assert_awaited_once_with(flow_run)
@pytest.mark.parametrize(
"state",
[
# Name not "Cancelling"
Cancelled(),
# Name "Cancelling" but type not "Cancelled"
Completed(name="Cancelling"),
# Type not Cancelled
Scheduled(),
Pending(),
Running(),
],
)
async def test_agent_cancel_run_not_called_for_other_states(
prefect_client: PrefectClient, deployment: ORMDeployment, state
):
await prefect_client.create_flow_run_from_deployment(
deployment.id,
state=state,
)
async with PrefectAgent(
work_queues=[deployment.work_queue_name], prefetch_seconds=10
) as agent:
agent.cancel_run = AsyncMock()
await agent.check_for_cancelled_flow_runs()
agent.cancel_run.assert_not_called()
@pytest.mark.parametrize(
"cancelling_constructor", [legacy_named_cancelling_state, Cancelling]
)
async def test_agent_cancel_run_called_for_cancelling_run_with_multiple_work_queues(
prefect_client: PrefectClient,
deployment: ORMDeployment,
cancelling_constructor,
):
deployment.work_queue_name = "foo"
await prefect_client.update_deployment(deployment)
flow_run = await prefect_client.create_flow_run_from_deployment(
deployment.id,
state=cancelling_constructor(),
)
async with PrefectAgent(work_queues=["foo", "bar"], prefetch_seconds=10) as agent:
agent.cancel_run = AsyncMock()
await agent.check_for_cancelled_flow_runs()
agent.cancel_run.assert_awaited_once_with(flow_run)
@pytest.mark.parametrize(
"cancelling_constructor", [legacy_named_cancelling_state, Cancelling]
)
async def test_agent_cancel_run_called_for_each_cancelling_run_in_multiple_work_queues(
prefect_client: PrefectClient,
deployment: ORMDeployment,
cancelling_constructor,
):
deployment_foo = await _create_test_deployment_from_orm(
prefect_client, deployment, work_queue_name="foo"
)
deployment_bar = await _create_test_deployment_from_orm(
prefect_client, deployment, work_queue_name="bar"
)
flow_run_foo = await prefect_client.create_flow_run_from_deployment(
deployment_foo.id,
state=cancelling_constructor(),
)
flow_run_bar = await prefect_client.create_flow_run_from_deployment(
deployment_bar.id,
state=cancelling_constructor(),
)
async with PrefectAgent(work_queues=["foo", "bar"], prefetch_seconds=10) as agent:
agent.cancel_run = AsyncMock()
await agent.check_for_cancelled_flow_runs()
agent.cancel_run.assert_has_awaits(
[call(flow_run_foo), call(flow_run_bar)], any_order=True
)
@pytest.mark.parametrize(
"cancelling_constructor", [legacy_named_cancelling_state, Cancelling]
)
async def test_agent_cancel_run_called_for_each_cancelling_run_in_a_work_queue(
prefect_client: PrefectClient, deployment: ORMDeployment, cancelling_constructor
):
deployment_foo = await _create_test_deployment_from_orm(
prefect_client, deployment, work_queue_name="foo"
)
flow_run_1 = await prefect_client.create_flow_run_from_deployment(
deployment_foo.id,
state=cancelling_constructor(),
)
flow_run_2 = await prefect_client.create_flow_run_from_deployment(
deployment_foo.id,
state=cancelling_constructor(),
)
flow_run_3 = await prefect_client.create_flow_run_from_deployment(
deployment_foo.id,
state=cancelling_constructor(),
)
async with PrefectAgent(work_queues=["foo"], prefetch_seconds=10) as agent:
agent.cancel_run = AsyncMock()
await agent.check_for_cancelled_flow_runs()
agent.cancel_run.assert_has_awaits(
[call(flow_run_1), call(flow_run_2), call(flow_run_3)], any_order=True
)
@pytest.mark.parametrize(
"cancelling_constructor", [legacy_named_cancelling_state, Cancelling]
)
async def test_agent_cancel_run_not_called_for_other_work_queues(
prefect_client: PrefectClient, deployment, cancelling_constructor
):
await prefect_client.create_flow_run_from_deployment(
deployment.id,
state=cancelling_constructor(),
)
async with PrefectAgent(
work_queues=[f"not-{deployment.work_queue_name}"], prefetch_seconds=10
) as agent:
agent.cancel_run = AsyncMock()
await agent.check_for_cancelled_flow_runs()
agent.cancel_run.assert_not_called()
# Test enforcement of cancellation ----------------------------------------------------
@pytest.fixture
def mock_infrastructure_kill(monkeypatch) -> Generator[AsyncMock, None, None]:
"""
Mocks all subtype implementations of `Infrastructure.kill`.
"""
mock = AsyncMock()
# Patch all infrastructure types
types = get_registry_for_type(Block)
for t in types.values():
if not issubclass(t, Infrastructure):
continue
monkeypatch.setattr(t, "kill", mock, raising=False)
yield mock
@pytest.mark.parametrize(
"cancelling_constructor", [legacy_named_cancelling_state, Cancelling]
)
async def test_agent_cancel_run_kills_run_with_infrastructure_pid(
prefect_client: PrefectClient,
deployment: ORMDeployment,
mock_infrastructure_kill: AsyncMock,
cancelling_constructor,
):
flow_run = await prefect_client.create_flow_run_from_deployment(
deployment.id,
state=cancelling_constructor(),
)
await prefect_client.update_flow_run(flow_run.id, infrastructure_pid="test")
async with PrefectAgent(
work_queues=[deployment.work_queue_name],
work_pool_name=flow_run.work_pool_name,
prefetch_seconds=10,
) as agent:
await agent.check_for_cancelled_flow_runs()
mock_infrastructure_kill.assert_awaited_once_with("test")
@pytest.mark.parametrize(
"cancelling_constructor", [legacy_named_cancelling_state, Cancelling]
)
async def test_agent_cancel_run_with_missing_infrastructure_pid(
prefect_client: PrefectClient,
deployment: ORMDeployment,
mock_infrastructure_kill: AsyncMock,
caplog,
cancelling_constructor,
):
flow_run = await prefect_client.create_flow_run_from_deployment(
deployment.id,
state=cancelling_constructor(),
)
async with PrefectAgent(
work_queues=[deployment.work_queue_name],
work_pool_name=flow_run.work_pool_name,
prefetch_seconds=10,
) as agent:
await agent.check_for_cancelled_flow_runs()
mock_infrastructure_kill.assert_not_awaited()
# State name updated to prevent further attempts
post_flow_run = await prefect_client.read_flow_run(flow_run.id)
assert post_flow_run.state.name == "Cancelled"
# Information broadcasted to user in logs and state message
assert (
"does not have an infrastructure pid attached. Cancellation cannot be"
" guaranteed."
in caplog.text
)
assert "missing infrastructure tracking information" in post_flow_run.state.message
@pytest.mark.usefixtures("mock_infrastructure_kill")
@pytest.mark.parametrize(
"cancelling_constructor", [legacy_named_cancelling_state, Cancelling]
)
async def test_agent_cancel_run_updates_state_type(
prefect_client: PrefectClient,
deployment: ORMDeployment,
cancelling_constructor,
):
flow_run = await prefect_client.create_flow_run_from_deployment(
deployment.id,
state=cancelling_constructor(),
)
await prefect_client.update_flow_run(flow_run.id, infrastructure_pid="test")
async with PrefectAgent(
work_queues=[deployment.work_queue_name],
work_pool_name=flow_run.work_pool_name,
prefetch_seconds=10,
) as agent:
await agent.check_for_cancelled_flow_runs()
post_flow_run = await prefect_client.read_flow_run(flow_run.id)
assert post_flow_run.state.type == StateType.CANCELLED
@pytest.mark.usefixtures("mock_infrastructure_kill")
@pytest.mark.parametrize(
"cancelling_constructor", [legacy_named_cancelling_state, Cancelling]
)
async def test_agent_cancel_run_preserves_other_state_properties(
prefect_client: PrefectClient,
deployment: ORMDeployment,
cancelling_constructor,
):
expected_changed_fields = {"type", "name", "timestamp", "id"}
flow_run = await prefect_client.create_flow_run_from_deployment(
deployment.id,
state=cancelling_constructor(message="test"),
)
await prefect_client.update_flow_run(flow_run.id, infrastructure_pid="test")
async with PrefectAgent(
work_queues=[deployment.work_queue_name], prefetch_seconds=10
) as agent:
await agent.check_for_cancelled_flow_runs()
post_flow_run = await prefect_client.read_flow_run(flow_run.id)
assert post_flow_run.state.dict(
exclude=expected_changed_fields
) == flow_run.state.dict(exclude=expected_changed_fields)
@pytest.mark.parametrize(
"cancelling_constructor", [legacy_named_cancelling_state, Cancelling]
)
async def test_agent_cancel_run_with_infrastructure_not_available_during_kill(
prefect_client: PrefectClient,
deployment: ORMDeployment,
mock_infrastructure_kill: AsyncMock,
caplog,
cancelling_constructor,
):
flow_run = await prefect_client.create_flow_run_from_deployment(
deployment.id,
state=cancelling_constructor(),
)
await prefect_client.update_flow_run(flow_run.id, infrastructure_pid="test")
mock_infrastructure_kill.side_effect = InfrastructureNotAvailable("Test!")
async with PrefectAgent(
work_queues=[deployment.work_queue_name],
work_pool_name=flow_run.work_pool_name,
prefetch_seconds=10,
) as agent:
await agent.check_for_cancelled_flow_runs()
# Perform a second call to check that it is tracked locally that this agent
# should not try again
await agent.check_for_cancelled_flow_runs()
# Only awaited once
mock_infrastructure_kill.assert_awaited_once_with("test")
# State name not updated; other agents may attempt the kill
post_flow_run = await prefect_client.read_flow_run(flow_run.id)
assert post_flow_run.state.name == "Cancelling"
# Exception message is included with note on agent action
assert "Test! Flow run cannot be cancelled by this agent." in caplog.text
# State message is not changed
assert post_flow_run.state.message is None
@pytest.mark.parametrize(
"cancelling_constructor", [legacy_named_cancelling_state, Cancelling]
)
async def test_agent_cancel_run_with_infrastructure_not_found_during_kill(
prefect_client: PrefectClient,
deployment: ORMDeployment,
mock_infrastructure_kill: AsyncMock,
caplog,
cancelling_constructor,
):
flow_run = await prefect_client.create_flow_run_from_deployment(
deployment.id,
state=cancelling_constructor(),
)
await prefect_client.update_flow_run(flow_run.id, infrastructure_pid="test")
mock_infrastructure_kill.side_effect = InfrastructureNotFound("Test!")
async with PrefectAgent(
work_queues=[deployment.work_queue_name],
work_pool_name=flow_run.work_pool_name,
prefetch_seconds=10,
) as agent:
await agent.check_for_cancelled_flow_runs()
# Perform a second call to check that another cancellation attempt is not made
await agent.check_for_cancelled_flow_runs()
# Only awaited once
mock_infrastructure_kill.assert_awaited_once_with("test")
# State name updated to prevent further attempts
post_flow_run = await prefect_client.read_flow_run(flow_run.id)
assert post_flow_run.state.name == "Cancelled"
# Exception message is included with note on agent action
assert "Test! Marking flow run as cancelled." in caplog.text
# No need for state message update
assert post_flow_run.state.message is None
@pytest.mark.parametrize(
"cancelling_constructor", [legacy_named_cancelling_state, Cancelling]
)
@pytest.mark.flaky(max_runs=3)
async def test_agent_cancel_run_with_unknown_error_during_kill(
prefect_client: PrefectClient,
deployment: ORMDeployment,
mock_infrastructure_kill: AsyncMock,
caplog,
cancelling_constructor,
):
flow_run = await prefect_client.create_flow_run_from_deployment(
deployment.id,
state=cancelling_constructor(),
)
await prefect_client.update_flow_run(flow_run.id, infrastructure_pid="test")
mock_infrastructure_kill.side_effect = ValueError("Oh no!")
async with PrefectAgent(
work_queues=[deployment.work_queue_name],
work_pool_name=flow_run.work_pool_name,
prefetch_seconds=10,
) as agent:
await agent.check_for_cancelled_flow_runs()
await anyio.sleep(0.75)
await agent.check_for_cancelled_flow_runs()
# Multiple attempts should be made
mock_infrastructure_kill.assert_has_awaits([call("test"), call("test")])
# State name not updated
post_flow_run = await prefect_client.read_flow_run(flow_run.id)
assert post_flow_run.state.name == "Cancelling"
assert (
"Encountered exception while killing infrastructure for flow run" in caplog.text
)
assert "ValueError: Oh no!" in caplog.text
assert "Traceback" in caplog.text
@pytest.mark.parametrize(
"cancelling_constructor", [legacy_named_cancelling_state, Cancelling]
)
async def test_agent_cancel_run_without_infrastructure_support_for_kill(
prefect_client: PrefectClient,
deployment: ORMDeployment,
caplog,
monkeypatch,
cancelling_constructor,
):
# Patch all infrastructure types
types = get_registry_for_type(Block)
for t in types.values():
if not issubclass(t, Infrastructure):
continue
monkeypatch.delattr(t, "kill", raising=False)
flow_run = await prefect_client.create_flow_run_from_deployment(
deployment.id,
state=cancelling_constructor(),
)
await prefect_client.update_flow_run(flow_run.id, infrastructure_pid="test")
async with PrefectAgent(
work_queues=[deployment.work_queue_name],
work_pool_name=flow_run.work_pool_name,
prefetch_seconds=10,
) as agent:
await agent.check_for_cancelled_flow_runs()
# State name not updated; another agent may have a code version that supports
# killing this flow run
post_flow_run = await prefect_client.read_flow_run(flow_run.id)
assert post_flow_run.state.name == "Cancelling"
assert (
"infrastructure 'process' does not support killing created infrastructure."
in caplog.text
)
assert "Cancellation cannot be guaranteed." in caplog.text
@pytest.mark.parametrize(
"cancelling_constructor", [legacy_named_cancelling_state, Cancelling]
)
async def test_agent_started_in_work_pool_without_work_queue_puts_flow_run_into_cancelled_state(
prefect_client: PrefectClient,
deployment: ORMDeployment,
caplog,
cancelling_constructor,
work_pool,
):
flow_run = await prefect_client.create_flow_run_from_deployment(
deployment.id,
state=cancelling_constructor(),
)
# agent with work pool but no work queue
async with PrefectAgent(
work_pool_name=work_pool.name, prefetch_seconds=10
) as agent:
await agent.check_for_cancelled_flow_runs()
# make sure it is actually cancelled
assert "Found 1 flow runs awaiting cancellation" in caplog.text
post_flow_run = await prefect_client.read_flow_run(flow_run.id)
assert post_flow_run.state.name == "Cancelled"
@pytest.mark.parametrize(
"cancelling_constructor", [legacy_named_cancelling_state, Cancelling]
)
async def test_agent_started_in_different_work_pool_without_work_queue_does_not_cancel_flow_run(
prefect_client: PrefectClient,
deployment: ORMDeployment,
caplog,
cancelling_constructor,
):
flow_run = await prefect_client.create_flow_run_from_deployment(
deployment.id,
state=cancelling_constructor(),
)
# agent with work pool but no work queue
async with PrefectAgent(
work_pool_name="another-work-pool", prefetch_seconds=10
) as agent:
await agent.check_for_cancelled_flow_runs()
assert "Found 1 flow runs awaiting cancellation" not in caplog.text
post_flow_run = await prefect_client.read_flow_run(flow_run.id)
assert post_flow_run.state.name == "Cancelling"
@pytest.mark.parametrize(
"cancelling_constructor", [legacy_named_cancelling_state, Cancelling]
)
async def test_agent_started_in_different_work_pool_with_same_work_queue_name_does_not_cancel_flow_run(
prefect_client: PrefectClient,
deployment: ORMDeployment,
caplog,
cancelling_constructor,
):
flow_run = await prefect_client.create_flow_run_from_deployment(
deployment.id,
state=cancelling_constructor(),
)
assert flow_run.work_pool_name == "test-work-pool"
async with PrefectAgent(
work_pool_name="another-work-pool",
work_queues=[flow_run.work_queue_name],
prefetch_seconds=10,
) as agent:
await agent.check_for_cancelled_flow_runs()
assert "Found 1 flow runs awaiting cancellation" not in caplog.text
post_flow_run = await prefect_client.read_flow_run(flow_run.id)
assert post_flow_run.state.name == "Cancelling"
@pytest.mark.parametrize(
"cancelling_constructor", [legacy_named_cancelling_state, Cancelling]
)
async def test_agent_started_in_same_work_pool_with_same_work_queue_name_cancels_flow_run(
prefect_client: PrefectClient,
deployment: ORMDeployment,
caplog,
cancelling_constructor,
):
flow_run = await prefect_client.create_flow_run_from_deployment(
deployment.id,
state=cancelling_constructor(),
)
assert flow_run.work_pool_name == "test-work-pool"
async with PrefectAgent(
work_pool_name=flow_run.work_pool_name,
work_queues=[flow_run.work_queue_name],
prefetch_seconds=10,
) as agent:
await agent.check_for_cancelled_flow_runs()
# make sure it is actually cancelled
assert "Found 1 flow runs awaiting cancellation" in caplog.text
post_flow_run = await prefect_client.read_flow_run(flow_run.id)
assert post_flow_run.state.name == "Cancelled"
@pytest.mark.parametrize(
"cancelling_constructor", [legacy_named_cancelling_state, Cancelling]
)
async def test_agent_started_in_same_work_pool_with_different_work_queue_name_does_not_cancel_flow_run(
prefect_client: PrefectClient,
deployment: ORMDeployment,
caplog,
cancelling_constructor,
):
flow_run = await prefect_client.create_flow_run_from_deployment(
deployment.id,
state=cancelling_constructor(),
)
assert flow_run.work_queue_name == "wq-1"
async with PrefectAgent(
work_pool_name=flow_run.work_pool_name,
work_queues=["wq-2"],
prefetch_seconds=10,
) as agent:
await agent.check_for_cancelled_flow_runs()
assert "Found 1 flow runs awaiting cancellation" not in caplog.text
post_flow_run = await prefect_client.read_flow_run(flow_run.id)
assert post_flow_run.state.name == "Cancelling"
@pytest.mark.parametrize(
"cancelling_constructor", [legacy_named_cancelling_state, Cancelling]
)
async def test_agent_started_without_work_pool_does_not_cancel_flow_run_in_nondefault_work_pool(
prefect_client: PrefectClient,
deployment: ORMDeployment,
caplog,
cancelling_constructor,
):
flow_run = await prefect_client.create_flow_run_from_deployment(
deployment.id,
state=cancelling_constructor(),
)
assert flow_run.work_queue_name == "wq-1"
assert flow_run.work_pool_name == "test-work-pool"
async with PrefectAgent(
work_queues=["wq-1"],
prefetch_seconds=10,
) as agent:
assert agent.work_pool_name is None
await agent.check_for_cancelled_flow_runs()
assert "Found 1 flow runs awaiting cancellation" not in caplog.text
post_flow_run = await prefect_client.read_flow_run(flow_run.id)
assert post_flow_run.state.name == "Cancelling"
@pytest.mark.parametrize(
"cancelling_constructor", [legacy_named_cancelling_state, Cancelling]
)
async def test_agent_started_with_nondefault_work_pool_does_not_cancel_flow_run_in_default_work_pool(
prefect_client: PrefectClient,
deployment_in_default_work_pool: ORMDeployment,
caplog,
cancelling_constructor,
):
flow_run = await prefect_client.create_flow_run_from_deployment(
deployment_in_default_work_pool.id,
state=cancelling_constructor(),
)
assert flow_run.work_queue_name == "wq-1"
assert flow_run.work_pool_name == "default-agent-pool"
async with PrefectAgent(
work_pool_name="test-work-pool",
work_queues=["wq-1"],
prefetch_seconds=10,
) as agent:
await agent.check_for_cancelled_flow_runs()
assert "Found 1 flow runs awaiting cancellation" not in caplog.text
post_flow_run = await prefect_client.read_flow_run(flow_run.id)
assert post_flow_run.state.name == "Cancelling"
|
e4e37dead42be51dd807ff9adbd60a5f678a22b7
|
444a9480bce2035565332d4d4654244c0b5cd47b
|
/research/cv/centernet/src/__init__.py
|
6fd650c3f7d374ff3f74402512b8283c24e3b86c
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-proprietary-license"
] |
permissive
|
mindspore-ai/models
|
7ede9c6454e77e995e674628204e1c6e76bd7b27
|
eab643f51336dbf7d711f02d27e6516e5affee59
|
refs/heads/master
| 2023-07-20T01:49:34.614616
| 2023-07-17T11:43:18
| 2023-07-17T11:43:18
| 417,393,380
| 301
| 92
|
Apache-2.0
| 2023-05-17T11:22:28
| 2021-10-15T06:38:37
|
Python
|
UTF-8
|
Python
| false
| false
| 1,493
|
py
|
__init__.py
|
# Copyright 2020-2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""CenterNet Init."""
from .centernet_pose import GatherMultiPoseFeatureCell, CenterNetMultiPoseLossCell, \
CenterNetWithLossScaleCell, CenterNetMultiPoseEval, CenterNetWithoutLossScaleCell
from .dataset import COCOHP
from .visual import visual_allimages, visual_image
from .decode import MultiPoseDecode
from .post_process import convert_eval_format, to_float, resize_detection, post_process, merge_outputs
from .post_process_onnx import post_process_onnx
__all__ = [
"GatherMultiPoseFeatureCell", "CenterNetMultiPoseLossCell", "CenterNetWithLossScaleCell", \
"CenterNetMultiPoseEval", "CenterNetWithoutLossScaleCell", "COCOHP", "visual_allimages", \
"visual_image", "MultiPoseDecode", "convert_eval_format", "to_float", "resize_detection", \
"post_process", "merge_outputs", "post_process_onnx"
]
|
8b3f787fb9f2cf34dfcdf8d29f9badb1366101c2
|
76f23cc69dc10c44bc7cf00b78e37db04c7a9c45
|
/datalad/support/third/loris_token_generator.py
|
3118f5b8611103fa0991a54a76ea4dca71b50e37
|
[
"BSD-3-Clause",
"MIT"
] |
permissive
|
datalad/datalad
|
2d9c247344d340325ba84e7ab674ac320e57f30c
|
40332b5ad25bf8744f7399f6c3575f7d28f71384
|
refs/heads/maint
| 2023-09-04T11:03:02.264714
| 2023-08-10T15:56:19
| 2023-08-10T15:56:19
| 14,052,034
| 453
| 134
|
NOASSERTION
| 2023-09-14T19:10:18
| 2013-11-01T19:40:08
|
Python
|
UTF-8
|
Python
| false
| false
| 1,457
|
py
|
loris_token_generator.py
|
# emacs: -*- mode: python; py-indent-offset: 4; tab-width: 4; indent-tabs-mode: nil -*-
# ex: set sts=4 ts=4 sw=4 et:
# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the datalad package for the
# copyright and license terms.
#
# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### #
import sys
import json
from urllib.request import (
Request,
urlopen,
)
from urllib.error import HTTPError
from datalad.support.exceptions import (
AccessDeniedError,
)
from datalad.utils import ensure_unicode
class LORISTokenGenerator(object):
"""
Generate a LORIS API token by making a request to the
LORIS login API endpoint with the given username
and password.
url is the complete URL of the $LORIS/api/$VERSION/login
endpoint.
"""
def __init__(self, url=None):
assert(url is not None)
self.url = url
def generate_token(self, user=None, password=None):
data = {'username': user, 'password' : password}
encoded_data = json.dumps(data).encode('utf-8')
request = Request(self.url, encoded_data)
try:
response = urlopen(request)
except HTTPError:
raise AccessDeniedError("Could not authenticate into LORIS")
str_response = ensure_unicode(response.read())
data = json.loads(str_response)
return data["token"]
|
c167ec964d7da124917bae7fe6b83946dbe9f330
|
083312af3c596aad1a5123fb66bf73ca9907a928
|
/sqlglot/dataframe/sql/readwriter.py
|
080448622bf2bc651864682b4d5f5d8a34efcadf
|
[
"MIT"
] |
permissive
|
tobymao/sqlglot
|
8f7a6da463c765d39f75390cddafbd9a0b3076a9
|
32d8e5423a7d7e1b56805fd0020b4aac3ce15d84
|
refs/heads/main
| 2023-09-01T05:18:17.148809
| 2023-09-01T01:53:06
| 2023-09-01T01:53:17
| 347,277,349
| 3,617
| 365
|
MIT
| 2023-09-14T21:43:44
| 2021-03-13T05:01:56
|
Python
|
UTF-8
|
Python
| false
| false
| 3,456
|
py
|
readwriter.py
|
from __future__ import annotations
import typing as t
import sqlglot
from sqlglot import expressions as exp
from sqlglot.helper import object_to_dict
if t.TYPE_CHECKING:
from sqlglot.dataframe.sql.dataframe import DataFrame
from sqlglot.dataframe.sql.session import SparkSession
class DataFrameReader:
def __init__(self, spark: SparkSession):
self.spark = spark
def table(self, tableName: str) -> DataFrame:
from sqlglot.dataframe.sql.dataframe import DataFrame
from sqlglot.dataframe.sql.session import SparkSession
sqlglot.schema.add_table(tableName, dialect=SparkSession().dialect)
return DataFrame(
self.spark,
exp.Select()
.from_(
exp.to_table(tableName, dialect=SparkSession().dialect).transform(
SparkSession().dialect.normalize_identifier
)
)
.select(
*(
column
for column in sqlglot.schema.column_names(
tableName, dialect=SparkSession().dialect
)
)
),
)
class DataFrameWriter:
def __init__(
self,
df: DataFrame,
spark: t.Optional[SparkSession] = None,
mode: t.Optional[str] = None,
by_name: bool = False,
):
self._df = df
self._spark = spark or df.spark
self._mode = mode
self._by_name = by_name
def copy(self, **kwargs) -> DataFrameWriter:
return DataFrameWriter(
**{
k[1:] if k.startswith("_") else k: v
for k, v in object_to_dict(self, **kwargs).items()
}
)
def sql(self, **kwargs) -> t.List[str]:
return self._df.sql(**kwargs)
def mode(self, saveMode: t.Optional[str]) -> DataFrameWriter:
return self.copy(_mode=saveMode)
@property
def byName(self):
return self.copy(by_name=True)
def insertInto(self, tableName: str, overwrite: t.Optional[bool] = None) -> DataFrameWriter:
from sqlglot.dataframe.sql.session import SparkSession
output_expression_container = exp.Insert(
**{
"this": exp.to_table(tableName),
"overwrite": overwrite,
}
)
df = self._df.copy(output_expression_container=output_expression_container)
if self._by_name:
columns = sqlglot.schema.column_names(
tableName, only_visible=True, dialect=SparkSession().dialect
)
df = df._convert_leaf_to_cte().select(*columns)
return self.copy(_df=df)
def saveAsTable(self, name: str, format: t.Optional[str] = None, mode: t.Optional[str] = None):
if format is not None:
raise NotImplementedError("Providing Format in the save as table is not supported")
exists, replace, mode = None, None, mode or str(self._mode)
if mode == "append":
return self.insertInto(name)
if mode == "ignore":
exists = True
if mode == "overwrite":
replace = True
output_expression_container = exp.Create(
this=exp.to_table(name),
kind="TABLE",
exists=exists,
replace=replace,
)
return self.copy(_df=self._df.copy(output_expression_container=output_expression_container))
|
4d423d7f4dfaa991fa3cca441dee3f44c6acdda0
|
8cca481c8dd508012aa794e2f9a07e11c3706a87
|
/presidio-analyzer/setup.py
|
5329a91d8bacc5c366e3d4ae03f3d5b2d236d008
|
[
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause",
"BSD-3-Clause",
"Unlicense",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-warranty-disclaimer",
"CNRI-Python",
"MIT",
"LicenseRef-scancode-secret-labs-2011",
"LicenseRef-scancode-generic-cla"
] |
permissive
|
microsoft/presidio
|
174472891e241e292982eee26a666d71ca263d42
|
3effc1467b8714714d5112ef7b627889507ea83d
|
refs/heads/main
| 2023-08-15T20:14:00.962803
| 2023-08-14T19:13:49
| 2023-08-14T19:13:49
| 132,129,752
| 2,092
| 412
|
MIT
| 2023-09-13T18:17:58
| 2018-05-04T11:08:58
|
Python
|
UTF-8
|
Python
| false
| false
| 1,784
|
py
|
setup.py
|
"""Setup.py for Presidio Analyzer."""
import os.path
from os import path
import setuptools
__version__ = ""
this_directory = path.abspath(path.dirname(__file__))
parent_directory = os.path.abspath(os.path.join(this_directory, os.pardir))
with open(path.join(this_directory, "README.MD"), encoding="utf-8") as f:
long_description = f.read()
try:
with open(os.path.join(parent_directory, "VERSION")) as version_file:
__version__ = version_file.read().strip()
except Exception:
__version__ = os.environ.get("PRESIDIO_VERSION", "0.0.1-alpha")
setuptools.setup(
name="presidio_analyzer",
version=__version__,
description="Presidio analyzer package",
url="https://github.com/Microsoft/presidio",
packages=[
"presidio_analyzer",
"presidio_analyzer.predefined_recognizers",
"presidio_analyzer.nlp_engine",
"presidio_analyzer.recognizer_registry",
"presidio_analyzer.context_aware_enhancers",
],
trusted_host=["pypi.org"],
tests_require=["pytest", "flake8>=3.7.9"],
install_requires=[
"spacy>=3.4.4",
"regex",
"tldextract",
"pyyaml",
"phonenumbers>=8.12",
],
extras_require={
'transformers': ['torch', 'transformers'],
},
include_package_data=True,
license="MIT",
classifiers=[
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
],
long_description=long_description,
long_description_content_type="text/markdown",
)
|
6d7f4f5593501f0f613269fa01021a349ad01e8b
|
b60686a2e351a756f249e0d9faab8fe154a08f11
|
/examples/shouldi/shouldi/python/safety.py
|
37b73cb45fdcc4c775cb6a261846a74e1813b98d
|
[
"MIT",
"LicenseRef-scancode-generic-export-compliance",
"Apache-2.0",
"Python-2.0"
] |
permissive
|
intel/dffml
|
86483b47229b9b62c9f8dfef51491aa02563347e
|
7d381bf67a72fe1ecb1012393d5726085564cb0e
|
refs/heads/main
| 2023-08-28T00:35:04.219193
| 2023-06-06T18:29:16
| 2023-06-06T18:29:16
| 149,512,216
| 237
| 204
|
MIT
| 2023-05-05T15:39:35
| 2018-09-19T21:06:34
|
Python
|
UTF-8
|
Python
| false
| false
| 575
|
py
|
safety.py
|
import sys
import json
import asyncio
from dffml import op
@op
async def safety_check(package: str, version: str) -> int:
pinned = f"{package}=={version}"
proc = await asyncio.create_subprocess_exec(
sys.executable,
"-m",
"safety",
"check",
"--stdin",
"--json",
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
)
stdout, _stderr = await proc.communicate(pinned.encode() + b"\n")
issues = json.loads(stdout)
return len(issues)
|
36abe75ad5a86997edf03349de8b126c30c14f11
|
c739a77733f60c12eff919e0cd9e4937cb063450
|
/husky_control/launch/teleop_joy.launch.py
|
964bd71cdb3ac6493e9c428460357611ecdbdd2f
|
[] |
no_license
|
husky/husky
|
b3661befb7ae88087eb16ee77af8ce699cbf48a8
|
63981e8df2023d3f2c437409f2b8b68588a33abe
|
refs/heads/foxy-devel
| 2023-08-16T19:58:31.541727
| 2023-04-18T23:55:58
| 2023-04-18T23:55:58
| 32,733,110
| 436
| 460
| null | 2023-08-03T03:10:14
| 2015-03-23T13:14:57
|
C++
|
UTF-8
|
Python
| false
| false
| 1,078
|
py
|
teleop_joy.launch.py
|
from launch import LaunchContext, LaunchDescription
from launch.substitutions import EnvironmentVariable, PathJoinSubstitution
from launch_ros.actions import Node
from launch_ros.substitutions import FindPackageShare
def generate_launch_description():
lc = LaunchContext()
joy_type = EnvironmentVariable('CPR_JOY_TYPE', default_value='logitech')
filepath_config_joy = PathJoinSubstitution(
[FindPackageShare('husky_control'), 'config', ('teleop_' + joy_type.perform(lc) + '.yaml')]
)
node_joy = Node(
namespace='joy_teleop',
package='joy',
executable='joy_node',
output='screen',
name='joy_node',
parameters=[filepath_config_joy]
)
node_teleop_twist_joy = Node(
namespace='joy_teleop',
package='teleop_twist_joy',
executable='teleop_node',
output='screen',
name='teleop_twist_joy_node',
parameters=[filepath_config_joy]
)
ld = LaunchDescription()
ld.add_action(node_joy)
ld.add_action(node_teleop_twist_joy)
return ld
|
f5bdfb63bf6ab7830b3ec3f021ee3d7e72de420c
|
e84637da2fbbd9cda34fac10a1024c8a413d1690
|
/radian/latex/latex_symbols.py
|
5c44ea0bf2996b2374a6a9f93995e85f266ac7e7
|
[
"MIT"
] |
permissive
|
randy3k/radian
|
5162bb6625ea2305d7439ba4942b13762e70fdd9
|
fb643c4c4dad33aa674eb09407c0caa6e41c2d42
|
refs/heads/master
| 2023-08-27T01:16:36.647660
| 2023-08-26T01:04:05
| 2023-08-26T01:04:05
| 86,867,223
| 1,495
| 80
|
MIT
| 2023-08-26T01:04:06
| 2017-03-31T23:13:43
|
Python
|
UTF-8
|
Python
| false
| false
| 64,127
|
py
|
latex_symbols.py
|
# This Python file uses the following encoding: utf-8
from __future__ import unicode_literals
latex_symbols = [
("\\1/8", u"⅛"),
("\\bscra", u"𝓪"),
("\\guilsinglright", u"›"),
("\\blacktriangleright", u"▶"),
("\\bisansc", u"𝙘"),
("\\^4", u"⁴"),
("\\Re", u"ℜ"),
("\\pitchfork", u"⋔"),
("\\bisanskappa", u"𝞳"),
("\\bbz", u"𝕫"),
("\\blockqtrshaded", u"░"),
("\\urcorner", u"⌝"),
("\\frakY", u"𝔜"),
("\\^2", u"²"),
("\\pi", u"π"),
("\\nless", u"≮"),
("\\sqsubseteq", u"⊑"),
("\\Updownarrow", u"⇕"),
("\\leftarrowbsimilar", u"⭋"),
("\\bbF", u"𝔽"),
("\\nrightarrow", u"↛"),
("\\bsansPi", u"𝝥"),
("\\sansseven", u"𝟩"),
("\\Theta", u"Θ"),
("\\rightmoon", u"☽"),
("\\bscrV", u"𝓥"),
("\\ttc", u"𝚌"),
("\\upsilon", u"υ"),
("\\bfrakq", u"𝖖"),
("\\copyright", u"©"),
("\\npreccurlyeq", u"⋠"),
("\\bfrakL", u"𝕷"),
("\\fltns", u"⏥"),
("\\bbN", u"ℕ"),
("\\smile", u"⌣"),
("\\bisansX", u"𝙓"),
("\\0/3", u"↉"),
("\\backsimeq", u"⋍"),
("\\bitau", u"𝝉"),
("\\bisansD", u"𝘿"),
("\\hvlig", u"ƕ"),
("\\lq", u"‘"),
("\\mapsfrom", u"↤"),
("\\blockrighthalf", u"▐"),
("\\perp", u"⟂"),
("\\biS", u"𝑺"),
("\\bscrB", u"𝓑"),
("\\ttn", u"𝚗"),
("\\bitheta", u"𝜽"),
("\\timesbar", u"⨱"),
("\\bik", u"𝒌"),
("\\bsanskappa", u"𝝹"),
("\\llcorner", u"⌞"),
("\\bigtimes", u"⨉"),
("\\circleddash", u"⊝"),
("\\bsansI", u"𝗜"),
("\\leftharpoondown", u"↽"),
("\\alpha", u"α"),
("\\between", u"≬"),
("\\^l", u"ˡ"),
("\\frown", u"⌢"),
("\\RoundImplies", u"⥰"),
("\\blockthreeqtrshaded", u"▓"),
("\\bsansthree", u"𝟯"),
("\\precneqq", u"⪵"),
("\\urblacktriangle", u"◥"),
("\\succeqq", u"⪴"),
("\\nsupseteq", u"⊉"),
("\\bfvarkappa", u"𝛞"),
("\\iota", u"ι"),
("\\overbrace", u"⏞"),
("\\danger", u"☡"),
("\\fraki", u"𝔦"),
("\\rightharpoondown", u"⇁"),
("\\tilde", u"̃"),
("\\upNu", u"Ν"),
("\\RRightarrow", u"⭆"),
("\\sansg", u"𝗀"),
("\\bisansh", u"𝙝"),
("\\itimath", u"𝚤"),
("\\bisansMu", u"𝞛"),
("\\isansZ", u"𝘡"),
("\\rightleftarrows", u"⇄"),
("\\impliedby", u"⟸"),
("\\succapprox", u"⪸"),
("\\Rsh", u"↱"),
("\\sumint", u"⨋"),
("\\bsansvarrho", u"𝞎"),
("\\pointint", u"⨕"),
("\\fdiagovnearrow", u"⤯"),
("\\plussubtwo", u"⨧"),
("\\original", u"⊶"),
("\\nvtwoheadleftarrow", u"⬴"),
("\\bfQ", u"𝐐"),
("\\biw", u"𝒘"),
("\\bsanspartial", u"𝞉"),
("\\bfo", u"𝐨"),
("\\nBumpeq", u"≎̸"),
("\\bisanssigma", u"𝞼"),
("\\frakD", u"𝔇"),
("\\nleftrightarrow", u"↮"),
("\\clockoint", u"⨏"),
("\\scrs", u"𝓈"),
("\\Im", u"ℑ"),
("\\bsansK", u"𝗞"),
("\\bisansvarrho", u"𝟈"),
("\\whtvertoval", u"⬯"),
("\\rarrx", u"⥇"),
("\\smallin", u"∊"),
("\\underleftarrow", u"⃮"),
("\\itx", u"𝑥"),
("\\measangledltosw", u"⦯"),
("\\eqqsim", u"⩳"),
("\\bij", u"𝒋"),
("\\ttW", u"𝚆"),
("\\leo", u"♌"),
("\\bfrakV", u"𝖁"),
("\\bfrakR", u"𝕽"),
("\\smallblacktriangleright", u"▸"),
("\\DownArrowBar", u"⤓"),
("\\surd", u"√"),
("\\leftwhitearrow", u"⇦"),
("\\bsansChi", u"𝝬"),
("\\ge", u"≥"),
("\\rttrnr", u"ɻ"),
("\\bbk", u"𝕜"),
("\\twoheaddownarrow", u"↡"),
("\\ointctrclockwise", u"∳"),
("\\squareulquad", u"◰"),
("\\amalg", u"⨿"),
("\\bagmember", u"⋿"),
("\\fraku", u"𝔲"),
("\\ElOr", u"⩖"),
("\\bfvarTheta", u"𝚹"),
("\\biKappa", u"𝜥"),
("\\turnangle", u"⦢"),
("\\Otimes", u"⨷"),
("\\wideutilde", u"̰"),
("\\isansp", u"𝘱"),
("\\trianglerightblack", u"◮"),
("\\bfr", u"𝐫"),
("\\frakM", u"𝔐"),
("\\frakS", u"𝔖"),
("\\uparrow", u"↑"),
("\\nvleftarrowtail", u"⬹"),
("\\frakG", u"𝔊"),
("\\_4", u"₄"),
("\\measangledrtose", u"⦮"),
("\\biXi", u"𝜩"),
("\\bisansvarpi", u"𝟉"),
("\\doubleplus", u"⧺"),
("\\plussim", u"⨦"),
("\\rvboxline", u"⎹"),
("\\bfnu", u"𝛎"),
("\\Game", u"⅁"),
("\\sterling", u"£"),
("\\bscrs", u"𝓼"),
("\\_x", u"ₓ"),
("\\sanseight", u"𝟪"),
("\\NestedGreaterGreater", u"⪢"),
("\\pentagon", u"⬠"),
("\\supmult", u"⫂"),
("\\bfu", u"𝐮"),
("\\sansLturned", u"⅂"),
("\\frakU", u"𝔘"),
("\\bumpeqq", u"⪮"),
("\\nVDash", u"⊯"),
("\\leftarrowtriangle", u"⇽"),
("\\itgamma", u"𝛾"),
("\\nvRightarrow", u"⤃"),
("\\lnsim", u"⋦"),
("\\downharpoonsleftright", u"⥥"),
("\\yen", u"¥"),
("\\bbB", u"𝔹"),
("\\isanss", u"𝘴"),
("\\theta", u"θ"),
("\\gnapprox", u"⪊"),
("\\itjmath", u"𝚥"),
("\\twoheaduparrowcircle", u"⥉"),
("\\bfZ", u"𝐙"),
("\\smallblacktriangleleft", u"◂"),
("\\bftau", u"𝛕"),
("\\male", u"♂"),
("\\LeftUpVectorBar", u"⥘"),
("\\NotLeftTriangleBar", u"⧏̸"),
("\\nRightarrow", u"⇏"),
("\\1/", u"⅟"),
("\\bfrakm", u"𝖒"),
("\\bigslopedvee", u"⩗"),
("\\blocklowhalf", u"▄"),
("\\veedoublebar", u"⩣"),
("\\forks", u"⫝̸"),
("\\Alpha", u"Α"),
("\\backepsilon", u"϶"),
("\\nsucccurlyeq", u"⋡"),
("\\scrc", u"𝒸"),
("\\bbK", u"𝕂"),
("\\psi", u"ψ"),
("\\biU", u"𝑼"),
("\\ng", u"ŋ"),
("\\eqdef", u"≝"),
("\\gesdotol", u"⪄"),
("\\botsemicircle", u"◡"),
("\\eqqslantless", u"⪛"),
("\\fraks", u"𝔰"),
("\\updownharpoonrightleft", u"⥌"),
("\\bisansj", u"𝙟"),
("\\Sampi", u"Ϡ"),
("\\l", u"ł"),
("\\bisansNu", u"𝞜"),
("\\olessthan", u"⧀"),
("\\star", u"⋆"),
("\\overleftarrow", u"⃖"),
("\\bsanseight", u"𝟴"),
("\\Downarrow", u"⇓"),
("\\lvertneqq", u"≨︀"),
("\\bfS", u"𝐒"),
("\\isansF", u"𝘍"),
("\\^J", u"ᴶ"),
("\\Longmapsto", u"⟾"),
("\\S", u"§"),
("\\gesdot", u"⪀"),
("\\bsansz", u"𝘇"),
("\\rtld", u"ɖ"),
("\\itrho", u"𝜌"),
("\\NotLessLess", u"≪̸"),
("\\backppprime", u"‷"),
("\\leftdotarrow", u"⬸"),
("\\omega", u"ω"),
("\\itNu", u"𝛮"),
("\\fisheye", u"◉"),
("\\NotSquareSubset", u"⊏̸"),
("\\bsansseven", u"𝟳"),
("\\boxcircle", u"⧇"),
("\\sbbrg", u"̪"),
("\\isansu", u"𝘶"),
("\\sansy", u"𝗒"),
("\\visiblespace", u"␣"),
("\\glE", u"⪒"),
("\\squarebotblack", u"⬓"),
("\\Bumpeq", u"≎"),
("\\gtreqless", u"⋛"),
("\\daleth", u"ℸ"),
("\\dottimes", u"⨰"),
("\\twocaps", u"⩋"),
("\\csub", u"⫏"),
("\\bscrA", u"𝓐"),
("\\recorder", u"⌕"),
("\\cupvee", u"⩅"),
("\\bsansTheta", u"𝝝"),
("\\biB", u"𝑩"),
("\\frakN", u"𝔑"),
("\\isansc", u"𝘤"),
("\\bbR", u"ℝ"),
("\\bbD", u"𝔻"),
("\\Elroang", u"⦆"),
("\\forksnot", u"⫝"),
("\\asteraccent", u"⃰"),
("\\leftharpoonupdash", u"⥪"),
("\\bfG", u"𝐆"),
("\\bsanstheta", u"𝝷"),
("\\^D", u"ᴰ"),
("\\bsansupsilon", u"𝞄"),
("\\Angle", u"⦜"),
("\\shuffle", u"⧢"),
("\\wedgemidvert", u"⩚"),
("\\dicev", u"⚄"),
("\\ReverseUpEquilibrium", u"⥯"),
("\\2/5", u"⅖"),
("\\_3", u"₃"),
("\\quotedblleft", u"“"),
("\\itC", u"𝐶"),
("\\bisansH", u"𝙃"),
("\\bisansL", u"𝙇"),
("\\ttK", u"𝙺"),
("\\scrk", u"𝓀"),
("\\bsansW", u"𝗪"),
("\\_phi", u"ᵩ"),
("\\clomeg", u"ɷ"),
("\\^)", u"⁾"),
("\\rightleftharpoons", u"⇌"),
("\\varisins", u"⋳"),
("\\blacksmiley", u"☻"),
("\\ddfnc", u"⦙"),
("\\bfgamma", u"𝛄"),
("\\bsansUpsilon", u"𝝪"),
("\\isansP", u"𝘗"),
("\\scrg", u"ℊ"),
("\\ttB", u"𝙱"),
("\\bigwhitestar", u"☆"),
("\\bigblacktriangleup", u"▲"),
("\\isanse", u"𝘦"),
("\\circlevertfill", u"◍"),
("\\rais", u"˔"),
("\\frakk", u"𝔨"),
("\\nVtwoheadleftarrow", u"⬵"),
("\\ttI", u"𝙸"),
("\\checkmark", u"✓"),
("\\bbh", u"𝕙"),
("\\itA", u"𝐴"),
("\\bfrakn", u"𝖓"),
("\\frakA", u"𝔄"),
("\\rl", u"ɼ"),
("\\sansone", u"𝟣"),
("\\leftarrowplus", u"⥆"),
("\\bisansXi", u"𝞝"),
("\\bbt", u"𝕥"),
("\\nsubseteqq", u"⫅̸"),
("\\mars", u"♂"),
("\\ngtr", u"≯"),
("\\bfrho", u"𝛒"),
("\\sansZ", u"𝖹"),
("\\hksearow", u"⤥"),
("\\acidfree", u"♾"),
("\\bbiD", u"ⅅ"),
("\\bisansB", u"𝘽"),
("\\lesssim", u"≲"),
("\\parallelogramblack", u"▰"),
("\\isansl", u"𝘭"),
("\\angles", u"⦞"),
("\\scrn", u"𝓃"),
("\\isansd", u"𝘥"),
("\\boxquestion", u"⍰"),
("\\Sqcap", u"⩎"),
("\\obar", u"⌽"),
("\\bisansg", u"𝙜"),
("\\^W", u"ᵂ"),
("\\bbeight", u"𝟠"),
("\\Colon", u"∷"),
("\\ltphi", u"ɸ"),
("\\frakO", u"𝔒"),
("\\3/4", u"¾"),
("\\bsansomega", u"𝞈"),
("\\sagittarius", u"♐"),
("\\prurel", u"⊰"),
("\\biN", u"𝑵"),
("\\rvbull", u"◘"),
("\\bsansk", u"𝗸"),
("\\sansv", u"𝗏"),
("\\dot", u"̇"),
("\\Omega", u"Ω"),
("\\ttG", u"𝙶"),
("\\upoldKoppa", u"Ϙ"),
("\\verts", u"ˈ"),
("\\perthousand", u"‰"),
("\\bfK", u"𝐊"),
("\\looparrowright", u"↬"),
("\\scrY", u"𝒴"),
("\\scrt", u"𝓉"),
("\\Vert", u"‖"),
("\\isansy", u"𝘺"),
("\\bisansR", u"𝙍"),
("\\bsansDelta", u"𝝙"),
("\\fdiagovrdiag", u"⤬"),
("\\itchi", u"𝜒"),
("\\ngeqslant", u"⩾̸"),
("\\^1", u"¹"),
("\\bivarTheta", u"𝜭"),
("\\itl", u"𝑙"),
("\\bfrakJ", u"𝕵"),
("\\ocommatopright", u"̕"),
("\\bfLambda", u"𝚲"),
("\\_k", u"ₖ"),
("\\biNu", u"𝜨"),
("\\perspcorrespond", u"⩞"),
("\\twoheadrightarrow", u"↠"),
("\\plustrif", u"⨨"),
("\\biV", u"𝑽"),
("\\pluseqq", u"⩲"),
("\\beta", u"β"),
("\\blkhorzoval", u"⬬"),
("\\bsanslambda", u"𝝺"),
("\\bfpartial", u"𝛛"),
("\\c", u"̧"),
("\\blackpointerleft", u"◄"),
("\\twonotes", u"♫"),
("\\mdblkdiamond", u"⬥"),
("\\ttnine", u"𝟿"),
("\\upwhitearrow", u"⇧"),
("\\bsansvarpi", u"𝞏"),
("\\itOmega", u"𝛺"),
("\\approxnotequal", u"≆"),
("\\bft", u"𝐭"),
("\\isansx", u"𝘹"),
("\\sqrt", u"√"),
("\\plusdot", u"⨥"),
("\\bigodot", u"⨀"),
("\\subsetneqq", u"⫋"),
("\\bsimilarleftarrow", u"⭁"),
("\\nvtwoheadrightarrowtail", u"⤗"),
("\\varTheta", u"ϴ"),
("\\bisansY", u"𝙔"),
("\\bbQ", u"ℚ"),
("\\neg", u"¬"),
("\\bscrx", u"𝔁"),
("\\bfxi", u"𝛏"),
("\\barwedge", u"⊼"),
("\\itmu", u"𝜇"),
("\\^m", u"ᵐ"),
("\\biLambda", u"𝜦"),
("\\sansL", u"𝖫"),
("\\ltcc", u"⪦"),
("\\medblackstar", u"⭑"),
("\\itU", u"𝑈"),
("\\Rightarrow", u"⇒"),
("\\bfkappa", u"𝛋"),
("\\leftbkarrow", u"⤌"),
("\\nvtwoheadleftarrowtail", u"⬼"),
("\\bsansvarkappa", u"𝞌"),
("\\scra", u"𝒶"),
("\\Cap", u"⋒"),
("\\itp", u"𝑝"),
("\\bsanss", u"𝘀"),
("\\^I", u"ᴵ"),
("\\aleph", u"ℵ"),
("\\bsansnabla", u"𝝯"),
("\\frakb", u"𝔟"),
("\\xi", u"ξ"),
("\\lessapprox", u"⪅"),
("\\bfz", u"𝐳"),
("\\ddddot", u"⃜"),
("\\bisansnu", u"𝞶"),
("\\_j", u"ⱼ"),
("\\longrightarrow", u"⟶"),
("\\bbW", u"𝕎"),
("\\to", u"→"),
("\\itPsi", u"𝛹"),
("\\varcarriagereturn", u"⏎"),
("\\dottedsquare", u"⬚"),
("\\tti", u"𝚒"),
("\\^o", u"ᵒ"),
("\\lmoustache", u"⎰"),
("\\^h", u"ʰ"),
("\\upoldkoppa", u"ϙ"),
("\\Equiv", u"≣"),
("\\pm", u"±"),
("\\scrN", u"𝒩"),
("\\ttr", u"𝚛"),
("\\prec", u"≺"),
("\\disjquant", u"⨈"),
("\\capwedge", u"⩄"),
("\\bfXi", u"𝚵"),
("\\accurrent", u"⏦"),
("\\draftingarrow", u"➛"),
("\\bbGamma", u"ℾ"),
("\\measuredangleleft", u"⦛"),
("\\DH", u"Ð"),
("\\circleonleftarrow", u"⬰"),
("\\bfJ", u"𝐉"),
("\\egsdot", u"⪘"),
("\\bsansS", u"𝗦"),
("\\ttU", u"𝚄"),
("\\bbv", u"𝕧"),
("\\bfraks", u"𝖘"),
("\\bsansLambda", u"𝝠"),
("\\itc", u"𝑐"),
("\\openbracketright", u"⟧"),
("\\vec", u"⃗"),
("\\bivartheta", u"𝝑"),
("\\ttC", u"𝙲"),
("\\bisansmu", u"𝞵"),
("\\downharpoonleft", u"⇃"),
("\\phi", u"ϕ"),
("\\sansw", u"𝗐"),
("\\bscrn", u"𝓷"),
("\\lozenge", u"◊"),
("\\circledparallel", u"⦷"),
("\\bfrake", u"𝖊"),
("\\L", u"Ł"),
("\\ni", u"∋"),
("\\nvleftrightarrow", u"⇹"),
("\\itF", u"𝐹"),
("\\ito", u"𝑜"),
("\\itvarkappa", u"𝜘"),
("\\vardiamondsuit", u"♦"),
("\\isansA", u"𝘈"),
("\\itUpsilon", u"𝛶"),
("\\itt", u"𝑡"),
("\\ttH", u"𝙷"),
("\\leftrightharpoonsdown", u"⥧"),
("\\invv", u"ʌ"),
("\\subseteqq", u"⫅"),
("\\profsurf", u"⌓"),
("\\_v", u"ᵥ"),
("\\blockfull", u"█"),
("\\bialpha", u"𝜶"),
("\\widebridgeabove", u"⃩"),
("\\sphericalangleup", u"⦡"),
("\\bfb", u"𝐛"),
("\\bisansTau", u"𝞣"),
("\\bsansAlpha", u"𝝖"),
("\\frakq", u"𝔮"),
("\\heartsuit", u"♡"),
("\\bsansvarphi", u"𝞍"),
("\\bsansH", u"𝗛"),
("\\circledstar", u"✪"),
("\\subset", u"⊂"),
("\\bsanstau", u"𝞃"),
("\\venus", u"♀"),
("\\supsetplus", u"⫀"),
("\\bsansP", u"𝗣"),
("\\thinspace", u" "),
("\\fallingdotseq", u"≒"),
("\\bisansr", u"𝙧"),
("\\partialmeetcontraction", u"⪣"),
("\\bivarepsilon", u"𝝐"),
("\\bbp", u"𝕡"),
("\\NotNestedLessLess", u"⪡̸"),
("\\bfL", u"𝐋"),
("\\sqlozenge", u"⌑"),
("\\squarecrossfill", u"▩"),
("\\frakC", u"ℭ"),
("\\approx", u"≈"),
("\\bscrO", u"𝓞"),
("\\ttfour", u"𝟺"),
("\\gtrless", u"≷"),
("\\ntriangleleft", u"⋪"),
("\\barvee", u"⊽"),
("\\invwhitelowerhalfcircle", u"◛"),
("\\partial", u"∂"),
("\\pluto", u"♇"),
("\\wedgedoublebar", u"⩠"),
("\\twoheadleftdbkarrow", u"⬷"),
("\\eqqslantgtr", u"⪜"),
("\\turnednot", u"⌙"),
("\\wedgedot", u"⟑"),
("\\kappa", u"κ"),
("\\similarleftarrow", u"⭉"),
("\\bsansu", u"𝘂"),
("\\rtlz", u"ʐ"),
("\\mdsmwhtsquare", u"◽"),
("\\frakx", u"𝔵"),
("\\lescc", u"⪨"),
("\\sanso", u"𝗈"),
("\\whthorzoval", u"⬭"),
("\\isansY", u"𝘠"),
("\\blocklefthalf", u"▌"),
("\\bisansU", u"𝙐"),
("\\bisansvarepsilon", u"𝟄"),
("\\sansq", u"𝗊"),
("\\vardoublebarwedge", u"⌆"),
("\\Vvdash", u"⊪"),
("\\invwhiteupperhalfcircle", u"◚"),
("\\hspace", u" "),
("\\sansE", u"𝖤"),
("\\rightdotarrow", u"⤑"),
("\\leqslant", u"⩽"),
("\\questeq", u"≟"),
("\\trnr", u"ɹ"),
("\\wp", u"℘"),
("\\bfomicron", u"𝛐"),
("\\frake", u"𝔢"),
("\\bsanschi", u"𝞆"),
("\\wedgeonwedge", u"⩕"),
("\\bisansgamma", u"𝞬"),
("\\ttO", u"𝙾"),
("\\fullouterjoin", u"⟗"),
("\\bscrD", u"𝓓"),
("\\isansj", u"𝘫"),
("\\bfrakS", u"𝕾"),
("\\lmrk", u"ː"),
("\\nHuparrow", u"⇞"),
("\\gamma", u"γ"),
("\\NotGreaterGreater", u"≫̸"),
("\\simgE", u"⪠"),
("\\bsansB", u"𝗕"),
("\\bsansX", u"𝗫"),
("\\bflambda", u"𝛌"),
("\\varstar", u"✶"),
("\\Doteq", u"≑"),
("\\ttD", u"𝙳"),
("\\LLeftarrow", u"⭅"),
("\\trnmlr", u"ɰ"),
("\\bbn", u"𝕟"),
("\\nsubset", u"⊄"),
("\\Equal", u"⩵"),
("\\varhexagon", u"⬡"),
("\\sansr", u"𝗋"),
("\\bsansMu", u"𝝡"),
("\\1/6", u"⅙"),
("\\bkarow", u"⤍"),
("\\bisanso", u"𝙤"),
("\\ttz", u"𝚣"),
("\\bfrakw", u"𝖜"),
("\\bscrq", u"𝓺"),
("\\intcup", u"⨚"),
("\\P", u"¶"),
("\\lneq", u"⪇"),
("\\bfrakF", u"𝕱"),
("\\^e", u"ᵉ"),
("\\napprox", u"≉"),
("\\sansA", u"𝖠"),
("\\ttT", u"𝚃"),
("\\^delta", u"ᵟ"),
("\\varheartsuit", u"♥"),
("\\ltquest", u"⩻"),
("\\bigstar", u"★"),
("\\bisansk", u"𝙠"),
("\\bsansGamma", u"𝝘"),
("\\lesseqgtr", u"⋚"),
("\\smeparsl", u"⧤"),
("\\smblkdiamond", u"⬩"),
("\\tdcol", u"⫶"),
("\\ngeq", u"≱"),
("\\varnothing", u"∅"),
("\\bisansx", u"𝙭"),
("\\rppolint", u"⨒"),
("\\supseteq", u"⊇"),
("\\cirfr", u"◑"),
("\\bisansG", u"𝙂"),
("\\eta", u"η"),
("\\bisansa", u"𝙖"),
("\\bsansU", u"𝗨"),
("\\varrho", u"ϱ"),
("\\itXi", u"𝛯"),
("\\neptune", u"♆"),
("\\^gamma", u"ᵞ"),
("\\lgblkcircle", u"⬤"),
("\\Vdash", u"⊩"),
("\\dots", u"…"),
("\\^G", u"ᴳ"),
("\\longleftrightarrow", u"⟷"),
("\\scrH", u"ℋ"),
("\\bipsi", u"𝝍"),
("\\Join", u"⨝"),
("\\gemini", u"♊"),
("\\isins", u"⋴"),
("\\bivarsigma", u"𝝇"),
("\\bff", u"𝐟"),
("\\bsansIota", u"𝝞"),
("\\_1", u"₁"),
("\\nsupset", u"⊅"),
("\\gescc", u"⪩"),
("\\supsetneqq", u"⫌"),
("\\threedangle", u"⟀"),
("\\sansB", u"𝖡"),
("\\lesdotor", u"⪃"),
("\\dotequiv", u"⩧"),
("\\bfvarrho", u"𝛠"),
("\\_o", u"ₒ"),
("\\bfrakG", u"𝕲"),
("\\biTau", u"𝜯"),
("\\bigtriangleup", u"△"),
("\\diagup", u"╱"),
("\\underbar", u"̲"),
("\\isansi", u"𝘪"),
("\\ttj", u"𝚓"),
("\\itL", u"𝐿"),
("\\rightwavearrow", u"↝"),
("\\nexists", u"∄"),
("\\nVleftarrowtail", u"⬺"),
("\\bimu", u"𝝁"),
("\\image", u"⊷"),
("\\underbrace", u"⏟"),
("\\circleurquadblack", u"◔"),
("\\ita", u"𝑎"),
("\\biEpsilon", u"𝜠"),
("\\bsansZeta", u"𝝛"),
("\\supdsub", u"⫘"),
("\\boxplus", u"⊞"),
("\\sansfour", u"𝟦"),
("\\^x", u"ˣ"),
("\\smashtimes", u"⨳"),
("\\Zeta", u"Ζ"),
("\\nisd", u"⋺"),
("\\biR", u"𝑹"),
("\\DDownarrow", u"⟱"),
("\\oplusrhrim", u"⨮"),
("\\biChi", u"𝜲"),
("\\ultriangle", u"◸"),
("\\bigamma", u"𝜸"),
("\\bsansG", u"𝗚"),
("\\OE", u"Œ"),
("\\circledcirc", u"⊚"),
("\\capricornus", u"♑"),
("\\diameter", u"⌀"),
("\\underbracket", u"⎵"),
("\\subsim", u"⫇"),
("\\bfN", u"𝐍"),
("\\house", u"⌂"),
("\\bisansJ", u"𝙅"),
("\\bsanstwo", u"𝟮"),
("\\bfpsi", u"𝛙"),
("\\towa", u"⤪"),
("\\itM", u"𝑀"),
("\\bsansv", u"𝘃"),
("\\schwa", u"ə"),
("\\in", u"∈"),
("\\bfe", u"𝐞"),
("\\bscrH", u"𝓗"),
("\\bfrakk", u"𝖐"),
("\\^0", u"⁰"),
("\\enclosecircle", u"⃝"),
("\\bscrQ", u"𝓠"),
("\\modtwosum", u"⨊"),
("\\chi", u"χ"),
("\\biiota", u"𝜾"),
("\\^alpha", u"ᵅ"),
("\\measanglerutone", u"⦨"),
("\\itD", u"𝐷"),
("\\sansLmirrored", u"⅃"),
("\\nvrightarrow", u"⇸"),
("\\biD", u"𝑫"),
("\\pes", u"₧"),
("\\bsansR", u"𝗥"),
("\\mapsdown", u"↧"),
("\\bsanspsi", u"𝞇"),
("\\imath", u"ı"),
("\\bfa", u"𝐚"),
("\\sanstwo", u"𝟤"),
("\\squareurquad", u"◳"),
("\\bfrakU", u"𝖀"),
("\\biZeta", u"𝜡"),
("\\bisanst", u"𝙩"),
("\\smwhtlozenge", u"⬫"),
("\\^V", u"ⱽ"),
("\\frakL", u"𝔏"),
("\\bbzero", u"𝟘"),
("\\rightthreearrows", u"⇶"),
("\\bsansxi", u"𝝽"),
("\\ttzero", u"𝟶"),
("\\scrU", u"𝒰"),
("\\bsansb", u"𝗯"),
("\\bfiota", u"𝛊"),
("\\bfrakg", u"𝖌"),
("\\QED", u"∎"),
("\\^b", u"ᵇ"),
("\\bisansl", u"𝙡"),
("\\boxbar", u"◫"),
("\\bbfive", u"𝟝"),
("\\Ldsh", u"↲"),
("\\bisansalpha", u"𝞪"),
("\\angdnr", u"⦟"),
("\\scrm", u"𝓂"),
("\\its", u"𝑠"),
("\\Xi", u"Ξ"),
("\\sansH", u"𝖧"),
("\\RightUpVectorBar", u"⥔"),
("\\veebar", u"⊻"),
("\\nsuccsim", u"≿̸"),
("\\itX", u"𝑋"),
("\\bsansN", u"𝗡"),
("\\bisansy", u"𝙮"),
("\\tto", u"𝚘"),
("\\tts", u"𝚜"),
("\\verymuchless", u"⋘"),
("\\bsanspi", u"𝝿"),
("\\frakr", u"𝔯"),
("\\leftdasharrow", u"⇠"),
("\\bfrakQ", u"𝕼"),
("\\rrbracket", u"⟧"),
("\\triangletimes", u"⨻"),
("\\dicei", u"⚀"),
("\\closedvarcup", u"⩌"),
("\\bbH", u"ℍ"),
("\\squarenwsefill", u"▧"),
("\\_gamma", u"ᵧ"),
("\\triangleq", u"≜"),
("\\lrtriangle", u"◿"),
("\\bfc", u"𝐜"),
("\\ogreaterthan", u"⧁"),
("\\congdot", u"⩭"),
("\\Beta", u"Β"),
("\\minusrdots", u"⨬"),
("\\bscrf", u"𝓯"),
("\\bisansSigma", u"𝞢"),
("\\ast", u"∗"),
("\\bigsqcup", u"⨆"),
("\\bsansq", u"𝗾"),
("\\bfbeta", u"𝛃"),
("\\bsansF", u"𝗙"),
("\\eqqplus", u"⩱"),
("\\bisansp", u"𝙥"),
("\\enclosesquare", u"⃞"),
("\\barleftarrow", u"⇤"),
("\\bscrr", u"𝓻"),
("\\isansN", u"𝘕"),
("\\bsansOmicron", u"𝝤"),
("\\ttsix", u"𝟼"),
("\\itLambda", u"𝛬"),
("\\nequiv", u"≢"),
("\\equivDD", u"⩸"),
("\\lat", u"⪫"),
("\\isansS", u"𝘚"),
("\\ttb", u"𝚋"),
("\\ncong", u"≇"),
("\\bbthree", u"𝟛"),
("\\^theta", u"ᶿ"),
("\\biM", u"𝑴"),
("\\Succ", u"⪼"),
("\\_schwa", u"ₔ"),
("\\Finv", u"Ⅎ"),
("\\ttf", u"𝚏"),
("\\bsansEta", u"𝝜"),
("\\_0", u"₀"),
("\\dddot", u"⃛"),
("\\scri", u"𝒾"),
("\\implies", u"⟹"),
("\\bfg", u"𝐠"),
("\\bfeta", u"𝛈"),
("\\itw", u"𝑤"),
("\\dotminus", u"∸"),
("\\bscrN", u"𝓝"),
("\\oint", u"∮"),
("\\bsanst", u"𝘁"),
("\\circlearrowleft", u"↺"),
("\\bscrE", u"𝓔"),
("\\blackinwhitediamond", u"◈"),
("\\diamondleftblack", u"⬖"),
("\\nHdownarrow", u"⇟"),
("\\bbJ", u"𝕁"),
("\\diamondsuit", u"♢"),
("\\frakg", u"𝔤"),
("\\isansO", u"𝘖"),
("\\bsansL", u"𝗟"),
("\\bsansnu", u"𝝼"),
("\\nLeftarrow", u"⇍"),
("\\bie", u"𝒆"),
("\\smalltriangleleft", u"◃"),
("\\rightleftharpoonsdown", u"⥩"),
("\\acute", u"́"),
("\\llbracket", u"⟦"),
("\\UUparrow", u"⟰"),
("\\Nearrow", u"⇗"),
("\\biu", u"𝒖"),
("\\bsansl", u"𝗹"),
("\\bigtriangledown", u"▽"),
("\\bfphi", u"𝛟"),
("\\Longleftarrow", u"⟸"),
("\\nsucc", u"⊁"),
("\\square", u"□"),
("\\succ", u"≻"),
("\\circledrightdot", u"⚆"),
("\\bfd", u"𝐝"),
("\\sansh", u"𝗁"),
("\\bbgamma", u"ℽ"),
("\\isansv", u"𝘷"),
("\\biomicron", u"𝝄"),
("\\bisansIota", u"𝞘"),
("\\bbT", u"𝕋"),
("\\scrC", u"𝒞"),
("\\pscrv", u"ʋ"),
("\\bsansdelta", u"𝝳"),
("\\neovnwarrow", u"⤱"),
("\\isanso", u"𝘰"),
("\\twoheadmapsto", u"⤅"),
("\\langle", u"⟨"),
("\\DownRightVectorBar", u"⥗"),
("\\Longmapsfrom", u"⟽"),
("\\Yup", u"⅄"),
("\\scrZ", u"𝒵"),
("\\itvarrho", u"𝜚"),
("\\clubsuit", u"♣"),
("\\elsdot", u"⪗"),
("\\Stigma", u"Ϛ"),
("\\biEta", u"𝜢"),
("\\xor", u"⊻"),
("\\rightangle", u"∟"),
("\\backsim", u"∽"),
("\\5/8", u"⅝"),
("\\minhat", u"⩟"),
("\\isansJ", u"𝘑"),
("\\bfmu", u"𝛍"),
("\\bsansC", u"𝗖"),
("\\downdownarrows", u"⇊"),
("\\measeq", u"≞"),
("\\^f", u"ᶠ"),
("\\lowint", u"⨜"),
("\\emptyset", u"∅"),
("\\sansM", u"𝖬"),
("\\varphi", u"φ"),
("\\bsansp", u"𝗽"),
("\\blacklozenge", u"⧫"),
("\\Tau", u"Τ"),
("\\itAlpha", u"𝛢"),
("\\itvarphi", u"𝜙"),
("\\bisansn", u"𝙣"),
("\\looparrowleft", u"↫"),
("\\isansV", u"𝘝"),
("\\nVtwoheadrightarrow", u"⤁"),
("\\ttp", u"𝚙"),
("\\beth", u"ℶ"),
("\\isansX", u"𝘟"),
("\\itj", u"𝑗"),
("\\sansj", u"𝗃"),
("\\nsim", u"≁"),
("\\ocirc", u"̊"),
("\\div", u"÷"),
("\\sansJ", u"𝖩"),
("\\bfrakt", u"𝖙"),
("\\itpi", u"𝜋"),
("\\sansG", u"𝖦"),
("\\longmapsfrom", u"⟻"),
("\\_-", u"₋"),
("\\bfsigma", u"𝛔"),
("\\squarehvfill", u"▦"),
("\\bfv", u"𝐯"),
("\\leftrightharpoonupdown", u"⥊"),
("\\turnk", u"ʞ"),
("\\bigcupdot", u"⨃"),
("\\And", u"⩓"),
("\\itE", u"𝐸"),
("\\bisansTheta", u"𝞗"),
("\\bbsum", u"⅀"),
("\\iiint", u"∭"),
("\\threeunderdot", u"⃨"),
("\\frakF", u"𝔉"),
("\\lvboxline", u"⎸"),
("\\bscrC", u"𝓒"),
("\\cancer", u"♋"),
("\\midbarwedge", u"⩜"),
("\\sqcup", u"⊔"),
("\\lrblacktriangle", u"◢"),
("\\Longrightarrow", u"⟹"),
("\\bikappa", u"𝜿"),
("\\subsetneq", u"⊊"),
("\\itBeta", u"𝛣"),
("\\ovhook", u"̉"),
("\\equalleftarrow", u"⭀"),
("\\bscrg", u"𝓰"),
("\\enclosetriangle", u"⃤"),
("\\dagger", u"†"),
("\\supsetdot", u"⪾"),
("\\frakf", u"𝔣"),
("\\scrI", u"ℐ"),
("\\rightouterjoin", u"⟖"),
("\\bfrakZ", u"𝖅"),
("\\twoheadmapsfrom", u"⬶"),
("\\bbf", u"𝕗"),
("\\itP", u"𝑃"),
("\\bsansalpha", u"𝝰"),
("\\bisansE", u"𝙀"),
("\\binu", u"𝝂"),
("\\itz", u"𝑧"),
("\\^g", u"ᵍ"),
("\\Sqcup", u"⩏"),
("\\biq", u"𝒒"),
("\\scrO", u"𝒪"),
("\\bfrakI", u"𝕴"),
("\\isansa", u"𝘢"),
("\\bfOmicron", u"𝚶"),
("\\leftwavearrow", u"↜"),
("\\notlessgreater", u"≸"),
("\\rightrightarrows", u"⇉"),
("\\DownRightTeeVector", u"⥟"),
("\\supsetapprox", u"⫊"),
("\\ttP", u"𝙿"),
("\\allequal", u"≌"),
("\\bfV", u"𝐕"),
("\\del", u"∇"),
("\\blackpointerright", u"►"),
("\\3/5", u"⅗"),
("\\bbC", u"ℂ"),
("\\female", u"♀"),
("\\cdotp", u"·"),
("\\bfvarphi", u"𝛗"),
("\\bsansc", u"𝗰"),
("\\bfnabla", u"𝛁"),
("\\^T", u"ᵀ"),
("\\itOmicron", u"𝛰"),
("\\capdot", u"⩀"),
("\\biY", u"𝒀"),
("\\italpha", u"𝛼"),
("\\ntrianglerighteq", u"⋭"),
("\\notbackslash", u"⍀"),
("\\nni", u"∌"),
("\\blacktriangle", u"▴"),
("\\mdblkcircle", u"⚫"),
("\\saturn", u"♄"),
("\\DownLeftRightVector", u"⥐"),
("\\ordmasculine", u"º"),
("\\curlyeqsucc", u"⋟"),
("\\bsansBeta", u"𝝗"),
("\\DownLeftTeeVector", u"⥞"),
("\\rdiagovfdiag", u"⤫"),
("\\mapsto", u"↦"),
("\\veemidvert", u"⩛"),
("\\^R", u"ᴿ"),
("\\maltese", u"✠"),
("\\rightarrowdiamond", u"⤞"),
("\\bfsix", u"𝟔"),
("\\leftouterjoin", u"⟕"),
("\\hslash", u"ℏ"),
("\\bisanszeta", u"𝞯"),
("\\bbid", u"ⅆ"),
("\\nVleftarrow", u"⇺"),
("\\circleonrightarrow", u"⇴"),
("\\bfraki", u"𝖎"),
("\\ttY", u"𝚈"),
("\\blockhalfshaded", u"▒"),
("\\brokenbar", u"¦"),
("\\blacksquare", u"■"),
("\\mdlgblkdiamond", u"◆"),
("\\circlellquad", u"◵"),
("\\upuparrows", u"⇈"),
("\\taurus", u"♉"),
("\\planck", u"ℎ"),
("\\bisansi", u"𝙞"),
("\\frakW", u"𝔚"),
("\\bbd", u"𝕕"),
("\\bsansRho", u"𝝦"),
("\\bfq", u"𝐪"),
("\\vDash", u"⊨"),
("\\conjquant", u"⨇"),
("\\4/5", u"⅘"),
("\\biPi", u"𝜫"),
("\\varclubsuit", u"♧"),
("\\bscrX", u"𝓧"),
("\\sim", u"∼"),
("\\bisanspi", u"𝞹"),
("\\^8", u"⁸"),
("\\RuleDelayed", u"⧴"),
("\\^p", u"ᵖ"),
("\\scrJ", u"𝒥"),
("\\sum", u"∑"),
("\\bfepsilon", u"𝛆"),
("\\rightarrowbsimilar", u"⭌"),
("\\aquarius", u"♒"),
("\\sansS", u"𝖲"),
("\\ggg", u"⋙"),
("\\uranus", u"♅"),
("\\biepsilon", u"𝜺"),
("\\isinvb", u"⋸"),
("\\rightthreetimes", u"⋌"),
("\\oturnedcomma", u"̒"),
("\\bscrR", u"𝓡"),
("\\O", u"Ø"),
("\\bfvarepsilon", u"𝛜"),
("\\nbumpeq", u"≏̸"),
("\\dashv", u"⊣"),
("\\bbie", u"ⅇ"),
("\\curlywedge", u"⋏"),
("\\tth", u"𝚑"),
("\\itTau", u"𝛵"),
("\\mdlgwhtdiamond", u"◇"),
("\\itk", u"𝑘"),
("\\biZ", u"𝒁"),
("\\biGamma", u"𝜞"),
("\\bsansKappa", u"𝝟"),
("\\underleftharpoondown", u"⃭"),
("\\gg", u"≫"),
("\\circleulquad", u"◴"),
("\\ngtrsim", u"≵"),
("\\_s", u"ₛ"),
("\\smwhitestar", u"⭒"),
("\\bfrakB", u"𝕭"),
("\\glj", u"⪤"),
("\\sqsupset", u"⊐"),
("\\frakt", u"𝔱"),
("\\nprec", u"⊀"),
("\\_n", u"ₙ"),
("\\diamond", u"⋄"),
("\\Lap", u"⧊"),
("\\otimesrhrim", u"⨵"),
("\\leftrightarrows", u"⇆"),
("\\LeftUpTeeVector", u"⥠"),
("\\itphi", u"𝜑"),
("\\hexagon", u"⎔"),
("\\biSigma", u"𝜮"),
("\\eighthnote", u"♪"),
("\\risingdotseq", u"≓"),
("\\RightUpTeeVector", u"⥜"),
("\\bbrktbrk", u"⎶"),
("\\^(", u"⁽"),
("\\1/2", u"½"),
("\\bfEpsilon", u"𝚬"),
("\\iint", u"∬"),
("\\nleqslant", u"⩽̸"),
("\\leftrightarrowtriangle", u"⇿"),
("\\squarelrquad", u"◲"),
("\\measanglerdtose", u"⦪"),
("\\eparsl", u"⧣"),
("\\nprecsim", u"≾̸"),
("\\btimes", u"⨲"),
("\\bia", u"𝒂"),
("\\bisansLambda", u"𝞚"),
("\\oe", u"œ"),
("\\forall", u"∀"),
("\\bbl", u"𝕝"),
("\\ttu", u"𝚞"),
("\\bisansDelta", u"𝞓"),
("\\bfDigamma", u"𝟊"),
("\\rightarrowtriangle", u"⇾"),
("\\bbw", u"𝕨"),
("\\leftarrowx", u"⬾"),
("\\bba", u"𝕒"),
("\\supset", u"⊃"),
("\\supsim", u"⫈"),
("\\bfrakP", u"𝕻"),
("\\ordfeminine", u"ª"),
("\\equiv", u"≡"),
("\\sharp", u"♯"),
("\\bsansY", u"𝗬"),
("\\sbrhr", u"˒"),
("\\_2", u"₂"),
("\\bbo", u"𝕠"),
("\\epsilon", u"ϵ"),
("\\Nwarrow", u"⇖"),
("\\bfMu", u"𝚳"),
("\\bsansmu", u"𝝻"),
("\\itlambda", u"𝜆"),
("\\isansE", u"𝘌"),
("\\ae", u"æ"),
("\\nrleg", u"ƞ"),
("\\infty", u"∞"),
("\\dualmap", u"⧟"),
("\\_=", u"₌"),
("\\eqgtr", u"⋝"),
("\\bigotimes", u"⨂"),
("\\bsansn", u"𝗻"),
("\\nvleftarrow", u"⇷"),
("\\Swarrow", u"⇙"),
("\\vrecto", u"▯"),
("\\isinE", u"⋹"),
("\\leftharpoonaccent", u"⃐"),
("\\bbb", u"𝕓"),
("\\inversewhitecircle", u"◙"),
("\\commaminus", u"⨩"),
("\\bisansf", u"𝙛"),
("\\whitearrowupfrombar", u"⇪"),
("\\bisansChi", u"𝞦"),
("\\btdl", u"ɬ"),
("\\vrectangleblack", u"▮"),
("\\bsansO", u"𝗢"),
("\\scrQ", u"𝒬"),
("\\eqslantgtr", u"⪖"),
("\\strike", u"̶"),
("\\smblksquare", u"▪"),
("\\scpolint", u"⨓"),
("\\sansK", u"𝖪"),
("\\lrcorner", u"⌟"),
("\\bsansV", u"𝗩"),
("\\birho", u"𝝆"),
("\\nwarrow", u"↖"),
("\\bfzero", u"𝟎"),
("\\^M", u"ᴹ"),
("\\_p", u"ₚ"),
("\\simlE", u"⪟"),
("\\_8", u"₈"),
("\\scry", u"𝓎"),
("\\bfvartheta", u"𝛝"),
("\\leqq", u"≦"),
("\\lfloor", u"⌊"),
("\\cirfb", u"◒"),
("\\bit", u"𝒕"),
("\\bscrk", u"𝓴"),
("\\urtriangle", u"◹"),
("\\rightsquigarrow", u"⇝"),
("\\leftarrow", u"←"),
("\\sphericalangle", u"∢"),
("\\revemptyset", u"⦰"),
("\\nVtwoheadleftarrowtail", u"⬽"),
("\\biP", u"𝑷"),
("\\rLarr", u"⥄"),
("\\boxtimes", u"⊠"),
("\\ttm", u"𝚖"),
("\\_i", u"ᵢ"),
("\\dottedcircle", u"◌"),
("\\bisansvarsigma", u"𝞻"),
("\\hkswarow", u"⤦"),
("\\bfl", u"𝐥"),
("\\gtquest", u"⩼"),
("\\bisansq", u"𝙦"),
("\\frakK", u"𝔎"),
("\\^r", u"ʳ"),
("\\wedgeodot", u"⩑"),
("\\isansC", u"𝘊"),
("\\interleave", u"⫴"),
("\\^B", u"ᴮ"),
("\\doublebarvee", u"⩢"),
("\\circledwhitebullet", u"⦾"),
("\\complement", u"∁"),
("\\biL", u"𝑳"),
("\\itEta", u"𝛨"),
("\\ttw", u"𝚠"),
("\\bfzeta", u"𝛇"),
("\\isansG", u"𝘎"),
("\\simless", u"⪝"),
("\\biOmega", u"𝜴"),
("\\sqsubsetneq", u"⋤"),
("\\Ddownarrow", u"⤋"),
("\\sansx", u"𝗑"),
("\\bih", u"𝒉"),
("\\measanglelutonw", u"⦩"),
("\\_rho", u"ᵨ"),
("\\neqsim", u"≂̸"),
("\\smwhtsquare", u"▫"),
("\\itIota", u"𝛪"),
("\\leftarrowapprox", u"⭊"),
("\\upMu", u"Μ"),
("\\nVrightarrow", u"⇻"),
("\\bscrU", u"𝓤"),
("\\Koppa", u"Ϟ"),
("\\itnu", u"𝜈"),
("\\isansq", u"𝘲"),
("\\minusfdots", u"⨫"),
("\\gggnest", u"⫸"),
("\\angle", u"∠"),
("\\bfF", u"𝐅"),
("\\diceiv", u"⚃"),
("\\scrT", u"𝒯"),
("\\itSigma", u"𝛴"),
("\\Eta", u"Η"),
("\\ll", u"≪"),
("\\underrightarrow", u"⃯"),
("\\vartriangle", u"▵"),
("\\bfrakv", u"𝖛"),
("\\Leftarrow", u"⇐"),
("\\lrtriangleeq", u"⧡"),
("\\asymp", u"≍"),
("\\defas", u"⧋"),
("\\varointclockwise", u"∲"),
("\\bscrL", u"𝓛"),
("\\times", u"×"),
("\\wr", u"≀"),
("\\twoheadrightarrowtail", u"⤖"),
("\\bsansiota", u"𝝸"),
("\\llblacktriangle", u"◣"),
("\\_u", u"ᵤ"),
("\\ddotseq", u"⩷"),
("\\lltriangle", u"◺"),
("\\ss", u"ß"),
("\\bii", u"𝒊"),
("\\upOmicron", u"Ο"),
("\\boxupcaret", u"⍓"),
("\\suphsub", u"⫗"),
("\\approxeqq", u"⩰"),
("\\intcap", u"⨙"),
("\\^k", u"ᵏ"),
("\\openbracketleft", u"⟦"),
("\\circleurquad", u"◷"),
("\\gtcc", u"⪧"),
("\\droang", u"̚"),
("\\simgtr", u"⪞"),
("\\isansK", u"𝘒"),
("\\bfGamma", u"𝚪"),
("\\leftarrowbackapprox", u"⭂"),
("\\barrightarrowdiamond", u"⤠"),
("\\triangleleft", u"◁"),
("\\nvdash", u"⊬"),
("\\biv", u"𝒗"),
("\\bfTheta", u"𝚯"),
("\\bscrS", u"𝓢"),
("\\bisansm", u"𝙢"),
("\\dotsminusdots", u"∺"),
("\\ttg", u"𝚐"),
("\\oiiint", u"∰"),
("\\scrV", u"𝒱"),
("\\questiondown", u"¿"),
("\\forkv", u"⫙"),
("\\updownarrowbar", u"↨"),
("\\upepsilon", u"ε"),
("\\itN", u"𝑁"),
("\\pertenthousand", u"‱"),
("\\precnsim", u"⋨"),
("\\profline", u"⌒"),
("\\bfdigamma", u"𝟋"),
("\\itupsilon", u"𝜐"),
("\\reapos", u"‛"),
("\\ttA", u"𝙰"),
("\\sansfive", u"𝟧"),
("\\succneq", u"⪲"),
("\\UpEquilibrium", u"⥮"),
("\\ttk", u"𝚔"),
("\\biC", u"𝑪"),
("\\scrE", u"ℰ"),
("\\bfrakd", u"𝖉"),
("\\underleftrightarrow", u"͍"),
("\\ttQ", u"𝚀"),
("\\aries", u"♈"),
("\\intercal", u"⊺"),
("\\bbs", u"𝕤"),
("\\Zbar", u"Ƶ"),
("\\emptysetoarrl", u"⦴"),
("\\itMu", u"𝛭"),
("\\ntrianglelefteq", u"⋬"),
("\\bigvee", u"⋁"),
("\\minus", u"−"),
("\\nleftarrow", u"↚"),
("\\scrL", u"ℒ"),
("\\bfp", u"𝐩"),
("\\itvarepsilon", u"𝜖"),
("\\mho", u"℧"),
("\\bigwedge", u"⋀"),
("\\benzenr", u"⏣"),
("\\submult", u"⫁"),
("\\sanss", u"𝗌"),
("\\sinewave", u"∿"),
("\\bbii", u"ⅈ"),
("\\leftrightarrowcircle", u"⥈"),
("\\diamondbotblack", u"⬙"),
("\\fourthroot", u"∜"),
("\\curlyvee", u"⋎"),
("\\eth", u"ð"),
("\\itK", u"𝐾"),
("\\bisansF", u"𝙁"),
("\\bsolhsub", u"⟈"),
("\\bisansQ", u"𝙌"),
("\\lsime", u"⪍"),
("\\lesseqqgtr", u"⪋"),
("\\dshfnc", u"┆"),
("\\blackcircledrightdot", u"⚈"),
("\\bbu", u"𝕦"),
("\\bsansXi", u"𝝣"),
("\\bsansf", u"𝗳"),
("\\bsansj", u"𝗷"),
("\\itI", u"𝐼"),
("\\scorpio", u"♏"),
("\\upharpoonright", u"↾"),
("\\circlelrquad", u"◶"),
("\\twocups", u"⩊"),
("\\ttx", u"𝚡"),
("\\bisanspartial", u"𝟃"),
("\\lessdot", u"⋖"),
("\\subsetplus", u"⪿"),
("\\bfP", u"𝐏"),
("\\sansm", u"𝗆"),
("\\blackcircledtwodots", u"⚉"),
("\\bfk", u"𝐤"),
("\\bsansvartheta", u"𝞋"),
("\\rsolbar", u"⧷"),
("\\sqfr", u"◨"),
("\\increment", u"∆"),
("\\sansa", u"𝖺"),
("\\bftwo", u"𝟐"),
("\\ttseven", u"𝟽"),
("\\isansI", u"𝘐"),
("\\leftrightharpoonsup", u"⥦"),
("\\fraky", u"𝔶"),
("\\nVtwoheadrightarrowtail", u"⤘"),
("\\ttX", u"𝚇"),
("\\bisanstau", u"𝞽"),
("\\otimeshat", u"⨶"),
("\\1/5", u"⅕"),
("\\ularc", u"◜"),
("\\bsansNu", u"𝝢"),
("\\_6", u"₆"),
("\\coprod", u"∐"),
("\\sansF", u"𝖥"),
("\\aa", u"å"),
("\\1/9", u"⅑"),
("\\dblarrowupdown", u"⇅"),
("\\isanst", u"𝘵"),
("\\^epsilon", u"ᵋ"),
("\\cupdot", u"⊍"),
("\\Lsh", u"↰"),
("\\itnabla", u"𝛻"),
("\\trianglelefteq", u"⊴"),
("\\scurel", u"⊱"),
("\\emptysetoarr", u"⦳"),
("\\degree", u"°"),
("\\xrat", u"℞"),
("\\dicevi", u"⚅"),
("\\triangleminus", u"⨺"),
("\\ddot", u"̈"),
("\\topbot", u"⌶"),
("\\updownharpoonleftright", u"⥍"),
("\\blacklefthalfcircle", u"◖"),
("\\hexagonblack", u"⬣"),
("\\fraka", u"𝔞"),
("\\bbone", u"𝟙"),
("\\isansR", u"𝘙"),
("\\ddagger", u"‡"),
("\\scrA", u"𝒜"),
("\\bscrc", u"𝓬"),
("\\itvarpi", u"𝜛"),
("\\bigcap", u"⋂"),
("\\itW", u"𝑊"),
("\\bibeta", u"𝜷"),
("\\gesles", u"⪔"),
("\\circeq", u"≗"),
("\\Phi", u"Φ"),
("\\guilsinglleft", u"‹"),
("\\bisansA", u"𝘼"),
("\\AE", u"Æ"),
("\\check", u"̌"),
("\\hlmrk", u"ˑ"),
("\\intx", u"⨘"),
("\\bfNu", u"𝚴"),
("\\parallelogram", u"▱"),
("\\Upsilon", u"Υ"),
("\\bsansD", u"𝗗"),
("\\arceq", u"≘"),
("\\LeftUpDownVector", u"⥑"),
("\\Dashv", u"⫤"),
("\\sansb", u"𝖻"),
("\\overbracket", u"⎴"),
("\\sanse", u"𝖾"),
("\\bfchi", u"𝛘"),
("\\ltimes", u"⋉"),
("\\bbseven", u"𝟟"),
("\\leftrightarrow", u"↔"),
("\\biI", u"𝑰"),
("\\^P", u"ᴾ"),
("\\rightarrowbackapprox", u"⭈"),
("\\varspadesuit", u"♤"),
("\\bfSigma", u"𝚺"),
("\\Mapsto", u"⤇"),
("\\bbg", u"𝕘"),
("\\bivarpi", u"𝝕"),
("\\Prec", u"⪻"),
("\\mid", u"∣"),
("\\subsetapprox", u"⫉"),
("\\varisinobar", u"⋶"),
("\\sansT", u"𝖳"),
("\\bfU", u"𝐔"),
("\\bivarrho", u"𝝔"),
("\\tricolon", u"⁝"),
("\\hookleftarrow", u"↩"),
("\\sqfnw", u"┙"),
("\\LeftTriangleBar", u"⧏"),
("\\isansD", u"𝘋"),
("\\bisansK", u"𝙆"),
("\\^=", u"⁼"),
("\\bfDelta", u"𝚫"),
("\\swarrow", u"↙"),
("\\wideangleup", u"⦧"),
("\\bar", u"̄"),
("\\csube", u"⫑"),
("\\bfrako", u"𝖔"),
("\\rtlt", u"ʈ"),
("\\natural", u"♮"),
("\\mp", u"∓"),
("\\niobar", u"⋾"),
("\\invnot", u"⌐"),
("\\biK", u"𝑲"),
("\\biQ", u"𝑸"),
("\\subseteq", u"⊆"),
("\\squareneswfill", u"▨"),
("\\isansT", u"𝘛"),
("\\postalmark", u"〒"),
("\\NG", u"Ŋ"),
("\\LeftRightVector", u"⥎"),
("\\frakc", u"𝔠"),
("\\biG", u"𝑮"),
("\\bfrakW", u"𝖂"),
("\\bfrakN", u"𝕹"),
("\\ittheta", u"𝜃"),
("\\gtreqqless", u"⪌"),
("\\isansr", u"𝘳"),
("\\bisansT", u"𝙏"),
("\\Longleftrightarrow", u"⟺"),
("\\tildelow", u"˜"),
("\\vdots", u"⋮"),
("\\dashrightharpoondown", u"⥭"),
("\\^H", u"ᴴ"),
("\\bigbot", u"⟘"),
("\\gimel", u"ℷ"),
("\\iiiint", u"⨌"),
("\\bip", u"𝒑"),
("\\varbarwedge", u"⌅"),
("\\twoheadleftarrowtail", u"⬻"),
("\\ttq", u"𝚚"),
("\\smte", u"⪬"),
("\\bisansRho", u"𝞠"),
("\\bisansepsilon", u"𝞮"),
("\\whitepointerright", u"▻"),
("\\nvLeftrightarrow", u"⤄"),
("\\TH", u"Þ"),
("\\mapsup", u"↥"),
("\\revangleubar", u"⦥"),
("\\scrz", u"𝓏"),
("\\clwintegral", u"∱"),
("\\sansthree", u"𝟥"),
("\\_9", u"₉"),
("\\circledbullet", u"⦿"),
("\\ttN", u"𝙽"),
("\\bisansv", u"𝙫"),
("\\Lleftarrow", u"⇚"),
("\\bfdelta", u"𝛅"),
("\\hat", u"̂"),
("\\nearrow", u"↗"),
("\\vartheta", u"ϑ"),
("\\bscrP", u"𝓟"),
("\\varnis", u"⋻"),
("\\Iota", u"Ι"),
("\\varlrtriangle", u"⊿"),
("\\bfy", u"𝐲"),
("\\backpprime", u"‶"),
("\\th", u"þ"),
("\\frakX", u"𝔛"),
("\\itS", u"𝑆"),
("\\bscrZ", u"𝓩"),
("\\kernelcontraction", u"∻"),
("\\ntriangleright", u"⋫"),
("\\frakT", u"𝔗"),
("\\Searrow", u"⇘"),
("\\tte", u"𝚎"),
("\\bfrakz", u"𝖟"),
("\\breve", u"̆"),
("\\angleubar", u"⦤"),
("\\hatapprox", u"⩯"),
("\\bisansEta", u"𝞖"),
("\\itvartheta", u"𝜗"),
("\\trnt", u"ʇ"),
("\\closedvarcupsmashprod", u"⩐"),
("\\succeq", u"⪰"),
("\\isansU", u"𝘜"),
("\\enspace", u" "),
("\\itq", u"𝑞"),
("\\nwovnearrow", u"⤲"),
("\\isansQ", u"𝘘"),
("\\bsansfive", u"𝟱"),
("\\bigcirc", u"○"),
("\\suphsol", u"⟉"),
("\\plushat", u"⨣"),
("\\bisansZ", u"𝙕"),
("\\sigma", u"σ"),
("\\itvarTheta", u"𝛳"),
("\\leftharpoonup", u"↼"),
("\\bisansOmicron", u"𝞞"),
("\\^L", u"ᴸ"),
("\\^w", u"ʷ"),
("\\int", u"∫"),
("\\curlyeqprec", u"⋞"),
("\\barleftarrowrightarrowbar", u"↹"),
("\\rightwhitearrow", u"⇨"),
("\\rightarrowplus", u"⥅"),
("\\bbA", u"𝔸"),
("\\sansY", u"𝖸"),
("\\bsansrho", u"𝞀"),
("\\sqrtbottom", u"⎷"),
("\\bscrv", u"𝓿"),
("\\nVrightarrowtail", u"⤕"),
("\\neovsearrow", u"⤮"),
("\\bscrW", u"𝓦"),
("\\Leftrightarrow", u"⇔"),
("\\rightharpoonsupdown", u"⥤"),
("\\lceil", u"⌈"),
("\\UpArrowBar", u"⤒"),
("\\bfAlpha", u"𝚨"),
("\\bfi", u"𝐢"),
("\\bfrakH", u"𝕳"),
("\\ne", u"≠"),
("\\varsubsetneqq", u"⊊︀"),
("\\bfs", u"𝐬"),
("\\bullet", u"•"),
("\\bfrakf", u"𝖋"),
("\\^+", u"⁺"),
("\\itpsi", u"𝜓"),
("\\lgE", u"⪑"),
("\\bffive", u"𝟓"),
("\\trnh", u"ɥ"),
("\\boxbslash", u"⧅"),
("\\equalparallel", u"⋕"),
("\\cirfnint", u"⨐"),
("\\biz", u"𝒛"),
("\\subedot", u"⫃"),
("\\bbi", u"𝕚"),
("\\itRho", u"𝛲"),
("\\nLeftrightarrow", u"⇎"),
("\\itepsilon", u"𝜀"),
("\\itomega", u"𝜔"),
("\\dashleftharpoondown", u"⥫"),
("\\hrectangle", u"▭"),
("\\bbM", u"𝕄"),
("\\ttd", u"𝚍"),
("\\bsansM", u"𝗠"),
("\\bfX", u"𝐗"),
("\\bisansvarkappa", u"𝟆"),
("\\itkappa", u"𝜅"),
("\\precnapprox", u"⪹"),
("\\scrj", u"𝒿"),
("\\nsqsupseteq", u"⋣"),
("\\precapprox", u"⪷"),
("\\pentagonblack", u"⬟"),
("\\curvearrowright", u"↷"),
("\\bfrakA", u"𝕬"),
("\\sqspne", u"⋥"),
("\\bfm", u"𝐦"),
("\\bisansW", u"𝙒"),
("\\top", u"⊤"),
("\\sansi", u"𝗂"),
("\\bbZ", u"ℤ"),
("\\itiota", u"𝜄"),
("\\blacktriangledown", u"▾"),
("\\squoval", u"▢"),
("\\rasp", u"ʼ"),
("\\downharpoonright", u"⇂"),
("\\bsanssix", u"𝟲"),
("\\sqsubset", u"⊏"),
("\\squarellblack", u"⬕"),
("\\underrightharpoondown", u"⃬"),
("\\succsim", u"≿"),
("\\dashV", u"⫣"),
("\\itzeta", u"𝜁"),
("\\emdash", u"—"),
("\\biA", u"𝑨"),
("\\itPhi", u"𝛷"),
("\\biJ", u"𝑱"),
("\\bisansOmega", u"𝞨"),
("\\scrM", u"ℳ"),
("\\bin", u"𝒏"),
("\\delta", u"δ"),
("\\rfloor", u"⌋"),
("\\eqslantless", u"⪕"),
("\\twoheaduparrow", u"↟"),
("\\bsansJ", u"𝗝"),
("\\bisansM", u"𝙈"),
("\\^phi", u"ᵠ"),
("\\bisansd", u"𝙙"),
("\\^i", u"ⁱ"),
("\\eqcolon", u"≕"),
("\\bfvarsigma", u"𝛓"),
("\\bfrakM", u"𝕸"),
("\\nsubseteq", u"⊈"),
("\\bisansomega", u"𝟂"),
("\\wedge", u"∧"),
("\\^N", u"ᴺ"),
("\\cdots", u"⋯"),
("\\7/8", u"⅞"),
("\\smblklozenge", u"⬪"),
("\\spadesuit", u"♠"),
("\\bfChi", u"𝚾"),
("\\trapezium", u"⏢"),
("\\bullseye", u"◎"),
("\\scro", u"ℴ"),
("\\tildetrpl", u"≋"),
("\\leftrightsquigarrow", u"↭"),
("\\^t", u"ᵗ"),
("\\dbkarow", u"⤏"),
("\\Sigma", u"Σ"),
("\\longmapsto", u"⟼"),
("\\DownArrowUpArrow", u"⇵"),
("\\intbar", u"⨍"),
("\\nasymp", u"≭"),
("\\npreceq", u"⪯̸"),
("\\isansM", u"𝘔"),
("\\bbnine", u"𝟡"),
("\\itdelta", u"𝛿"),
("\\virgo", u"♍"),
("\\bfPhi", u"𝚽"),
("\\isansL", u"𝘓"),
("\\boxast", u"⧆"),
("\\bbV", u"𝕍"),
("\\sansc", u"𝖼"),
("\\bfUpsilon", u"𝚼"),
("\\csupe", u"⫒"),
("\\drbkarrow", u"⤐"),
("\\bfrakC", u"𝕮"),
("\\bscrl", u"𝓵"),
("\\ell", u"ℓ"),
("\\bfrakY", u"𝖄"),
("\\squaretopblack", u"⬒"),
("\\sansN", u"𝖭"),
("\\isansB", u"𝘉"),
("\\bisansPsi", u"𝞧"),
("\\tau", u"τ"),
("\\Vvert", u"⦀"),
("\\circledS", u"Ⓢ"),
("\\bsanse", u"𝗲"),
("\\bbtwo", u"𝟚"),
("\\boxdot", u"⊡"),
("\\sanszero", u"𝟢"),
("\\twoheadleftarrow", u"↞"),
("\\barovernorthwestarrow", u"↸"),
("\\mdwhtlozenge", u"⬨"),
("\\bsansE", u"𝗘"),
("\\bscrd", u"𝓭"),
("\\Uparrow", u"⇑"),
("\\squarevfill", u"▥"),
("\\bfeight", u"𝟖"),
("\\LeftVectorBar", u"⥒"),
("\\_7", u"₇"),
("\\rightdasharrow", u"⇢"),
("\\itO", u"𝑂"),
("\\sanst", u"𝗍"),
("\\bisansu", u"𝙪"),
("\\isansz", u"𝘻"),
("\\sansO", u"𝖮"),
("\\smwhtcircle", u"◦"),
("\\nolinebreak", u""),
("\\rangle", u"⟩"),
("\\rightarrowgtr", u"⭃"),
("\\libra", u"♎"),
("\\Lambda", u"Λ"),
("\\esh", u"ʃ"),
("\\ttfive", u"𝟻"),
("\\dsol", u"⧶"),
("\\sqsupseteq", u"⊒"),
("\\_r", u"ᵣ"),
("\\tieconcat", u"⁀"),
("\\itGamma", u"𝛤"),
("\\itg", u"𝑔"),
("\\odot", u"⊙"),
("\\supseteqq", u"⫆"),
("\\csup", u"⫐"),
("\\bsansm", u"𝗺"),
("\\bisansS", u"𝙎"),
("\\boxminus", u"⊟"),
("\\Rdsh", u"↳"),
("\\varveebar", u"⩡"),
("\\bfrakx", u"𝖝"),
("\\k", u"̨"),
("\\bfBeta", u"𝚩"),
("\\bilambda", u"𝝀"),
("\\nparallel", u"∦"),
("\\Pi", u"Π"),
("\\^n", u"ⁿ"),
("\\conictaper", u"⌲"),
("\\biBeta", u"𝜝"),
("\\^z", u"ᶻ"),
("\\gtrapprox", u"⪆"),
("\\lessgtr", u"≶"),
("\\scrw", u"𝓌"),
("\\frakE", u"𝔈"),
("\\bix", u"𝒙"),
("\\ttthree", u"𝟹"),
("\\overleftrightarrow", u"⃡"),
("\\mdwhtcircle", u"⚪"),
("\\bisansrho", u"𝞺"),
("\\dotsim", u"⩪"),
("\\bfrakX", u"𝖃"),
("\\coloneq", u"≔"),
("\\trademark", u"™"),
("\\pisces", u"♓"),
("\\bscrY", u"𝓨"),
("\\bbfour", u"𝟜"),
("\\rightarrowsupset", u"⭄"),
("\\itu", u"𝑢"),
("\\preceqq", u"⪳"),
("\\bio", u"𝒐"),
("\\eqcirc", u"≖"),
("\\quarternote", u"♩"),
("\\measangleldtosw", u"⦫"),
("\\RightVectorBar", u"⥓"),
("\\vysmblksquare", u"⬝"),
("\\bscrz", u"𝔃"),
("\\frakJ", u"𝔍"),
("\\lgwhtsquare", u"⬜"),
("\\sansf", u"𝖿"),
("\\bsansvarepsilon", u"𝞊"),
("\\bfnine", u"𝟗"),
("\\upharpoonleft", u"↿"),
("\\sansl", u"𝗅"),
("\\wedgeq", u"≙"),
("\\itChi", u"𝛸"),
("\\^U", u"ᵁ"),
("\\hrectangleblack", u"▬"),
("\\hookrightarrow", u"↪"),
("\\supsetneq", u"⊋"),
("\\nis", u"⋼"),
("\\bisansI", u"𝙄"),
("\\biIota", u"𝜤"),
("\\5/6", u"⅚"),
("\\bbX", u"𝕏"),
("\\bisansdelta", u"𝞭"),
("\\succneqq", u"⪶"),
("\\precneq", u"⪱"),
("\\frakw", u"𝔴"),
("\\diamondleftarrow", u"⤝"),
("\\bbY", u"𝕐"),
("\\bisansbeta", u"𝞫"),
("\\bisansKappa", u"𝞙"),
("\\bfIota", u"𝚰"),
("\\bfTau", u"𝚻"),
("\\rtll", u"ɭ"),
("\\bim", u"𝒎"),
("\\sout", u"̶"),
("\\precsim", u"≾"),
("\\invw", u"ʍ"),
("\\bsansZ", u"𝗭"),
("\\^E", u"ᴱ"),
("\\biphi", u"𝝋"),
("\\upkoppa", u"ϟ"),
("\\trnsa", u"ɒ"),
("\\^d", u"ᵈ"),
("\\Gamma", u"Γ"),
("\\preceq", u"⪯"),
("\\bscrM", u"𝓜"),
("\\bfraka", u"𝖆"),
("\\isansH", u"𝘏"),
("\\tosa", u"⤩"),
("\\otimeslhrim", u"⨴"),
("\\bsansOmega", u"𝝮"),
("\\notin", u"∉"),
("\\inglst", u"ʖ"),
("\\frakl", u"𝔩"),
("\\^u", u"ᵘ"),
("\\ulblacktriangle", u"◤"),
("\\bil", u"𝒍"),
("\\_beta", u"ᵦ"),
("\\sansR", u"𝖱"),
("\\euro", u"€"),
("\\circ", u"∘"),
("\\bisansnabla", u"𝞩"),
("\\prime", u"′"),
("\\biH", u"𝑯"),
("\\itomicron", u"𝜊"),
("\\biTheta", u"𝜣"),
("\\mdwhtsquare", u"◻"),
("\\Angstrom", u"Å"),
("\\isansh", u"𝘩"),
("\\cdot", u"⋅"),
("\\uplus", u"⊎"),
("\\blockuphalf", u"▀"),
("\\leftthreearrows", u"⬱"),
("\\bid", u"𝒅"),
("\\leftdbkarrow", u"⤎"),
("\\itb", u"𝑏"),
("\\rtimes", u"⋊"),
("\\bisansvarTheta", u"𝞡"),
("\\numero", u"№"),
("\\carriagereturn", u"↵"),
("\\gsiml", u"⪐"),
("\\scrK", u"𝒦"),
("\\circledtwodots", u"⚇"),
("\\nmid", u"∤"),
("\\DJ", u"Đ"),
("\\bsanso", u"𝗼"),
("\\scrq", u"𝓆"),
("\\sansnine", u"𝟫"),
("\\trianglecdot", u"◬"),
("\\bfOmega", u"𝛀"),
("\\bfZeta", u"𝚭"),
("\\trny", u"ʎ"),
("\\^3", u"³"),
("\\^j", u"ʲ"),
("\\bsansh", u"𝗵"),
("\\bfrakE", u"𝕰"),
("\\ldots", u"…"),
("\\scrx", u"𝓍"),
("\\DownLeftVectorBar", u"⥖"),
("\\Supset", u"⋑"),
("\\mdblklozenge", u"⬧"),
("\\itvarsigma", u"𝜍"),
("\\barcup", u"⩂"),
("\\bftheta", u"𝛉"),
("\\bif", u"𝒇"),
("\\simrdots", u"⩫"),
("\\pgamma", u"ɣ"),
("\\ttM", u"𝙼"),
("\\midbarvee", u"⩝"),
("\\RightUpDownVector", u"⥏"),
("\\enclosediamond", u"⃟"),
("\\bisansAlpha", u"𝞐"),
("\\^5", u"⁵"),
("\\rightleftharpoonsup", u"⥨"),
("\\ltcir", u"⩹"),
("\\varhexagonlrbonds", u"⌬"),
("\\upharpoonsleftright", u"⥣"),
("\\varpi", u"ϖ"),
("\\scrR", u"ℛ"),
("\\bfH", u"𝐇"),
("\\circledast", u"⊛"),
("\\cap", u"∩"),
("\\bir", u"𝒓"),
("\\bscrh", u"𝓱"),
("\\Kappa", u"Κ"),
("\\vdash", u"⊢"),
("\\bib", u"𝒃"),
("\\smalltriangleright", u"▹"),
("\\because", u"∵"),
("\\barcap", u"⩃"),
("\\^beta", u"ᵝ"),
("\\bigtop", u"⟙"),
("\\elinters", u"⏧"),
("\\frakh", u"𝔥"),
("\\bfvarpi", u"𝛡"),
("\\bipi", u"𝝅"),
("\\_chi", u"ᵪ"),
("\\scrf", u"𝒻"),
("\\Times", u"⨯"),
("\\sqfse", u"◪"),
("\\rightharpoonupdash", u"⥬"),
("\\varniobar", u"⋽"),
("\\^iota", u"ᶥ"),
("\\biguplus", u"⨄"),
("\\nVleftrightarrow", u"⇼"),
("\\^a", u"ᵃ"),
("\\^v", u"ᵛ"),
("\\itr", u"𝑟"),
("\\bisansV", u"𝙑"),
("\\eqsim", u"≂"),
("\\whiteinwhitetriangle", u"⟁"),
("\\pupsil", u"ʊ"),
("\\lrarc", u"◞"),
("\\frakQ", u"𝔔"),
("\\isansg", u"𝘨"),
("\\tona", u"⤧"),
("\\setminus", u"∖"),
("\\nsqsubseteq", u"⋢"),
("\\doublepipe", u"ǂ"),
("\\lesdot", u"⩿"),
("\\isansw", u"𝘸"),
("\\bsansone", u"𝟭"),
("\\scrl", u"𝓁"),
("\\bbO", u"𝕆"),
("\\therefore", u"∴"),
("\\leftarrowtail", u"↢"),
("\\scre", u"ℯ"),
("\\smallni", u"∍"),
("\\rightanglearc", u"⊾"),
("\\measuredangle", u"∡"),
("\\iti", u"𝑖"),
("\\LeftTeeVector", u"⥚"),
("\\bfrakK", u"𝕶"),
("\\bisansvarphi", u"𝟇"),
("\\sansk", u"𝗄"),
("\\blkvertoval", u"⬮"),
("\\scrr", u"𝓇"),
("\\bisansPi", u"𝞟"),
("\\longleftarrow", u"⟵"),
("\\reglst", u"ʕ"),
("\\dj", u"đ"),
("\\downzigzagarrow", u"↯"),
("\\supedot", u"⫄"),
("\\biW", u"𝑾"),
("\\ppprime", u"‴"),
("\\biX", u"𝑿"),
("\\scrd", u"𝒹"),
("\\intprod", u"⨼"),
("\\notgreaterless", u"≹"),
("\\frakn", u"𝔫"),
("\\mdsmblksquare", u"◾"),
("\\bsansg", u"𝗴"),
("\\whitepointerleft", u"◅"),
("\\bfomega", u"𝛚"),
("\\bsansnine", u"𝟵"),
("\\^A", u"ᴬ"),
("\\bisansxi", u"𝞷"),
("\\_5", u"₅"),
("\\scrF", u"ℱ"),
("\\measangleurtone", u"⦬"),
("\\bscrI", u"𝓘"),
("\\3/8", u"⅜"),
("\\biy", u"𝒚"),
("\\bisansz", u"𝙯"),
("\\rtlr", u"ɽ"),
("\\subsub", u"⫕"),
("\\frakz", u"𝔷"),
("\\sansQ", u"𝖰"),
("\\strns", u"⏤"),
("\\gtrsim", u"≳"),
("\\uparrowbarred", u"⤉"),
("\\^Phi", u"ᶲ"),
("\\bidelta", u"𝜹"),
("\\adots", u"⋰"),
("\\downdasharrow", u"⇣"),
("\\rho", u"ρ"),
("\\dh", u"ð"),
("\\bscrK", u"𝓚"),
("\\gla", u"⪥"),
("\\itxi", u"𝜉"),
("\\bfpi", u"𝛑"),
("\\bfthree", u"𝟑"),
("\\mdsmwhtcircle", u"⚬"),
("\\bfEta", u"𝚮"),
("\\eqdot", u"⩦"),
("\\bfrakh", u"𝖍"),
("\\emptysetobar", u"⦱"),
("\\ittau", u"𝜏"),
("\\leftthreetimes", u"⋋"),
("\\bfrakc", u"𝖈"),
("\\jupiter", u"♃"),
("\\tta", u"𝚊"),
("\\_a", u"ₐ"),
("\\biPsi", u"𝜳"),
("\\bsansPsi", u"𝝭"),
("\\bumpeq", u"≏"),
("\\oiint", u"∯"),
("\\bigblacktriangledown", u"▼"),
("\\dotplus", u"∔"),
("\\bbS", u"𝕊"),
("\\opluslhrim", u"⨭"),
("\\searrow", u"↘"),
("\\mdwhtdiamond", u"⬦"),
("\\nvtwoheadrightarrow", u"⤀"),
("\\bfrakj", u"𝖏"),
("\\biDelta", u"𝜟"),
("\\itT", u"𝑇"),
("\\scrh", u"𝒽"),
("\\diamondtopblack", u"⬘"),
("\\diceii", u"⚁"),
("\\ttE", u"𝙴"),
("\\cirfl", u"◐"),
("\\bbj", u"𝕛"),
("\\bfA", u"𝐀"),
("\\bsansa", u"𝗮"),
("\\VDash", u"⊫"),
("\\upomicron", u"ο"),
("\\bscro", u"𝓸"),
("\\bsansT", u"𝗧"),
("\\bisansC", u"𝘾"),
("\\frakV", u"𝔙"),
("\\rsqhook", u"⫎"),
("\\palh", u"̡"),
("\\longleftsquigarrow", u"⬳"),
("\\trnm", u"ɯ"),
("\\^6", u"⁶"),
("\\boxdiag", u"⧄"),
("\\bic", u"𝒄"),
("\\bscry", u"𝔂"),
("\\quotedblright", u"”"),
("\\upsampi", u"ϡ"),
("\\bfrakD", u"𝕯"),
("\\itDelta", u"𝛥"),
("\\itKappa", u"𝛫"),
("\\linefeed", u"↴"),
("\\ttJ", u"𝙹"),
("\\geqqslant", u"⫺"),
("\\varsigma", u"ς"),
("\\bfrakO", u"𝕺"),
("\\bisanseta", u"𝞰"),
("\\dyogh", u"ʤ"),
("\\bsansfour", u"𝟰"),
("\\^y", u"ʸ"),
("\\mdblksquare", u"◼"),
("\\binabla", u"𝜵"),
("\\bisansupsilon", u"𝞾"),
("\\scrB", u"ℬ"),
("\\rtls", u"ʂ"),
("\\sqrint", u"⨖"),
("\\itQ", u"𝑄"),
("\\bfPi", u"𝚷"),
("\\nu", u"ν"),
("\\leftrightharpoons", u"⇋"),
("\\preccurlyeq", u"≼"),
("\\ddots", u"⋱"),
("\\nvrightarrowtail", u"⤔"),
("\\bipartial", u"𝝏"),
("\\flat", u"♭"),
("\\otimes", u"⊗"),
("\\bfE", u"𝐄"),
("\\lnapprox", u"⪉"),
("\\npolint", u"⨔"),
("\\bfM", u"𝐌"),
("\\bscre", u"𝓮"),
("\\sansu", u"𝗎"),
("\\astrosun", u"☉"),
("\\_t", u"ₜ"),
("\\itTheta", u"𝛩"),
("\\bichi", u"𝝌"),
("\\vartriangleleft", u"⊲"),
("\\bisansiota", u"𝞲"),
("\\simplus", u"⨤"),
("\\NotSquareSuperset", u"⊐̸"),
("\\scrS", u"𝒮"),
("\\bsansEpsilon", u"𝝚"),
("\\bisansEpsilon", u"𝞔"),
("\\bsanszeta", u"𝝵"),
("\\ltlmr", u"ɱ"),
("\\Psi", u"Ψ"),
("\\upvarbeta", u"ϐ"),
("\\bisansomicron", u"𝞸"),
("\\squareurblack", u"⬔"),
("\\mdlgblkcircle", u"●"),
("\\scrb", u"𝒷"),
("\\RightDownVectorBar", u"⥕"),
("\\odiv", u"⨸"),
("\\late", u"⪭"),
("\\ominus", u"⊖"),
("\\bscrt", u"𝓽"),
("\\bbm", u"𝕞"),
("\\grave", u"̀"),
("\\odotslashdot", u"⦼"),
("\\scrv", u"𝓋"),
("\\sansD", u"𝖣"),
("\\bbq", u"𝕢"),
("\\rightpentagonblack", u"⭓"),
("\\isinobar", u"⋷"),
("\\bsansepsilon", u"𝝴"),
("\\eqeqeq", u"⩶"),
("\\bfone", u"𝟏"),
("\\neuter", u"⚲"),
("\\lesges", u"⪓"),
("\\bowtie", u"⋈"),
("\\frakH", u"ℌ"),
("\\squareulblack", u"◩"),
("\\bbU", u"𝕌"),
("\\prod", u"∏"),
("\\bfraku", u"𝖚"),
("\\isansn", u"𝘯"),
("\\leftharpoonsupdown", u"⥢"),
("\\biUpsilon", u"𝜰"),
("\\lgblksquare", u"⬛"),
("\\sansn", u"𝗇"),
("\\downwhitearrow", u"⇩"),
("\\big", u"𝒈"),
("\\succcurlyeq", u"≽"),
("\\geqslant", u"⩾"),
("\\^c", u"ᶜ"),
("\\bscrw", u"𝔀"),
("\\awint", u"⨑"),
("\\scrW", u"𝒲"),
("\\LeftDownVectorBar", u"⥙"),
("\\_)", u"₎"),
("\\not", u"̸"),
("\\frako", u"𝔬"),
("\\bisanspsi", u"𝟁"),
("\\bigoplus", u"⨁"),
("\\circledequal", u"⊜"),
("\\veeeq", u"≚"),
("\\rightanglemdot", u"⦝"),
("\\biAlpha", u"𝜜"),
("\\itPi", u"𝛱"),
("\\ohm", u"Ω"),
("\\nsucceq", u"⪰̸"),
("\\obslash", u"⦸"),
("\\bsansd", u"𝗱"),
("\\^K", u"ᴷ"),
("\\H", u"̋"),
("\\bsansvarsigma", u"𝞁"),
("\\bisanschi", u"𝟀"),
("\\2/3", u"⅔"),
("\\squarellquad", u"◱"),
("\\bfR", u"𝐑"),
("\\upstigma", u"ϛ"),
("\\digamma", u"ϝ"),
("\\bsanseta", u"𝝶"),
("\\sansV", u"𝖵"),
("\\bisansPhi", u"𝞥"),
("\\vartriangleright", u"⊳"),
("\\bisansBeta", u"𝞑"),
("\\nsupseteqq", u"⫆̸"),
("\\bfrakr", u"𝖗"),
("\\bisansUpsilon", u"𝞤"),
("\\subsup", u"⫓"),
("\\NestedLessLess", u"⪡"),
("\\bfseven", u"𝟕"),
("\\biT", u"𝑻"),
("\\o", u"ø"),
("\\divideontimes", u"⋇"),
("\\bixi", u"𝝃"),
("\\triangleright", u"▷"),
("\\bfw", u"𝐰"),
("\\bbG", u"𝔾"),
("\\disin", u"⋲"),
("\\gsime", u"⪎"),
("\\NotNestedGreaterGreater", u"⪢̸"),
("\\jmath", u"ȷ"),
("\\lgwhtcircle", u"◯"),
("\\blackinwhitesquare", u"▣"),
("\\bbE", u"𝔼"),
("\\diagdown", u"╲"),
("\\doteq", u"≐"),
("\\bfraky", u"𝖞"),
("\\bsansgamma", u"𝝲"),
("\\ttS", u"𝚂"),
("\\bigslopedwedge", u"⩘"),
("\\supsup", u"⫖"),
("\\upint", u"⨛"),
("\\urarc", u"◝"),
("\\bscrT", u"𝓣"),
("\\bigcup", u"⋃"),
("\\simeq", u"≃"),
("\\vysmblkcircle", u"∙"),
("\\bbL", u"𝕃"),
("\\gvertneqq", u"≩︀"),
("\\bsansr", u"𝗿"),
("\\bfKappa", u"𝚱"),
("\\rightarrowtail", u"↣"),
("\\bisanstheta", u"𝞱"),
("\\diceiii", u"⚂"),
("\\bis", u"𝒔"),
("\\bfrakp", u"𝖕"),
("\\^chi", u"ᵡ"),
("\\multimap", u"⊸"),
("\\triangleleftblack", u"◭"),
("\\Delta", u"Δ"),
("\\varhexagonblack", u"⬢"),
("\\bfT", u"𝐓"),
("\\llarc", u"◟"),
("\\bscri", u"𝓲"),
("\\iff", u"⟺"),
("\\Rho", u"Ρ"),
("\\leqqslant", u"⫹"),
("\\lllnest", u"⫷"),
("\\bfh", u"𝐡"),
("\\backprime", u"‵"),
("\\bfj", u"𝐣"),
("\\isansm", u"𝘮"),
("\\topsemicircle", u"◠"),
("\\itpartial", u"𝜕"),
("\\lambda", u"λ"),
("\\highminus", u"¯"),
("\\LeftDownTeeVector", u"⥡"),
("\\ttt", u"𝚝"),
("\\itR", u"𝑅"),
("\\starequal", u"≛"),
("\\blanksymbol", u"␢"),
("\\nvLeftarrow", u"⤂"),
("\\triangledown", u"▿"),
("\\bisansZeta", u"𝞕"),
("\\circlearrowright", u"↻"),
("\\frakZ", u"ℨ"),
("\\closedvarcap", u"⩍"),
("\\itd", u"𝑑"),
("\\scrp", u"𝓅"),
("\\bfrakT", u"𝕿"),
("\\pbgam", u"ɤ"),
("\\isindot", u"⋵"),
("\\blacktriangleleft", u"◀"),
("\\bscrG", u"𝓖"),
("\\bfrakb", u"𝖇"),
("\\succnsim", u"⋩"),
("\\eqqless", u"⪙"),
("\\bfn", u"𝐧"),
("\\bfrakl", u"𝖑"),
("\\cong", u"≅"),
("\\sansX", u"𝖷"),
("\\bisansb", u"𝙗"),
("\\iteta", u"𝜂"),
("\\varsupsetneq", u"⊋︀"),
("\\bsanssigma", u"𝞂"),
("\\bsansPhi", u"𝝫"),
("\\isansk", u"𝘬"),
("\\pppprime", u"⁗"),
("\\bsansw", u"𝘄"),
("\\bisanss", u"𝙨"),
("\\low", u"˕"),
("\\eqvparsl", u"⧥"),
("\\medwhitestar", u"⭐"),
("\\quad", u" "),
("\\eqqgtr", u"⪚"),
("\\measangleultonw", u"⦭"),
("\\bigsqcap", u"⨅"),
("\\supsub", u"⫔"),
("\\sun", u"☼"),
("\\bfI", u"𝐈"),
("\\isansb", u"𝘣"),
("\\ity", u"𝑦"),
("\\ltln", u"ɲ"),
("\\lazysinv", u"∾"),
("\\RightTriangleBar", u"⧐"),
("\\rh", u"̢"),
("\\asteq", u"⩮"),
("\\Subset", u"⋐"),
("\\itV", u"𝑉"),
("\\vysmwhtsquare", u"⬞"),
("\\bsansbeta", u"𝝱"),
("\\biE", u"𝑬"),
("\\Rlarr", u"⥂"),
("\\leftmoon", u"☾"),
("\\_+", u"₊"),
("\\bisansGamma", u"𝞒"),
("\\bfY", u"𝐘"),
("\\sqcap", u"⊓"),
("\\succnapprox", u"⪺"),
("\\nleq", u"≰"),
("\\bbsix", u"𝟞"),
("\\bfW", u"𝐖"),
("\\biPhi", u"𝜱"),
("\\pprime", u"″"),
("\\bfO", u"𝐎"),
("\\vee", u"∨"),
("\\bivarkappa", u"𝝒"),
("\\bbe", u"𝕖"),
("\\^s", u"ˢ"),
("\\frakv", u"𝔳"),
("\\isansf", u"𝘧"),
("\\ttL", u"𝙻"),
("\\^9", u"⁹"),
("\\approxeq", u"≊"),
("\\RightTeeVector", u"⥛"),
("\\_h", u"ₕ"),
("\\ttR", u"𝚁"),
("\\rightharpoonup", u"⇀"),
("\\dlcorn", u"⎣"),
("\\rightarrowbar", u"⇥"),
("\\hermitconjmatrix", u"⊹"),
("\\notslash", u"⌿"),
("\\rightarrow", u"→"),
("\\bisigma", u"𝝈"),
("\\upand", u"⅋"),
("\\frakB", u"𝔅"),
("\\geqq", u"≧"),
("\\rightpentagon", u"⭔"),
("\\Mapsfrom", u"⤆"),
("\\itB", u"𝐵"),
("\\circletophalfblack", u"◓"),
("\\rmoustache", u"⎱"),
("\\u", u"˘"),
("\\bbpi", u"ℼ"),
("\\intBar", u"⨎"),
("\\Epsilon", u"Ε"),
("\\1/10", u"⅒"),
("\\1/3", u"⅓"),
("\\leftrightharpoondownup", u"⥋"),
("\\rightharpoonaccent", u"⃑"),
("\\itJ", u"𝐽"),
("\\_l", u"ₗ"),
("\\RightDownTeeVector", u"⥝"),
("\\viewdata", u"⌗"),
("\\overbar", u"̅"),
("\\bisansw", u"𝙬"),
("\\mu", u"μ"),
("\\sansI", u"𝖨"),
("\\ttv", u"𝚟"),
("\\diamondleftarrowbar", u"⤟"),
("\\bisansO", u"𝙊"),
("\\zeta", u"ζ"),
("\\1/7", u"⅐"),
("\\diamondrightblack", u"⬗"),
("\\bbPi", u"ℿ"),
("\\bfx", u"𝐱"),
("\\exclamdown", u"¡"),
("\\biRho", u"𝜬"),
("\\itv", u"𝑣"),
("\\gneq", u"⪈"),
("\\itn", u"𝑛"),
("\\curvearrowleft", u"↶"),
("\\nlesssim", u"≴"),
("\\frakp", u"𝔭"),
("\\mercury", u"☿"),
("\\^O", u"ᴼ"),
("\\lpargt", u"⦠"),
("\\le", u"≤"),
("\\bscrF", u"𝓕"),
("\\leftcurvedarrow", u"⬿"),
("\\bscrm", u"𝓶"),
("\\bfD", u"𝐃"),
("\\isansW", u"𝘞"),
("\\^7", u"⁷"),
("\\tttwo", u"𝟸"),
("\\bfupsilon", u"𝛖"),
("\\hermaphrodite", u"⚥"),
("\\candra", u"̐"),
("\\triangleplus", u"⨹"),
("\\ulcorner", u"⌜"),
("\\bbI", u"𝕀"),
("\\hbar", u"ħ"),
("\\itZ", u"𝑍"),
("\\sansP", u"𝖯"),
("\\bffour", u"𝟒"),
("\\tteight", u"𝟾"),
("\\varepsilon", u"ε"),
("\\tty", u"𝚢"),
("\\bsansTau", u"𝝩"),
("\\bisanslambda", u"𝞴"),
("\\yogh", u"ʒ"),
("\\bsansi", u"𝗶"),
("\\glst", u"ʔ"),
("\\intprodr", u"⨽"),
("\\annuity", u"⃧"),
("\\bsimilarrightarrow", u"⭇"),
("\\sanssix", u"𝟨"),
("\\blackrighthalfcircle", u"◗"),
("\\downarrow", u"↓"),
("\\eulermascheroni", u"ℇ"),
("\\minusdot", u"⨪"),
("\\revangle", u"⦣"),
("\\gtrdot", u"⋗"),
("\\circledR", u"®"),
("\\nVdash", u"⊮"),
("\\downarrowbarred", u"⤈"),
("\\veeodot", u"⩒"),
("\\sansC", u"𝖢"),
("\\biupsilon", u"𝝊"),
("\\nsime", u"≄"),
("\\parallel", u"∥"),
("\\squarehfill", u"▤"),
("\\NotRightTriangleBar", u"⧐̸"),
("\\scru", u"𝓊"),
("\\Digamma", u"Ϝ"),
("\\bsansSigma", u"𝝨"),
("\\PropertyLine", u"⅊"),
("\\leftleftarrows", u"⇇"),
("\\nvDash", u"⊭"),
("\\frakm", u"𝔪"),
("\\bfC", u"𝐂"),
("\\verti", u"ˌ"),
("\\Rrightarrow", u"⇛"),
("\\eqless", u"⋜"),
("\\itsigma", u"𝜎"),
("\\bsansphi", u"𝞅"),
("\\toea", u"⤨"),
("\\itf", u"𝑓"),
("\\bbc", u"𝕔"),
("\\frakd", u"𝔡"),
("\\seovnearrow", u"⤭"),
("\\openo", u"ɔ"),
("\\gneqq", u"≩"),
("\\updasharrow", u"⇡"),
("\\bisansP", u"𝙋"),
("\\bsansx", u"𝘅"),
("\\^-", u"⁻"),
("\\dingasterisk", u"✽"),
("\\sansz", u"𝗓"),
("\\bfRho", u"𝚸"),
("\\1/4", u"¼"),
("\\ttZ", u"𝚉"),
("\\oplus", u"⊕"),
("\\rceil", u"⌉"),
("\\lesdoto", u"⪁"),
("\\uminus", u"⩁"),
("\\leftarrowonoplus", u"⬲"),
("\\ttV", u"𝚅"),
("\\bivarphi", u"𝝓"),
("\\itH", u"𝐻"),
("\\updownarrow", u"↕"),
("\\itG", u"𝐺"),
("\\sansW", u"𝖶"),
("\\cup", u"∪"),
("\\upin", u"⟒"),
("\\ringplus", u"⨢"),
("\\lsimg", u"⪏"),
("\\itm", u"𝑚"),
("\\itbeta", u"𝛽"),
("\\Or", u"⩔"),
("\\longrightsquigarrow", u"⟿"),
("\\rdiagovsearrow", u"⤰"),
("\\gtcir", u"⩺"),
("\\_e", u"ₑ"),
("\\lsqhook", u"⫍"),
("\\tesh", u"ʧ"),
("\\bscrp", u"𝓹"),
("\\varkappa", u"ϰ"),
("\\bbP", u"ℙ"),
("\\bbr", u"𝕣"),
("\\cbrt", u"∛"),
("\\trianglerighteq", u"⊵"),
("\\biF", u"𝑭"),
("\\emptysetocirc", u"⦲"),
("\\Coloneq", u"⩴"),
("\\scrG", u"𝒢"),
("\\euler", u"ℯ"),
("\\bscru", u"𝓾"),
("\\sansd", u"𝖽"),
("\\AA", u"Å"),
("\\frakj", u"𝔧"),
("\\bfalpha", u"𝛂"),
("\\biOmicron", u"𝜪"),
("\\bisansphi", u"𝞿"),
("\\ttone", u"𝟷"),
("\\_(", u"₍"),
("\\bfB", u"𝐁"),
("\\exists", u"∃"),
("\\fhr", u"ɾ"),
("\\bscrJ", u"𝓙"),
("\\Uuparrow", u"⤊"),
("\\biMu", u"𝜧"),
("\\bsansomicron", u"𝝾"),
("\\thickspace", u" "),
("\\endash", u"–"),
("\\sansU", u"𝖴"),
("\\bisansN", u"𝙉"),
("\\blackcircleulquadwhite", u"◕"),
("\\ttl", u"𝚕"),
("\\bisansvartheta", u"𝟅"),
("\\bsansQ", u"𝗤"),
("\\bizeta", u"𝜻"),
("\\bsansvarTheta", u"𝝧"),
("\\gesdoto", u"⪂"),
("\\bieta", u"𝜼"),
("\\oslash", u"⊘"),
("\\itZeta", u"𝛧"),
("\\itEpsilon", u"𝛦"),
("\\smt", u"⪪"),
("\\scrP", u"𝒫"),
("\\bscrb", u"𝓫"),
("\\sansp", u"𝗉"),
("\\trna", u"ɐ"),
("\\itY", u"𝑌"),
("\\bisanse", u"𝙚"),
("\\wideangledown", u"⦦"),
("\\bsanszero", u"𝟬"),
("\\bby", u"𝕪"),
("\\sblhr", u"˓"),
("\\simminussim", u"⩬"),
("\\subsetdot", u"⪽"),
("\\turnediota", u"℩"),
("\\bsansA", u"𝗔"),
("\\join", u"⨝"),
("\\bscrj", u"𝓳"),
("\\bot", u"⊥"),
("\\scrD", u"𝒟"),
("\\frakP", u"𝔓"),
("\\gnsim", u"⋧"),
("\\Chi", u"Χ"),
("\\biO", u"𝑶"),
("\\sqfl", u"◧"),
("\\vertoverlay", u"⃒"),
("\\tripleplus", u"⧻"),
("\\nabla", u"∇"),
("\\scrX", u"𝒳"),
("\\_m", u"ₘ"),
("\\models", u"⊧"),
("\\lneqq", u"≨"),
("\\trnrl", u"ɺ"),
("\\Cup", u"⋓"),
("\\propto", u"∝"),
("\\rtln", u"ɳ"),
("\\bbx", u"𝕩"),
("\\bfPsi", u"𝚿"),
("\\ite", u"𝑒"),
("\\biomega", u"𝝎"),
("\\bbij", u"ⅉ"),
("\\ttF", u"𝙵"),
("\\rq", u"’"),
("\\mlcp", u"⫛"),
("\\leftsquigarrow", u"⇜"),
("\\bsansy", u"𝘆")
]
|
5332ece181e5eedc35195482d66c017f84a9a789
|
0be19cd3a3ec44141f03dd523da6fb770b08f569
|
/test/smart_compose/train/test_data_fn.py
|
c63e2330a779344b3cec8ad3744cd275db8af9f2
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] |
permissive
|
linkedin/detext
|
f2cdace77d4b3c6cc88d3992a67dde305e2b8a52
|
671d43c5ffc83cae635174ed15c58d0bc84b76ef
|
refs/heads/master
| 2023-09-01T00:53:57.571516
| 2023-03-02T22:27:06
| 2023-03-02T22:27:06
| 234,432,813
| 1,289
| 151
|
BSD-2-Clause
| 2023-03-02T22:03:34
| 2020-01-16T23:38:58
|
Python
|
UTF-8
|
Python
| false
| false
| 2,675
|
py
|
test_data_fn.py
|
from os.path import join as path_join
import tensorflow as tf
from official.utils.misc import distribution_utils
from smart_compose.train import data_fn
from smart_compose.utils import vocab_utils
from smart_compose.utils.parsing_utils import InputFtrType, iterate_items_with_list_val
from smart_compose.utils.testing.test_case import TestCase
class TestData(TestCase):
"""Unit test for data_fn.py"""
_, vocab_tf_table = vocab_utils.read_tf_vocab(TestCase.vocab_file, '[UNK]')
vocab_table = TestCase.vocab_table_py
CLS = '[CLS]'
PAD = '[PAD]'
SEP = '[SEP]'
PAD_ID = vocab_table[PAD]
SEP_ID = vocab_table[SEP]
CLS_ID = vocab_table[CLS]
target_column_name = 'query'
def testInputFnBuilderTfrecord(self):
""" Tests function input_fn_builder() """
one_device_strategy = distribution_utils.get_distribution_strategy('one_device', num_gpus=0)
for strategy in [None, one_device_strategy]:
self._testInputFnBuilderTfrecord(strategy)
def _testInputFnBuilderTfrecord(self, strategy):
""" Tests function input_fn_builder() for given strategy """
data_dir = path_join(self.data_dir)
# Create a dataset
# Read schema
# Parse and process data in dataset
feature_type_2_name = {
InputFtrType.TARGET_COLUMN_NAME: self.target_column_name,
}
def _input_fn_tfrecord(ctx):
return data_fn.input_fn_tfrecord(input_pattern=data_dir,
batch_size=batch_size,
mode=tf.estimator.ModeKeys.EVAL,
feature_type_2_name=feature_type_2_name,
input_pipeline_context=ctx)
batch_size = 2
if strategy is not None:
dataset = strategy.experimental_distribute_datasets_from_function(_input_fn_tfrecord)
else:
dataset = _input_fn_tfrecord(None)
# Make iterator
for features, label in dataset:
for ftr_type, ftr_name_lst in iterate_items_with_list_val(feature_type_2_name):
if ftr_type in (InputFtrType.TARGET_COLUMN_NAME,):
self.assertLen(ftr_name_lst, 1), f'Length for current ftr type ({ftr_type}) should be 1'
ftr_name = ftr_name_lst[0]
self.assertIn(ftr_name, label)
continue
# Check source and target text shape
self.assertAllEqual(label[self.target_column_name].shape, [batch_size])
break
if __name__ == "__main__":
tf.test.main()
|
375839ea4e9dc109b9f6577aa8c299ab84e690ef
|
9efca95a55cb4df52d895d42f1ec10331516a734
|
/tools/c7n_gcp/tests/test_memstore.py
|
6bbf3663accdc725214b5e9cd0f66be8bb2dcd01
|
[
"Apache-2.0"
] |
permissive
|
cloud-custodian/cloud-custodian
|
519e602abe00c642786441b64cc40857ef5bc9de
|
27563cf4571040f923124e1acb2463f11e372225
|
refs/heads/main
| 2023-09-04T10:54:55.963703
| 2023-09-01T17:40:17
| 2023-09-01T17:40:17
| 52,837,350
| 3,327
| 1,096
|
Apache-2.0
| 2023-09-14T14:03:30
| 2016-03-01T01:11:20
|
Python
|
UTF-8
|
Python
| false
| false
| 872
|
py
|
test_memstore.py
|
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
from gcp_common import BaseTest
class RedisInstanceTest(BaseTest):
def test_redis_instance_query(self):
project_id = 'gcp-lab-custodian'
factory = self.replay_flight_data('test_redis_instance_list_query', project_id=project_id)
p = self.load_policy(
{'name': 'redis-instance-query',
'resource': 'gcp.redis'},
session_factory=factory)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]['name'], 'projects/cloud-custodian/locations/'
'us-central1/instances/instance-test')
assert p.resource_manager.get_urns(resources) == [
"gcp:redis:us-central1:gcp-lab-custodian:instance/instance-test"
]
|
085f0f1f14d621f1d0702f2d6fbe1a3c3eced6ec
|
483424524c70852cc043e0d77bf1b757a61d797a
|
/deepspeed/ops/transformer/inference/op_binding/base.py
|
5a997f95d5cc53e3323180ad7cd70ed9e8210a1b
|
[
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
] |
permissive
|
microsoft/DeepSpeed
|
810f1af320020718d0794f5a97cde6f1d17af122
|
55d9964c59c0c6e23158b5789a5c36c28939a7b0
|
refs/heads/master
| 2023-09-06T07:40:52.145692
| 2023-09-05T23:51:23
| 2023-09-05T23:51:23
| 235,860,204
| 27,557
| 3,347
|
Apache-2.0
| 2023-09-14T21:38:46
| 2020-01-23T18:35:18
|
Python
|
UTF-8
|
Python
| false
| false
| 536
|
py
|
base.py
|
# Copyright (c) Microsoft Corporation.
# SPDX-License-Identifier: Apache-2.0
# DeepSpeed Team
import torch
from ..config import DeepSpeedInferenceConfig
from deepspeed.ops.op_builder import InferenceBuilder
class BaseOp(torch.nn.Module):
inference_module = None
def __init__(self, config: DeepSpeedInferenceConfig):
super(BaseOp, self).__init__()
self.config = config
if BaseOp.inference_module is None:
builder = InferenceBuilder()
BaseOp.inference_module = builder.load()
|
e4b13f88e7c938f35defe1743b678f4da3d6f96d
|
b43e0384ec4bfacec2571a2bb41ce563267db449
|
/jesse/models/SpotExchange.py
|
ce737409528d22fb62de1cae47930b5b9309e22d
|
[
"MIT"
] |
permissive
|
jesse-ai/jesse
|
55b73448b767492a20f8bc56c28306a1a24f8599
|
fadb03b5fcc06f0655c6a5d877435fb872a97c5e
|
refs/heads/master
| 2023-08-24T15:28:52.875208
| 2023-08-24T13:53:31
| 2023-08-24T13:53:31
| 156,847,937
| 5,259
| 722
|
MIT
| 2023-09-10T13:51:26
| 2018-11-09T10:38:44
|
Python
|
UTF-8
|
Python
| false
| false
| 5,762
|
py
|
SpotExchange.py
|
import jesse.helpers as jh
from jesse.enums import sides
from jesse.exceptions import InsufficientBalance
from jesse.models import Order
from jesse.models.Exchange import Exchange
from jesse.enums import order_types
from jesse.utils import sum_floats, subtract_floats
class SpotExchange(Exchange):
def __init__(self, name: str, starting_balance: float, fee_rate: float):
super().__init__(name, starting_balance, fee_rate, 'spot')
self.stop_orders_sum = {}
self.limit_orders_sum = {}
# # # # live-trading only # # # #
self._started_balance = 0
# # # # # # # # # # # # # # # # #
@property
def started_balance(self) -> float:
if jh.is_livetrading():
return self._started_balance
return self.starting_assets[jh.app_currency()]
@property
def wallet_balance(self) -> float:
return self.assets[self.settlement_currency]
@property
def available_margin(self) -> float:
return self.wallet_balance
def on_order_submission(self, order: Order) -> None:
if jh.is_livetrading():
return
if order.side == sides.SELL:
if order.type == order_types.STOP:
self.stop_orders_sum[order.symbol] = sum_floats(self.stop_orders_sum.get(order.symbol, 0), abs(order.qty))
elif order.type == order_types.LIMIT:
self.limit_orders_sum[order.symbol] = sum_floats(self.limit_orders_sum.get(order.symbol, 0), abs(order.qty))
base_asset = jh.base_asset(order.symbol)
# buy order
if order.side == sides.BUY:
# cannot buy if we don't have enough balance (of the settlement currency)
quote_balance = self.assets[self.settlement_currency]
self.assets[self.settlement_currency] = subtract_floats(self.assets[self.settlement_currency], (abs(order.qty) * order.price))
if self.assets[self.settlement_currency] < 0:
raise InsufficientBalance(
f"Not enough balance. Available balance at {self.name} for {self.settlement_currency} is {quote_balance} but you're trying to spend {abs(order.qty * order.price)}"
)
# sell order
else:
# sell order's qty cannot be bigger than the amount of existing base asset
base_balance = self.assets[base_asset]
if order.type == order_types.MARKET:
order_qty = sum_floats(abs(order.qty), self.limit_orders_sum.get(order.symbol, 0))
elif order.type == order_types.STOP:
order_qty = self.stop_orders_sum[order.symbol]
elif order.type == order_types.LIMIT:
order_qty = self.limit_orders_sum[order.symbol]
else:
raise Exception(f"Unknown order type {order.type}")
# validate that the total selling amount is not bigger than the amount of the existing base asset
if order_qty > base_balance:
raise InsufficientBalance(
f"Not enough balance. Available balance at {self.name} for {base_asset} is {base_balance} but you're trying to sell {order_qty}"
)
def on_order_execution(self, order: Order) -> None:
if jh.is_livetrading():
return
if order.side == sides.SELL:
if order.type == order_types.STOP:
self.stop_orders_sum[order.symbol] = subtract_floats(self.stop_orders_sum[order.symbol], abs(order.qty))
elif order.type == order_types.LIMIT:
self.limit_orders_sum[order.symbol] = subtract_floats(self.limit_orders_sum[order.symbol], abs(order.qty))
base_asset = jh.base_asset(order.symbol)
# buy order
if order.side == sides.BUY:
# asset's balance is increased by the amount of the order's qty after fees are deducted
self.assets[base_asset] = sum_floats(self.assets[base_asset], abs(order.qty) * (1 - self.fee_rate))
# sell order
else:
# settlement currency's balance is increased by the amount of the order's qty after fees are deducted
self.assets[self.settlement_currency] = sum_floats(self.assets[self.settlement_currency], (abs(order.qty) * order.price) * (1 - self.fee_rate))
# now reduce base asset's balance by the amount of the order's qty
self.assets[base_asset] = subtract_floats(self.assets[base_asset], abs(order.qty))
def on_order_cancellation(self, order: Order) -> None:
if jh.is_livetrading():
return
if order.side == sides.SELL:
if order.type == order_types.STOP:
self.stop_orders_sum[order.symbol] = subtract_floats(self.stop_orders_sum[order.symbol], abs(order.qty))
elif order.type == order_types.LIMIT:
self.limit_orders_sum[order.symbol] = subtract_floats(self.limit_orders_sum[order.symbol], abs(order.qty))
base_asset = jh.base_asset(order.symbol)
# buy order
if order.side == sides.BUY:
self.assets[self.settlement_currency] = sum_floats(self.assets[self.settlement_currency], abs(order.qty) * order.price)
# sell order
else:
self.assets[base_asset] = sum_floats(self.assets[base_asset], abs(order.qty))
def update_from_stream(self, data: dict) -> None:
"""
Used for updating the exchange from the WS stream (only for live trading)
"""
if not jh.is_livetrading():
raise Exception('This method is only for live trading')
self.assets[self.settlement_currency] = data['balance']
if self._started_balance == 0:
self._started_balance = data['balance']
|
b03ef66be61677f591c6e209f7f1892d02c4db71
|
a5a99f646e371b45974a6fb6ccc06b0a674818f2
|
/RecoTracker/MkFitCore/standalone/plotting/makeBenchmarkPlots.py
|
0fb60aa57b63186c3c13afc03cb2f381d6e4252f
|
[
"Apache-2.0"
] |
permissive
|
cms-sw/cmssw
|
4ecd2c1105d59c66d385551230542c6615b9ab58
|
19c178740257eb48367778593da55dcad08b7a4f
|
refs/heads/master
| 2023-08-23T21:57:42.491143
| 2023-08-22T20:22:40
| 2023-08-22T20:22:40
| 10,969,551
| 1,006
| 3,696
|
Apache-2.0
| 2023-09-14T19:14:28
| 2013-06-26T14:09:07
|
C++
|
UTF-8
|
Python
| false
| false
| 6,509
|
py
|
makeBenchmarkPlots.py
|
import os.path, glob, sys
import ROOT
import array
import math
# N.B.: Consult ./xeon_scripts/benchmark-cmssw-ttbar-fulldet-build.sh for info on nTHs, nVUs, and text file names
def run():
# command line input
arch = sys.argv[1] # SNB, KNL, SKL-SP
sample = sys.argv[2]
build = sys.argv[3] # BH, STD, CE, FV
isVU = sys.argv[4] # 'true' or 'false': if no argument passed, will not do VU plots
isTH = sys.argv[5] # 'true' or 'false': if no argument passed, will not do TH plots
# reopen file for writing
g = ROOT.TFile('benchmark_'+arch+'_'+sample+'.root','update')
# Vectorization data points
vuvals = ['1','2','4','8']
nth = '1'
if arch == 'KNL' or arch == 'SKL-SP' or arch == 'LNX-G' or arch == 'LNX-S':
vuvals.append('16')
vuvals.append('16int')
elif arch == 'SNB' :
vuvals.append('8int')
else :
print arch,'is not a valid architecture! Exiting...'
sys.exit(0)
# call the make plots function
if isVU == 'true' :
makeplots(arch,sample,build,vuvals,nth,'VU')
# Parallelization datapoints
if arch == 'KNL' :
nvu = '16int'
thvals = ['1','2','4','8','16','32','64','96','128','160','192','224','256']
elif arch == 'SNB' :
nvu = '8int'
thvals = ['1','2','4','6','8','12','16','20','24']
elif arch == 'SKL-SP' :
nvu = '16int'
thvals = ['1','2','4','8','16','32','48','64']
elif arch == 'LNX-G' :
nvu = '16int'
thvals = ['1','2','4','8','16','32','48','64']
elif arch == 'LNX-S' :
nvu = '16int'
thvals = ['1','2','4','8','16','32','48','64']
else :
print arch,'is not a valid architecture! Exiting...'
sys.exit(0)
# call the make plots function
if isTH == 'true' :
makeplots(arch,sample,build,thvals,nvu,'TH')
g.Write()
g.Close()
def makeplots(arch,sample,build,vals,nC,text):
# position in logs
if build == 'BH' : pos = 8
elif build == 'STD' : pos = 11
elif build == 'CE' : pos = 14
elif build == 'FV' : pos = 17
else :
print build,'is not a valid test! Exiting...'
sys.exit(0)
# time
print arch,sample,build,text
# define tgraphs vs absolute time and speedup
g_time = ROOT.TGraphErrors(len(vals)-1)
g_speedup = ROOT.TGraphErrors(len(vals)-1)
# make separate plot for intrinsics measurement
if text is 'VU' :
g_time_int = ROOT.TGraphErrors(1)
g_speedup_int = ROOT.TGraphErrors(1)
point = 0
for val in vals :
if val is '16int': xval = 16.0
elif val is '8int' : xval = 8.0
else : xval = float(val)
# array of time values
yvals = array.array('d');
# always skip the first event
firstFound = False
# open the correct log file, store times into temp file
if text is 'VU' : os.system('grep Matriplex log_'+arch+'_'+sample+'_'+build+'_NVU'+val+'_NTH'+nC +'.txt >& log_'+arch+'_'+sample+'_'+build+'_'+text+'.txt')
elif text is 'TH' : os.system('grep Matriplex log_'+arch+'_'+sample+'_'+build+'_NVU'+nC +'_NTH'+val+'.txt >& log_'+arch+'_'+sample+'_'+build+'_'+text+'.txt')
else :
print 'VU or TH are the only options for extra text! Exiting...'
exit
# open temp file, store event times into yvals
with open('log_'+arch+'_'+sample+'_'+build+'_'+text+'.txt') as f :
for line in f :
if 'Matriplex' not in line : continue
if 'Total' in line : continue
if not firstFound :
firstFound = True
continue
lsplit = line.split()
yvals.append(float(lsplit[pos]))
# Compute mean and uncertainty on mean from yvals
sum = 0.;
for yval in range(0,len(yvals)):
sum = sum + yvals[yval]
if len(yvals) > 0 :
mean = sum/len(yvals)
else :
mean = 0
emean = 0.;
for yval in range(0,len(yvals)):
emean = emean + ((yvals[yval] - mean) * (yvals[yval] - mean))
if len(yvals) > 1 :
emean = math.sqrt(emean / (len(yvals) - 1))
emean = emean/math.sqrt(len(yvals))
else :
emean = 0
# Printout value for good measure
print val,mean,'+/-',emean
# store intrinsics val into separate plot
if 'int' not in val :
g_time.SetPoint(point,xval,mean)
g_time.SetPointError(point,0,emean)
point = point+1
else :
g_time_int.SetPoint(0,xval,mean)
g_time_int.SetPointError(0,0,emean)
# always write out the standard plot
g_time.Write('g_'+build+'_'+text+'_time')
# write out separate intrinsics plot
if text is 'VU' :
g_time_int.Write('g_'+build+'_'+text+'_time_int')
# Speedup calculation
xval0 = array.array('d',[0])
yval0 = array.array('d',[0])
yerr0 = array.array('d',[0])
# Get first point to divide by
g_time.GetPoint(0,xval0,yval0)
yerr0.append(g_time.GetErrorY(0))
point = 0
for val in vals :
# set up inputs
xval = array.array('d',[0])
yval = array.array('d',[0])
yerr = array.array('d',[0])
# get standard plots from standard plot
if 'int' not in val :
g_time.GetPoint(point,xval,yval)
yerr.append(g_time.GetErrorY(point))
else :
g_time_int.GetPoint(0,xval,yval)
yerr.append(g_time_int.GetErrorY(0))
speedup = 0.
espeedup = 0.
if yval[0] > 0. and yval0[0] > 0. :
speedup = yval0[0]/yval[0]
espeedup = speedup * math.sqrt(math.pow(yerr0[0]/yval0[0],2) + math.pow(yerr[0]/yval[0],2))
# store in the correct plot
if 'int' not in val :
g_speedup.SetPoint(point,xval[0],speedup)
g_speedup.SetPointError(point,0,espeedup)
point = point+1
else :
g_speedup_int.SetPoint(0,xval[0],speedup)
g_speedup_int.SetPointError(0,0,espeedup)
# always write out the standard plot
g_speedup.Write('g_'+build+'_'+text+'_speedup')
# write out separate intrinsics plot
if text is 'VU' :
g_speedup_int.Write('g_'+build+'_'+text+'_speedup_int')
# all done
return
if __name__ == "__main__":
run()
|
9497567c1d8dff5a2f1af1b2a27184426110fb65
|
fa1ad2e2ac7e376fc7cb3b3a6e1bb88eed3e80be
|
/dts/airbyte/airbyte-integrations/connectors/source-okta/unit_tests/test_streams.py
|
35b8b8fd637535e5b51053ea2c4e6bdb215ef72d
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause",
"Elastic-2.0"
] |
permissive
|
alldatacenter/alldata
|
7bc7713c9f1d56ad6b8e59ea03206d1073b7e047
|
8d5f9a2d49ab8f9e85ccf058cb02c2fda287afc6
|
refs/heads/master
| 2023-08-05T07:32:25.442740
| 2023-08-03T13:17:24
| 2023-08-03T13:17:24
| 213,321,771
| 774
| 250
|
Apache-2.0
| 2023-09-06T17:35:32
| 2019-10-07T07:36:18
| null |
UTF-8
|
Python
| false
| false
| 21,925
|
py
|
test_streams.py
|
#
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
import datetime
import time
from abc import ABC
from http import HTTPStatus
from unittest.mock import MagicMock
import pytest
import requests
from airbyte_cdk.models import SyncMode
from source_okta.source import (
CustomRoles,
GroupMembers,
GroupRoleAssignments,
Groups,
IncrementalOktaStream,
Logs,
OktaStream,
Permissions,
ResourceSets,
UserRoleAssignments,
Users,
)
@pytest.fixture
def patch_base_class(mocker):
"""
Base patcher for used streams
"""
mocker.patch.object(OktaStream, "path", "v0/example_endpoint")
mocker.patch.object(OktaStream, "primary_key", "test_primary_key")
mocker.patch.object(OktaStream, "__abstractmethods__", set())
mocker.patch.object(IncrementalOktaStream, "path", "v0/example_endpoint")
mocker.patch.object(IncrementalOktaStream, "primary_key", "test_primary_key")
mocker.patch.object(IncrementalOktaStream, "__abstractmethods__", set())
class TestStatusCodes:
@pytest.mark.parametrize(
("http_status", "should_retry"),
[
(HTTPStatus.OK, False),
(HTTPStatus.BAD_REQUEST, False),
(HTTPStatus.TOO_MANY_REQUESTS, True),
(HTTPStatus.INTERNAL_SERVER_ERROR, True),
],
)
def test_should_retry(self, patch_base_class, http_status, should_retry, url_base, start_date):
response_mock = MagicMock()
response_mock.status_code = http_status
stream = OktaStream(url_base=url_base, start_date=start_date)
assert stream.should_retry(response_mock) == should_retry
class TestOktaStream:
def test_okta_stream_request_params(self, patch_base_class, url_base, start_date):
stream = OktaStream(url_base=url_base, start_date=start_date)
inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None}
expected_params = {"limit": 200}
assert stream.request_params(**inputs) == expected_params
def test_okta_stream_parse_response(self, patch_base_class, requests_mock, url_base, api_url, start_date):
stream = OktaStream(url_base=url_base, start_date=start_date)
requests_mock.get(f"{api_url}", json=[{"a": 123}, {"b": "xx"}])
resp = requests.get(f"{api_url}")
inputs = {"response": resp, "stream_state": MagicMock()}
expected_parsed_object = [{"a": 123}, {"b": "xx"}]
assert list(stream.parse_response(**inputs)) == expected_parsed_object
def test_okta_stream_backoff_time(self, patch_base_class, url_base, start_date):
response_mock = requests.Response()
stream = OktaStream(url_base=url_base, start_date=start_date)
expected_backoff_time = None
assert stream.backoff_time(response_mock) == expected_backoff_time
def test_okta_stream_incremental_request_params(self, patch_base_class, url_base, start_date):
stream = IncrementalOktaStream(url_base=url_base, start_date=start_date)
inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None}
expected_params = {"filter": 'None gt "2021-03-21T20:49:13.000Z"', "limit": 200}
assert stream.request_params(**inputs) == expected_params
def test_incremental_okta_stream_parse_response(self, patch_base_class, requests_mock, url_base, api_url, start_date):
stream = IncrementalOktaStream(url_base=url_base, start_date=start_date)
requests_mock.get(f"{api_url}", json=[{"a": 123}, {"b": "xx"}])
resp = requests.get(f"{api_url}")
inputs = {"response": resp, "stream_state": MagicMock()}
expected_parsed_object = [{"a": 123}, {"b": "xx"}]
assert list(stream.parse_response(**inputs)) == expected_parsed_object
def test_incremental_okta_stream_backoff_time(self, patch_base_class, url_base, start_date):
response_mock = MagicMock()
stream = IncrementalOktaStream(url_base=url_base, start_date=start_date)
expected_backoff_time = None
assert stream.backoff_time(response_mock) == expected_backoff_time
def test_okta_stream_incremental_backoff_time_empty(self, patch_base_class, url_base, start_date):
stream = IncrementalOktaStream(url_base=url_base, start_date=start_date)
response = MagicMock(requests.Response)
response.status_code = 200
expected_params = None
inputs = {"response": response}
assert stream.backoff_time(**inputs) == expected_params
def test_okta_stream_incremental_back_off_now(self, patch_base_class, url_base, start_date):
stream = IncrementalOktaStream(url_base=url_base, start_date=start_date)
response = MagicMock(requests.Response)
response.status_code = requests.codes.TOO_MANY_REQUESTS
response.headers = {"x-rate-limit-reset": int(time.time())}
expected_params = (0, 2)
inputs = {"response": response}
get_backoff_time = stream.backoff_time(**inputs)
assert expected_params[0] <= get_backoff_time <= expected_params[1]
def test_okta_stream_incremental_get_updated_state(self, patch_base_class, latest_record_instance, url_base, start_date):
class TestIncrementalOktaStream(IncrementalOktaStream, ABC):
def __init__(self, url_base: str, *args, **kwargs):
super().__init__(url_base, *args, **kwargs)
self._cursor_field = None
@property
def cursor_field(self) -> str:
return self._cursor_field
stream = TestIncrementalOktaStream(url_base=url_base, start_date=start_date)
stream._cursor_field = "lastUpdated"
current_stream_state = {"lastUpdated": "2021-04-21T21:03:55.000Z"}
update_state = stream.get_updated_state(current_stream_state=current_stream_state, latest_record=latest_record_instance)
expected_result = {"lastUpdated": "2022-07-18T07:58:11.000Z"}
assert update_state == expected_result
def test_okta_stream_http_method(self, patch_base_class, url_base, start_date):
stream = OktaStream(url_base=url_base, start_date=start_date)
expected_method = "GET"
assert stream.http_method == expected_method
class TestNextPageToken:
def test_next_page_token(self, patch_base_class, users_instance, url_base, api_url, start_date):
stream = OktaStream(url_base=url_base, start_date=start_date)
response = MagicMock(requests.Response)
response.links = {"next": {"url": f"{api_url}?param1=test_value1¶m2=test_value2"}}
inputs = {"response": response}
expected_token = {"param1": "test_value1", "param2": "test_value2"}
result = stream.next_page_token(**inputs)
assert result == expected_token
def test_next_page_token_empty_params(self, patch_base_class, users_instance, url_base, api_url, start_date):
stream = OktaStream(url_base=url_base, start_date=start_date)
response = MagicMock(requests.Response)
response.links = {"next": {"url": f"{api_url}"}}
inputs = {"response": response}
expected_token = {}
result = stream.next_page_token(**inputs)
assert result == expected_token
def test_next_page_token_link_have_self_and_equal_next(self, patch_base_class, users_instance, url_base, api_url, start_date):
stream = OktaStream(url_base=url_base, start_date=start_date)
response = MagicMock(requests.Response)
response.links = {"next": {"url": f"{api_url}"}, "self": {"url": f"{api_url}"}}
inputs = {"response": response}
expected_token = None
result = stream.next_page_token(**inputs)
assert result == expected_token
class TestStreamUsers:
def test_stream_users(self, requests_mock, patch_base_class, users_instance, url_base, api_url, start_date):
stream = Users(url_base=url_base, start_date=start_date)
requests_mock.get(f"{api_url}/users", json=[users_instance])
inputs = {"sync_mode": SyncMode.incremental}
assert list(stream.read_records(**inputs)) == [users_instance]
def test_users_request_params_out_of_next_page_token(self, patch_base_class, url_base, user_status_filter, start_date):
stream = Users(url_base=url_base, start_date=start_date)
inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None}
expected_params = {"limit": 200, "filter": f'lastUpdated gt "2021-03-21T20:49:13.000Z" and ({user_status_filter})'}
assert stream.request_params(**inputs) == expected_params
def test_users_source_request_params_have_next_cursor(self, patch_base_class, url_base, user_status_filter, start_date):
stream = Users(url_base=url_base, start_date=start_date)
inputs = {"stream_slice": None, "stream_state": None, "next_page_token": {"next_cursor": "123"}}
expected_params = {
"limit": 200,
"next_cursor": "123",
"filter": f'lastUpdated gt "2021-03-21T20:49:13.000Z" and ({user_status_filter})',
}
assert stream.request_params(**inputs) == expected_params
def test_users_source_request_params_have_latest_entry(self, patch_base_class, url_base, user_status_filter, start_date):
stream = Users(url_base=url_base, start_date=start_date)
inputs = {"stream_slice": None, "stream_state": {"lastUpdated": "some_date"}, "next_page_token": {"next_cursor": "123"}}
expected_params = {"limit": 200, "next_cursor": "123", "filter": f'lastUpdated gt "some_date" and ({user_status_filter})'}
assert stream.request_params(**inputs) == expected_params
def test_users_source_parse_response(self, requests_mock, patch_base_class, users_instance, url_base, api_url, start_date):
stream = Users(url_base=url_base, start_date=start_date)
requests_mock.get(f"{api_url}", json=[users_instance])
assert list(stream.parse_response(response=requests.get(f"{api_url}"))) == [users_instance]
class TestStreamCustomRoles:
def test_custom_roles(self, requests_mock, patch_base_class, custom_role_instance, url_base, api_url, start_date):
stream = CustomRoles(url_base=url_base, start_date=start_date)
record = {"roles": [custom_role_instance]}
requests_mock.get(f"{api_url}/iam/roles?limit=200", json=record)
inputs = {"sync_mode": SyncMode.incremental}
assert list(stream.read_records(**inputs)) == record["roles"]
def test_custom_roles_parse_response(self, requests_mock, patch_base_class, custom_role_instance, url_base, api_url, start_date):
stream = CustomRoles(url_base=url_base, start_date=start_date)
record = {"roles": [custom_role_instance]}
requests_mock.get(f"{api_url}", json=record)
assert list(stream.parse_response(response=requests.get(f"{api_url}"))) == [custom_role_instance]
class TestStreamPermissions:
def test_permissions(self, requests_mock, patch_base_class, permission_instance, url_base, api_url, start_date):
stream = Permissions(url_base=url_base, start_date=start_date)
record = {"permissions": [permission_instance]}
role_id = "test_role_id"
requests_mock.get(f"{api_url}/iam/roles/{role_id}/permissions", json=record)
inputs = {"sync_mode": SyncMode.full_refresh, "stream_state": {}, "stream_slice": {"role_id": role_id}}
assert list(stream.read_records(**inputs)) == record["permissions"]
def test_permissions_parse_response(self, requests_mock, patch_base_class, permission_instance, url_base, api_url, start_date):
stream = Permissions(url_base=url_base, start_date=start_date)
record = {"permissions": [permission_instance]}
requests_mock.get(f"{api_url}", json=record)
assert list(stream.parse_response(response=requests.get(f"{api_url}"))) == [permission_instance]
class TestStreamGroups:
def test_groups(self, requests_mock, patch_base_class, groups_instance, url_base, api_url, start_date):
stream = Groups(url_base=url_base, start_date=start_date)
requests_mock.get(f"{api_url}/groups?limit=200", json=[groups_instance])
inputs = {"sync_mode": SyncMode.incremental}
assert list(stream.read_records(**inputs)) == [groups_instance]
def test_groups_parse_response(self, requests_mock, patch_base_class, groups_instance, url_base, api_url, start_date):
stream = Groups(url_base=url_base, start_date=start_date)
requests_mock.get(f"{api_url}", json=[groups_instance])
assert list(stream.parse_response(response=requests.get(f"{api_url}"))) == [groups_instance]
class TestStreamGroupMembers:
def test_group_members(self, requests_mock, patch_base_class, group_members_instance, url_base, api_url, start_date):
stream = GroupMembers(url_base=url_base, start_date=start_date)
group_id = "test_group_id"
requests_mock.get(f"{api_url}/groups/{group_id}/users?limit=200", json=[group_members_instance])
inputs = {"sync_mode": SyncMode.incremental, "stream_state": {}, "stream_slice": {"group_id": group_id}}
assert list(stream.read_records(**inputs)) == [group_members_instance]
def test_group_members_parse_response(self, requests_mock, patch_base_class, group_members_instance, url_base, api_url, start_date):
stream = GroupMembers(url_base=url_base, start_date=start_date)
requests_mock.get(f"{api_url}", json=[group_members_instance])
assert list(stream.parse_response(response=requests.get(f"{api_url}"))) == [group_members_instance]
def test_group_members_request_params_with_latest_entry(self, patch_base_class, group_members_instance, url_base, start_date):
stream = GroupMembers(url_base=url_base, start_date=start_date)
inputs = {
"stream_slice": {"group_id": "some_group"},
"stream_state": {"id": "some_test_id"},
"next_page_token": {"next_cursor": "123"},
}
assert stream.request_params(**inputs) == {
"limit": 200,
"next_cursor": "123",
"after": "some_test_id",
}
def test_group_members_slice_stream(
self, requests_mock, patch_base_class, group_members_instance, groups_instance, url_base, api_url, start_date
):
stream = GroupMembers(url_base=url_base, start_date=start_date)
requests_mock.get(f"{api_url}/groups?limit=200", json=[groups_instance])
assert list(stream.stream_slices()) == [{"group_id": "test_group_id"}]
def test_group_member_request_get_update_state(self, latest_record_instance, url_base, start_date):
stream = GroupMembers(url_base=url_base, start_date=start_date)
stream._cursor_field = "id"
current_stream_state = {"id": "test_user_group_id"}
update_state = stream.get_updated_state(current_stream_state=current_stream_state, latest_record=latest_record_instance)
assert update_state == {"id": "test_user_group_id"}
class TestStreamGroupRoleAssignment:
def test_group_role_assignments(self, requests_mock, patch_base_class, group_role_assignments_instance, url_base, api_url, start_date):
stream = GroupRoleAssignments(url_base=url_base, start_date=start_date)
group_id = "test_group_id"
mock_address = f"{api_url}/groups/{group_id}/roles?limit=200"
requests_mock.get(mock_address, json=[group_role_assignments_instance])
inputs = {"sync_mode": SyncMode.full_refresh, "stream_state": {}, "stream_slice": {"group_id": group_id}}
assert list(stream.read_records(**inputs)) == [group_role_assignments_instance]
def test_group_role_assignments_parse_response(
self, requests_mock, patch_base_class, group_role_assignments_instance, url_base, api_url, start_date
):
stream = GroupRoleAssignments(url_base=url_base, start_date=start_date)
requests_mock.get(f"{api_url}", json=[group_role_assignments_instance])
assert list(stream.parse_response(response=requests.get(f"{api_url}"))) == [group_role_assignments_instance]
def test_group_role_assignments_slice_stream(
self, requests_mock, patch_base_class, group_members_instance, groups_instance, url_base, api_url, start_date
):
stream = GroupRoleAssignments(url_base=url_base, start_date=start_date)
requests_mock.get(f"{api_url}/groups?limit=200", json=[groups_instance])
assert list(stream.stream_slices()) == [{"group_id": "test_group_id"}]
class TestStreamLogs:
def test_logs(self, requests_mock, patch_base_class, logs_instance, url_base, api_url, start_date):
stream = Logs(url_base=url_base, start_date=start_date)
requests_mock.get(f"{api_url}/logs?limit=200", json=[logs_instance])
inputs = {"sync_mode": SyncMode.incremental}
assert list(stream.read_records(**inputs)) == [logs_instance]
def test_logs_parse_response(self, requests_mock, patch_base_class, logs_instance, url_base, api_url, start_date):
stream = Logs(url_base=url_base, start_date=start_date)
requests_mock.get(f"{api_url}/logs?limit=200", json=[logs_instance])
assert list(stream.parse_response(response=requests.get(f"{api_url}/logs?limit=200"))) == [logs_instance]
def test_logs_request_params_for_since(self, patch_base_class, logs_instance, url_base, start_date):
stream = Logs(url_base=url_base, start_date=start_date)
inputs = {"stream_state": {"published": "2022-07-19T15:54:11.545Z"}, "stream_slice": None}
assert stream.request_params(**inputs) == {
"limit": 200,
"since": "2022-07-19T15:54:11.545Z",
}
def test_logs_request_params_for_until(self, patch_base_class, logs_instance, url_base, start_date):
stream = Logs(url_base=url_base, start_date=start_date)
testing_date = datetime.datetime.utcnow() + datetime.timedelta(days=10)
inputs = {"stream_state": {"published": testing_date.isoformat()}, "stream_slice": None}
assert stream.request_params(**inputs) == {"limit": 200, "since": testing_date.isoformat()}
class TestStreamUserRoleAssignment:
def test_user_role_assignments(self, requests_mock, patch_base_class, user_role_assignments_instance, url_base, api_url, start_date):
stream = UserRoleAssignments(url_base=url_base, start_date=start_date)
user_id = "test_user_id"
mock_address = f"{api_url}/users/{user_id}/roles?limit=200"
requests_mock.get(mock_address, json=[user_role_assignments_instance])
inputs = {"sync_mode": SyncMode.full_refresh, "stream_state": {}, "stream_slice": {"user_id": user_id}}
assert list(stream.read_records(**inputs)) == [user_role_assignments_instance]
def test_user_role_assignments_parse_response(
self, requests_mock, patch_base_class, user_role_assignments_instance, url_base, api_url, start_date
):
stream = UserRoleAssignments(url_base=url_base, start_date=start_date)
requests_mock.get(f"{api_url}", json=[user_role_assignments_instance])
assert list(stream.parse_response(response=requests.get(f"{api_url}"))) == [user_role_assignments_instance]
def test_user_role_assignments_slice_stream(
self, requests_mock, patch_base_class, group_members_instance, users_instance, url_base, api_url, start_date
):
stream = UserRoleAssignments(url_base=url_base, start_date=start_date)
requests_mock.get(f"{api_url}/users?limit=200", json=[users_instance])
assert list(stream.stream_slices()) == [{"user_id": "test_user_id"}]
class TestStreamResourceSets:
def test_resource_sets(self, requests_mock, patch_base_class, resource_set_instance, url_base, api_url, start_date):
stream = ResourceSets(url_base=url_base, start_date=start_date)
record = {"resource-sets": [resource_set_instance]}
requests_mock.get(f"{api_url}/iam/resource-sets", json=record)
inputs = {"sync_mode": SyncMode.incremental}
assert list(stream.read_records(**inputs)) == record["resource-sets"]
def test_resource_sets_parse_response(self, requests_mock, patch_base_class, resource_set_instance, url_base, api_url, start_date):
stream = ResourceSets(url_base=url_base, start_date=start_date)
record = {"resource-sets": [resource_set_instance]}
requests_mock.get(f"{api_url}", json=record)
assert list(stream.parse_response(response=requests.get(f"{api_url}"))) == [resource_set_instance]
def test_resource_sets_next_page_token(self, requests_mock, patch_base_class, resource_set_instance, url_base, api_url, start_date):
stream = ResourceSets(url_base=url_base, start_date=start_date)
cursor = "iam5cursorFybecursor"
response = MagicMock(requests.Response)
next_link = f"{url_base}/iam/resource-sets?after={cursor}"
response.json = MagicMock(return_value={"_links": {"next": {"href": next_link}}, "resource-sets": [resource_set_instance]})
inputs = {"response": response}
result = stream.next_page_token(**inputs)
assert result == {"after": cursor}
response.json = MagicMock(return_value={"resource-sets": [resource_set_instance]})
inputs = {"response": response}
result = stream.next_page_token(**inputs)
assert result is None
def test_resource_sets_request_params(self, requests_mock, patch_base_class, resource_set_instance, url_base, api_url, start_date):
stream = ResourceSets(url_base=url_base, start_date=start_date)
cursor = "iam5cursorFybecursor"
inputs = {"stream_slice": None, "stream_state": {"id": cursor}, "next_page_token": None}
expected_params = {"limit": 200, "after": "iam5cursorFybecursor", "filter": 'id gt "iam5cursorFybecursor"'}
assert stream.request_params(**inputs) == expected_params
|
0229c825933aa95325771d8d7b583593e3d9ff60
|
529e713a78e82de2ae5d44cfb8ef209e0894d72a
|
/mandelbrot-set-python/viewport.py
|
33918b40cf1311cd567961d382b315c5973a7033
|
[
"MIT"
] |
permissive
|
realpython/materials
|
cd2f548276be2c82f134ca03eadb1cd279e0f26e
|
d2d62756d3854f54a12a767f2bf9470486c0ceef
|
refs/heads/master
| 2023-09-05T22:12:29.806738
| 2023-08-31T20:56:28
| 2023-08-31T20:56:28
| 132,374,697
| 4,678
| 6,482
|
MIT
| 2023-09-12T22:22:06
| 2018-05-06T20:46:18
|
HTML
|
UTF-8
|
Python
| false
| false
| 1,009
|
py
|
viewport.py
|
# viewport.py
from dataclasses import dataclass
from PIL import Image
@dataclass
class Viewport:
image: Image.Image
center: complex
width: float
@property
def height(self):
return self.scale * self.image.height
@property
def offset(self):
return self.center + complex(-self.width, self.height) / 2
@property
def scale(self):
return self.width / self.image.width
def __iter__(self):
for y in range(self.image.height):
for x in range(self.image.width):
yield Pixel(self, x, y)
@dataclass
class Pixel:
viewport: Viewport
x: int
y: int
@property
def color(self):
return self.viewport.image.getpixel((self.x, self.y))
@color.setter
def color(self, value):
self.viewport.image.putpixel((self.x, self.y), value)
def __complex__(self):
return (
complex(self.x, -self.y) * self.viewport.scale
+ self.viewport.offset
)
|
445c6231f1d2613e0ae0c4b8247facf78780cdec
|
bee77315d08def61c1155930285211ef3d8d7654
|
/nevergrad/functions/pyomo/test_core.py
|
ad48749c5147f5f48284017905580bfaeac617f2
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
facebookresearch/nevergrad
|
d2da592c1bf3b7c398392b3d39217a3753a4912c
|
daddb18184bf64ba9082ecc55a56e07429a23103
|
refs/heads/main
| 2023-09-04T10:53:42.903505
| 2023-08-30T17:10:37
| 2023-08-30T17:10:37
| 158,468,845
| 3,526
| 367
|
MIT
| 2023-09-11T13:37:36
| 2018-11-21T00:33:17
|
Python
|
UTF-8
|
Python
| false
| false
| 3,463
|
py
|
test_core.py
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import typing as tp
import os
import numpy as np
import pyomo.environ as pyomo
import nevergrad as ng
from . import core
def test_concrete_model_without_constraints() -> None:
model = pyomo.ConcreteModel()
model.x = pyomo.Var([1, 2], domain=pyomo.NonNegativeReals)
model.obj = pyomo.Objective(expr=(model.x[1] - 0.5) ** 2 + (model.x[2] - 0.5) ** 2)
func = core.Pyomo(model)
optimizer = ng.optimizers.NGO(parametrization=func.parametrization, budget=100)
recommendation = optimizer.minimize(func.function)
np.testing.assert_almost_equal(recommendation.kwargs["x[1]"], 0.5, decimal=1)
np.testing.assert_almost_equal(recommendation.kwargs["x[2]"], 0.5, decimal=1)
def square(m: tp.Any) -> float:
return pyomo.quicksum((m.x[i] - 0.5) ** 2 for i in m.x)
def test_concrete_model_with_constraints() -> None:
model = pyomo.ConcreteModel()
model.x = pyomo.Var([0, 1], domain=pyomo.Reals)
model.obj = pyomo.Objective(rule=square)
model.Constraint1 = pyomo.Constraint(rule=lambda m: m.x[0] >= 1)
model.Constraint2 = pyomo.Constraint(rule=lambda m: m.x[1] >= 0.8)
func = core.Pyomo(model)
optimizer = ng.optimizers.OnePlusOne(parametrization=func.parametrization, budget=100)
recommendation = optimizer.minimize(func.function)
np.testing.assert_almost_equal(recommendation.kwargs["x[0]"], 1.0, decimal=1)
np.testing.assert_almost_equal(recommendation.kwargs["x[1]"], 0.8, decimal=1)
def test_abstract_model_with_constraints() -> None:
abs_model = pyomo.AbstractModel()
abs_model.F = pyomo.Set()
abs_model.Xmin = pyomo.Param(abs_model.F, within=pyomo.Reals, default=0.0)
abs_model.x = pyomo.Var(abs_model.F, within=pyomo.Reals)
abs_model.constraints = pyomo.Constraint(abs_model.F, rule=lambda m, i: m.x[i] >= m.Xmin[i])
abs_model.obj = pyomo.Objective(rule=square)
# Load the values of the parameters from external file
dirname = os.path.dirname(__file__)
filename = os.path.join(dirname, "test_model_1.dat")
model = abs_model.create_instance(filename)
func = core.Pyomo(model)
func.parametrization.random_state.seed(12)
optimizer = ng.optimizers.OnePlusOne(parametrization=func.parametrization, budget=200)
recommendation = optimizer.minimize(func.function)
np.testing.assert_almost_equal(recommendation.kwargs['x["New York"]'], model.Xmin["New York"], decimal=1)
np.testing.assert_almost_equal(
recommendation.kwargs['x["Hong Kong"]'], model.Xmin["Hong Kong"], decimal=1
)
def test_pyomo_set() -> None:
def square2(m: tp.Any) -> float:
return (m.x - 1) ** 2 # type: ignore
model = pyomo.ConcreteModel()
model.P = pyomo.Set(initialize=list(range(1, 11)))
model.Q = pyomo.Set(initialize=list(range(6, 16)))
model.R = model.P ^ model.Q # XOR
model.x = pyomo.Var(domain=model.R)
model.obj = pyomo.Objective(rule=square2)
model.constraint1 = pyomo.Constraint(rule=lambda m: m.x >= 2)
func = core.Pyomo(model)
func.parametrization.random_state.seed(12)
optimizer = ng.optimizers.OnePlusOne(parametrization=func.parametrization, budget=100)
recommendation = optimizer.minimize(func.function)
np.testing.assert_almost_equal(recommendation.kwargs["x"], 2.0, decimal=1)
|
a609c41d0c17724bf99e424ffefa2a07e91fee94
|
57c77e6ca4867fe03f733c4b0778491c7042154b
|
/rlcard/envs/env.py
|
93e23954878d8014835ae2da8313488634875b28
|
[
"MIT"
] |
permissive
|
datamllab/rlcard
|
60754423478a8854c0d3af6e3766c1c6f6ffb855
|
7fc56edebe9a2e39c94f872edd8dbe325c61b806
|
refs/heads/master
| 2023-08-17T06:01:08.615989
| 2023-07-11T22:19:45
| 2023-07-11T22:19:45
| 206,562,316
| 2,447
| 639
|
MIT
| 2023-07-11T22:19:46
| 2019-09-05T12:48:01
|
Python
|
UTF-8
|
Python
| false
| false
| 8,525
|
py
|
env.py
|
from rlcard.utils import *
class Env(object):
'''
The base Env class. For all the environments in RLCard,
we should base on this class and implement as many functions
as we can.
'''
def __init__(self, config):
''' Initialize the environment
Args:
config (dict): A config dictionary. All the fields are
optional. Currently, the dictionary includes:
'seed' (int) - A environment local random seed.
'allow_step_back' (boolean) - True if allowing
step_back.
There can be some game specific configurations, e.g., the
number of players in the game. These fields should start with
'game_', e.g., 'game_num_players' which specify the number of
players in the game. Since these configurations may be game-specific,
The default settings should be put in the Env class. For example,
the default game configurations for Blackjack should be in
'rlcard/envs/blackjack.py'
TODO: Support more game configurations in the future.
'''
self.allow_step_back = self.game.allow_step_back = config['allow_step_back']
self.action_recorder = []
# Game specific configurations
# Currently only support blackjack、limit-holdem、no-limit-holdem
# TODO support game configurations for all the games
supported_envs = ['blackjack', 'leduc-holdem', 'limit-holdem', 'no-limit-holdem']
if self.name in supported_envs:
_game_config = self.default_game_config.copy()
for key in config:
if key in _game_config:
_game_config[key] = config[key]
self.game.configure(_game_config)
# Get the number of players/actions in this game
self.num_players = self.game.get_num_players()
self.num_actions = self.game.get_num_actions()
# A counter for the timesteps
self.timestep = 0
# Set random seed, default is None
self.seed(config['seed'])
def reset(self):
''' Start a new game
Returns:
(tuple): Tuple containing:
(numpy.array): The begining state of the game
(int): The begining player
'''
state, player_id = self.game.init_game()
self.action_recorder = []
return self._extract_state(state), player_id
def step(self, action, raw_action=False):
''' Step forward
Args:
action (int): The action taken by the current player
raw_action (boolean): True if the action is a raw action
Returns:
(tuple): Tuple containing:
(dict): The next state
(int): The ID of the next player
'''
if not raw_action:
action = self._decode_action(action)
self.timestep += 1
# Record the action for human interface
self.action_recorder.append((self.get_player_id(), action))
next_state, player_id = self.game.step(action)
return self._extract_state(next_state), player_id
def step_back(self):
''' Take one step backward.
Returns:
(tuple): Tuple containing:
(dict): The previous state
(int): The ID of the previous player
Note: Error will be raised if step back from the root node.
'''
if not self.allow_step_back:
raise Exception('Step back is off. To use step_back, please set allow_step_back=True in rlcard.make')
if not self.game.step_back():
return False
player_id = self.get_player_id()
state = self.get_state(player_id)
return state, player_id
def set_agents(self, agents):
'''
Set the agents that will interact with the environment.
This function must be called before `run`.
Args:
agents (list): List of Agent classes
'''
self.agents = agents
def run(self, is_training=False):
'''
Run a complete game, either for evaluation or training RL agent.
Args:
is_training (boolean): True if for training purpose.
Returns:
(tuple) Tuple containing:
(list): A list of trajectories generated from the environment.
(list): A list payoffs. Each entry corresponds to one player.
Note: The trajectories are 3-dimension list. The first dimension is for different players.
The second dimension is for different transitions. The third dimension is for the contents of each transiton
'''
trajectories = [[] for _ in range(self.num_players)]
state, player_id = self.reset()
# Loop to play the game
trajectories[player_id].append(state)
while not self.is_over():
# Agent plays
if not is_training:
action, _ = self.agents[player_id].eval_step(state)
else:
action = self.agents[player_id].step(state)
# Environment steps
next_state, next_player_id = self.step(action, self.agents[player_id].use_raw)
# Save action
trajectories[player_id].append(action)
# Set the state and player
state = next_state
player_id = next_player_id
# Save state.
if not self.game.is_over():
trajectories[player_id].append(state)
# Add a final state to all the players
for player_id in range(self.num_players):
state = self.get_state(player_id)
trajectories[player_id].append(state)
# Payoffs
payoffs = self.get_payoffs()
return trajectories, payoffs
def is_over(self):
''' Check whether the curent game is over
Returns:
(boolean): True if current game is over
'''
return self.game.is_over()
def get_player_id(self):
''' Get the current player id
Returns:
(int): The id of the current player
'''
return self.game.get_player_id()
def get_state(self, player_id):
''' Get the state given player id
Args:
player_id (int): The player id
Returns:
(numpy.array): The observed state of the player
'''
return self._extract_state(self.game.get_state(player_id))
def get_payoffs(self):
''' Get the payoffs of players. Must be implemented in the child class.
Returns:
(list): A list of payoffs for each player.
Note: Must be implemented in the child class.
'''
raise NotImplementedError
def get_perfect_information(self):
''' Get the perfect information of the current state
Returns:
(dict): A dictionary of all the perfect information of the current state
'''
raise NotImplementedError
def get_action_feature(self, action):
''' For some environments such as DouDizhu, we can have action features
Returns:
(numpy.array): The action features
'''
# By default we use one-hot encoding
feature = np.zeros(self.num_actions, dtype=np.int8)
feature[action] = 1
return feature
def seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
self.game.np_random = self.np_random
return seed
def _extract_state(self, state):
''' Extract useful information from state for RL. Must be implemented in the child class.
Args:
state (dict): The raw state
Returns:
(numpy.array): The extracted state
'''
raise NotImplementedError
def _decode_action(self, action_id):
''' Decode Action id to the action in the game.
Args:
action_id (int): The id of the action
Returns:
(string): The action that will be passed to the game engine.
Note: Must be implemented in the child class.
'''
raise NotImplementedError
def _get_legal_actions(self):
''' Get all legal actions for current state.
Returns:
(list): A list of legal actions' id.
Note: Must be implemented in the child class.
'''
raise NotImplementedError
|
7dc67dc72fb23112819c85711096cc159aefad70
|
12fa6ac5fb9472acbe2eb1871d3bfaa893940335
|
/prototype_source/maskedtensor_sparsity.py
|
74024f8e229dbbc75c1313ecf74fd6133cb5a2ba
|
[
"BSD-3-Clause"
] |
permissive
|
pytorch/tutorials
|
e6670dc301fd48edbca5a554df2af44e016712c7
|
32d834139b8627eeacb5fb2862be9f095fcb0b52
|
refs/heads/main
| 2023-08-31T19:27:17.448171
| 2023-08-28T15:45:25
| 2023-08-28T15:45:25
| 69,709,572
| 7,919
| 4,731
|
BSD-3-Clause
| 2023-09-14T17:25:49
| 2016-09-30T23:48:36
|
Python
|
UTF-8
|
Python
| false
| false
| 12,377
|
py
|
maskedtensor_sparsity.py
|
# -*- coding: utf-8 -*-
"""
(Prototype) MaskedTensor Sparsity
=================================
"""
######################################################################
# Before working on this tutorial, please make sure to review our
# `MaskedTensor Overview tutorial <https://pytorch.org/tutorials/prototype/maskedtensor_overview.html>`.
#
# Introduction
# ------------
#
# Sparsity has been an area of rapid growth and importance within PyTorch; if any sparsity terms are confusing below,
# please refer to the `sparsity tutorial <https://pytorch.org/docs/stable/sparse.html>`__ for additional details.
#
# Sparse storage formats have been proven to be powerful in a variety of ways. As a primer, the first use case
# most practitioners think about is when the majority of elements are equal to zero (a high degree of sparsity),
# but even in cases of lower sparsity, certain formats (e.g. BSR) can take advantage of substructures within a matrix.
#
# .. note::
#
# At the moment, MaskedTensor supports COO and CSR tensors with plans to support additional formats
# (such as BSR and CSC) in the future. If you have any requests for additional formats,
# please file a feature request `here <https://github.com/pytorch/pytorch/issues>`__!
#
# Principles
# ----------
#
# When creating a :class:`MaskedTensor` with sparse tensors, there are a few principles that must be observed:
#
# 1. ``data`` and ``mask`` must have the same storage format, whether that's :attr:`torch.strided`, :attr:`torch.sparse_coo`, or :attr:`torch.sparse_csr`
# 2. ``data`` and ``mask`` must have the same size, indicated by :func:`size()`
#
# .. _sparse-coo-tensors:
#
# Sparse COO tensors
# ------------------
#
# In accordance with Principle #1, a sparse COO MaskedTensor is created by passing in two sparse COO tensors,
# which can be initialized by any of its constructors, for example :func:`torch.sparse_coo_tensor`.
#
# As a recap of `sparse COO tensors <https://pytorch.org/docs/stable/sparse.html#sparse-coo-tensors>`__, the COO format
# stands for "coordinate format", where the specified elements are stored as tuples of their indices and the
# corresponding values. That is, the following are provided:
#
# * ``indices``: array of size ``(ndim, nse)`` and dtype ``torch.int64``
# * ``values``: array of size `(nse,)` with any integer or floating point dtype
#
# where ``ndim`` is the dimensionality of the tensor and ``nse`` is the number of specified elements.
#
# For both sparse COO and CSR tensors, you can construct a :class:`MaskedTensor` by doing either:
#
# 1. ``masked_tensor(sparse_tensor_data, sparse_tensor_mask)``
# 2. ``dense_masked_tensor.to_sparse_coo()`` or ``dense_masked_tensor.to_sparse_csr()``
#
# The second method is easier to illustrate so we've shown that below, but for more on the first and the nuances behind
# the approach, please read the :ref:`Sparse COO Appendix <sparse-coo-appendix>`.
#
import torch
from torch.masked import masked_tensor
import warnings
# Disable prototype warnings and such
warnings.filterwarnings(action='ignore', category=UserWarning)
values = torch.tensor([[0, 0, 3], [4, 0, 5]])
mask = torch.tensor([[False, False, True], [False, False, True]])
mt = masked_tensor(values, mask)
sparse_coo_mt = mt.to_sparse_coo()
print("mt:\n", mt)
print("mt (sparse coo):\n", sparse_coo_mt)
print("mt data (sparse coo):\n", sparse_coo_mt.get_data())
######################################################################
# Sparse CSR tensors
# ------------------
#
# Similarly, :class:`MaskedTensor` also supports the
# `CSR (Compressed Sparse Row) <https://pytorch.org/docs/stable/sparse.html#sparse-csr-tensor>`__
# sparse tensor format. Instead of storing the tuples of the indices like sparse COO tensors, sparse CSR tensors
# aim to decrease the memory requirements by storing compressed row indices.
# In particular, a CSR sparse tensor consists of three 1-D tensors:
#
# * ``crow_indices``: array of compressed row indices with size ``(size[0] + 1,)``. This array indicates which row
# a given entry in values lives in. The last element is the number of specified elements,
# while `crow_indices[i+1] - crow_indices[i]` indicates the number of specified elements in row i.
# * ``col_indices``: array of size ``(nnz,)``. Indicates the column indices for each value.
# * ``values``: array of size ``(nnz,)``. Contains the values of the CSR tensor.
#
# Of note, both sparse COO and CSR tensors are in a `beta <https://pytorch.org/docs/stable/index.html>`__ state.
#
# By way of example:
#
mt_sparse_csr = mt.to_sparse_csr()
print("mt (sparse csr):\n", mt_sparse_csr)
print("mt data (sparse csr):\n", mt_sparse_csr.get_data())
######################################################################
# Supported Operations
# --------------------
#
# Unary
# ^^^^^
# All `unary operators <https://pytorch.org/docs/master/masked.html#unary-operators>`__ are supported, e.g.:
#
mt.sin()
######################################################################
# Binary
# ^^^^^^
# `Binary operators <https://pytorch.org/docs/master/masked.html#unary-operators>`__ are also supported, but the
# input masks from the two masked tensors must match. For more information on why this decision was made, please
# find our `MaskedTensor: Advanced Semantics tutorial <https://pytorch.org/tutorials/prototype/maskedtensor_advanced_semantics.html>`__.
#
# Please find an example below:
#
i = [[0, 1, 1],
[2, 0, 2]]
v1 = [3, 4, 5]
v2 = [20, 30, 40]
m = torch.tensor([True, False, True])
s1 = torch.sparse_coo_tensor(i, v1, (2, 3))
s2 = torch.sparse_coo_tensor(i, v2, (2, 3))
mask = torch.sparse_coo_tensor(i, m, (2, 3))
mt1 = masked_tensor(s1, mask)
mt2 = masked_tensor(s2, mask)
print("mt1:\n", mt1)
print("mt2:\n", mt2)
######################################################################
#
print("torch.div(mt2, mt1):\n", torch.div(mt2, mt1))
print("torch.mul(mt1, mt2):\n", torch.mul(mt1, mt2))
######################################################################
# Reductions
# ^^^^^^^^^^
# Finally, `reductions <https://pytorch.org/docs/master/masked.html#reductions>`__ are supported:
#
mt
######################################################################
#
print("mt.sum():\n", mt.sum())
print("mt.sum(dim=1):\n", mt.sum(dim=1))
print("mt.amin():\n", mt.amin())
######################################################################
# MaskedTensor Helper Methods
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^
# For convenience, :class:`MaskedTensor` has a number of methods to help convert between the different layouts
# and identify the current layout:
#
# Setup:
#
v = [[3, 0, 0],
[0, 4, 5]]
m = [[True, False, False],
[False, True, True]]
mt = masked_tensor(torch.tensor(v), torch.tensor(m))
mt
######################################################################
# :meth:`MaskedTensor.to_sparse_coo()` / :meth:`MaskedTensor.to_sparse_csr()` / :meth:`MaskedTensor.to_dense()`
# to help convert between the different layouts.
#
mt_sparse_coo = mt.to_sparse_coo()
mt_sparse_csr = mt.to_sparse_csr()
mt_dense = mt_sparse_coo.to_dense()
######################################################################
# :meth:`MaskedTensor.is_sparse()` -- this will check if the :class:`MaskedTensor`'s layout
# matches any of the supported sparse layouts (currently COO and CSR).
#
print("mt_dense.is_sparse: ", mt_dense.is_sparse())
print("mt_sparse_coo.is_sparse: ", mt_sparse_coo.is_sparse())
print("mt_sparse_csr.is_sparse: ", mt_sparse_csr.is_sparse())
######################################################################
# :meth:`MaskedTensor.is_sparse_coo()`
#
print("mt_dense.is_sparse_coo: ", mt_dense.is_sparse_coo())
print("mt_sparse_coo.is_sparse_coo: ", mt_sparse_coo.is_sparse_coo())
print("mt_sparse_csr.is_sparse_coo: ", mt_sparse_csr.is_sparse_coo())
######################################################################
# :meth:`MaskedTensor.is_sparse_csr()`
#
print("mt_dense.is_sparse_csr: ", mt_dense.is_sparse_csr())
print("mt_sparse_coo.is_sparse_csr: ", mt_sparse_coo.is_sparse_csr())
print("mt_sparse_csr.is_sparse_csr: ", mt_sparse_csr.is_sparse_csr())
######################################################################
# Appendix
# --------
#
# .. _sparse-coo-appendix:
#
# Sparse COO Construction
# ^^^^^^^^^^^^^^^^^^^^^^^
#
# Recall in our :ref:`original example <sparse-coo-tensors>`, we created a :class:`MaskedTensor`
# and then converted it to a sparse COO MaskedTensor with :meth:`MaskedTensor.to_sparse_coo`.
#
# Alternatively, we can also construct a sparse COO MaskedTensor directly by passing in two sparse COO tensors:
#
values = torch.tensor([[0, 0, 3], [4, 0, 5]]).to_sparse()
mask = torch.tensor([[False, False, True], [False, False, True]]).to_sparse()
mt = masked_tensor(values, mask)
print("values:\n", values)
print("mask:\n", mask)
print("mt:\n", mt)
######################################################################
# Instead of using :meth:`torch.Tensor.to_sparse`, we can also create the sparse COO tensors directly,
# which brings us to a warning:
#
# .. warning::
#
# When using a function like :meth:`MaskedTensor.to_sparse_coo` (analogous to :meth:`Tensor.to_sparse`),
# if the user does not specify the indices like in the above example,
# then the 0 values will be "unspecified" by default.
#
# Below, we explicitly specify the 0's:
#
i = [[0, 1, 1],
[2, 0, 2]]
v = [3, 4, 5]
m = torch.tensor([True, False, True])
values = torch.sparse_coo_tensor(i, v, (2, 3))
mask = torch.sparse_coo_tensor(i, m, (2, 3))
mt2 = masked_tensor(values, mask)
print("values:\n", values)
print("mask:\n", mask)
print("mt2:\n", mt2)
######################################################################
# Note that ``mt`` and ``mt2`` look identical on the surface, and in the vast majority of operations, will yield the same
# result. But this brings us to a detail on the implementation:
#
# ``data`` and ``mask`` -- only for sparse MaskedTensors -- can have a different number of elements (:func:`nnz`)
# **at creation**, but the indices of ``mask`` must then be a subset of the indices of ``data``. In this case,
# ``data`` will assume the shape of ``mask`` by ``data = data.sparse_mask(mask)``; in other words, any of the elements
# in ``data`` that are not ``True`` in ``mask`` (that is, not specified) will be thrown away.
#
# Therefore, under the hood, the data looks slightly different; ``mt2`` has the "4" value masked out and ``mt``
# is completely without it. Their underlying data has different shapes,
# which would make operations like ``mt + mt2`` invalid.
#
print("mt data:\n", mt.get_data())
print("mt2 data:\n", mt2.get_data())
######################################################################
# .. _sparse-csr-appendix:
#
# Sparse CSR Construction
# ^^^^^^^^^^^^^^^^^^^^^^^
#
# We can also construct a sparse CSR MaskedTensor using sparse CSR tensors,
# and like the example above, this results in a similar treatment under the hood.
#
crow_indices = torch.tensor([0, 2, 4])
col_indices = torch.tensor([0, 1, 0, 1])
values = torch.tensor([1, 2, 3, 4])
mask_values = torch.tensor([True, False, False, True])
csr = torch.sparse_csr_tensor(crow_indices, col_indices, values, dtype=torch.double)
mask = torch.sparse_csr_tensor(crow_indices, col_indices, mask_values, dtype=torch.bool)
mt = masked_tensor(csr, mask)
print("mt:\n", mt)
print("mt data:\n", mt.get_data())
######################################################################
# Conclusion
# ----------
# In this tutorial, we have introduced how to use :class:`MaskedTensor` with sparse COO and CSR formats and
# discussed some of the subtleties under the hood in case users decide to access the underlying data structures
# directly. Sparse storage formats and masked semantics indeed have strong synergies, so much so that they are
# sometimes used as proxies for each other (as we will see in the next tutorial). In the future, we certainly plan
# to invest and continue developing in this direction.
#
# Further Reading
# ---------------
#
# To continue learning more, you can find our
# `Efficiently writing "sparse" semantics for Adagrad with MaskedTensor tutorial <https://pytorch.org/tutorials/prototype/maskedtensor_adagrad.html>`__
# to see an example of how MaskedTensor can simplify existing workflows with native masking semantics.
#
|
23fb7bdbf7e369d5b31fcbf7b84da4e2b26e1ef4
|
6c628b7b72eef4dbcc982803eb18c20a01d50a25
|
/brownie/network/alert.py
|
cfcf6611449a9381bad29fb5d515d52a11b78bf7
|
[
"MIT"
] |
permissive
|
eth-brownie/brownie
|
174c5cb549427f4814fa5a1dc9ede225acc983f8
|
bc7b511583060fdaff1d4b5269aedcc1cb710bc6
|
refs/heads/master
| 2023-09-04T15:53:39.804726
| 2023-06-12T07:27:29
| 2023-06-12T07:27:29
| 155,913,585
| 2,408
| 518
|
MIT
| 2023-09-06T14:20:17
| 2018-11-02T19:39:26
|
Python
|
UTF-8
|
Python
| false
| false
| 4,453
|
py
|
alert.py
|
#!/usr/bin/python3
import time
from threading import Thread
from typing import Callable, Dict, List, Tuple, Union
from brownie.utils import color
__console_dir__ = ["Alert", "new", "show", "stop_all"]
_instances = set()
class Alert:
"""Setup notifications and callbacks based on state changes to the blockchain.
The alert is immediatly active as soon as the class is insantiated."""
def __init__(
self,
fn: Callable,
args: Tuple = None,
kwargs: Dict = None,
delay: float = 2,
msg: str = None,
callback: Callable = None,
repeat: bool = False,
) -> None:
"""Creates a new Alert.
Args:
fn: Callable to monitor for changes.
args: Positional args when checking the callable.
kwargs: Keyword args when checking the callable.
delay: Frequency to check for changes, in seconds.
msg: Notification string to display on change.
callback: Callback function to call upon change. It must accept two
arguments: initial value, new value
repeat: if False, the alert will terminate after firing once.
if True, the alert will continue to fire on changes until it
is terminated via Alert.stop()
if int, the alert will fire n+1 times before terminating.
"""
if args is None:
args = ()
if kwargs is None:
kwargs = {}
if not callable(fn):
raise TypeError("You can only set an alert on a callable object")
if isinstance(repeat, int) and repeat < 0:
raise ValueError("repeat must be True, False or a positive integer")
self._kill = False
start_value = fn(*args, **kwargs)
self._thread = Thread(
target=self._loop,
daemon=True,
args=(fn, args, kwargs, start_value, delay, msg, callback, repeat),
)
self._thread.start()
self.start_time = time.time()
_instances.add(self)
def _loop(
self,
fn: Callable,
args: Tuple,
kwargs: Dict,
start_value: int,
delay: float,
msg: str,
callback: Callable,
repeat: Union[int, bool, None] = False,
) -> None:
try:
sleep = min(delay, 0.05)
while repeat is not None:
next_ = time.time() + delay
while next_ > time.time() and not self._kill:
time.sleep(sleep)
if self._kill:
break
value = fn(*args, **kwargs)
if value == start_value:
continue
if msg:
fmt_msg = msg.format(start_value, value)
print(f"{color('bright red')}ALERT{color}: {fmt_msg}")
if callback:
callback(start_value, value)
start_value = value
if not repeat:
repeat = None
elif isinstance(repeat, int) and not isinstance(repeat, bool):
repeat -= 1
finally:
_instances.discard(self)
def is_alive(self) -> bool:
"""Checks if the alert is currently active."""
return self._thread.is_alive()
def wait(self, timeout: int = None) -> None:
"""Waits for the alert to fire.
Args:
timeout: Number of seconds to wait. If None, will wait indefinitely."""
self._thread.join(timeout)
def stop(self, wait: bool = True) -> None:
"""Stops the alert.
Args:
wait: If True, waits for the alert to terminate after stopping it."""
self._kill = True
if wait:
self.wait()
def new(
fn: Callable,
args: Tuple = None,
kwargs: Dict = None,
delay: float = 0.5,
msg: str = None,
callback: Callable = None,
repeat: bool = False,
) -> "Alert":
"""Alias for creating a new Alert instance."""
return Alert(fn, args, kwargs, delay, msg, callback, repeat)
def show() -> List:
"""Returns a list of all currently active Alert instances."""
return sorted(_instances, key=lambda k: k.start_time)
def stop_all() -> None:
"""Stops all currently active Alert instances."""
for t in _instances.copy():
t.stop()
_instances.clear()
|
25222ce4cb3a62c4d9835074a32844f6e4656f6d
|
e3cfab409afb5ff9a0b3812bf848be6ca9239cee
|
/pygeodesy/dms.py
|
7562ba9d748ab7dcfd7e751251882adc74f57dd7
|
[
"MIT"
] |
permissive
|
mrJean1/PyGeodesy
|
565266a4f7f6cda5abe98e915bbd868f6cbe1760
|
eba35704b248a7a0388b30f3cea19793921e99b7
|
refs/heads/master
| 2023-08-23T13:58:20.069917
| 2023-08-20T18:50:45
| 2023-08-20T18:50:45
| 68,028,481
| 283
| 66
| null | 2022-04-09T00:40:52
| 2016-09-12T16:49:10
|
Python
|
UTF-8
|
Python
| false
| false
| 44,267
|
py
|
dms.py
|
# -*- coding: utf-8 -*-
u'''Parsers and formatters of angles in degrees, minutes and seconds or radians.
Functions to parse and format bearing, compass, lat- and longitudes in various forms of
degrees, minutes and seconds with or without degrees, minute and second symbols plus a
compass point suffix, including parsing of C{decimal} and C{sexagecimal} degrees.
After I{(C) Chris Veness 2011-2015} published under the same MIT Licence**, see
U{Latitude/Longitude<https://www.Movable-Type.co.UK/scripts/latlong.html>} and
U{Vector-based geodesy<https://www.Movable-Type.co.UK/scripts/latlong-vectors.html>}.
@var F_D: Format degrees as unsigned "deg°" with symbol, plus compass point suffix C{N, S, E} or C{W} (C{str}).
@var F_DM: Format degrees as unsigned "deg°min′" with symbols, plus suffix (C{str}).
@var F_DMS: Format degrees as unsigned "deg°min′sec″" with symbols, plus suffix (C{str}).
@var F_DEG: Format degrees as unsigned "[D]DD" I{without} symbol, plus suffix (C{str}).
@var F_MIN: Format degrees as unsigned "[D]DDMM" I{without} symbols, plus suffix (C{str}).
@var F_SEC: Format degrees as unsigned "[D]DDMMSS" I{without} symbols, plus suffix (C{str}).
@var F_D60: Format degrees as unsigned "[D]DD.MMSS" C{sexagecimal} I{without} symbols, plus suffix (C{str}).
@var F__E: Format degrees as unsigned "%E" I{without} symbols, plus suffix (C{str}).
@var F__F: Format degrees as unsigned "%F" I{without} symbols, plus suffix (C{str}).
@var F__G: Format degrees as unsigned "%G" I{without} symbols, plus suffix (C{str}).
@var F_RAD: Convert degrees to radians and format as unsigned "RR" with symbol, plus suffix (C{str}).
@var F_D_: Format degrees as signed "-/deg°" with symbol, I{without} suffix (C{str}).
@var F_DM_: Format degrees as signed "-/deg°min′" with symbols, I{without} suffix (C{str}).
@var F_DMS_: Format degrees as signed "-/deg°min′sec″" with symbols, I{without} suffix (C{str}).
@var F_DEG_: Format degrees as signed "-/[D]DD" I{without} symbol, I{without} suffix (C{str}).
@var F_MIN_: Format degrees as signed "-/[D]DDMM" I{without} symbols, I{without} suffix (C{str}).
@var F_SEC_: Format degrees as signed "-/[D]DDMMSS" I{without} symbols, I{without} suffix (C{str}).
@var F_D60_: Format degrees as signed "-/[D]DD.MMSS" C{sexagecimal} I{without} symbols, I{without} suffix (C{str}).
@var F__E_: Format degrees as signed "-/%E" I{without} symbols, I{without} suffix (C{str}).
@var F__F_: Format degrees as signed "-/%F" I{without} symbols, I{without} suffix (C{str}).
@var F__G_: Format degrees as signed "-/%G" I{without} symbols, I{without} suffix (C{str}).
@var F_RAD_: Convert degrees to radians and format as signed "-/RR" I{without} symbol, I{without} suffix (C{str}).
@var F_D__: Format degrees as signed "-/+deg°" with symbol, I{without} suffix (C{str}).
@var F_DM__: Format degrees as signed "-/+deg°min′" with symbols, I{without} suffix (C{str}).
@var F_DMS__: Format degrees as signed "-/+deg°min′sec″" with symbols, I{without} suffix (C{str}).
@var F_DEG__: Format degrees as signed "-/+[D]DD" I{without} symbol, I{without} suffix (C{str}).
@var F_MIN__: Format degrees as signed "-/+[D]DDMM" I{without} symbols, without suffix (C{str}).
@var F_SEC__: Format degrees as signed "-/+[D]DDMMSS" I{without} symbols, I{without} suffix (C{str}).
@var F_D60__: Format degrees as signed "-/+[D]DD.MMSS" C{sexagecimal} I{without} symbols, I{without} suffix (C{str}).
@var F__E__: Format degrees as signed "-/+%E" I{without} symbols, I{without} suffix (C{str}).
@var F__F__: Format degrees as signed "-/+%F" I{without} symbols, I{without} suffix (C{str}).
@var F__G__: Format degrees as signed "-/+%G" I{without} symbols, I{without} suffix (C{str}).
@var F_RAD__: Convert degrees to radians and format as signed "-/+RR" I{without} symbol, I{without} suffix (C{str}).
@var S_DEG: Degrees symbol, default C{"°"}
@var S_MIN: Minutes symbol, default C{"′"} aka I{PRIME}
@var S_SEC: Seconds symbol, default C{"″"} aka I{DOUBLE_PRIME}
@var S_RAD: Radians symbol, default C{""} aka L{pygeodesy.NN}
@var S_DMS: If C{True} include, otherwise cancel all DMS symbols, default C{True}.
@var S_SEP: Separator between C{deg°|min′|sec″|suffix}, default C{""} aka L{pygeodesy.NN}
@note: In Python 2-, L{S_DEG}, L{S_MIN}, L{S_SEC}, L{S_RAD} and L{S_SEP} may be multi-byte,
non-ascii characters and if so, I{not} C{unicode}.
'''
from pygeodesy.basics import copysign0, isodd, issequence, isstr, map2, \
neg as _neg # in .ups
from pygeodesy.constants import _umod_360, _0_0, _0_5, _60_0, _360_0, _3600_0
from pygeodesy.errors import ParseError, _parseX, RangeError, rangerrors, _TypeError, \
_ValueError, _xkwds, _xkwds_get
from pygeodesy.interns import NN, _arg_, _COMMA_, _d_, _DASH_, _deg_, _degrees_, _DOT_, \
_0_, _e_, _E_, _EW_, _f_, _F_, _g_, _MINUS_, _N_, _NE_, _NS_, \
_NSEW_, _NW_, _of_, _PERCENTDOTSTAR_, _PLUS_, _PLUSMINUS_, \
_QUOTE1_, _QUOTE2_, _radians_, _S_, _SE_, _SPACE_, _SW_, _W_
from pygeodesy.lazily import _ALL_LAZY, _ALL_MODS as _MODS
from pygeodesy.streprs import Fmt, fstr, fstrzs, _0wpF
# from pygeodesy.utily import _Wrap # _MODS
from math import fabs, modf, radians
try:
from string import letters as _LETTERS
except ImportError: # Python 3+
from string import ascii_letters as _LETTERS
__all__ = _ALL_LAZY.dms
__version__ = '23.06.08'
_beyond_ = 'beyond'
_DDDMMSS_ = 'DDDMMSS'
_deg_min_ = 'deg+min'
_keyword_ = 'keyword'
_SDIGITS_ = '-0123456789+'
_sexagecimal_ = 'sexagecimal'
_SEXAGECIMUL = 1.e4 # sexagecimal C{D.MMSSss} into decimal C{DMMSS.ss}
F_D, F_DM, F_DMS, F_DEG, F_MIN, F_SEC, F_D60, F__E, F__F, F__G, F_RAD = _F_s = (
_d_, 'dm', 'dms', _deg_, 'min', 'sec', 'd60', _e_, _f_, _g_, 'rad')
F_D_, F_DM_, F_DMS_, F_DEG_, F_MIN_, F_SEC_, F_D60_, F__E_, F__F_, F__G_, F_RAD_ = (NN(
_MINUS_, _) for _ in _F_s)
F_D__, F_DM__, F_DMS__, F_DEG__, F_MIN__, F_SEC__, F_D60__, F__E__, F__F__, F__G__, F_RAD__ = (NN(
_PLUS_, _) for _ in _F_s)
del _F_s
_F_case = {F_D: F_D, F_DEG: F_D, _degrees_: F_D, # unsigned _F_s
F_DM: F_DM, F_MIN: F_DM, _deg_min_: F_DM,
F_D60: F_D60, F_RAD: F_RAD, _radians_: F_RAD,
F__E: F__E, F__F: F__F, F__G: F__G} # default F_DMS
_F_prec = {F_D: 6, F_DM: 4, F_DMS: 2, # default precs
F_DEG: 6, F_MIN: 4, F_SEC: 2, F_D60: 0,
F__E: 8, F__F: 8, F__G: 8, F_RAD: 5}
_F_symb = set((F_D, F_DM, F_DMS, _deg_min_)) # == {} pychok -Tb
S_DEG = _DEGREES_ = '°' # ord() = 176
S_MIN = _MINUTES_ = '′' # PRIME
S_SEC = _SECONDS_ = '″' # DOUBLE_PRIME
S_RAD = _RADIANS_ = NN # PYCHOK radians symbol ""
S_DMS = True # include DMS symbols
S_SEP = NN # separator between deg|min|sec|suffix ""
S_NUL = NN # empty string, kept INTERNAL
# note: ord(_DEGREES_) == ord('°') == 176, ord('˚') == 730
_S_norm = {S_DEG: _DEGREES_, '˚': _DEGREES_, '^': _DEGREES_, # _d_: _DEGREES_,
S_MIN: _MINUTES_, '’': _MINUTES_, _QUOTE1_: _MINUTES_, # _r_: _RADIANS_
S_SEC: _SECONDS_, '”': _SECONDS_, _QUOTE2_: _SECONDS_}
_WINDS = (_N_, 'NbE', 'NNE', 'NEbN', _NE_, 'NEbE', 'ENE', 'EbN',
_E_, 'EbS', 'ESE', 'SEbE', _SE_, 'SEbS', 'SSE', 'SbE',
_S_, 'SbW', 'SSW', 'SWbS', _SW_, 'SWbW', 'WSW', 'WbS',
_W_, 'WbN', 'WNW', 'NWbW', _NW_, 'NWbN', 'NNW', 'NbW')
def _D603(sep, s_D=_DOT_, s_M=None, s_S=S_NUL, s_DMS=S_DMS, **unused):
'''(INTERNAL) Get the overridden or default pseudo-C{DMS} symbols.
'''
if s_DMS:
M = sep if s_M is None else s_M
return s_D, (M or S_NUL), s_S
else: # no overriden symbols
return _DOT_, sep, S_NUL
def _DMS3(form, s_D=S_DEG, s_M=S_MIN, s_S=S_SEC, s_DMS=S_DMS, **unused):
'''(INTERNAL) Get the overridden or default C{DMS} symbols.
'''
return (s_D, s_M, s_S) if s_DMS and form in _F_symb else (S_NUL, S_NUL, S_NUL)
def _dms3(d, ddd, p, w):
'''(INTERNAL) Format C{d} as (deg, min, sec) C{str}s with leading zeros.
'''
d, s = divmod(round(d * _3600_0, p), _3600_0)
m, s = divmod(s, _60_0)
return (_0wpF(ddd, 0, d),
_0wpF( 2, 0, m),
_0wpF(w+2, p, s))
def _fstrzs(t, **unused):
'''(INTERNAL) Pass-thru version of C{.streprs.fstrzs}.
'''
return t
def _split3(strDMS, suffix=_NSEW_):
'''(INTERNAL) Return sign, stripped B{C{strDMS}} and compass point.
'''
t = strDMS.strip()
s = t[:1] # sign or digit
P = t[-1:] # compass point or digit or dot
t = t.lstrip(_PLUSMINUS_).rstrip(suffix).strip()
return s, t, P
def _toDMS(deg, form, prec, sep, ddd, suff, s_D_M_S): # MCCABE 13 in .units
'''(INTERNAL) Convert C{deg} to C{str}, with/-out sign, DMS symbols and/or suffix.
'''
try:
deg = float(deg)
except (TypeError, ValueError) as x:
raise _ValueError(deg=deg, form=form, prec=prec, cause=x)
if form[:1] in _PLUSMINUS_: # signed
sign = _MINUS_ if deg < 0 else (
_PLUS_ if deg > 0 and form[:1] == _PLUS_ else NN)
form = form.lstrip(_PLUSMINUS_)
suff = NN # no suffix if signed
else: # suffixed
sign = NN # no sign if suffixed
if suff and sep: # no sep if no suffix
suff = NN(sep, suff)
try:
F = _F_case[form] # .strip()
except KeyError:
form = form.lower() # .strip()
F = _F_case.get(form, F_DMS)
if prec is None:
z = p = _F_prec.get(F, 6)
else:
z = int(prec)
p = abs(z)
w = p + (1 if p else 0)
z = fstrzs if z > 1 else _fstrzs
d = fabs(deg)
if F is F_DMS: # 'deg+min+sec', default
D, M, S = _DMS3(form, **s_D_M_S)
d, m, s = _dms3(d, ddd, p, w)
t = NN(sign, d, D, sep,
m, M, sep,
z(s), S, suff)
elif F is F_DM: # 'deg+min'
D, M, _ = _DMS3(form, **s_D_M_S)
d, m = divmod(round(d * _60_0, p), _60_0)
t = NN(sign, _0wpF(ddd, 0, d), D, sep,
z(_0wpF(w+2, p, m)), M, suff)
elif F is F_D: # 'deg'
D, _, _ = _DMS3(form, **s_D_M_S)
t = NN(sign, z(_0wpF(w+ddd, p, d)), D, suff)
elif F is F_D60: # 'deg.MM|SSss|'
D, M, S = _D603(sep, **s_D_M_S)
d, m, s = _dms3(d, ddd, p, w)
t = z(s).split(_DOT_) + [S, suff]
t = NN(sign, d, D, m, M, *t)
elif F is F_RAD:
R = _xkwds_get(s_D_M_S, s_R=S_RAD)
r = NN(_PERCENTDOTSTAR_, _F_) % (p, radians(d))
t = NN(sign, z(r), R, suff)
else: # F in (F__E, F__F, F__G)
D = _xkwds_get(s_D_M_S, s_D=S_NUL)
d = NN(_PERCENTDOTSTAR_, F) % (p, d) # XXX form?
t = NN(sign, z(d, ap1z=F is F__G), D, suff)
return t # NOT unicode in Python 2-
def bearingDMS(bearing, form=F_D, prec=None, sep=S_SEP, **s_D_M_S):
'''Convert bearing to a string (without compass point suffix).
@arg bearing: Bearing from North (compass C{degrees360}).
@kwarg form: Format specifier for B{C{deg}} (C{str} or L{F_D},
L{F_DM}, L{F_DMS}, L{F_DEG}, L{F_MIN}, L{F_SEC},
L{F_D60}, L{F__E}, L{F__F}, L{F__G}, L{F_RAD},
L{F_D_}, L{F_DM_}, L{F_DMS_}, L{F_DEG_}, L{F_MIN_},
L{F_SEC_}, L{F_D60_}, L{F__E_}, L{F__F_}, L{F__G_},
L{F_RAD_}, L{F_D__}, L{F_DM__}, L{F_DMS__}, L{F_DEG__},
L{F_MIN__}, L{F_SEC__}, L{F_D60__}, L{F__E__},
L{F__F__}, L{F__G__} or L{F_RAD__}).
@kwarg prec: Number of decimal digits (0..9 or C{None} for default).
Trailing zero decimals are stripped for B{C{prec}}
values of 1 and above, but kept for negative B{C{prec}}.
@kwarg sep: Separator between degrees, minutes, seconds, suffix (C{str}).
@kwarg s_D_M_S: Optional keyword arguments C{B{s_D}=str}, C{B{s_M}=str},
C{B{s_S}=str} and C{B{s_DMS}=True} to override any or
cancel all DMS symbols, defaults L{S_DEG}, L{S_MIN}
respectively L{S_SEC}.
@return: Compass degrees per the specified B{C{form}} (C{str}).
@see: Function L{pygeodesy.toDMS}.
'''
return _toDMS(_umod_360(bearing), form, prec, sep, 1, NN, s_D_M_S)
def _clip(angle, limit, units):
'''(INTERNAL) Helper for C{clipDegrees} and C{clipRadians}.
'''
c = min(limit, max(-limit, angle))
if c != angle and rangerrors():
t = _SPACE_(fstr(angle, prec=6, ints=True), _beyond_,
copysign0(limit, angle), units)
raise RangeError(t, txt=None)
return c
def clipDegrees(deg, limit):
'''Clip a lat- or longitude to the given range.
@arg deg: Unclipped lat- or longitude (C{scalar degrees}).
@arg limit: Valid C{-/+B{limit}} range (C{degrees}).
@return: Clipped value (C{degrees}).
@raise RangeError: If B{C{deg}} outside the valid C{-/+B{limit}}
range and L{pygeodesy.rangerrors} set to C{True}.
'''
return _clip(deg, limit, _degrees_) if limit and limit > 0 else deg
def clipRadians(rad, limit):
'''Clip a lat- or longitude to the given range.
@arg rad: Unclipped lat- or longitude (C{radians}).
@arg limit: Valid C{-/+B{limit}} range (C{radians}).
@return: Clipped value (C{radians}).
@raise RangeError: If B{C{rad}} outside the valid C{-/+B{limit}}
range and L{pygeodesy.rangerrors} set to C{True}.
'''
return _clip(rad, limit, _radians_) if limit and limit > 0 else rad
def compassDMS(bearing, form=F_D, prec=None, sep=S_SEP, **s_D_M_S):
'''Convert bearing to a string suffixed with compass point.
@arg bearing: Bearing from North (compass C{degrees360}).
@kwarg form: Format specifier for B{C{deg}} (C{str} or L{F_D},
L{F_DM}, L{F_DMS}, L{F_DEG}, L{F_MIN}, L{F_SEC},
L{F_D60}, L{F__E}, L{F__F}, L{F__G}, L{F_RAD},
L{F_D_}, L{F_DM_}, L{F_DMS_}, L{F_DEG_}, L{F_MIN_},
L{F_SEC_}, L{F_D60_}, L{F__E_}, L{F__F_}, L{F__G_},
L{F_RAD_}, L{F_D__}, L{F_DM__}, L{F_DMS__}, L{F_DEG__},
L{F_MIN__}, L{F_SEC__}, L{F_D60__}, L{F__E__},
L{F__F__}, L{F__G__} or L{F_RAD__}).
@kwarg prec: Number of decimal digits (0..9 or C{None} for default).
Trailing zero decimals are stripped for B{C{prec}}
values of 1 and above, but kept for negative B{C{prec}}.
@kwarg sep: Separator between degrees, minutes, seconds, suffix (C{str}).
@kwarg s_D_M_S: Optional keyword arguments C{B{s_D}=str}, C{B{s_M}=str}
C{B{s_S}=str} and C{B{s_DMS}=True} to override any or
cancel all DMS symbols, defaults L{S_DEG}, L{S_MIN}
respectively L{S_SEC}.
@return: Compass degrees and point in the specified form (C{str}).
@see: Function L{pygeodesy.toDMS}.
'''
b = _umod_360(bearing)
return _toDMS(b, form, prec, sep, 1, compassPoint(b), s_D_M_S)
def compassPoint(bearing, prec=3):
'''Convert bearing to a compass point.
@arg bearing: Bearing from North (compass C{degrees360}).
@kwarg prec: Precision, number of compass point characters:
1 for cardinal or basic winds,
2 for intercardinal or ordinal or principal winds,
3 for secondary-intercardinal or half-winds or
4 for quarter-winds).
@return: Compass point (1-, 2-, 3- or 4-letter C{str}).
@raise ValueError: Invalid B{C{prec}}.
@see: U{Dms.compassPoint
<https://GitHub.com/ChrisVeness/geodesy/blob/master/dms.js>}
and U{Compass rose<https://WikiPedia.org/wiki/Compass_rose>}.
@example:
>>> p = compassPoint(24, 1) # 'N'
>>> p = compassPoint(24, 2) # 'NE'
>>> p = compassPoint(24, 3) # 'NNE'
>>> p = compassPoint(24) # 'NNE'
>>> p = compassPoint(11, 4) # 'NbE'
>>> p = compassPoint(30, 4) # 'NEbN'
>>> p = compassPoint(11.249) # 'N'
>>> p = compassPoint(11.25) # 'NNE'
>>> p = compassPoint(-11.25) # 'N'
>>> p = compassPoint(348.749) # 'NNW'
'''
try: # like .streprs.enstr2
m = 2 << prec
if m in (4, 8, 16, 32):
w = 32 // m
# not round(), i.e. half-even rounding in Python 3+,
# but round-away-from-zero as int(b + 0.5) iff b is
# non-negative, otherwise int(b + copysign0(_0_5, b))
w *= int(_umod_360(bearing) * m / _360_0 + _0_5) % m
return _WINDS[w]
raise ValueError
except (IndexError, TypeError, ValueError) as x:
raise _ValueError(bearing=bearing, prec=prec, cause=x)
def degDMS(deg, prec=6, s_D=S_DEG, s_M=S_MIN, s_S=S_SEC, neg=_MINUS_, pos=NN):
'''Convert degrees to a string in degrees, minutes I{or} seconds.
@arg deg: Value in degrees (C{scalar degrees}).
@kwarg prec: Number of decimal digits (0..9 or C{None} for default).
Trailing zero decimals are stripped for B{C{prec}}
values of 1 and above, but kept for negative B{C{prec}}.
@kwarg s_D: D symbol for degrees (C{str}).
@kwarg s_M: M symbol for minutes (C{str}) or C{""}.
@kwarg s_S: S symbol for seconds (C{str}) or C{""}.
@kwarg neg: Optional sign for negative (C{'-'}).
@kwarg pos: Optional sign for positive (C{''}).
@return: I{Either} degrees, minutes I{or} seconds (C{str}).
@see: Function L{pygeodesy.toDMS}.
'''
try:
deg = float(deg)
except (TypeError, ValueError) as x:
raise _ValueError(deg=deg, prec=prec, cause=x)
d, s = fabs(deg), s_D
if d < 1:
if s_M:
d *= _60_0
if d < 1 and s_S:
d *= _60_0
s = s_S
else:
s = s_M
elif s_S:
d *= _3600_0
s = s_S
z = int(prec)
t = Fmt.F(d, prec=abs(z))
if z > 1:
t = fstrzs(t)
n = neg if deg < 0 else pos
return NN(n, t, s) # NOT unicode in Python 2-
def latDMS(deg, form=F_DMS, prec=None, sep=S_SEP, **s_D_M_S):
'''Convert latitude to a string, optionally suffixed with N or S.
@arg deg: Latitude to be formatted (C{scalar degrees}).
@kwarg form: Format specifier for B{C{deg}} (C{str} or L{F_D},
L{F_DM}, L{F_DMS}, L{F_DEG}, L{F_MIN}, L{F_SEC},
L{F_D60}, L{F__E}, L{F__F}, L{F__G}, L{F_RAD},
L{F_D_}, L{F_DM_}, L{F_DMS_}, L{F_DEG_}, L{F_MIN_},
L{F_SEC_}, L{F_D60_}, L{F__E_}, L{F__F_}, L{F__G_},
L{F_RAD_}, L{F_D__}, L{F_DM__}, L{F_DMS__}, L{F_DEG__},
L{F_MIN__}, L{F_SEC__}, L{F_D60__}, L{F__E__},
L{F__F__}, L{F__G__} or L{F_RAD__}).
@kwarg prec: Number of decimal digits (0..9 or C{None} for default).
Trailing zero decimals are stripped for B{C{prec}}
values of 1 and above, but kept for negative B{C{prec}}.
@kwarg sep: Separator between degrees, minutes, seconds, suffix (C{str}).
@kwarg s_D_M_S: Optional keyword arguments C{B{s_D}=str}, C{B{s_M}=str}
C{B{s_S}=str} and C{B{s_DMS}=True} to override any or
cancel all DMS symbols, defaults L{S_DEG}, L{S_MIN}
respectively L{S_SEC}.
@return: Degrees in the specified form (C{str}).
@see: Functions L{pygeodesy.toDMS} and L{pygeodesy.lonDMS}.
'''
p = _S_ if deg < 0 else _N_
return _toDMS(deg, form, prec, sep, 2, p, s_D_M_S)
def latlonDMS(lls, **m_form_prec_sep_s_D_M_S):
'''Convert one or more C{LatLon} instances to strings.
@arg lls: Single or list, sequence, tuple, etc. (C{LatLon}s).
@kwarg m_form_prec_sep_s_D_M_S: Optional C{B{m}eter}, C{B{form}at},
C{B{prec}ision}, I{DEPRECATED} B{C{sep}}, B{C{s_D}}, B{C{s_M}},
B{C{s_S}} and B{C{s_DMS}} keyword arguments, see method
C{LatLon.toStr} and functions L{pygeodesy.latDMS} and
L{pygeodesy.lonDMS}.
@return: A C{tuple} of C{str}s if B{C{lls}} is a list, sequence,
tuple, etc. of C{LatLon} instances or a single C{str}
if B{C{lls}} is a single C{LatLon}.
@see: Functions L{pygeodesy.latDMS}, L{pygeodesy.latlonDMS_},
L{pygeodesy.lonDMS} and L{pygeodesy.toDMS} and method
C{LatLon.toStr}.
@note: Keyword argument C{B{sep}=None} to return a C{str}ing
instead of the C{tuple}, has been I{DEPRECATED}, use
C{B{sep}.join(B{latlonDMS_}(...))}.
'''
sep, kwds = _latlonDMS_sep2(latlonDMS, **m_form_prec_sep_s_D_M_S)
if isinstance(lls, _MODS.latlonBase.LatLonBase):
t = lls.toStr(**kwds)
elif issequence(lls):
t = tuple(ll.toStr(**kwds) for ll in lls)
if sep: # XXX TO BE REMOVED
t = sep.join(t)
else:
raise _TypeError(lls=lls, **m_form_prec_sep_s_D_M_S)
return t
def latlonDMS_(*lls, **m_form_prec_sep_s_D_M_S):
'''Convert one or more C{LatLon} instances to strings.
@arg lls: The instances, all positional arguments (C{LatLon}s).
@kwarg m_form_prec_sep_s_D_M_S: Optional C{B{m}eter}, C{B{form}at},
C{B{prec}ision}, I{DEPRECATED} B{C{sep}}, B{C{s_D}}, B{C{s_M}},
B{C{s_S}} and B{C{s_DMS}} keyword arguments, see method
C{LatLon.toStr} and functions L{pygeodesy.latDMS} and
L{pygeodesy.lonDMS}.
@return: A C{tuple} of C{str}s if 2 or more C{LatLon} instances
or a single C{str} if only a single C{LatLon} instance
is given in B{C{lls}}.
@see: Function L{pygeodesy.latlonDMS}.
@note: Keyword argument C{B{sep}=None} to return a C{str}ing
instead of the C{tuple}, has been I{DEPRECATED}, use
C{B{sep}.join(B{latlonDMS_}(...))}.
'''
sep, kwds = _latlonDMS_sep2(latlonDMS, **m_form_prec_sep_s_D_M_S)
if not lls:
raise _ValueError(lls=lls, **m_form_prec_sep_s_D_M_S)
elif len(lls) < 2:
lls, sep = lls[0], None
t = latlonDMS(lls, **kwds)
return sep.join(t) if sep else t
def _latlonDMS_sep2(where, sep=None, **kwds):
'''DEPRECATED, instead use: %r.join(%s(...))'''
if sep:
k = _SPACE_(_keyword_, _arg_, Fmt.EQUAL(sep=repr(sep)), _of_)
n = where.__name__
t = _latlonDMS_sep2.__doc__ % (sep, n)
_MODS.props._throwarning(k, n, t)
return sep, kwds
def lonDMS(deg, form=F_DMS, prec=None, sep=S_SEP, **s_D_M_S):
'''Convert longitude to a string, optionally suffixed with E or W.
@arg deg: Longitude to be formatted (C{scalar degrees}).
@kwarg form: Format specifier for B{C{deg}} (C{str} or L{F_D},
L{F_DM}, L{F_DMS}, L{F_DEG}, L{F_MIN}, L{F_SEC},
L{F_D60}, L{F__E}, L{F__F}, L{F__G}, L{F_RAD},
L{F_D_}, L{F_DM_}, L{F_DMS_}, L{F_DEG_}, L{F_MIN_},
L{F_SEC_}, L{F_D60_}, L{F__E_}, L{F__F_}, L{F__G_},
L{F_RAD_}, L{F_D__}, L{F_DM__}, L{F_DMS__}, L{F_DEG__},
L{F_MIN__}, L{F_SEC__}, L{F_D60__}, L{F__E__},
L{F__F__}, L{F__G__} or L{F_RAD__}).
@kwarg prec: Number of decimal digits (0..9 or C{None} for default).
Trailing zero decimals are stripped for B{C{prec}}
values of 1 and above, but kept for negative B{C{prec}}.
@kwarg sep: Separator between degrees, minutes, seconds, suffix (C{str}).
@kwarg s_D_M_S: Optional keyword arguments C{B{s_D}=str}, C{B{s_M}=str}
C{B{s_S}=str} and C{B{s_DMS}=True} to override any or
cancel all DMS symbols, defaults L{S_DEG}, L{S_MIN}
respectively L{S_SEC}.
@return: Degrees in the specified form (C{str}).
@see: Functions L{pygeodesy.toDMS} and L{pygeodesy.latDMS}.
'''
p = _W_ if deg < 0 else _E_
return _toDMS(deg, form, prec, sep, 3, p, s_D_M_S)
def normDMS(strDMS, norm=None, **s_D_M_S):
'''Normalize all degrees, minutes and seconds (DMS) I{symbols} in
a string to the default symbols L{S_DEG}, L{S_MIN}, L{S_SEC}.
@arg strDMS: Original DMS string (C{str}).
@kwarg norm: Optional replacement symbol (C{str}) or C{None} for
the default DMS symbols). Use C{B{norm}=""} to
remove all DMS symbols.
@kwarg s_D_M_S: Optional, alternate DMS symbols C{B{s_D}=str},
C{B{s_M}=str}, C{B{s_S}=str} and/or C{B{s_R}=str}
for radians, each to be replaced by B{C{norm}}.
@return: Normalized DMS (C{str}).
'''
def _s2S2(s_D=S_DEG, s_M=S_MIN, s_S=S_SEC, s_R=S_RAD):
d = {s_D: S_DEG, s_M: S_MIN, s_S: S_SEC, s_R: S_RAD}
for s, S in _xkwds(d, **_S_norm).items():
if s:
yield s, S
# XXX strDMS isn't unicode in Python 2- and looping
# thru strDMS will yield each byte, hence the loop
# thru _s2S2 and replacing the DMS symbols in strDMS
if norm is None: # back to default DMS
for s, S in _s2S2(**s_D_M_S):
if s != S:
strDMS = strDMS.replace(s, S)
else: # replace or remove all DMS
n = norm or NN
for s, _ in _s2S2(**s_D_M_S):
if s != n:
strDMS = strDMS.replace(s, n)
if n:
strDMS = strDMS.rstrip(n) # XXX not .strip?
return strDMS # NOT unicode in Python 2-
def parseDDDMMSS(strDDDMMSS, suffix=_NSEW_, sep=S_SEP, clip=0, sexagecimal=False): # MCCABE 14
'''Parse a lat- or longitude represention forms as [D]DDMMSS in degrees.
@arg strDDDMMSS: Degrees in any of several forms (C{str}) and types (C{float},
C{int}, other).
@kwarg suffix: Optional, valid compass points (C{str}, C{tuple}).
@kwarg sep: Optional separator between "[D]DD", "MM", "SS", B{C{suffix}} (L{S_SEP}).
@kwarg clip: Optionally, limit value to range C{-/+B{clip}} (C{degrees}).
@kwarg sexagecimal: If C{True}, convert C{"D.MMSS"} or C{float(D.MMSS)} to
C{base-60} "MM" and "SS" digits. See C{form}s L{F_D60},
L{F_D60_} and L{F_D60__}.
@return: Degrees (C{float}).
@raise ParseError: Invalid B{C{strDDDMMSS}} or B{C{clip}} or the form of
B{C{strDDDMMSS}} is incompatible with the suffixed or
B{C{suffix}} compass point.
@raise RangeError: Value of B{C{strDDDMMSS}} outside the valid C{-/+B{clip}}
range and L{pygeodesy.rangerrors} set to C{True}.
@note: Type C{str} values "[D]DD", "[D]DDMM", "[D]DDMMSS" and "[D]DD.MMSS"
for B{C{strDDDMMSS}} are parsed properly only if I{either} unsigned
and suffixed with a valid, compatible, C{cardinal} L{compassPoint}
I{or} signed I{or} unsigned, unsuffixed and with keyword argument
B{C{suffix}="NS"}, B{C{suffix}="EW"} or a compatible L{compassPoint}.
@note: Unlike function L{parseDMS}, type C{float}, C{int} and other non-C{str}
B{C{strDDDMMSS}} values are interpreted as C{form} [D]DDMMSS or
[D]DD.MMSS. For example, C{int(1230)} is returned as 12.5 and I{not
1230.0} degrees. However, C{int(345)} is considered C{form} "DDD"
345 I{and not "DDMM" 0345}, unless B{C{suffix}} specifies the compass
point. Also, C{float(15.0523)} is returned as 15.0523 decimal
degrees and I{not 15°5′23″ sexagecimal}. To consider the latter, use
C{float(15.0523)} or C{"15.0523"} and specify the keyword argument
C{B{sexagecimal}=True}.
@see: Functions L{pygeodesy.parseDMS}, L{pygeodesy.parseDMS2} and
L{pygeodesy.parse3llh}.
'''
def _DDDMMSS(strDDDMMSS, suffix, sep, clip, sexagecimal):
S = suffix.upper()
if isstr(strDDDMMSS):
t = strDDDMMSS.replace(sep, NN) if sep else strDDDMMSS
s, t, P = _split3(t, S)
f = t.split(_DOT_)
n = len(f[0])
f = NN.join(f)
if 1 < n < 8 and f.isdigit() and ( # dddN/S/E/W or ddd or +/-ddd
(P in S and s.isdigit()) or
(P.isdigit() and s in _SDIGITS_ # PYCHOK indent
and S in _WINDS)):
# check [D]DDMMSS form and compass point
X = _EW_ if isodd(n) else _NS_
if not (P in X or (S in X and (P.isdigit() or P == _DOT_))):
t = _DDDMMSS_[int(X is _NS_):(n | 1)], _DASH_.join(X)
raise ParseError('form %s applies %s' % t)
elif not sexagecimal: # try other forms
return _DMS2deg(strDDDMMSS, S, sep, clip, {})
if sexagecimal: # move decimal dot from ...
n += 4 # ... [D]DD.MMSSs to [D]DDMMSS.s
if n < 6:
raise ParseError('%s digits (%s)' % (_sexagecimal_, n))
z = n - len(f) # zeros to append
t = (f + (_0_ * z)) if z > 0 else _DOT_(f[:n], f[n:])
f = _0_0 # fraction
else: # float or int to [D]DDMMSS[.fff]
f, m = float(strDDDMMSS), 0
if sexagecimal:
f *= _SEXAGECIMUL
m = 6
s = P = _0_ # anything except NN, _S_, _SW_, _W_
if f < 0:
f = -f
s = _MINUS_
f, i = modf(f) # returns ...
t = str(int(i)) # ... float(i)
n = len(t) # number of digits to ...
if n < m: # ... required min or ...
t = (_0_ * (m - n)) + t
# ... match the given compass point
elif S in (_NS_ if isodd(n) else _EW_):
t = _0_ + t
# P = S
# elif n > 1:
# P = (_EW_ if isodd(n) else _NS_)[0]
n = len(t)
if n < 4: # [D]DD[.ddd]
t = (float(t) + f),
else:
f += float(t[n-2:])
if n < 6: # [D]DDMM[.mmm]
t = float(t[:n-2]), f
else: # [D]DDMMSS[.sss]
t = float(t[:n-4]), float(t[n-4:n-2]), f
d = _dms2deg(s, P, *t)
return clipDegrees(d, float(clip)) if clip else d
return _parseX(_DDDMMSS, strDDDMMSS, suffix, sep, clip, sexagecimal,
strDDDMMSS=strDDDMMSS, suffix=suffix, sexagecimal=sexagecimal)
def _dms2deg(s, P, deg, min=_0_0, sec=_0_0):
'''(INTERNAL) Helper for C{parseDDDMMSS} and C{_DMS2deg}.
'''
deg += (min + (sec / _60_0)) / _60_0
if s == _MINUS_ or (P and P in _SW_):
deg = _neg(deg)
return deg
def _DMS2deg(strDMS, suffix, sep, clip, s_D_M_S):
'''(INTERNAL) Helper for C{parseDDDMMSS} and C{parseDMS}.
'''
try:
d = float(strDMS)
except (TypeError, ValueError):
s, t, P = _split3(strDMS, suffix.upper())
if sep: # remove all DMS symbols
t = t.replace(sep, _SPACE_)
t = normDMS(t, norm=NN, **s_D_M_S)
else: # replace all DMS symbols
t = normDMS(t, norm=_SPACE_, **s_D_M_S)
t = map2(float, t.strip().split())
d = _dms2deg(s, P, *t[:3])
return clipDegrees(d, float(clip)) if clip else d
def parseDMS(strDMS, suffix=_NSEW_, sep=S_SEP, clip=0, **s_D_M_S): # MCCABE 14
'''Parse a lat- or longitude representation in C{degrees}.
This is very flexible on formats, allowing signed decimal
degrees, degrees and minutes or degrees minutes and seconds
optionally suffixed by a cardinal compass point.
A variety of symbols, separators and suffixes are accepted,
for example "3°37′09″W". Minutes and seconds may be omitted.
@arg strDMS: Degrees in any of several forms (C{str}) and
types (C{float}, C{int}, other).
@kwarg suffix: Optional, valid compass points (C{str}, C{tuple}).
@kwarg sep: Optional separator between deg°, min′, sec″, B{C{suffix}} (C{''}).
@kwarg clip: Optionally, limit value to range C{-/+B{clip}} (C{degrees}).
@kwarg s_D_M_S: Optional, alternate symbol for degrees C{B{s_D}=str},
minutes C{B{s_M}=str} and/or seconds C{B{s_S}=str}.
@return: Degrees (C{float}).
@raise ParseError: Invalid B{C{strDMS}} or B{C{clip}}.
@raise RangeError: Value of B{C{strDMS}} outside the valid C{-/+B{clip}}
range and L{pygeodesy.rangerrors} set to C{True}.
@note: Unlike function L{parseDDDMMSS}, type C{float}, C{int} and other
non-C{str} B{C{strDMS}} values are considered decimal (and not
sexagecimal) degrees. For example, C{int(1230)} is returned
as 1230.0 I{and not as 12.5} degrees and C{float(345)} as 345.0
I{and not as 3.75} degrees!
@see: Functions L{pygeodesy.parseDDDMMSS}, L{pygeodesy.parseDMS2},
L{pygeodesy.parse3llh} and L{pygeodesy.toDMS}.
'''
return _parseX(_DMS2deg, strDMS, suffix, sep, clip, s_D_M_S, strDMS=strDMS, suffix=suffix)
def parseDMS2(strLat, strLon, sep=S_SEP, clipLat=90, clipLon=180, wrap=False, **s_D_M_S):
'''Parse a lat- and a longitude representions C{"lat, lon"} in C{degrees}.
@arg strLat: Latitude in any of several forms (C{str} or C{degrees}).
@arg strLon: Longitude in any of several forms (C{str} or C{degrees}).
@kwarg sep: Optional separator between deg°, min′, sec″, suffix (C{''}).
@kwarg clipLat: Limit latitude to range C{-/+B{clipLat}} (C{degrees}).
@kwarg clipLon: Limit longitude to range C{-/+B{clipLon}} (C{degrees}).
@kwarg wrap: If C{True}, wrap or I{normalize} the lat- and longitude,
overriding B{C{clipLat}} and B{C{clipLon}} (C{bool}).
@kwarg s_D_M_S: Optional, alternate symbol for degrees C{B{s_D}=str},
minutes C{B{s_M}=str} and/or seconds C{B{s_S}=str}.
@return: A L{LatLon2Tuple}C{(lat, lon)} in C{degrees}.
@raise ParseError: Invalid B{C{strLat}} or B{C{strLon}}.
@raise RangeError: Value of B{C{strLat}} or B{C{strLon}} outside the
valid C{-/+B{clipLat}} or C{-/+B{clipLon}} range
and L{pygeodesy.rangerrors} set to C{True}.
@note: See the B{Notes} at function L{parseDMS}.
@see: Functions L{pygeodesy.parseDDDMMSS}, L{pygeodesy.parseDMS},
L{pygeodesy.parse3llh} and L{pygeodesy.toDMS}.
'''
return _2Tuple(strLat, strLon, clipLat, clipLon, wrap, sep=sep, **s_D_M_S)
def _2Tuple(strLat, strLon, clipLat, clipLon, wrap, **kwds):
'''(INTERNAL) Helper for C{parseDMS2} and C{parsellh3}.
'''
if wrap:
_W = _MODS.utily._Wrap
lat, lon = _W.latlon(parseDMS(strLat, suffix=_NS_, **kwds),
parseDMS(strLon, suffix=_EW_, **kwds))
else:
# if wrap is None:
# clipLat = clipLon = 0
lat = parseDMS(strLat, suffix=_NS_, clip=clipLat, **kwds)
lon = parseDMS(strLon, suffix=_EW_, clip=clipLon, **kwds)
return _MODS.namedTuples.LatLon2Tuple(lat, lon)
def parse3llh(strllh, height=0, sep=_COMMA_, clipLat=90, clipLon=180, wrap=False, **s_D_M_S):
'''Parse a string C{"lat, lon [, h]"} representing lat-, longitude in
C{degrees} and optional height in C{meter}.
The lat- and longitude value must be separated by a separator
character. If height is present it must follow, separated by
another separator.
The lat- and longitude values may be swapped, provided at least
one ends with the proper compass point.
@arg strllh: Latitude, longitude[, height] (C{str}, ...).
@kwarg height: Optional, default height (C{meter}) or C{None}.
@kwarg sep: Optional separator between C{"lat lon [h] suffix"} (C{str}).
@kwarg clipLat: Limit latitude to range C{-/+B{clipLat}} (C{degrees}).
@kwarg clipLon: Limit longitude to range C{-/+B{clipLon}} (C{degrees}).
@kwarg wrap: If C{True}, wrap or I{normalize} the lat- and longitude,
overriding B{C{clipLat}} and B{C{clipLon}} (C{bool}).
@kwarg s_D_M_S: Optional, alternate symbol for degrees C{B{s_D}=str},
minutes C{B{s_M}=str} and/or seconds C{B{s_S}=str}.
@return: A L{LatLon3Tuple}C{(lat, lon, height)} in C{degrees},
C{degrees} and C{float}.
@raise RangeError: Lat- or longitude value of B{C{strllh}} outside
the valid C{-/+B{clipLat}} or C{-/+B{clipLon}}
range and L{pygeodesy.rangerrors} set to C{True}.
@raise ValueError: Invalid B{C{strllh}} or B{C{height}}.
@note: See the B{Notes} at function L{parseDMS}.
@see: Functions L{pygeodesy.parseDDDMMSS}, L{pygeodesy.parseDMS},
L{pygeodesy.parseDMS2} and L{pygeodesy.toDMS}.
@example:
>>> parse3llh('000°00′05.31″W, 51° 28′ 40.12″ N')
(51.4778°N, 000.0015°W, 0)
'''
def _3llh(strllh, height, sep, wrap):
ll = strllh.strip().split(sep)
if len(ll) > 2: # XXX interpret height unit
h = float(ll.pop(2).rstrip(_LETTERS + _SPACE_))
else:
h = height # None from wgrs.Georef.__new__
if len(ll) != 2:
raise ValueError
a, b = [_.strip() for _ in ll] # PYCHOK false
if a[-1:] in _EW_ or b[-1:] in _NS_:
a, b = b, a
return _2Tuple(a, b, clipLat, clipLon, wrap, **s_D_M_S).to3Tuple(h)
return _parseX(_3llh, strllh, height, sep, wrap, strllh=strllh)
def parseRad(strRad, suffix=_NSEW_, clip=0):
'''Parse a string representing angle in C{radians}.
@arg strRad: Degrees in any of several forms (C{str} or C{radians}).
@kwarg suffix: Optional, valid compass points (C{str}, C{tuple}).
@kwarg clip: Optionally, limit value to range C{-/+B{clip}} (C{radians}).
@return: Radians (C{float}).
@raise ParseError: Invalid B{C{strRad}} or B{C{clip}}.
@raise RangeError: Value of B{C{strRad}} outside the valid C{-/+B{clip}}
range and L{pygeodesy.rangerrors} set to C{True}.
'''
def _Rad(strRad, suffix, clip):
try:
r = float(strRad)
except (TypeError, ValueError):
s, t, P = _split3(strRad, suffix.upper())
r = _dms2deg(s, P, float(t))
return clipRadians(r, float(clip)) if clip else r
return _parseX(_Rad, strRad, suffix, clip, strRad=strRad, suffix=suffix)
def precision(form, prec=None):
'''Set the default precison for a given F_ form.
@arg form: L{F_D}, L{F_DM}, L{F_DMS}, L{F_DEG}, L{F_MIN},
L{F_SEC}, L{F_D60}, L{F__E}, L{F__F}, L{F__G}
or L{F_RAD} (C{str}).
@kwarg prec: Number of decimal digits (0..9 or C{None} for
default). Trailing zero decimals are stripped
for B{C{prec}} values of 1 and above, but kept
for negative B{C{prec}}.
@return: Previous precision for the B{C{form}} (C{int}).
@raise ValueError: Invalid B{C{form}} or B{C{prec}} or B{C{prec}}
outside the valid range C{-/+9}.
'''
try:
p = _F_prec[form]
except KeyError:
raise _ValueError(form=form)
if prec is not None: # set as default
_F_prec[form] = _MODS.units.Precision_(prec=prec, low=-9, high=9)
return p
def toDMS(deg, form=F_DMS, prec=2, sep=S_SEP, ddd=2, neg=_MINUS_, pos=_PLUS_, **s_D_M_S):
'''Convert I{signed} C{degrees} to string, without suffix.
@arg deg: Degrees to be formatted (C{scalar degrees}).
@kwarg form: Format specifier for B{C{deg}} (C{str} or L{F_D},
L{F_DM}, L{F_DMS}, L{F_DEG}, L{F_MIN}, L{F_SEC},
L{F_D60}, L{F__E}, L{F__F}, L{F__G}, L{F_RAD},
L{F_D_}, L{F_DM_}, L{F_DMS_}, L{F_DEG_}, L{F_MIN_},
L{F_SEC_}, L{F_D60_}, L{F__E_}, L{F__F_}, L{F__G_},
L{F_RAD_}, L{F_D__}, L{F_DM__}, L{F_DMS__}, L{F_DEG__},
L{F_MIN__}, L{F_SEC__}, L{F_D60__}, L{F__E__},
L{F__F__}, L{F__G__} or L{F_RAD__}).
@kwarg prec: Number of decimal digits (0..9 or C{None} for default).
Trailing zero decimals are stripped for B{C{prec}}
values of 1 and above, but kept for negative B{C{prec}}.
@kwarg sep: Separator between degrees, minutes, seconds, suffix (C{str}).
@kwarg ddd: Number of digits for B{C{deg}°} (2 or 3).
@kwarg neg: Prefix for negative B{C{deg}} (C{'-'}).
@kwarg pos: Prefix for positive B{C{deg}} and signed B{C{form}} (C{'+'}).
@kwarg s_D_M_S: Optional keyword arguments C{B{s_D}=str}, C{B{s_M}=str}
C{B{s_S}=str} and C{B{s_DMS}=True} to override any or
cancel all DMS symbols, defaults L{S_DEG}, L{S_MIN}
respectively L{S_SEC}. See B{Notes} below.
@return: Degrees in the specified form (C{str}).
@note: The degrees, minutes and seconds (DMS) symbol can be overridden in
this and other C{*DMS} functions by using optional keyword argments
C{B{s_D}="d"}, C{B{s_M}="'"} respectively C{B{s_S}='"'}. Using
keyword argument B{C{s_DMS}=None} cancels all C{DMS} symbols to
C{B{S_NUL}=NN}.
@note: Sexagecimal format B{C{F_D60}} supports overridable pseudo-DMS symbols
positioned at C{"[D]DD<B{s_D}>MM<B{s_M}>SS<B{s_S}>"} with defaults
C{B{s_D}="."}, C{B{s_M}=B{sep}} and C{B{s_S}=}L{pygeodesy.NN}.
@note: Formats B{C{F__E}}, B{C{F__F}} and B{C{F__G}} can be extended with
a C{D}-only symbol if defined with keyword argument C{B{s_D}=str}.
Likewise for B{C{F_RAD}} formats with keyword argument C{B{s_R}=str}.
@see: Function L{pygeodesy.degDMS}
'''
s = form[:1]
f = form[1:] if s in _PLUSMINUS_ else form
t = _toDMS(deg, f, prec, sep, ddd, NN, s_D_M_S) # unsigned and -suffixed
if deg < 0 and neg:
t = neg + t
elif deg > 0 and s == _PLUS_ and pos:
t = pos + t
return t
# **) MIT License
#
# Copyright (C) 2016-2023 -- mrJean1 at Gmail -- All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
|
70d1cc8c75bf20ef22f7c9be0f275cb45ab26e42
|
2ad93a1cf25a580fe980482d2d17a657de3b2523
|
/django-stubs/contrib/postgres/functions.pyi
|
414a0998af303b0cacd426adddf647eb84c1ab5c
|
[
"MIT"
] |
permissive
|
typeddjango/django-stubs
|
f35dfcb001e54694a0a1e8c0afcc6e6a3d130c32
|
0117348c3c7713f25f96b46e53ebdeed7bdba544
|
refs/heads/master
| 2023-08-25T19:42:52.707151
| 2023-08-23T15:13:25
| 2023-08-23T15:13:25
| 142,779,680
| 1,133
| 376
|
MIT
| 2023-09-13T19:05:06
| 2018-07-29T17:08:50
|
Python
|
UTF-8
|
Python
| false
| false
| 95
|
pyi
|
functions.pyi
|
from django.db.models import Func
class RandomUUID(Func): ...
class TransactionNow(Func): ...
|
d59596434f5cbc84e9dbf61313ccfd4398712683
|
a41e1498e3c080f47abd8e8e57157548df3ebbf1
|
/pandas/tests/scalar/timestamp/test_constructors.py
|
b65b34f7482608607196e361d7fd1613e55eb7e2
|
[
"BSD-3-Clause"
] |
permissive
|
pandas-dev/pandas
|
e7e639454a298bebc272622e66faa9829ea393bb
|
c7325d7e7e77ecb4a4e57b48bc25265277c75712
|
refs/heads/main
| 2023-09-01T12:42:07.927176
| 2023-09-01T11:14:10
| 2023-09-01T11:14:10
| 858,127
| 36,166
| 18,728
|
BSD-3-Clause
| 2023-09-14T21:18:41
| 2010-08-24T01:37:33
|
Python
|
UTF-8
|
Python
| false
| false
| 32,760
|
py
|
test_constructors.py
|
import calendar
from datetime import (
date,
datetime,
timedelta,
timezone,
)
import zoneinfo
import dateutil.tz
from dateutil.tz import tzutc
import numpy as np
import pytest
import pytz
from pandas._libs.tslibs.dtypes import NpyDatetimeUnit
from pandas.compat import PY310
from pandas.errors import OutOfBoundsDatetime
from pandas import (
NA,
NaT,
Period,
Timedelta,
Timestamp,
)
class TestTimestampConstructors:
def test_construct_from_time_unit(self):
# GH#54097 only passing a time component, no date
ts = Timestamp("01:01:01.111")
assert ts.unit == "ms"
def test_weekday_but_no_day_raises(self):
# GH#52659
msg = "Parsing datetimes with weekday but no day information is not supported"
with pytest.raises(ValueError, match=msg):
Timestamp("2023 Sept Thu")
def test_construct_from_string_invalid_raises(self):
# dateutil (weirdly) parses "200622-12-31" as
# datetime(2022, 6, 20, 12, 0, tzinfo=tzoffset(None, -111600)
# which besides being mis-parsed, is a tzoffset that will cause
# str(ts) to raise ValueError. Ensure we raise in the constructor
# instead.
# see test_to_datetime_malformed_raise for analogous to_datetime test
with pytest.raises(ValueError, match="gives an invalid tzoffset"):
Timestamp("200622-12-31")
def test_constructor_str_infer_reso(self):
# non-iso8601 path
# _parse_delimited_date path
ts = Timestamp("01/30/2023")
assert ts.unit == "s"
# _parse_dateabbr_string path
ts = Timestamp("2015Q1")
assert ts.unit == "s"
# dateutil_parse path
ts = Timestamp("2016-01-01 1:30:01 PM")
assert ts.unit == "s"
ts = Timestamp("2016 June 3 15:25:01.345")
assert ts.unit == "ms"
ts = Timestamp("300-01-01")
assert ts.unit == "s"
ts = Timestamp("300 June 1:30:01.300")
assert ts.unit == "ms"
def test_constructor_from_iso8601_str_with_offset_reso(self):
# GH#49737
ts = Timestamp("2016-01-01 04:05:06-01:00")
assert ts.unit == "s"
ts = Timestamp("2016-01-01 04:05:06.000-01:00")
assert ts.unit == "ms"
ts = Timestamp("2016-01-01 04:05:06.000000-01:00")
assert ts.unit == "us"
ts = Timestamp("2016-01-01 04:05:06.000000001-01:00")
assert ts.unit == "ns"
def test_constructor_from_date_second_reso(self):
# GH#49034 constructing from a pydate object gets lowest supported
# reso, i.e. seconds
obj = date(2012, 9, 1)
ts = Timestamp(obj)
assert ts.unit == "s"
@pytest.mark.parametrize("typ", [int, float])
def test_construct_from_int_float_with_unit_out_of_bound_raises(self, typ):
# GH#50870 make sure we get a OutOfBoundsDatetime instead of OverflowError
val = typ(150000000000000)
msg = f"cannot convert input {val} with the unit 'D'"
with pytest.raises(OutOfBoundsDatetime, match=msg):
Timestamp(val, unit="D")
@pytest.mark.parametrize("typ", [int, float])
def test_constructor_int_float_with_YM_unit(self, typ):
# GH#47266 avoid the conversions in cast_from_unit
val = typ(150)
ts = Timestamp(val, unit="Y")
expected = Timestamp("2120-01-01")
assert ts == expected
ts = Timestamp(val, unit="M")
expected = Timestamp("1982-07-01")
assert ts == expected
def test_constructor_float_not_round_with_YM_unit_deprecated(self):
# GH#47267 avoid the conversions in cast_from-unit
msg = "Conversion of non-round float with unit=[MY] is ambiguous"
with pytest.raises(ValueError, match=msg):
Timestamp(150.5, unit="Y")
with pytest.raises(ValueError, match=msg):
Timestamp(150.5, unit="M")
def test_constructor_datetime64_with_tz(self):
# GH#42288, GH#24559
dt = np.datetime64("1970-01-01 05:00:00")
tzstr = "UTC+05:00"
# pre-2.0 this interpreted dt as a UTC time. in 2.0 this is treated
# as a wall-time, consistent with DatetimeIndex behavior
ts = Timestamp(dt, tz=tzstr)
alt = Timestamp(dt).tz_localize(tzstr)
assert ts == alt
assert ts.hour == 5
def test_constructor(self):
base_str = "2014-07-01 09:00"
base_dt = datetime(2014, 7, 1, 9)
base_expected = 1_404_205_200_000_000_000
# confirm base representation is correct
assert calendar.timegm(base_dt.timetuple()) * 1_000_000_000 == base_expected
tests = [
(base_str, base_dt, base_expected),
(
"2014-07-01 10:00",
datetime(2014, 7, 1, 10),
base_expected + 3600 * 1_000_000_000,
),
(
"2014-07-01 09:00:00.000008000",
datetime(2014, 7, 1, 9, 0, 0, 8),
base_expected + 8000,
),
(
"2014-07-01 09:00:00.000000005",
Timestamp("2014-07-01 09:00:00.000000005"),
base_expected + 5,
),
]
timezones = [
(None, 0),
("UTC", 0),
(pytz.utc, 0),
("Asia/Tokyo", 9),
("US/Eastern", -4),
("dateutil/US/Pacific", -7),
(pytz.FixedOffset(-180), -3),
(dateutil.tz.tzoffset(None, 18000), 5),
]
for date_str, date_obj, expected in tests:
for result in [Timestamp(date_str), Timestamp(date_obj)]:
result = result.as_unit("ns") # test originally written before non-nano
# only with timestring
assert result.as_unit("ns")._value == expected
# re-creation shouldn't affect to internal value
result = Timestamp(result)
assert result.as_unit("ns")._value == expected
# with timezone
for tz, offset in timezones:
for result in [Timestamp(date_str, tz=tz), Timestamp(date_obj, tz=tz)]:
result = result.as_unit(
"ns"
) # test originally written before non-nano
expected_tz = expected - offset * 3600 * 1_000_000_000
assert result.as_unit("ns")._value == expected_tz
# should preserve tz
result = Timestamp(result)
assert result.as_unit("ns")._value == expected_tz
# should convert to UTC
if tz is not None:
result = Timestamp(result).tz_convert("UTC")
else:
result = Timestamp(result, tz="UTC")
expected_utc = expected - offset * 3600 * 1_000_000_000
assert result.as_unit("ns")._value == expected_utc
def test_constructor_with_stringoffset(self):
# GH 7833
base_str = "2014-07-01 11:00:00+02:00"
base_dt = datetime(2014, 7, 1, 9)
base_expected = 1_404_205_200_000_000_000
# confirm base representation is correct
assert calendar.timegm(base_dt.timetuple()) * 1_000_000_000 == base_expected
tests = [
(base_str, base_expected),
("2014-07-01 12:00:00+02:00", base_expected + 3600 * 1_000_000_000),
("2014-07-01 11:00:00.000008000+02:00", base_expected + 8000),
("2014-07-01 11:00:00.000000005+02:00", base_expected + 5),
]
timezones = [
(None, 0),
("UTC", 0),
(pytz.utc, 0),
("Asia/Tokyo", 9),
("US/Eastern", -4),
("dateutil/US/Pacific", -7),
(pytz.FixedOffset(-180), -3),
(dateutil.tz.tzoffset(None, 18000), 5),
]
for date_str, expected in tests:
for result in [Timestamp(date_str)]:
# only with timestring
assert result.as_unit("ns")._value == expected
# re-creation shouldn't affect to internal value
result = Timestamp(result)
assert result.as_unit("ns")._value == expected
# with timezone
for tz, offset in timezones:
result = Timestamp(date_str, tz=tz)
expected_tz = expected
assert result.as_unit("ns")._value == expected_tz
# should preserve tz
result = Timestamp(result)
assert result.as_unit("ns")._value == expected_tz
# should convert to UTC
result = Timestamp(result).tz_convert("UTC")
expected_utc = expected
assert result.as_unit("ns")._value == expected_utc
# This should be 2013-11-01 05:00 in UTC
# converted to Chicago tz
result = Timestamp("2013-11-01 00:00:00-0500", tz="America/Chicago")
assert result._value == Timestamp("2013-11-01 05:00")._value
expected = "Timestamp('2013-11-01 00:00:00-0500', tz='America/Chicago')"
assert repr(result) == expected
assert result == eval(repr(result))
# This should be 2013-11-01 05:00 in UTC
# converted to Tokyo tz (+09:00)
result = Timestamp("2013-11-01 00:00:00-0500", tz="Asia/Tokyo")
assert result._value == Timestamp("2013-11-01 05:00")._value
expected = "Timestamp('2013-11-01 14:00:00+0900', tz='Asia/Tokyo')"
assert repr(result) == expected
assert result == eval(repr(result))
# GH11708
# This should be 2015-11-18 10:00 in UTC
# converted to Asia/Katmandu
result = Timestamp("2015-11-18 15:45:00+05:45", tz="Asia/Katmandu")
assert result._value == Timestamp("2015-11-18 10:00")._value
expected = "Timestamp('2015-11-18 15:45:00+0545', tz='Asia/Katmandu')"
assert repr(result) == expected
assert result == eval(repr(result))
# This should be 2015-11-18 10:00 in UTC
# converted to Asia/Kolkata
result = Timestamp("2015-11-18 15:30:00+05:30", tz="Asia/Kolkata")
assert result._value == Timestamp("2015-11-18 10:00")._value
expected = "Timestamp('2015-11-18 15:30:00+0530', tz='Asia/Kolkata')"
assert repr(result) == expected
assert result == eval(repr(result))
def test_constructor_invalid(self):
msg = "Cannot convert input"
with pytest.raises(TypeError, match=msg):
Timestamp(slice(2))
msg = "Cannot convert Period"
with pytest.raises(ValueError, match=msg):
Timestamp(Period("1000-01-01"))
def test_constructor_invalid_tz(self):
# GH#17690
msg = (
"Argument 'tzinfo' has incorrect type "
r"\(expected datetime.tzinfo, got str\)"
)
with pytest.raises(TypeError, match=msg):
Timestamp("2017-10-22", tzinfo="US/Eastern")
msg = "at most one of"
with pytest.raises(ValueError, match=msg):
Timestamp("2017-10-22", tzinfo=pytz.utc, tz="UTC")
msg = "Cannot pass a date attribute keyword argument when passing a date string"
with pytest.raises(ValueError, match=msg):
# GH#5168
# case where user tries to pass tz as an arg, not kwarg, gets
# interpreted as `year`
Timestamp("2012-01-01", "US/Pacific")
def test_constructor_strptime(self):
# GH25016
# Test support for Timestamp.strptime
fmt = "%Y%m%d-%H%M%S-%f%z"
ts = "20190129-235348-000001+0000"
msg = r"Timestamp.strptime\(\) is not implemented"
with pytest.raises(NotImplementedError, match=msg):
Timestamp.strptime(ts, fmt)
def test_constructor_tz_or_tzinfo(self):
# GH#17943, GH#17690, GH#5168
stamps = [
Timestamp(year=2017, month=10, day=22, tz="UTC"),
Timestamp(year=2017, month=10, day=22, tzinfo=pytz.utc),
Timestamp(year=2017, month=10, day=22, tz=pytz.utc),
Timestamp(datetime(2017, 10, 22), tzinfo=pytz.utc),
Timestamp(datetime(2017, 10, 22), tz="UTC"),
Timestamp(datetime(2017, 10, 22), tz=pytz.utc),
]
assert all(ts == stamps[0] for ts in stamps)
def test_constructor_positional_with_tzinfo(self):
# GH#31929
ts = Timestamp(2020, 12, 31, tzinfo=timezone.utc)
expected = Timestamp("2020-12-31", tzinfo=timezone.utc)
assert ts == expected
@pytest.mark.parametrize("kwd", ["nanosecond", "microsecond", "second", "minute"])
def test_constructor_positional_keyword_mixed_with_tzinfo(self, kwd, request):
# TODO: if we passed microsecond with a keyword we would mess up
# xref GH#45307
if kwd != "nanosecond":
# nanosecond is keyword-only as of 2.0, others are not
mark = pytest.mark.xfail(reason="GH#45307")
request.node.add_marker(mark)
kwargs = {kwd: 4}
ts = Timestamp(2020, 12, 31, tzinfo=timezone.utc, **kwargs)
td_kwargs = {kwd + "s": 4}
td = Timedelta(**td_kwargs)
expected = Timestamp("2020-12-31", tz=timezone.utc) + td
assert ts == expected
def test_constructor_positional(self):
# see gh-10758
msg = (
"'NoneType' object cannot be interpreted as an integer"
if PY310
else "an integer is required"
)
with pytest.raises(TypeError, match=msg):
Timestamp(2000, 1)
msg = "month must be in 1..12"
with pytest.raises(ValueError, match=msg):
Timestamp(2000, 0, 1)
with pytest.raises(ValueError, match=msg):
Timestamp(2000, 13, 1)
msg = "day is out of range for month"
with pytest.raises(ValueError, match=msg):
Timestamp(2000, 1, 0)
with pytest.raises(ValueError, match=msg):
Timestamp(2000, 1, 32)
# see gh-11630
assert repr(Timestamp(2015, 11, 12)) == repr(Timestamp("20151112"))
assert repr(Timestamp(2015, 11, 12, 1, 2, 3, 999999)) == repr(
Timestamp("2015-11-12 01:02:03.999999")
)
def test_constructor_keyword(self):
# GH 10758
msg = "function missing required argument 'day'|Required argument 'day'"
with pytest.raises(TypeError, match=msg):
Timestamp(year=2000, month=1)
msg = "month must be in 1..12"
with pytest.raises(ValueError, match=msg):
Timestamp(year=2000, month=0, day=1)
with pytest.raises(ValueError, match=msg):
Timestamp(year=2000, month=13, day=1)
msg = "day is out of range for month"
with pytest.raises(ValueError, match=msg):
Timestamp(year=2000, month=1, day=0)
with pytest.raises(ValueError, match=msg):
Timestamp(year=2000, month=1, day=32)
assert repr(Timestamp(year=2015, month=11, day=12)) == repr(
Timestamp("20151112")
)
assert repr(
Timestamp(
year=2015,
month=11,
day=12,
hour=1,
minute=2,
second=3,
microsecond=999999,
)
) == repr(Timestamp("2015-11-12 01:02:03.999999"))
def test_constructor_fromordinal(self):
base = datetime(2000, 1, 1)
ts = Timestamp.fromordinal(base.toordinal())
assert base == ts
assert base.toordinal() == ts.toordinal()
ts = Timestamp.fromordinal(base.toordinal(), tz="US/Eastern")
assert Timestamp("2000-01-01", tz="US/Eastern") == ts
assert base.toordinal() == ts.toordinal()
# GH#3042
dt = datetime(2011, 4, 16, 0, 0)
ts = Timestamp.fromordinal(dt.toordinal())
assert ts.to_pydatetime() == dt
# with a tzinfo
stamp = Timestamp("2011-4-16", tz="US/Eastern")
dt_tz = stamp.to_pydatetime()
ts = Timestamp.fromordinal(dt_tz.toordinal(), tz="US/Eastern")
assert ts.to_pydatetime() == dt_tz
@pytest.mark.parametrize(
"result",
[
Timestamp(datetime(2000, 1, 2, 3, 4, 5, 6), nanosecond=1),
Timestamp(
year=2000,
month=1,
day=2,
hour=3,
minute=4,
second=5,
microsecond=6,
nanosecond=1,
),
Timestamp(
year=2000,
month=1,
day=2,
hour=3,
minute=4,
second=5,
microsecond=6,
nanosecond=1,
tz="UTC",
),
Timestamp(2000, 1, 2, 3, 4, 5, 6, None, nanosecond=1),
Timestamp(2000, 1, 2, 3, 4, 5, 6, tz=pytz.UTC, nanosecond=1),
],
)
def test_constructor_nanosecond(self, result):
# GH 18898
# As of 2.0 (GH 49416), nanosecond should not be accepted positionally
expected = Timestamp(datetime(2000, 1, 2, 3, 4, 5, 6), tz=result.tz)
expected = expected + Timedelta(nanoseconds=1)
assert result == expected
@pytest.mark.parametrize("z", ["Z0", "Z00"])
def test_constructor_invalid_Z0_isostring(self, z):
# GH 8910
msg = f"Unknown datetime string format, unable to parse: 2014-11-02 01:00{z}"
with pytest.raises(ValueError, match=msg):
Timestamp(f"2014-11-02 01:00{z}")
@pytest.mark.parametrize(
"arg",
[
"year",
"month",
"day",
"hour",
"minute",
"second",
"microsecond",
"nanosecond",
],
)
def test_invalid_date_kwarg_with_string_input(self, arg):
kwarg = {arg: 1}
msg = "Cannot pass a date attribute keyword argument"
with pytest.raises(ValueError, match=msg):
Timestamp("2010-10-10 12:59:59.999999999", **kwarg)
def test_out_of_bounds_integer_value(self):
# GH#26651 check that we raise OutOfBoundsDatetime, not OverflowError
msg = str(Timestamp.max._value * 2)
with pytest.raises(OutOfBoundsDatetime, match=msg):
Timestamp(Timestamp.max._value * 2)
msg = str(Timestamp.min._value * 2)
with pytest.raises(OutOfBoundsDatetime, match=msg):
Timestamp(Timestamp.min._value * 2)
def test_out_of_bounds_value(self):
one_us = np.timedelta64(1).astype("timedelta64[us]")
# By definition we can't go out of bounds in [ns], so we
# convert the datetime64s to [us] so we can go out of bounds
min_ts_us = np.datetime64(Timestamp.min).astype("M8[us]") + one_us
max_ts_us = np.datetime64(Timestamp.max).astype("M8[us]")
# No error for the min/max datetimes
Timestamp(min_ts_us)
Timestamp(max_ts_us)
# We used to raise on these before supporting non-nano
us_val = NpyDatetimeUnit.NPY_FR_us.value
assert Timestamp(min_ts_us - one_us)._creso == us_val
assert Timestamp(max_ts_us + one_us)._creso == us_val
# https://github.com/numpy/numpy/issues/22346 for why
# we can't use the same construction as above with minute resolution
# too_low, too_high are the _just_ outside the range of M8[s]
too_low = np.datetime64("-292277022657-01-27T08:29", "m")
too_high = np.datetime64("292277026596-12-04T15:31", "m")
msg = "Out of bounds"
# One us less than the minimum is an error
with pytest.raises(ValueError, match=msg):
Timestamp(too_low)
# One us more than the maximum is an error
with pytest.raises(ValueError, match=msg):
Timestamp(too_high)
def test_out_of_bounds_string(self):
msg = "Cannot cast .* to unit='ns' without overflow"
with pytest.raises(ValueError, match=msg):
Timestamp("1676-01-01").as_unit("ns")
with pytest.raises(ValueError, match=msg):
Timestamp("2263-01-01").as_unit("ns")
ts = Timestamp("2263-01-01")
assert ts.unit == "s"
ts = Timestamp("1676-01-01")
assert ts.unit == "s"
def test_barely_out_of_bounds(self):
# GH#19529
# GH#19382 close enough to bounds that dropping nanos would result
# in an in-bounds datetime
msg = "Out of bounds nanosecond timestamp: 2262-04-11 23:47:16"
with pytest.raises(OutOfBoundsDatetime, match=msg):
Timestamp("2262-04-11 23:47:16.854775808")
def test_bounds_with_different_units(self):
out_of_bounds_dates = ("1677-09-21", "2262-04-12")
time_units = ("D", "h", "m", "s", "ms", "us")
for date_string in out_of_bounds_dates:
for unit in time_units:
dt64 = np.datetime64(date_string, unit)
ts = Timestamp(dt64)
if unit in ["s", "ms", "us"]:
# We can preserve the input unit
assert ts._value == dt64.view("i8")
else:
# we chose the closest unit that we _do_ support
assert ts._creso == NpyDatetimeUnit.NPY_FR_s.value
# With more extreme cases, we can't even fit inside second resolution
info = np.iinfo(np.int64)
msg = "Out of bounds nanosecond timestamp:"
for value in [info.min + 1, info.max]:
for unit in ["D", "h", "m"]:
dt64 = np.datetime64(value, unit)
with pytest.raises(OutOfBoundsDatetime, match=msg):
Timestamp(dt64)
in_bounds_dates = ("1677-09-23", "2262-04-11")
for date_string in in_bounds_dates:
for unit in time_units:
dt64 = np.datetime64(date_string, unit)
Timestamp(dt64)
@pytest.mark.parametrize("arg", ["001-01-01", "0001-01-01"])
def test_out_of_bounds_string_consistency(self, arg):
# GH 15829
msg = "Cannot cast 0001-01-01 00:00:00 to unit='ns' without overflow"
with pytest.raises(OutOfBoundsDatetime, match=msg):
Timestamp(arg).as_unit("ns")
ts = Timestamp(arg)
assert ts.unit == "s"
assert ts.year == ts.month == ts.day == 1
def test_min_valid(self):
# Ensure that Timestamp.min is a valid Timestamp
Timestamp(Timestamp.min)
def test_max_valid(self):
# Ensure that Timestamp.max is a valid Timestamp
Timestamp(Timestamp.max)
def test_now(self):
# GH#9000
ts_from_string = Timestamp("now")
ts_from_method = Timestamp.now()
ts_datetime = datetime.now()
ts_from_string_tz = Timestamp("now", tz="US/Eastern")
ts_from_method_tz = Timestamp.now(tz="US/Eastern")
# Check that the delta between the times is less than 1s (arbitrarily
# small)
delta = Timedelta(seconds=1)
assert abs(ts_from_method - ts_from_string) < delta
assert abs(ts_datetime - ts_from_method) < delta
assert abs(ts_from_method_tz - ts_from_string_tz) < delta
assert (
abs(
ts_from_string_tz.tz_localize(None)
- ts_from_method_tz.tz_localize(None)
)
< delta
)
def test_today(self):
ts_from_string = Timestamp("today")
ts_from_method = Timestamp.today()
ts_datetime = datetime.today()
ts_from_string_tz = Timestamp("today", tz="US/Eastern")
ts_from_method_tz = Timestamp.today(tz="US/Eastern")
# Check that the delta between the times is less than 1s (arbitrarily
# small)
delta = Timedelta(seconds=1)
assert abs(ts_from_method - ts_from_string) < delta
assert abs(ts_datetime - ts_from_method) < delta
assert abs(ts_from_method_tz - ts_from_string_tz) < delta
assert (
abs(
ts_from_string_tz.tz_localize(None)
- ts_from_method_tz.tz_localize(None)
)
< delta
)
@pytest.mark.parametrize("tz", [None, pytz.timezone("US/Pacific")])
def test_disallow_setting_tz(self, tz):
# GH 3746
ts = Timestamp("2010")
msg = "Cannot directly set timezone"
with pytest.raises(AttributeError, match=msg):
ts.tz = tz
@pytest.mark.parametrize("offset", ["+0300", "+0200"])
def test_construct_timestamp_near_dst(self, offset):
# GH 20854
expected = Timestamp(f"2016-10-30 03:00:00{offset}", tz="Europe/Helsinki")
result = Timestamp(expected).tz_convert("Europe/Helsinki")
assert result == expected
@pytest.mark.parametrize(
"arg", ["2013/01/01 00:00:00+09:00", "2013-01-01 00:00:00+09:00"]
)
def test_construct_with_different_string_format(self, arg):
# GH 12064
result = Timestamp(arg)
expected = Timestamp(datetime(2013, 1, 1), tz=pytz.FixedOffset(540))
assert result == expected
@pytest.mark.parametrize("box", [datetime, Timestamp])
def test_raise_tz_and_tzinfo_in_datetime_input(self, box):
# GH 23579
kwargs = {"year": 2018, "month": 1, "day": 1, "tzinfo": pytz.utc}
msg = "Cannot pass a datetime or Timestamp"
with pytest.raises(ValueError, match=msg):
Timestamp(box(**kwargs), tz="US/Pacific")
msg = "Cannot pass a datetime or Timestamp"
with pytest.raises(ValueError, match=msg):
Timestamp(box(**kwargs), tzinfo=pytz.timezone("US/Pacific"))
def test_dont_convert_dateutil_utc_to_pytz_utc(self):
result = Timestamp(datetime(2018, 1, 1), tz=tzutc())
expected = Timestamp(datetime(2018, 1, 1)).tz_localize(tzutc())
assert result == expected
def test_constructor_subclassed_datetime(self):
# GH 25851
# ensure that subclassed datetime works for
# Timestamp creation
class SubDatetime(datetime):
pass
data = SubDatetime(2000, 1, 1)
result = Timestamp(data)
expected = Timestamp(2000, 1, 1)
assert result == expected
def test_constructor_fromisocalendar(self):
# GH 30395
expected_timestamp = Timestamp("2000-01-03 00:00:00")
expected_stdlib = datetime.fromisocalendar(2000, 1, 1)
result = Timestamp.fromisocalendar(2000, 1, 1)
assert result == expected_timestamp
assert result == expected_stdlib
assert isinstance(result, Timestamp)
def test_constructor_ambiguous_dst():
# GH 24329
# Make sure that calling Timestamp constructor
# on Timestamp created from ambiguous time
# doesn't change Timestamp.value
ts = Timestamp(1382835600000000000, tz="dateutil/Europe/London")
expected = ts._value
result = Timestamp(ts)._value
assert result == expected
@pytest.mark.parametrize("epoch", [1552211999999999872, 1552211999999999999])
def test_constructor_before_dst_switch(epoch):
# GH 31043
# Make sure that calling Timestamp constructor
# on time just before DST switch doesn't lead to
# nonexistent time or value change
ts = Timestamp(epoch, tz="dateutil/America/Los_Angeles")
result = ts.tz.dst(ts)
expected = timedelta(seconds=0)
assert Timestamp(ts)._value == epoch
assert result == expected
def test_timestamp_constructor_identity():
# Test for #30543
expected = Timestamp("2017-01-01T12")
result = Timestamp(expected)
assert result is expected
@pytest.mark.parametrize("kwargs", [{}, {"year": 2020}, {"year": 2020, "month": 1}])
def test_constructor_missing_keyword(kwargs):
# GH 31200
# The exact error message of datetime() depends on its version
msg1 = r"function missing required argument '(year|month|day)' \(pos [123]\)"
msg2 = r"Required argument '(year|month|day)' \(pos [123]\) not found"
msg = "|".join([msg1, msg2])
with pytest.raises(TypeError, match=msg):
Timestamp(**kwargs)
@pytest.mark.parametrize("nano", [-1, 1000])
def test_timestamp_nano_range(nano):
# GH 48255
with pytest.raises(ValueError, match="nanosecond must be in 0..999"):
Timestamp(year=2022, month=1, day=1, nanosecond=nano)
def test_non_nano_value():
# https://github.com/pandas-dev/pandas/issues/49076
result = Timestamp("1800-01-01", unit="s").value
# `.value` shows nanoseconds, even though unit is 's'
assert result == -5364662400000000000
# out-of-nanoseconds-bounds `.value` raises informative message
msg = (
r"Cannot convert Timestamp to nanoseconds without overflow. "
r"Use `.asm8.view\('i8'\)` to cast represent Timestamp in its "
r"own unit \(here, s\).$"
)
ts = Timestamp("0300-01-01")
with pytest.raises(OverflowError, match=msg):
ts.value
# check that the suggested workaround actually works
result = ts.asm8.view("i8")
assert result == -52700112000
def test_timestamp_constructor_invalid_fold_raise():
# Test forGH #25057
# Valid fold values are only [None, 0, 1]
msg = "Valid values for the fold argument are None, 0, or 1."
with pytest.raises(ValueError, match=msg):
Timestamp(123, fold=2)
def test_timestamp_constructor_pytz_fold_raise():
# Test for GH#25057
# pytz doesn't support fold. Check that we raise
# if fold is passed with pytz
msg = "pytz timezones do not support fold. Please use dateutil timezones."
tz = pytz.timezone("Europe/London")
with pytest.raises(ValueError, match=msg):
Timestamp(datetime(2019, 10, 27, 0, 30, 0, 0), tz=tz, fold=0)
@pytest.mark.parametrize("fold", [0, 1])
@pytest.mark.parametrize(
"ts_input",
[
1572136200000000000,
1572136200000000000.0,
np.datetime64(1572136200000000000, "ns"),
"2019-10-27 01:30:00+01:00",
datetime(2019, 10, 27, 0, 30, 0, 0, tzinfo=timezone.utc),
],
)
def test_timestamp_constructor_fold_conflict(ts_input, fold):
# Test for GH#25057
# Check that we raise on fold conflict
msg = (
"Cannot pass fold with possibly unambiguous input: int, float, "
"numpy.datetime64, str, or timezone-aware datetime-like. "
"Pass naive datetime-like or build Timestamp from components."
)
with pytest.raises(ValueError, match=msg):
Timestamp(ts_input=ts_input, fold=fold)
@pytest.mark.parametrize("tz", ["dateutil/Europe/London", None])
@pytest.mark.parametrize("fold", [0, 1])
def test_timestamp_constructor_retain_fold(tz, fold):
# Test for GH#25057
# Check that we retain fold
ts = Timestamp(year=2019, month=10, day=27, hour=1, minute=30, tz=tz, fold=fold)
result = ts.fold
expected = fold
assert result == expected
try:
_tzs = [
"dateutil/Europe/London",
zoneinfo.ZoneInfo("Europe/London"),
]
except zoneinfo.ZoneInfoNotFoundError:
_tzs = ["dateutil/Europe/London"]
@pytest.mark.parametrize("tz", _tzs)
@pytest.mark.parametrize(
"ts_input,fold_out",
[
(1572136200000000000, 0),
(1572139800000000000, 1),
("2019-10-27 01:30:00+01:00", 0),
("2019-10-27 01:30:00+00:00", 1),
(datetime(2019, 10, 27, 1, 30, 0, 0, fold=0), 0),
(datetime(2019, 10, 27, 1, 30, 0, 0, fold=1), 1),
],
)
def test_timestamp_constructor_infer_fold_from_value(tz, ts_input, fold_out):
# Test for GH#25057
# Check that we infer fold correctly based on timestamps since utc
# or strings
ts = Timestamp(ts_input, tz=tz)
result = ts.fold
expected = fold_out
assert result == expected
@pytest.mark.parametrize("tz", ["dateutil/Europe/London"])
@pytest.mark.parametrize(
"ts_input,fold,value_out",
[
(datetime(2019, 10, 27, 1, 30, 0, 0), 0, 1572136200000000),
(datetime(2019, 10, 27, 1, 30, 0, 0), 1, 1572139800000000),
],
)
def test_timestamp_constructor_adjust_value_for_fold(tz, ts_input, fold, value_out):
# Test for GH#25057
# Check that we adjust value for fold correctly
# based on timestamps since utc
ts = Timestamp(ts_input, tz=tz, fold=fold)
result = ts._value
expected = value_out
assert result == expected
@pytest.mark.parametrize("na_value", [None, np.nan, np.datetime64("NaT"), NaT, NA])
def test_timestamp_constructor_na_value(na_value):
# GH45481
result = Timestamp(na_value)
expected = NaT
assert result is expected
|
d42eeeb18708f00b59d8a04af54b8fba232be10c
|
150b528ec8b413c33333a6d03e66a41a695e5e5f
|
/dcdownloader/utils.py
|
d9e8a00d4a15bd5a0d5d60e625302bbb15a0aa3b
|
[
"MIT"
] |
permissive
|
dev-techmoe/python-dcdownloader
|
c37dd3970ff332f525c0ddd6a6d0fc35b9ce9787
|
cd9aeec2b15c608aae8345f4c1b4c29890df8e13
|
refs/heads/dev
| 2021-04-09T13:22:05.598246
| 2019-11-02T05:10:28
| 2019-11-02T05:10:28
| 125,320,296
| 184
| 27
|
MIT
| 2021-04-02T05:59:19
| 2018-03-15T06:06:46
|
Python
|
UTF-8
|
Python
| false
| false
| 2,790
|
py
|
utils.py
|
import re, os, traceback
from dcdownloader import config, title
def decode_packed_codes(code):
def encode_base_n(num, n, table=None):
FULL_TABLE = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
if not table:
table = FULL_TABLE[:n]
if n > len(table):
raise ValueError('base %d exceeds table length %d' % (n, len(table)))
if num == 0:
return table[0]
ret = ''
while num:
ret = table[num % n] + ret
num = num // n
return ret
pattern = r"}\('(.+)',(\d+),(\d+),'([^']+)'\.split\('\|'\)"
mobj = re.search(pattern, code)
obfucasted_code, base, count, symbols = mobj.groups()
base = int(base)
count = int(count)
symbols = symbols.split('|')
symbol_table = {}
while count:
count -= 1
base_n_count = encode_base_n(count, base)
symbol_table[base_n_count] = symbols[count] or base_n_count
return re.sub(
r'\b(\w+)\b', lambda mobj: symbol_table[mobj.group(0)],
obfucasted_code)
def generate_aiohttp_session_config(**kwargs):
params = {
'timeout': 50,
'verify_ssl': config.get('debug_mode'),
'proxy': config.get('proxy')
}
params.update(kwargs)
return params
def update_window_title(mode=None, msg=None):
app_name = 'DCDownloader'
window_title = app_name
if not mode == None:
window_title = window_title + ': %s' % mode
if not msg == None:
window_title = window_title + ' - %s' % msg
title.update(window_title)
def mkdir(path):
path_ = path.split('/')
for i in range(0, len(path_)):
p = '/'.join(path_[0:i+1])
if p and not os.path.exists(p):
os.mkdir(p)
def retry(max_num=5, on_retry=None, on_fail=None, on_fail_exit=False):
remaining_num = max_num
def decorate(func):
async def _retry(*args, **kwargs):
nonlocal max_num, remaining_num
try:
return await func(*args, **kwargs)
except Exception as err:
if not on_retry == None:
# traceback.print_exc()
on_retry(err=err, args=[args, kwargs], retry_num=max_num - remaining_num)
if remaining_num > 1:
remaining_num -= 1
return await _retry(*args, **kwargs)
else:
if not on_fail == None:
on_fail(err=err, args=[args, kwargs], retry_num=max_num - remaining_num)
remaining_num = max_num
if on_fail_exit == True:
exit()
return _retry
return decorate
|
42d77384e8794fde73a0fdd228a8f6f948e415a8
|
85373d45a83e4096affafa4f4e5b400787413e57
|
/test/programytest/storage/stores/file/store/test_properties.py
|
c69d7606014bac93ecabe9f05c2c8286d6130f4f
|
[
"MIT"
] |
permissive
|
keiffster/program-y
|
a02bb9d8278835547cc875f4f9cd668d5b1f44da
|
fc7b0a3afa4fa6ed683e0c817a9aa89f9543bb20
|
refs/heads/master
| 2023-08-23T13:55:39.255535
| 2022-12-13T09:51:57
| 2022-12-13T09:51:57
| 74,462,571
| 379
| 173
|
NOASSERTION
| 2023-05-23T00:51:21
| 2016-11-22T10:43:41
|
Python
|
UTF-8
|
Python
| false
| false
| 2,375
|
py
|
test_properties.py
|
import os
import os.path
from unittest.mock import patch
from programy.mappings.properties import PropertiesCollection
from programy.storage.stores.file.config import FileStorageConfiguration
from programy.storage.stores.file.engine import FileStorageEngine
from programy.storage.stores.file.store.properties import FilePropertyStore
from programytest.storage.asserts.store.assert_properties import PropertyStoreAsserts
from programy.storage.stores.file.config import FileStoreConfiguration
class FilePropertyStoreTests(PropertyStoreAsserts):
def test_initialise(self):
config = FileStorageConfiguration()
engine = FileStorageEngine(config)
engine.initialise()
store = FilePropertyStore(engine)
self.assertEqual(store.storage_engine, engine)
def test_storage_path(self):
config = FileStorageConfiguration()
engine = FileStorageEngine(config)
engine.initialise()
store = FilePropertyStore(engine)
self.assertEquals('/tmp/properties/properties.txt', store._get_storage_path())
self.assertIsInstance(store.get_storage(), FileStoreConfiguration)
def test_load_properties(self):
config = FileStorageConfiguration()
config._properties_storage = FileStoreConfiguration(file=os.path.dirname(__file__) + os.sep + "data" + os.sep + "lookups" + os.sep + "text" + os.sep + "properties.txt", fileformat="text", encoding="utf-8", delete_on_start=False)
engine = FileStorageEngine(config)
engine.initialise()
store = FilePropertyStore(engine)
collection = PropertiesCollection()
store.load(collection)
self.assertTrue(collection.has_key("name"))
self.assertTrue("Y-Bot", collection.value("name"))
self.assertTrue(collection.has_key("firstname"))
self.assertTrue("Y", collection.value("firstname"))
self.assertTrue(collection.has_key("middlename"))
self.assertTrue("AIML", collection.value("middlename"))
def test_process_line(self):
config = FileStorageConfiguration()
engine = FileStorageEngine(config)
engine.initialise()
store = FilePropertyStore(engine)
self.assertFalse(store._process_line("", {}))
self.assertFalse(store._process_line("#name:Y-Bot", {}))
self.assertTrue(store._process_line("name:Y-Bot", {}))
|
594e8a9941a5471133ef9b981a67f3f4365209c3
|
017b1261bac4a6ed7e613474f328239188366491
|
/src/rpdk/core/contract/suite/hook/hook_handler_commons.py
|
a50dee41e335e036a782fc16bd965f5e309eddb8
|
[
"Apache-2.0"
] |
permissive
|
aws-cloudformation/cloudformation-cli
|
bd4834bfe8b39c9fc926f9c77710b2c6d1b167c1
|
75bed278bcec94739e4c132e2b3d88a4fddb5bf4
|
refs/heads/master
| 2023-08-07T18:24:56.153849
| 2023-07-31T22:54:23
| 2023-07-31T22:54:23
| 143,929,054
| 270
| 164
|
Apache-2.0
| 2023-08-31T16:06:04
| 2018-08-07T21:33:19
|
Python
|
UTF-8
|
Python
| false
| false
| 3,979
|
py
|
hook_handler_commons.py
|
# pylint: disable=import-outside-toplevel
import logging
import pytest
from rpdk.core.contract.hook_client import HookClient
from rpdk.core.contract.interface import HandlerErrorCode, HookStatus
from rpdk.core.contract.suite.contract_asserts_commons import failed_event
LOG = logging.getLogger(__name__)
TARGET_NAME_REGEX = "^[a-zA-Z0-9]{2,64}::[a-zA-Z0-9]{2,64}::[a-zA-Z0-9]{2,64}$"
UNSUPPORTED_TARGET_SCHEMA = {
"type": "object",
"properties": {
"id": {"type": "string", "format": "arn"},
"property1": {"type": "string", "pattern": "^[a-zA-Z0-9]{2,26}$"},
"property2": {"type": "integer", "minimum": 1, "maximum": 100},
},
}
def test_hook_success(hook_client, invocation_point, target, target_model):
if HookClient.is_update_invocation_point(invocation_point):
raise ValueError(
"Invocation point {} not supported for this testing operation".format(
invocation_point
)
)
_status, response, _error_code = hook_client.call_and_assert(
invocation_point, HookStatus.SUCCESS, target, target_model
)
return response
def test_update_hook_success(hook_client, invocation_point, target, target_model):
if not HookClient.is_update_invocation_point(invocation_point):
raise ValueError(
"Invocation point {} not supported for testing UPDATE hook operation".format(
invocation_point
)
)
_status, response, _error_code = hook_client.call_and_assert(
invocation_point, HookStatus.SUCCESS, target, target_model
)
return response
def test_hook_failed(hook_client, invocation_point, target, target_model=None):
_status, response, error_code = hook_client.call_and_assert(
invocation_point, HookStatus.FAILED, target, target_model
)
assert response["message"]
return response, error_code
def test_hook_handlers_success(hook_client, invocation_point):
is_update_hook = HookClient.is_update_invocation_point(invocation_point)
for (
_invocation_point,
target,
target_model,
) in hook_client.generate_request_examples(invocation_point):
if is_update_hook:
test_update_hook_success(
hook_client, invocation_point, target, target_model
)
else:
test_hook_success(hook_client, invocation_point, target, target_model)
def test_hook_handlers_failed(hook_client, invocation_point):
for (
_invocation_point,
target,
target_model,
) in hook_client.generate_invalid_request_examples(invocation_point):
test_hook_failed(hook_client, invocation_point, target, target_model)
@failed_event(
error_code=HandlerErrorCode.UnsupportedTarget,
msg="A hook handler MUST return FAILED with a UnsupportedTarget error code if the target is not supported",
)
def test_hook_unsupported_target(hook_client, invocation_point):
if not hook_client.handler_has_wildcard_targets(invocation_point):
pytest.skip("No wildcard hook targets. Skipping test.")
# imported here to avoid hypothesis being loaded before pytest is loaded
from ...resource_generator import ResourceGenerator
unsupported_target = ResourceGenerator(
UNSUPPORTED_TARGET_SCHEMA
).generate_schema_strategy(UNSUPPORTED_TARGET_SCHEMA)
target_model = {"resourceProperties": unsupported_target.example()}
if HookClient.is_update_invocation_point(invocation_point):
target_model["previousResourceProperties"] = unsupported_target.example()
target_model["previousResourceProperties"]["id"] = target_model[
"resourceProperties"
]["id"]
_response, error_code = test_hook_failed(
hook_client,
invocation_point,
ResourceGenerator.generate_string_strategy(
{"pattern": TARGET_NAME_REGEX}
).example(),
target_model,
)
return error_code
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.