hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a082d3c3bcbafaea739319cd0ab16f79ffd0a75
| 3,554
|
py
|
Python
|
news-crawler/main.py
|
flavienbwk/news-crawler
|
f0b940dd8828ccadba6c29783c66b3f72c2d32d7
|
[
"Apache-2.0"
] | 1
|
2021-09-28T12:11:34.000Z
|
2021-09-28T12:11:34.000Z
|
news-crawler/main.py
|
flavienbwk/news-crawler
|
f0b940dd8828ccadba6c29783c66b3f72c2d32d7
|
[
"Apache-2.0"
] | null | null | null |
news-crawler/main.py
|
flavienbwk/news-crawler
|
f0b940dd8828ccadba6c29783c66b3f72c2d32d7
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import time
import slugify
from playwright.sync_api import sync_playwright
from crawlers import CRAWLERS
from crawlers.Crawler import Crawler
from crawlers.Login import Login
from models.Media import Media
from utils import Database, Logger, Persister
FILE_PATH = os.path.realpath(__file__)
DIR_PATH = os.path.dirname(os.path.realpath(__file__))
COOKIES_DIR = f"{DIR_PATH}/../database"
PERSIST_BATCH = 1 # Number of articles to be saved at the same time
PAGE_HEIGHT = 850
PAGE_WIDTH = 768
CRAWLER_EMAIL = os.getenv("CRAWLER_EMAIL")
CRAWLER_PASSWORD = os.getenv("CRAWLER_PASSWORD")
LOGGER = Logger.Logger()
LOGGER.info("Starting News Crawler...")
CRAWLER_OPTIONS = {
"page_height": PAGE_HEIGHT,
"page_width": PAGE_WIDTH,
"retrieve_related_article_links": (
True
if os.environ.get("RETRIEVE_RELATED_ARTICLE_LINKS", False) == "true"
else False # May lead to irrelevant articles throughout time (liens "Lire aussi...")
),
"retrieve_each_article_links": (
True
if os.environ.get("RETRIEVE_EACH_ARTICLE_LINKS", False) == "true"
else False # May highly lead to irrelevant articles throughout time
),
}
def get_media(database: Database, sha256: str) -> Media:
session = database.getSession()
return session.query(Media).filter_by(sha256=sha256).first()
def process_crawl(crawler_source: str, database: Database.Database):
login_class: Login = CRAWLERS[crawler_source]["login"]
crawler_class: Crawler = CRAWLERS[crawler_source]["crawler"]
persister = Persister.Persister(database=database, batch_size=PERSIST_BATCH)
with sync_playwright() as playwright_rs:
is_docker = True if os.environ.get("IS_DOCKER", False) == "true" else False
browser = playwright_rs.chromium.launch(headless=True if is_docker else False)
context = browser.new_context()
page = context.new_page()
page.set_default_timeout(60000)
page.set_viewport_size({"width": PAGE_WIDTH, "height": PAGE_HEIGHT})
cookies_file_path = (
f"{COOKIES_DIR}/{slugify.slugify(crawler_source)}.cookies.pickle"
)
login = login_class(
cookies_file_path=cookies_file_path,
context=context,
page=page,
crawler_email=CRAWLER_EMAIL,
crawler_password=CRAWLER_PASSWORD,
)
login.login()
crawler = crawler_class(
database=database, context=context, page=page, options=CRAWLER_OPTIONS
)
for article_details in crawler.crawl():
if article_details:
article = article_details["article"]
for media in article_details["medias"]:
if media:
media_query = get_media(database, media.sha256)
if media_query:
media = media_query
article.medias.append(media)
persister.add_object(article)
persister.request_save_objects()
persister.save_objects()
if __name__ == "__main__":
start_time = time.time()
CRAWLER_SOURCE = os.environ.get("CRAWLER_SOURCE", "")
if CRAWLER_SOURCE not in CRAWLERS:
LOGGER.error(f"Provided crawler '{CRAWLER_SOURCE}' is not supported")
exit(1)
database = Database.Database("sqlite")
database.initDatabase()
process_crawl(CRAWLER_SOURCE, database)
print("--- Executed in %s seconds ---" % (time.time() - start_time))
| 33.214953
| 93
| 0.662915
|
4a082dbd5f02961c089192e1e21566c7ee562b43
| 1,124
|
py
|
Python
|
tests/command_line/test_average.py
|
graeme-winter/dxtbx
|
6cd5afee819c5e967cfa0bcd47084b3687b830d5
|
[
"BSD-3-Clause"
] | null | null | null |
tests/command_line/test_average.py
|
graeme-winter/dxtbx
|
6cd5afee819c5e967cfa0bcd47084b3687b830d5
|
[
"BSD-3-Clause"
] | null | null | null |
tests/command_line/test_average.py
|
graeme-winter/dxtbx
|
6cd5afee819c5e967cfa0bcd47084b3687b830d5
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import, division, print_function
import os
import procrunner
import pytest
import dxtbx
@pytest.mark.parametrize("use_mpi", [True, False])
def test_average(dials_data, tmpdir, use_mpi):
# averager uses cbf handling code in the xfel module
pytest.importorskip("xfel")
# Only allow MPI tests if we've got MPI capabilities
if use_mpi:
pytest.importorskip("mpi4py")
data = os.path.join(
dials_data("image_examples"),
"SACLA-MPCCD-run266702-0-subset.h5",
)
if use_mpi:
command = "mpirun"
mpargs = "-n 2 dxtbx.image_average --mpi=True".split()
else:
command = "dxtbx.image_average"
mpargs = "-n 2".split()
result = procrunner.run(
[command] + mpargs + "-v -a avg.cbf -s stddev.cbf -m max.cbf".split() + [data],
working_directory=tmpdir,
)
assert not result.returncode and not result.stderr
h5 = dxtbx.load(data).get_detector()
cbf = dxtbx.load(tmpdir.join("avg.cbf")).get_detector()
assert h5.is_similar_to(cbf)
assert h5[0].get_gain() == cbf[0].get_gain()
| 27.414634
| 87
| 0.655694
|
4a0830479c678618b649c3f7247ef458e10c4ae2
| 15,879
|
py
|
Python
|
tests/unit/fileserver/test_fileclient.py
|
xiaowei582648206/saltx
|
1d17b030b973ce5422e0fbe7e17c98c7ca91c49b
|
[
"Apache-2.0"
] | 1
|
2022-02-09T06:40:14.000Z
|
2022-02-09T06:40:14.000Z
|
tests/unit/fileserver/test_fileclient.py
|
xiaowei582648206/saltx
|
1d17b030b973ce5422e0fbe7e17c98c7ca91c49b
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/fileserver/test_fileclient.py
|
xiaowei582648206/saltx
|
1d17b030b973ce5422e0fbe7e17c98c7ca91c49b
|
[
"Apache-2.0"
] | 4
|
2020-11-04T06:28:05.000Z
|
2022-02-09T10:54:49.000Z
|
# -*- coding: utf-8 -*-
'''
:codeauthor: Mike Place <mp@saltstack.com>
'''
# Import Python libs
from __future__ import absolute_import
import errno
import logging
import os
import shutil
# Import Salt Testing libs
from tests.integration import AdaptedConfigurationTestCaseMixin
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.paths import TMP
from tests.support.unit import TestCase, skipIf
from tests.support.mock import MagicMock, patch, NO_MOCK, NO_MOCK_REASON
# Import salt libs
import salt.utils
from salt import fileclient
import salt.ext.six as six
log = logging.getLogger(__name__)
SALTENVS = ('base', 'dev')
FS_ROOT = os.path.join(TMP, 'fileclient_fs_root')
CACHE_ROOT = os.path.join(TMP, 'fileclient_cache_root')
SUBDIR = 'subdir'
SUBDIR_FILES = ('foo.txt', 'bar.txt', 'baz.txt')
def _get_file_roots():
return dict(
[(x, [os.path.join(FS_ROOT, x)]) for x in SALTENVS]
)
MOCKED_OPTS = {
'file_roots': _get_file_roots(),
'fileserver_backend': ['roots'],
'cachedir': CACHE_ROOT,
'file_client': 'local',
}
@skipIf(NO_MOCK, NO_MOCK_REASON)
class FileClientTest(TestCase, AdaptedConfigurationTestCaseMixin, LoaderModuleMockMixin):
def setup_loader_modules(self):
return {fileclient: {'__opts__': MOCKED_OPTS}}
def setUp(self):
self.file_client = fileclient.Client(self.master_opts)
def tearDown(self):
del self.file_client
def test_file_list_emptydirs(self):
'''
Ensure that the fileclient class won't allow a direct call to file_list_emptydirs()
'''
with self.assertRaises(NotImplementedError):
self.file_client.file_list_emptydirs()
def test_get_file(self):
'''
Ensure that the fileclient class won't allow a direct call to get_file()
'''
with self.assertRaises(NotImplementedError):
self.file_client.get_file(None)
def test_get_file_client(self):
minion_opts = self.get_temp_config('minion')
minion_opts['file_client'] = 'remote'
with patch('salt.fileclient.RemoteClient', MagicMock(return_value='remote_client')):
ret = fileclient.get_file_client(minion_opts)
self.assertEqual('remote_client', ret)
@skipIf(NO_MOCK, NO_MOCK_REASON)
class FileclientCacheTest(TestCase, AdaptedConfigurationTestCaseMixin, LoaderModuleMockMixin):
'''
Tests for the fileclient caching. The LocalClient is the only thing we can
test as it is the only way we can mock the fileclient (the tests run from
the minion process, so the master cannot be mocked from test code).
'''
def setup_loader_modules(self):
return {fileclient: {'__opts__': MOCKED_OPTS}}
def setUp(self):
'''
No need to add a dummy foo.txt to muddy up the github repo, just make
our own fileserver root on-the-fly.
'''
def _new_dir(path):
'''
Add a new dir at ``path`` using os.makedirs. If the directory
already exists, remove it recursively and then try to create it
again.
'''
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST:
# Just in case a previous test was interrupted, remove the
# directory and try adding it again.
shutil.rmtree(path)
os.makedirs(path)
else:
raise
# Crete the FS_ROOT
for saltenv in SALTENVS:
saltenv_root = os.path.join(FS_ROOT, saltenv)
# Make sure we have a fresh root dir for this saltenv
_new_dir(saltenv_root)
path = os.path.join(saltenv_root, 'foo.txt')
with salt.utils.fopen(path, 'w') as fp_:
fp_.write(
'This is a test file in the \'{0}\' saltenv.\n'
.format(saltenv)
)
subdir_abspath = os.path.join(saltenv_root, SUBDIR)
os.makedirs(subdir_abspath)
for subdir_file in SUBDIR_FILES:
path = os.path.join(subdir_abspath, subdir_file)
with salt.utils.fopen(path, 'w') as fp_:
fp_.write(
'This is file \'{0}\' in subdir \'{1} from saltenv '
'\'{2}\''.format(subdir_file, SUBDIR, saltenv)
)
# Create the CACHE_ROOT
_new_dir(CACHE_ROOT)
def tearDown(self):
'''
Remove the directories created for these tests
'''
shutil.rmtree(FS_ROOT)
shutil.rmtree(CACHE_ROOT)
def test_cache_dir(self):
'''
Ensure entire directory is cached to correct location
'''
patched_opts = dict((x, y) for x, y in six.iteritems(self.minion_opts))
patched_opts.update(MOCKED_OPTS)
with patch.dict(fileclient.__opts__, patched_opts):
client = fileclient.get_file_client(fileclient.__opts__, pillar=False)
for saltenv in SALTENVS:
self.assertTrue(
client.cache_dir(
'salt://{0}'.format(SUBDIR),
saltenv,
cachedir=None
)
)
for subdir_file in SUBDIR_FILES:
cache_loc = os.path.join(fileclient.__opts__['cachedir'],
'files',
saltenv,
SUBDIR,
subdir_file)
# Double check that the content of the cached file
# identifies it as being from the correct saltenv. The
# setUp function creates the file with the name of the
# saltenv mentioned in the file, so a simple 'in' check is
# sufficient here. If opening the file raises an exception,
# this is a problem, so we are not catching the exception
# and letting it be raised so that the test fails.
with salt.utils.fopen(cache_loc) as fp_:
content = fp_.read()
log.debug('cache_loc = %s', cache_loc)
log.debug('content = %s', content)
self.assertTrue(subdir_file in content)
self.assertTrue(SUBDIR in content)
self.assertTrue(saltenv in content)
def test_cache_dir_with_alternate_cachedir_and_absolute_path(self):
'''
Ensure entire directory is cached to correct location when an alternate
cachedir is specified and that cachedir is an absolute path
'''
patched_opts = dict((x, y) for x, y in six.iteritems(self.minion_opts))
patched_opts.update(MOCKED_OPTS)
alt_cachedir = os.path.join(TMP, 'abs_cachedir')
with patch.dict(fileclient.__opts__, patched_opts):
client = fileclient.get_file_client(fileclient.__opts__, pillar=False)
for saltenv in SALTENVS:
self.assertTrue(
client.cache_dir(
'salt://{0}'.format(SUBDIR),
saltenv,
cachedir=alt_cachedir
)
)
for subdir_file in SUBDIR_FILES:
cache_loc = os.path.join(alt_cachedir,
'files',
saltenv,
SUBDIR,
subdir_file)
# Double check that the content of the cached file
# identifies it as being from the correct saltenv. The
# setUp function creates the file with the name of the
# saltenv mentioned in the file, so a simple 'in' check is
# sufficient here. If opening the file raises an exception,
# this is a problem, so we are not catching the exception
# and letting it be raised so that the test fails.
with salt.utils.fopen(cache_loc) as fp_:
content = fp_.read()
log.debug('cache_loc = %s', cache_loc)
log.debug('content = %s', content)
self.assertTrue(subdir_file in content)
self.assertTrue(SUBDIR in content)
self.assertTrue(saltenv in content)
def test_cache_dir_with_alternate_cachedir_and_relative_path(self):
'''
Ensure entire directory is cached to correct location when an alternate
cachedir is specified and that cachedir is a relative path
'''
patched_opts = dict((x, y) for x, y in six.iteritems(self.minion_opts))
patched_opts.update(MOCKED_OPTS)
alt_cachedir = 'foo'
with patch.dict(fileclient.__opts__, patched_opts):
client = fileclient.get_file_client(fileclient.__opts__, pillar=False)
for saltenv in SALTENVS:
self.assertTrue(
client.cache_dir(
'salt://{0}'.format(SUBDIR),
saltenv,
cachedir=alt_cachedir
)
)
for subdir_file in SUBDIR_FILES:
cache_loc = os.path.join(fileclient.__opts__['cachedir'],
alt_cachedir,
'files',
saltenv,
SUBDIR,
subdir_file)
# Double check that the content of the cached file
# identifies it as being from the correct saltenv. The
# setUp function creates the file with the name of the
# saltenv mentioned in the file, so a simple 'in' check is
# sufficient here. If opening the file raises an exception,
# this is a problem, so we are not catching the exception
# and letting it be raised so that the test fails.
with salt.utils.fopen(cache_loc) as fp_:
content = fp_.read()
log.debug('cache_loc = %s', cache_loc)
log.debug('content = %s', content)
self.assertTrue(subdir_file in content)
self.assertTrue(SUBDIR in content)
self.assertTrue(saltenv in content)
def test_cache_file(self):
'''
Ensure file is cached to correct location
'''
patched_opts = dict((x, y) for x, y in six.iteritems(self.minion_opts))
patched_opts.update(MOCKED_OPTS)
with patch.dict(fileclient.__opts__, patched_opts):
client = fileclient.get_file_client(fileclient.__opts__, pillar=False)
for saltenv in SALTENVS:
self.assertTrue(
client.cache_file('salt://foo.txt', saltenv, cachedir=None)
)
cache_loc = os.path.join(
fileclient.__opts__['cachedir'], 'files', saltenv, 'foo.txt')
# Double check that the content of the cached file identifies
# it as being from the correct saltenv. The setUp function
# creates the file with the name of the saltenv mentioned in
# the file, so a simple 'in' check is sufficient here. If
# opening the file raises an exception, this is a problem, so
# we are not catching the exception and letting it be raised so
# that the test fails.
with salt.utils.fopen(cache_loc) as fp_:
content = fp_.read()
log.debug('cache_loc = %s', cache_loc)
log.debug('content = %s', content)
self.assertTrue(saltenv in content)
def test_cache_file_with_alternate_cachedir_and_absolute_path(self):
'''
Ensure file is cached to correct location when an alternate cachedir is
specified and that cachedir is an absolute path
'''
patched_opts = dict((x, y) for x, y in six.iteritems(self.minion_opts))
patched_opts.update(MOCKED_OPTS)
alt_cachedir = os.path.join(TMP, 'abs_cachedir')
with patch.dict(fileclient.__opts__, patched_opts):
client = fileclient.get_file_client(fileclient.__opts__, pillar=False)
for saltenv in SALTENVS:
self.assertTrue(
client.cache_file('salt://foo.txt',
saltenv,
cachedir=alt_cachedir)
)
cache_loc = os.path.join(alt_cachedir,
'files',
saltenv,
'foo.txt')
# Double check that the content of the cached file identifies
# it as being from the correct saltenv. The setUp function
# creates the file with the name of the saltenv mentioned in
# the file, so a simple 'in' check is sufficient here. If
# opening the file raises an exception, this is a problem, so
# we are not catching the exception and letting it be raised so
# that the test fails.
with salt.utils.fopen(cache_loc) as fp_:
content = fp_.read()
log.debug('cache_loc = %s', cache_loc)
log.debug('content = %s', content)
self.assertTrue(saltenv in content)
def test_cache_file_with_alternate_cachedir_and_relative_path(self):
'''
Ensure file is cached to correct location when an alternate cachedir is
specified and that cachedir is a relative path
'''
patched_opts = dict((x, y) for x, y in six.iteritems(self.minion_opts))
patched_opts.update(MOCKED_OPTS)
alt_cachedir = 'foo'
with patch.dict(fileclient.__opts__, patched_opts):
client = fileclient.get_file_client(fileclient.__opts__, pillar=False)
for saltenv in SALTENVS:
self.assertTrue(
client.cache_file('salt://foo.txt',
saltenv,
cachedir=alt_cachedir)
)
cache_loc = os.path.join(fileclient.__opts__['cachedir'],
alt_cachedir,
'files',
saltenv,
'foo.txt')
# Double check that the content of the cached file identifies
# it as being from the correct saltenv. The setUp function
# creates the file with the name of the saltenv mentioned in
# the file, so a simple 'in' check is sufficient here. If
# opening the file raises an exception, this is a problem, so
# we are not catching the exception and letting it be raised so
# that the test fails.
with salt.utils.fopen(cache_loc) as fp_:
content = fp_.read()
log.debug('cache_loc = %s', cache_loc)
log.debug('content = %s', content)
self.assertTrue(saltenv in content)
| 43.864641
| 94
| 0.546949
|
4a08306155c3c1389f7298e94580ff3d8e533811
| 3,072
|
py
|
Python
|
jmeter_api/thread_groups/ultimate_thread_group/test_ultimate_thread_group.py
|
dashawn888/jmeter_api
|
1ab5b02f3a7c8ad1b84fc50db4fe1fc2fa7c91bd
|
[
"Apache-2.0"
] | 11
|
2020-03-22T13:30:21.000Z
|
2021-12-25T06:23:44.000Z
|
jmeter_api/thread_groups/ultimate_thread_group/test_ultimate_thread_group.py
|
dashawn888/jmeter_api
|
1ab5b02f3a7c8ad1b84fc50db4fe1fc2fa7c91bd
|
[
"Apache-2.0"
] | 37
|
2019-12-18T13:12:50.000Z
|
2022-02-10T10:52:37.000Z
|
jmeter_api/thread_groups/ultimate_thread_group/test_ultimate_thread_group.py
|
dashawn888/jmeter_api
|
1ab5b02f3a7c8ad1b84fc50db4fe1fc2fa7c91bd
|
[
"Apache-2.0"
] | 5
|
2019-12-06T10:55:56.000Z
|
2020-06-01T19:32:32.000Z
|
import xmltodict
import pytest
from jmeter_api.thread_groups.ultimate_thread_group.elements import UltimateThreadGroup, ThreadGroupAction
from jmeter_api.basics.utils import tag_wrapper
class TestUltimateThreadGroupArgs:
class TestSchedule:
def test_check(self):
with pytest.raises(TypeError):
UltimateThreadGroup(schedule="1")
def test_check2(self):
with pytest.raises(TypeError):
UltimateThreadGroup(schedule={"thread_count": 1, "delay": 0, "startup": 0,
"hold": 10, "shotdown": 0})
def test_check3(self):
with pytest.raises(TypeError):
UltimateThreadGroup(schedule=[{"thread_count": "1", "delay": 0, "startup": 0,
"hold": 10, "shotdown": 0}])
def test_check4(self):
with pytest.raises(TypeError):
UltimateThreadGroup(schedule=[{"thread_count": -1, "delay": 0, "startup": 0,
"hold": 10, "shotdown": 0}])
def test_check5(self):
with pytest.raises(ValueError):
UltimateThreadGroup(schedule=[{"thread_count": 1, "startup": 0,
"hold": 10, "shotdown": 0}])
def test_positive(self):
UltimateThreadGroup(schedule=[{"thread_count": 1, "delay": 0, "startup": 0,
"hold": 10, "shotdown": 0}])
class TestUltimateThreadGroupRender:
def test_target_rate(self):
element = UltimateThreadGroup(schedule=[{"thread_count": 3, "delay": 0, "startup": 5,
"hold": 10, "shotdown": 6}])
rendered_doc = element.to_xml()
parsed_doc = xmltodict.parse(tag_wrapper(rendered_doc, 'test_results'))
assert parsed_doc['test_results']['kg.apc.jmeter.threads.UltimateThreadGroup']['collectionProp']['collectionProp']['stringProp'][0]['#text'] == "3"
assert parsed_doc['test_results']['kg.apc.jmeter.threads.UltimateThreadGroup']['collectionProp']['collectionProp']['stringProp'][1]['#text'] == "0"
assert parsed_doc['test_results']['kg.apc.jmeter.threads.UltimateThreadGroup']['collectionProp']['collectionProp']['stringProp'][2]['#text'] == "5"
assert parsed_doc['test_results']['kg.apc.jmeter.threads.UltimateThreadGroup']['collectionProp']['collectionProp']['stringProp'][3]['#text'] == "10"
assert parsed_doc['test_results']['kg.apc.jmeter.threads.UltimateThreadGroup']['collectionProp']['collectionProp']['stringProp'][4]['#text'] == "6"
def test_on_sample_error(self):
element = UltimateThreadGroup(on_sample_error=ThreadGroupAction.START_NEXT_LOOP)
rendered_doc = element.to_xml()
parsed_doc = xmltodict.parse(tag_wrapper(rendered_doc, 'test_results'))
assert parsed_doc['test_results']['kg.apc.jmeter.threads.UltimateThreadGroup']['stringProp']['#text'] == 'startnextloop'
| 54.857143
| 156
| 0.604492
|
4a0830c4c9c8cd0dff24a2d035b24843b268e8ee
| 229
|
py
|
Python
|
ProGitForProgrammers/Program.py
|
cybercritter/ProGitForProgrammers
|
e63f8473eb3e2199800979242e1619ecd755e6b2
|
[
"MIT"
] | null | null | null |
ProGitForProgrammers/Program.py
|
cybercritter/ProGitForProgrammers
|
e63f8473eb3e2199800979242e1619ecd755e6b2
|
[
"MIT"
] | null | null | null |
ProGitForProgrammers/Program.py
|
cybercritter/ProGitForProgrammers
|
e63f8473eb3e2199800979242e1619ecd755e6b2
|
[
"MIT"
] | null | null | null |
class Program:
def __init__(self):
print('Hello World!')
print('I just added this in PyCharm')
print('I just added this to the command line repo')
if __name__ == '__main__':
program = Program()
| 20.818182
| 59
| 0.615721
|
4a0831b22d1c912d91eb0d2c72b0fab7e8e99883
| 1,768
|
py
|
Python
|
tests/settings.py
|
adamchainz/django-perf-rec
|
f543053d9de5bc7f52f5761fc914d342c78e37a1
|
[
"MIT"
] | 147
|
2018-08-21T14:18:27.000Z
|
2022-03-31T23:16:58.000Z
|
tests/settings.py
|
adamchainz/django-perf-rec
|
f543053d9de5bc7f52f5761fc914d342c78e37a1
|
[
"MIT"
] | 48
|
2018-07-15T11:07:08.000Z
|
2022-03-26T16:00:22.000Z
|
tests/settings.py
|
adamchainz/django-perf-rec
|
f543053d9de5bc7f52f5761fc914d342c78e37a1
|
[
"MIT"
] | 11
|
2018-07-13T10:09:44.000Z
|
2021-02-13T18:15:12.000Z
|
import os
from typing import List
import django
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
SECRET_KEY = "NOTASECRET"
DATABASES = {
"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"},
"replica": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
"TEST": {"MIRROR": "default"},
},
"second": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"},
}
CACHES = {
"default": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"},
"second": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"},
}
ALLOWED_HOSTS: List[str] = []
INSTALLED_APPS = ["django.contrib.auth", "django.contrib.contenttypes", "tests.testapp"]
MIDDLEWARE_CLASSES = (
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
)
ROOT_URLCONF = "tests.urls"
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
if django.VERSION < (4, 0):
USE_L10N = True
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
]
},
}
]
USE_TZ = True
| 26.787879
| 88
| 0.649887
|
4a0831d577593644e1038ce68cd0abf7e98dc575
| 18,181
|
py
|
Python
|
testing/run_tests.py
|
rhencke/engine
|
1016db292c4e73374a0a11536b18303c9522a224
|
[
"BSD-3-Clause"
] | 13
|
2020-08-09T10:30:50.000Z
|
2021-09-06T18:26:05.000Z
|
testing/run_tests.py
|
rhencke/engine
|
1016db292c4e73374a0a11536b18303c9522a224
|
[
"BSD-3-Clause"
] | null | null | null |
testing/run_tests.py
|
rhencke/engine
|
1016db292c4e73374a0a11536b18303c9522a224
|
[
"BSD-3-Clause"
] | 4
|
2020-09-24T05:14:51.000Z
|
2021-04-22T19:53:10.000Z
|
#!/usr/bin/env python
# Copyright 2013 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
A top level harness to run all unit-tests in a specific engine build.
"""
import argparse
import glob
import os
import re
import subprocess
import sys
import time
buildroot_dir = os.path.abspath(os.path.join(os.path.realpath(__file__), '..', '..', '..'))
out_dir = os.path.join(buildroot_dir, 'out')
golden_dir = os.path.join(buildroot_dir, 'flutter', 'testing', 'resources')
fonts_dir = os.path.join(buildroot_dir, 'flutter', 'third_party', 'txt', 'third_party', 'fonts')
roboto_font_path = os.path.join(fonts_dir, 'Roboto-Regular.ttf')
dart_tests_dir = os.path.join(buildroot_dir, 'flutter', 'testing', 'dart',)
font_subset_dir = os.path.join(buildroot_dir, 'flutter', 'tools', 'font-subset')
fml_unittests_filter = '--gtest_filter=-*TimeSensitiveTest*'
def PrintDivider(char='='):
print '\n'
for _ in xrange(4):
print(''.join([char for _ in xrange(80)]))
print '\n'
def RunCmd(cmd, **kwargs):
command_string = ' '.join(cmd)
PrintDivider('>')
print 'Running command "%s"' % command_string
start_time = time.time()
process = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr, **kwargs)
process.communicate()
end_time = time.time()
if process.returncode != 0:
PrintDivider('!')
raise Exception('Command "%s" exited with code %d' % (command_string, process.returncode))
PrintDivider('<')
print 'Command run successfully in %.2f seconds: %s' % (end_time - start_time, command_string)
def IsMac():
return sys.platform == 'darwin'
def IsLinux():
return sys.platform.startswith('linux')
def IsWindows():
return sys.platform.startswith(('cygwin', 'win'))
def ExecutableSuffix():
return '.exe' if IsWindows() else ''
def FindExecutablePath(path):
if os.path.exists(path):
return path
if IsWindows():
exe_path = path + '.exe'
if os.path.exists(exe_path):
return exe_path
bat_path = path + '.bat'
if os.path.exists(bat_path):
return bat_path
raise Exception('Executable %s does not exist!' % path)
def RunEngineExecutable(build_dir, executable_name, filter, flags=[], cwd=buildroot_dir):
if filter is not None and executable_name not in filter:
print('Skipping %s due to filter.' % executable_name)
return
executable = FindExecutablePath(os.path.join(build_dir, executable_name))
print('Running %s in %s' % (executable_name, cwd))
test_command = [ executable ] + flags
print(' '.join(test_command))
RunCmd(test_command, cwd=cwd)
def RunCCTests(build_dir, filter):
print("Running Engine Unit-tests.")
# Not all of the engine unit tests are designed to be run more than once.
non_repeatable_shuffle_flags = [
"--gtest_shuffle",
]
shuffle_flags = non_repeatable_shuffle_flags + [
"--gtest_repeat=2",
]
RunEngineExecutable(build_dir, 'client_wrapper_glfw_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'common_cpp_core_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'common_cpp_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'client_wrapper_unittests', filter, shuffle_flags)
# https://github.com/flutter/flutter/issues/36294
if not IsWindows():
RunEngineExecutable(build_dir, 'embedder_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'embedder_proctable_unittests', filter, shuffle_flags)
else:
RunEngineExecutable(build_dir, 'flutter_windows_unittests', filter, non_repeatable_shuffle_flags)
RunEngineExecutable(build_dir, 'client_wrapper_windows_unittests', filter, shuffle_flags)
flow_flags = ['--gtest_filter=-PerformanceOverlayLayer.Gold']
if IsLinux():
flow_flags = [
'--golden-dir=%s' % golden_dir,
'--font-file=%s' % roboto_font_path,
]
RunEngineExecutable(build_dir, 'flow_unittests', filter, flow_flags + shuffle_flags)
# TODO(44614): Re-enable after https://github.com/flutter/flutter/issues/44614 has been addressed.
# RunEngineExecutable(build_dir, 'fml_unittests', filter, [ fml_unittests_filter ] + shuffle_flags)
RunEngineExecutable(build_dir, 'runtime_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'tonic_unittests', filter, shuffle_flags)
if not IsWindows():
# https://github.com/flutter/flutter/issues/36295
RunEngineExecutable(build_dir, 'shell_unittests', filter, shuffle_flags)
# https://github.com/google/googletest/issues/2490
RunEngineExecutable(build_dir, 'android_external_view_embedder_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'jni_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'platform_view_android_delegate_unittests', filter, shuffle_flags)
# The image release unit test can take a while on slow machines.
RunEngineExecutable(build_dir, 'ui_unittests', filter, shuffle_flags + ['--timeout=90'])
RunEngineExecutable(build_dir, 'testing_unittests', filter, shuffle_flags)
# These unit-tests are Objective-C and can only run on Darwin.
if IsMac():
RunEngineExecutable(build_dir, 'flutter_channels_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'flutter_desktop_darwin_unittests', filter, non_repeatable_shuffle_flags)
# https://github.com/flutter/flutter/issues/36296
if IsLinux():
RunEngineExecutable(build_dir, 'txt_unittests', filter, shuffle_flags)
if IsLinux():
RunEngineExecutable(build_dir, 'flutter_linux_unittests', filter, non_repeatable_shuffle_flags)
RunEngineExecutable(build_dir, 'flutter_glfw_unittests', filter, non_repeatable_shuffle_flags)
def RunEngineBenchmarks(build_dir, filter):
print("Running Engine Benchmarks.")
RunEngineExecutable(build_dir, 'shell_benchmarks', filter)
RunEngineExecutable(build_dir, 'fml_benchmarks', filter)
RunEngineExecutable(build_dir, 'ui_benchmarks', filter)
if IsLinux():
RunEngineExecutable(build_dir, 'txt_benchmarks', filter)
def SnapshotTest(build_dir, dart_file, kernel_file_output, verbose_dart_snapshot):
print("Generating snapshot for test %s" % dart_file)
dart = os.path.join(build_dir, 'dart-sdk', 'bin', 'dart')
frontend_server = os.path.join(build_dir, 'gen', 'frontend_server.dart.snapshot')
flutter_patched_sdk = os.path.join(build_dir, 'flutter_patched_sdk')
test_packages = os.path.join(dart_tests_dir, '.packages')
assert os.path.exists(dart)
assert os.path.exists(frontend_server)
assert os.path.exists(flutter_patched_sdk)
assert os.path.exists(test_packages)
snapshot_command = [
dart,
frontend_server,
'--enable-experiment=non-nullable',
'--no-sound-null-safety',
'--sdk-root',
flutter_patched_sdk,
'--incremental',
'--target=flutter',
'--packages',
test_packages,
'--output-dill',
kernel_file_output,
dart_file
]
if verbose_dart_snapshot:
RunCmd(snapshot_command, cwd=buildroot_dir)
else:
subprocess.check_output(snapshot_command, cwd=buildroot_dir)
assert os.path.exists(kernel_file_output)
def RunDartTest(build_dir, dart_file, verbose_dart_snapshot, multithreaded):
kernel_file_name = os.path.basename(dart_file) + '.kernel.dill'
kernel_file_output = os.path.join(out_dir, kernel_file_name)
SnapshotTest(build_dir, dart_file, kernel_file_output, verbose_dart_snapshot)
command_args = [
'--disable-observatory',
'--use-test-fonts',
kernel_file_output
]
if multithreaded:
threading = 'multithreaded'
command_args.insert(0, '--force-multithreading')
else:
threading = 'single-threaded'
print("Running test '%s' using 'flutter_tester' (%s)" % (kernel_file_name, threading))
RunEngineExecutable(build_dir, 'flutter_tester', None, command_args)
def RunPubGet(build_dir, directory):
print("Running 'pub get' in the tests directory %s" % dart_tests_dir)
pub_get_command = [
os.path.join(build_dir, 'dart-sdk', 'bin', 'pub'),
'get'
]
RunCmd(pub_get_command, cwd=directory)
def EnsureDebugUnoptSkyPackagesAreBuilt():
variant_out_dir = os.path.join(out_dir, 'host_debug_unopt')
ninja_command = [
'autoninja',
'-C',
variant_out_dir,
'flutter/sky/packages'
]
# Attempt running Ninja if the out directory exists.
# We don't want to blow away any custom GN args the caller may have already set.
if os.path.exists(variant_out_dir):
RunCmd(ninja_command, cwd=buildroot_dir)
return
gn_command = [
os.path.join(buildroot_dir, 'flutter', 'tools', 'gn'),
'--runtime-mode',
'debug',
'--unopt',
'--no-lto',
]
RunCmd(gn_command, cwd=buildroot_dir)
RunCmd(ninja_command, cwd=buildroot_dir)
def EnsureJavaTestsAreBuilt(android_out_dir):
"""Builds the engine variant and the test jar containing the JUnit tests"""
ninja_command = [
'autoninja',
'-C',
android_out_dir,
'flutter/shell/platform/android:robolectric_tests'
]
# Attempt running Ninja if the out directory exists.
# We don't want to blow away any custom GN args the caller may have already set.
if os.path.exists(android_out_dir):
RunCmd(ninja_command, cwd=buildroot_dir)
return
assert android_out_dir != "out/android_debug_unopt", "%s doesn't exist. Run GN to generate the directory first" % android_out_dir
# Otherwise prepare the directory first, then build the test.
gn_command = [
os.path.join(buildroot_dir, 'flutter', 'tools', 'gn'),
'--android',
'--unoptimized',
'--runtime-mode=debug',
'--no-lto',
]
RunCmd(gn_command, cwd=buildroot_dir)
RunCmd(ninja_command, cwd=buildroot_dir)
def EnsureIosTestsAreBuilt(ios_out_dir):
"""Builds the engine variant and the test dylib containing the XCTests"""
ninja_command = [
'autoninja',
'-C',
ios_out_dir,
'ios_test_flutter'
]
# Attempt running Ninja if the out directory exists.
# We don't want to blow away any custom GN args the caller may have already set.
if os.path.exists(ios_out_dir):
RunCmd(ninja_command, cwd=buildroot_dir)
return
assert ios_out_dir != "out/ios_debug_sim_unopt", "%s doesn't exist. Run GN to generate the directory first" % ios_out_dir
# Otherwise prepare the directory first, then build the test.
gn_command = [
os.path.join(buildroot_dir, 'flutter', 'tools', 'gn'),
'--ios',
'--unoptimized',
'--runtime-mode=debug',
'--no-lto',
'--simulator'
]
RunCmd(gn_command, cwd=buildroot_dir)
RunCmd(ninja_command, cwd=buildroot_dir)
def AssertExpectedJavaVersion():
"""Checks that the user has Java 8 which is the supported Java version for Android"""
EXPECTED_VERSION = '1.8'
# `java -version` is output to stderr. https://bugs.java.com/bugdatabase/view_bug.do?bug_id=4380614
version_output = subprocess.check_output(['java', '-version'], stderr=subprocess.STDOUT)
match = bool(re.compile('version "%s' % EXPECTED_VERSION).search(version_output))
message = "JUnit tests need to be run with Java %s. Check the `java -version` on your PATH." % EXPECTED_VERSION
assert match, message
def AssertExpectedXcodeVersion():
"""Checks that the user has a recent version of Xcode installed"""
EXPECTED_MAJOR_VERSION = ['11', '12']
version_output = subprocess.check_output(['xcodebuild', '-version'])
match = re.match("Xcode (\d+)", version_output)
message = "Xcode must be installed to run the iOS embedding unit tests"
assert match.group(1) in EXPECTED_MAJOR_VERSION, message
def RunJavaTests(filter, android_variant='android_debug_unopt'):
"""Runs the Java JUnit unit tests for the Android embedding"""
AssertExpectedJavaVersion()
android_out_dir = os.path.join(out_dir, android_variant)
EnsureJavaTestsAreBuilt(android_out_dir)
embedding_deps_dir = os.path.join(buildroot_dir, 'third_party', 'android_embedding_dependencies', 'lib')
classpath = map(str, [
os.path.join(buildroot_dir, 'third_party', 'android_tools', 'sdk', 'platforms', 'android-30', 'android.jar'),
os.path.join(embedding_deps_dir, '*'), # Wildcard for all jars in the directory
os.path.join(android_out_dir, 'flutter.jar'),
os.path.join(android_out_dir, 'robolectric_tests.jar')
])
test_class = filter if filter else 'io.flutter.FlutterTestSuite'
command = [
'java',
'-Drobolectric.offline=true',
'-Drobolectric.dependency.dir=' + embedding_deps_dir,
'-classpath', ':'.join(classpath),
'-Drobolectric.logging=stdout',
'org.junit.runner.JUnitCore',
test_class
]
RunCmd(command)
def RunObjcTests(ios_variant='ios_debug_sim_unopt'):
"""Runs Objective-C XCTest unit tests for the iOS embedding"""
AssertExpectedXcodeVersion()
ios_out_dir = os.path.join(out_dir, ios_variant)
EnsureIosTestsAreBuilt(ios_out_dir)
ios_unit_test_dir = os.path.join(buildroot_dir, 'flutter', 'testing', 'ios', 'IosUnitTests')
# Avoid using xcpretty unless the following can be addressed:
# - Make sure all relevant failure output is printed on a failure.
# - Make sure that a failing exit code is set for CI.
# See https://github.com/flutter/flutter/issues/63742
command = [
'xcodebuild '
'-sdk iphonesimulator '
'-scheme IosUnitTests '
"-destination platform='iOS Simulator,name=iPhone 8' "
'test '
'FLUTTER_ENGINE=' + ios_variant
]
RunCmd(command, cwd=ios_unit_test_dir, shell=True)
def RunDartTests(build_dir, filter, verbose_dart_snapshot):
# This one is a bit messy. The pubspec.yaml at flutter/testing/dart/pubspec.yaml
# has dependencies that are hardcoded to point to the sky packages at host_debug_unopt/
# Before running Dart tests, make sure to run just that target (NOT the whole engine)
EnsureDebugUnoptSkyPackagesAreBuilt()
# Now that we have the Sky packages at the hardcoded location, run `pub get`.
RunEngineExecutable(build_dir, os.path.join('dart-sdk', 'bin', 'pub'), None, flags=['get'], cwd=dart_tests_dir)
dart_tests = glob.glob('%s/*_test.dart' % dart_tests_dir)
for dart_test_file in dart_tests:
if filter is not None and os.path.basename(dart_test_file) not in filter:
print("Skipping %s due to filter." % dart_test_file)
else:
print("Testing dart file %s" % dart_test_file)
RunDartTest(build_dir, dart_test_file, verbose_dart_snapshot, True)
RunDartTest(build_dir, dart_test_file, verbose_dart_snapshot, False)
def RunFrontEndServerTests(build_dir):
test_dir = os.path.join(buildroot_dir, 'flutter', 'flutter_frontend_server')
dart_tests = glob.glob('%s/test/*_test.dart' % test_dir)
for dart_test_file in dart_tests:
opts = [
dart_test_file,
os.path.join(build_dir, 'gen', 'frontend_server.dart.snapshot'),
os.path.join(build_dir, 'flutter_patched_sdk')]
RunEngineExecutable(
build_dir,
os.path.join('dart-sdk', 'bin', 'dart'),
None,
flags=opts,
cwd=test_dir)
def RunConstFinderTests(build_dir):
test_dir = os.path.join(buildroot_dir, 'flutter', 'tools', 'const_finder', 'test')
opts = [
os.path.join(test_dir, 'const_finder_test.dart'),
os.path.join(build_dir, 'gen', 'frontend_server.dart.snapshot'),
os.path.join(build_dir, 'flutter_patched_sdk')]
RunEngineExecutable(build_dir, os.path.join('dart-sdk', 'bin', 'dart'), None, flags=opts, cwd=test_dir)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--variant', dest='variant', action='store',
default='host_debug_unopt', help='The engine build variant to run the tests for.')
parser.add_argument('--type', type=str, default='all')
parser.add_argument('--engine-filter', type=str, default='',
help='A list of engine test executables to run.')
parser.add_argument('--dart-filter', type=str, default='',
help='A list of Dart test scripts to run.')
parser.add_argument('--java-filter', type=str, default='',
help='A single Java test class to run.')
parser.add_argument('--android-variant', dest='android_variant', action='store',
default='android_debug_unopt',
help='The engine build variant to run java tests for')
parser.add_argument('--ios-variant', dest='ios_variant', action='store',
default='ios_debug_sim_unopt',
help='The engine build variant to run objective-c tests for')
parser.add_argument('--verbose-dart-snapshot', dest='verbose_dart_snapshot', action='store_true',
default=False, help='Show extra dart snapshot logging.')
args = parser.parse_args()
if args.type == 'all':
types = ['engine', 'dart', 'benchmarks', 'java', 'objc', 'font-subset']
else:
types = args.type.split(',')
build_dir = os.path.join(out_dir, args.variant)
if args.type != 'java':
assert os.path.exists(build_dir), 'Build variant directory %s does not exist!' % build_dir
engine_filter = args.engine_filter.split(',') if args.engine_filter else None
if 'engine' in types:
RunCCTests(build_dir, engine_filter)
if 'dart' in types:
assert not IsWindows(), "Dart tests can't be run on windows. https://github.com/flutter/flutter/issues/36301."
dart_filter = args.dart_filter.split(',') if args.dart_filter else None
RunDartTests(build_dir, dart_filter, args.verbose_dart_snapshot)
RunConstFinderTests(build_dir)
RunFrontEndServerTests(build_dir)
if 'java' in types:
assert not IsWindows(), "Android engine files can't be compiled on Windows."
java_filter = args.java_filter
if ',' in java_filter or '*' in java_filter:
print('Can only filter JUnit4 tests by single entire class name, eg "io.flutter.SmokeTest". Ignoring filter=' + java_filter)
java_filter = None
RunJavaTests(java_filter, args.android_variant)
if 'objc' in types:
assert IsMac(), "iOS embedding tests can only be run on macOS."
RunObjcTests(args.ios_variant)
# https://github.com/flutter/flutter/issues/36300
if 'benchmarks' in types and not IsWindows():
RunEngineBenchmarks(build_dir, engine_filter)
if ('engine' in types or 'font-subset' in types) and args.variant != 'host_release':
RunCmd(['python', 'test.py'], cwd=font_subset_dir)
if __name__ == '__main__':
sys.exit(main())
| 35.859961
| 131
| 0.724493
|
4a0832f427047888b41e4338032959ab42927666
| 10,114
|
py
|
Python
|
threedod/benchmark_scripts/show_3d_bbox_annotation.py
|
Levintsky/ARKitScenes
|
d209c6ae512e3638c90da8aeebf2e3a5b345807f
|
[
"AML"
] | 237
|
2021-12-03T03:35:31.000Z
|
2022-03-28T21:05:37.000Z
|
threedod/benchmark_scripts/show_3d_bbox_annotation.py
|
Yaldatkk/ARKitScenes
|
58bf410f65bc2ae2e35e3c3d2a7c45d8b7863fca
|
[
"AML"
] | 19
|
2021-12-05T13:58:15.000Z
|
2022-03-18T14:23:55.000Z
|
threedod/benchmark_scripts/show_3d_bbox_annotation.py
|
Yaldatkk/ARKitScenes
|
58bf410f65bc2ae2e35e3c3d2a7c45d8b7863fca
|
[
"AML"
] | 27
|
2021-12-08T06:08:15.000Z
|
2022-03-30T07:08:51.000Z
|
import vtk
import json
import numpy as np
import argparse
import sys
import subprocess
from plyfile import PlyData
class Render(object):
def __init__(self, ply_file, json_file, back_face_cull=False):
"""
:param ply_file: path of ply file
:param json_file: path of annotation result json file
:param back_face_cull: see single side of mesh
"""
self.annotation = load_json(json_file)
self.file = ply_file
self.back_face_cull = back_face_cull
self.reader = vtk.vtkPLYReader()
self.colors = vtk.vtkNamedColors()
self.mapper = vtk.vtkPolyDataMapper()
self.actor = vtk.vtkActor()
self.ren = vtk.vtkRenderer()
self.renWin = vtk.vtkRenderWindow()
self.iren = vtk.vtkRenderWindowInteractor()
self.offset_x, self.offset_y, self.offset_z = 0, 0, 0
self.vertex = []
self.file_type = None
def __call__(self):
self._prepare()
self.iren.Initialize()
self.renWin.Render()
self.ren.GetActiveCamera().SetPosition(15.0, 10.0, 9.0)
self.ren.GetActiveCamera().SetViewUp(0.1, 0.0, 1.0)
self.renWin.Render()
self.iren.Start()
def _prepare(self):
print("Reading file...")
self.read_mesh()
self.set_mapper()
self.set_actor()
self.transform_actor()
self.set_render()
self.add_actor()
self.draw_lines()
self.init_coordinate_axes()
print("Done")
def read_mesh(self):
plydata = None
file_type = check_file_type(self.file)
if not file_type:
plydata = PlyData.read(self.file)
self.file_type = "pcd" if plydata["face"].count == 0 else "mesh"
else:
self.file_type = file_type
if self.file_type == "mesh":
self.reader = vtk.vtkPLYReader()
self.reader.SetFileName(self.file)
self.reader.Update()
else:
if not plydata:
plydata = PlyData.read(self.file)
self.vertex = plydata["vertex"]
def set_mapper(self):
if self.file_type == "mesh":
self.mapper.SetInputConnection(self.reader.GetOutputPort())
self.mapper.SetScalarVisibility(3)
else:
points = vtk.vtkPoints()
vertices = vtk.vtkCellArray()
polydata = vtk.vtkPolyData()
for index, vertex in enumerate(self.vertex):
points.InsertPoint(index, vertex[0], vertex[1], vertex[2])
vertices.InsertNextCell(1)
vertices.InsertCellPoint(index)
polydata.SetPoints(points)
polydata.SetVerts(vertices)
self.mapper.SetInputData(polydata)
def set_actor(self):
if self.file_type == "mesh":
self.actor.GetProperty().SetBackfaceCulling(self.back_face_cull)
else:
self.actor.GetProperty().SetPointSize(1.5)
self.actor.SetMapper(self.mapper)
self.actor.GetProperty().SetColor(self.colors.GetColor3d('Tan'))
# Place the mesh at the origin point for easy viewing
self.offset_x = -sum(self.actor.GetXRange()) / 2
self.offset_y = -sum(self.actor.GetYRange()) / 2
self.offset_z = -sum(self.actor.GetZRange()) / 2
self.actor.SetPosition(self.offset_x, self.offset_y, self.offset_z)
def transform_actor(self):
# no transformation is required in 3D tool,
# self.xz_align_matrix is a identity matrix
self.actor.SetUserMatrix(self.xz_align_matrix)
def set_render(self):
self.renWin.SetWindowName("demo")
self.renWin.SetSize(2500, 1800)
self.renWin.AddRenderer(self.ren)
self.ren.SetBackground(self.colors.GetColor3d('AliceBlue'))
self.ren.GetActiveCamera().SetPosition(15.0, 10.0, 9.0)
self.ren.GetActiveCamera().SetViewUp(0.1, 0.0, 1.0)
self.bind_mouse_event()
self.iren.SetRenderWindow(self.renWin)
def bind_mouse_event(self):
self.iren.SetInteractorStyle(MyEvent())
def add_actor(self):
self.ren.AddActor(self.actor)
def init_coordinate_axes(self):
axes = vtk.vtkAxesActor()
axes.SetTotalLength(10, 10, 10)
axes.SetShaftType(0)
axes.SetCylinderRadius(0.002)
self.ren.AddActor(axes)
def draw_lines(self):
for bbox in self.bboxes:
self.draw_bbox(bbox)
def draw_bbox(self, bbox):
for point in bbox:
point[0] += self.offset_x
point[1] += self.offset_y
point[2] += self.offset_z
self.ren.AddActor(line_actor([bbox[0], bbox[1], bbox[2], bbox[3],
bbox[0], bbox[4], bbox[5], bbox[6],
bbox[7], bbox[4]]))
self.ren.AddActor(line_actor([bbox[3], bbox[7]]))
self.ren.AddActor(line_actor([bbox[1], bbox[5]]))
self.ren.AddActor(line_actor([bbox[2], bbox[6]]))
@property
def bboxes(self):
bbox_list = []
for label_info in self.annotation["data"]:
rotation = np.array(label_info["segments"]["obbAligned"]["normalizedAxes"]).reshape(3, 3)
transform = np.array(label_info["segments"]["obbAligned"]["centroid"]).reshape(-1, 3)
scale = np.array(label_info["segments"]["obbAligned"]["axesLengths"]).reshape(-1, 3)
box3d = compute_box_3d(scale.reshape(3).tolist(), transform, rotation)
bbox_list.append(box3d)
bbox_list = np.asarray(bbox_list)
return bbox_list
@property
def xz_align_matrix(self):
# no transformation is required in 3D tool,
# just return a identity matrix here
transM = np.identity(4)
m = [x for y in transM for x in y]
mat = vtk.vtkMatrix4x4()
mat.DeepCopy(m)
mat.Transpose()
return mat
class MyEvent(vtk.vtkInteractorStyleTrackballCamera):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.AddObserver("MiddleButtonPressEvent", self.middle_button_press)
self.AddObserver("MiddleButtonReleaseEvent", self.middle_button_release)
self.AddObserver("LeftButtonPressEvent", self.left_button_press)
self.AddObserver("LeftButtonReleaseEvent", self.left_button_release)
self.AddObserver("RightButtonPressEvent", self.right_button_press)
self.AddObserver("RightButtonReleaseEvent", self.right_button_release)
def middle_button_press(self, obj, event):
# print("Middle Button pressed")
self.OnMiddleButtonDown()
return
def middle_button_release(self, obj, event):
# print("Middle Button released")
self.OnMiddleButtonUp()
return
def left_button_press(self, obj, event):
# print("Left Button pressed")
self.OnLeftButtonDown()
return
def left_button_release(self, obj, event):
# print("Left Button released")
self.OnLeftButtonUp()
return
def right_button_press(self, obj, event):
# print("right Button pressed")
self.OnRightButtonDown()
return
def right_button_release(self, obj, event):
# print("right Button released")
self.OnLeftButtonUp()
return
def load_json(js_path):
with open(js_path, "r") as f:
json_data = json.load(f)
return json_data
def compute_box_3d(scale, transform, rotation):
scales = [i / 2 for i in scale]
l, h, w = scales
center = np.reshape(transform, (-1, 3))
center = center.reshape(3)
x_corners = [l, l, -l, -l, l, l, -l, -l]
y_corners = [h, -h, -h, h, h, -h, -h, h]
z_corners = [w, w, w, w, -w, -w, -w, -w]
corners_3d = np.dot(np.transpose(rotation),
np.vstack([x_corners, y_corners, z_corners]))
corners_3d[0, :] += center[0]
corners_3d[1, :] += center[1]
corners_3d[2, :] += center[2]
bbox3d_raw = np.transpose(corners_3d)
return bbox3d_raw
def check_file_type(file):
file_type = None
cmd = f'head -n 30 {file}'
try:
p = subprocess.Popen(cmd, shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
res = [i.strip() for i in p.stdout.readlines()]
except Exception:
return file_type
for i in res:
try:
line = i.decode("utf-8")
except Exception:
pass
else:
if "element face" in line:
face_count = int(line.split(" ")[-1])
if face_count == 0:
file_type = "pcd"
else:
file_type = "mesh"
break
return file_type
def line_actor(points):
linesPolyData = vtk.vtkPolyData()
pts = vtk.vtkPoints()
lines = vtk.vtkCellArray()
namedColors = vtk.vtkNamedColors()
for point in points:
pts.InsertNextPoint(point)
linesPolyData.SetPoints(pts)
for i in range(len(points) - 1):
line = vtk.vtkLine()
line.GetPointIds().SetId(0, i)
line.GetPointIds().SetId(1, i + 1)
lines.InsertNextCell(line)
linesPolyData.SetLines(lines)
# Setup the visualization pipeline
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputData(linesPolyData)
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetLineWidth(4)
actor.GetProperty().SetColor(namedColors.GetColor3d("Tomato"))
return actor
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--file", type=str, help="path of ply file")
parser.add_argument("-a", "--anno", type=str, help="path of json file")
parser.add_argument("-s", "--side", type=int, default=1,
help="0: double side, 1:single side")
return parser.parse_args()
if __name__ == '__main__':
if len(sys.argv) == 1:
sys.argv.append("-h")
args = get_args()
render = Render(args.file, args.anno, args.side)
render()
| 33.379538
| 101
| 0.602927
|
4a0833946459e839c99d54181eeefd7cdfb6294d
| 842
|
py
|
Python
|
m_src/transaction/range.py
|
komthanh/v20-python-samples
|
27047c332aa3d34217819a593834effb13414d40
|
[
"MIT"
] | null | null | null |
m_src/transaction/range.py
|
komthanh/v20-python-samples
|
27047c332aa3d34217819a593834effb13414d40
|
[
"MIT"
] | null | null | null |
m_src/transaction/range.py
|
komthanh/v20-python-samples
|
27047c332aa3d34217819a593834effb13414d40
|
[
"MIT"
] | null | null | null |
import argparse
import common.config
import common.args
def main():
parser = argparse.ArgumentParser()
common.config.add_argument(parser)
parser.add_argument('fromid')
parser.add_argument('toid')
parser.add_argument('--type', action='append')
args = parser.parse_args("1 60 --type".split() + ['MARKET_ORDER', '--type', 'LIMIT_ORDER'])
# args = parser.parse_args("1 60 --type ORDER --type FUNDING".split())
api = args.config.create_context()
filter = None
if args.type is not None:
filter = ','.join(args.type)
account_id = args.config.active_account
response = api.transaction.range(account_id, fromID=args.fromid, toID=args.toid, type=filter)
for transaction in response.get("transactions", 200):
print(transaction.title())
if __name__ == "__main__":
main()
| 25.515152
| 97
| 0.674584
|
4a0834cc54a0f5692b312c51f9eef7d4a2b3e301
| 4,202
|
py
|
Python
|
data_cleaning/data_cleaning.py
|
Rishabh1501/discord-review-bot
|
a9f8f4b42dc88d93e52c7c8e53b9260b2441bf43
|
[
"MIT"
] | null | null | null |
data_cleaning/data_cleaning.py
|
Rishabh1501/discord-review-bot
|
a9f8f4b42dc88d93e52c7c8e53b9260b2441bf43
|
[
"MIT"
] | null | null | null |
data_cleaning/data_cleaning.py
|
Rishabh1501/discord-review-bot
|
a9f8f4b42dc88d93e52c7c8e53b9260b2441bf43
|
[
"MIT"
] | null | null | null |
"""
Copyright (c) 2021 Rishabh Kalra <rishabhkalra1501@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import os
import nltk
import string
import pandas as pd
from nltk.stem import PorterStemmer
from nltk.corpus import stopwords
# nltk.download() To download all the nltk libraries
class Cleaner:
"""
Class to clean the data , perform stemming and preparing the data for cleaning
Keyword arguments: log_folder_name="Training_Logs", log_file_name="2-data_cleaner.txt"
argument --
log_folder_name: Specifies the folder for Training Logs
log_file_name: Specifies the name of the log file
Return: None
"""
def __init__(self):
self.stemmer = PorterStemmer()
self.stop_words = stopwords.words('english')
self.unnecessary_words = ["br", "'ll",
"..", "....", "n't", "...", " ... "]
self.punctuation = string.punctuation
def review_to_words(self, sentence):
"""
Converts a sentence into a clean and stemmed sentence
Args:
sentence (string): sentence to be cleaned
Raises:
Exception: any Exception, check logs for specifics
Returns:
String : Cleaned Sentence
"""
try:
words = nltk.word_tokenize(sentence)
words_list = list()
for word in words:
word = word.lower()
letter_list = list()
# print(word)
if word not in self.stop_words:
if word not in self.unnecessary_words:
for letter in word:
if letter not in self.punctuation:
letter_list.append(letter)
if letter_list:
word = ''.join(letter_list)
words_list.append(self.stemmer.stem(word))
return " ".join(words_list)
except Exception as e:
raise Exception(e)
def ret_cleaned_dataframe(self, dataframe, col_num=0):
"""Returns a cleaned dataframe
Args:
dataframe (pandas.DataFrame): DataFrame to be Cleaned
col_num (int, optional): Number of the column to be cleaned. Defaults to 0.
Raises:
Exception: any Exception, check logs for specifics
Returns:
pandas.DataFrame: pandas DataFrame
"""
try:
col = dataframe.columns
# dataframe[col[col_num+1]] = dataframe[col[col_num+1]].apply(lambda x: 1 if x == "positive" else 0)
return dataframe
except Exception as e:
raise Exception(e)
def save_dataframe_in_csv(self, dataframe, file_path):
"""saves the dataframe in csv format
Args:
dataframe (pandas.DataFrame): DataFrame to be saved
file_path (string/path): path to save the dataframe in csv format
Raises:
Exception: any Exception, check logs for specifics
"""
try:
dataframe.to_csv(file_path, index_label=False)
except Exception as e:
raise Exception(e)
| 35.016667
| 112
| 0.627082
|
4a083625f535cdcfdfb73b66236474206c832670
| 40,057
|
py
|
Python
|
rig/face_feature_manager.py
|
jzboylxj/XDLibs
|
76ab640502d7e254bc98930d6ebb9e870476ed9a
|
[
"MIT"
] | 1
|
2021-03-11T02:24:08.000Z
|
2021-03-11T02:24:08.000Z
|
rig/face_feature_manager.py
|
jzboylxj/XDLibs
|
76ab640502d7e254bc98930d6ebb9e870476ed9a
|
[
"MIT"
] | null | null | null |
rig/face_feature_manager.py
|
jzboylxj/XDLibs
|
76ab640502d7e254bc98930d6ebb9e870476ed9a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
# @Time : 2020/12/18 9:57
# @Author : Li XiaoJun
# @Site :
# @File : face_feature_manager.py
import os
from imp import reload
import pymel.core as pm
from animation import common as xd_com, helper
reload(xd_com)
version = 0.2
def get_module_list(path="", return_type="folders"):
u"""扫描文件夹,将目录列取出来,如果目录下有对应的文件(例:文件夹名face, 对应的文件)
:param return_type: 返回类型
"""
json_list = []
json_file = ""
folder_list = []
if path != '':
path_dir = os.listdir(path)
for json_file in path_dir:
if json_file == ".mayaSwatches":
continue
full_path = os.path.join(path, json_file)
if os.path.isdir(full_path):
folder_list.append(json_file)
elif os.path.isfile(full_path):
# 获取JSON文件的名字后,清理文件的后缀名
file_name = os.path.splitext(json_file)[0]
json_list.append(file_name)
if return_type == "files":
return json_file
elif return_type == "folders":
return folder_list
class FeatureControllerGroup():
def __init__(self, file):
self.file = file
self._data = self.get_data()
def get_data(self):
return xd_com.read_json(file_path=self.file)
def get_controller_group_data(self, controller):
return self._data[controller]
class FeatureController():
def __init__(self, file):
self.file = file
self._data = self.get_data()
self._feature_name = self.get_feature_name()
def get_data(self):
return xd_com.read_json(file_path=self.file)
def get_feature_name(self):
self._feature_name = self._data.keys()[0]
return self._feature_name
def get_controller_data(self, index):
feature = self.get_feature_name()
controller_data = self._data[feature][index]
return controller_data
def get_controller_list(self):
controller_list = []
feature = self.get_feature_name()
for controller_dict in self._data[feature]:
controller_list.append(controller_dict["ControllerName"])
return controller_list
def __str__(self):
return self.get_feature_name()
class FeatureComponent():
def __init__(self, data):
self._data = data
self.name = self.get_shape_type()
self.control_group = self.get_control_group()
def get_shape_type(self):
return self._data["shapeType"]
def get_control_group(self):
return self._data["ControlGroup"]
def build_widget(self, parent):
for axis_data in self.control_group:
tab = self.axis_widget(data=axis_data, parent=parent)
pm.tabLayout(parent, e=True, tabLabel=((tab, axis_data["GroupName"])))
def axis_widget(self, data=None, parent=""):
name = data["GroupName"]
print("axis_widget:{}".format(name))
joint_list = []
for bone_data in data["BoneRange"]:
joint_list.append(bone_data["BoneName"])
layout = pm.formLayout("{}_FormLayout".format(name), p=parent)
joint_list_frame = pm.frameLayout(
"{}_JointListFrameLayout".format(name), label="Joint List", p=layout)
pm.textScrollList("{}_JointListWidget".format(name),
w=120, a=joint_list,
sc=lambda *args: self.select_joint("{}_JointListWidget".format(name)))
pm.popupMenu()
pm.menuItem(label=u"添加骨骼", c=lambda *args: self.add_axis_joints())
pm.setParent(joint_list_frame)
joint_meta_frame = pm.frameLayout(
"{}_JointMetaFrameWidget".format(name), label="Joint Meta", p=layout)
pm.button(label=u"Update Max", c=lambda *args: self.update_joints_meta(value="Max"))
pm.button(label=u"Update Min", c=lambda *args: self.update_joints_meta(value="Min"))
pm.setParent("..")
pm.formLayout(layout, edit=True,
attachForm=[
(joint_list_frame, 'top', 10),
(joint_list_frame, 'left', 10),
(joint_list_frame, 'bottom', 10),
(joint_meta_frame, 'top', 10),
(joint_meta_frame, 'right', 10),
(joint_meta_frame, 'bottom', 10),
],
attachControl=[
(joint_meta_frame, 'left', 5, joint_list_frame),
])
pm.setParent(layout)
return layout
def select_joint(self, widget):
pm.select(pm.textScrollList(widget, q=True, si=True)[0])
class FaceFeatureModule():
def __init__(self, name, data_path):
self.name = name
self.data_root = data_path
self.control_group_file = "{}/{}/{}ControlGroup.json".format(self.data_root, self.name, self.name)
self.control_file = "{}/{}/{}Controller.json".format(self.data_root, self.name, self.name)
self.control_group_data = None
self.control_data = None
def feature_widget(self, parent):
layout = pm.formLayout("{}FormTabLayout".format(self.name), p=parent)
controller_list_frame = pm.frameLayout("{}ControllerListFrameLayout".format(self.name),
bgs=True, mh=10, mw=10, p=layout,
label=("{} Controllers".format(self.name).title()))
pm.textScrollList("{}ControllerListWidget".format(self.name), w=120, h=130,
sc=lambda *args: self.select_controller())
pm.popupMenu()
pm.menuItem(label=u"创建测试代理体", c=lambda *args: self.build_test_proxy())
pm.button("{}ControllerBuildBtn".format(self.name), label="New", w=100,
c=lambda *args: self.command_new_control())
pm.setParent(controller_list_frame)
controller_meta_frame = pm.frameLayout("{}ControllerMetaFrameLayout".format(self.name),
bgs=True, mh=10, mw=10, p=layout,
label=("{} meta".format(self.name).title()))
pm.radioButtonGrp("{}ControllerSideField".format(self.name),
label=u'控制器位置',
numberOfRadioButtons=2, labelArray2=['Middle', 'LF And RT'], cw3=[140, 80, 80])
pm.textFieldGrp("{}ControllerNameField".format(
self.name), label=u"控制器")
pm.textFieldGrp("{}ControllerBoneNameField".format(
self.name), label=u"控制器挂点骨骼")
pm.floatFieldGrp("{}ControllerPositionOffsetField".format(self.name),
label=u'控制器位置偏移', numberOfFields=3,
value1=0.0, value2=0.0, value3=0.0, cw4=[140, 50, 50, 50])
pm.checkBoxGrp("{}ControllerAxisControlField".format(self.name),
label=u'控制器滑竿',
numberOfCheckBoxes=3, labelArray3=['XAxis', 'YAxis', 'ZAxis'],
cw4=[140, 80, 80, 80])
pm.button("{}ControllerMetaUpdateBtn".format(self.name),
label=u"更新", c=lambda *args: self.update_meta_data())
pm.setParent(controller_meta_frame)
joint_list_frame = pm.frameLayout("{}ControlJointListFrameLayout".format(self.name),
bgs=True, mh=10, mw=10, p=layout,
label=("{} control joints".format(self.name).title()))
pm.tabLayout("{}ControlJointListTabLayout".format(self.name), p=joint_list_frame)
pm.setParent("..")
pm.setParent(joint_list_frame)
pm.formLayout(
layout, edit=True,
attachForm=[
(controller_list_frame, 'top', 10),
(controller_list_frame, 'left', 10),
(controller_meta_frame, 'top', 10),
(controller_meta_frame, 'right', 10),
(joint_list_frame, 'left', 10),
(joint_list_frame, 'right', 10),
(joint_list_frame, 'bottom', 10),
],
attachControl=[
(controller_meta_frame, 'left', 5, controller_list_frame),
(joint_list_frame, 'top', 5, controller_meta_frame),
])
pm.setParent("..")
self.init_data()
return layout
def init_data(self):
self.control_data = FeatureController(self.control_file)
self.control_group_data = FeatureControllerGroup(self.control_group_file)
controller_list = self.control_data.get_controller_list()
pm.textScrollList("{}ControllerListWidget".format(self.name), e=True, a=controller_list)
return
def select_controller(self):
selected_index = pm.textScrollList("{}ControllerListWidget".format(self.name), q=True, sii=True)[0]
controller_data = self.control_data.get_controller_data(index=selected_index - 1)
self.refresh_meta_data(controller_data)
selected_controller = pm.textScrollList("{}ControllerListWidget".format(self.name), q=True, si=True)[0]
selected_control_group = self.control_group_data.get_controller_group_data(selected_controller)
self.refresh_control_group_meta_data(selected_control_group)
return selected_index
def refresh_meta_data(self, data):
# print(data)
pm.textFieldGrp("{}ControllerNameField".format(self.name), e=True, text=data["ControllerName"])
pm.textFieldGrp("{}ControllerBoneNameField".format(self.name), e=True, text=data["ControllerBoneName"])
pm.floatFieldGrp("{}ControllerPositionOffsetField".format(self.name), e=True,
value1=data["ControllerPositionOffset"][0] * 100,
value2=data["ControllerPositionOffset"][1] * 100,
value3=data["ControllerPositionOffset"][2] * 100)
if data["AxisControl"]["XAxis"] == "":
axis_x = False
else:
axis_x = True
if data["AxisControl"]["YAxis"] == "":
axis_y = False
else:
axis_y = True
if data["AxisControl"]["ZAxis"] == "":
axis_z = False
else:
axis_z = True
pm.checkBoxGrp("{}ControllerAxisControlField".format(self.name), e=True,
value1=axis_x, value2=axis_y, value3=axis_z)
def refresh_control_group_meta_data(self, data):
tab_list = pm.tabLayout("{}ControlJointListTabLayout".format(self.name), q=True, ca=True)
if not tab_list is None:
for tab in tab_list:
pm.deleteUI(tab)
axis_tabs = FeatureComponent(data)
axis_tabs.build_widget(parent="{}ControlJointListTabLayout".format(self.name))
def build_test_proxy(self):
selected_controller = pm.textScrollList("{}ControllerListWidget".format(self.name), q=True, si=True)[0]
selected_tab = self.name
if not pm.objExists("TestProxyGrp"):
pm.createNode("transform", name="TestProxyGrp")
test_controller = pm.spaceLocator(name="Test{}".format(selected_controller))
pm.parent(test_controller, "TestProxyGrp")
control_group = self.control_group_data.get_controller_group_data(selected_controller)["ControlGroup"]
for control_data in control_group:
pm.addAttr(test_controller, ln=control_data["GroupName"], at="double", dv=0, min=-1, max=1)
pm.setAttr("{}.{}".format(test_controller, control_data["GroupName"]), e=True, k=True)
self.sdk_bone(source="{}.{}".format(test_controller, control_data["GroupName"]),
target_data=control_data)
return
def sdk_bone(self, source, target_data):
print(source)
print(target_data)
attr_list = ["tx", "ty", "tz", "rx", "ry", "rz", "sx", "sy", "sz"]
if len(target_data["BoneRange"]) > 0:
for bone in target_data["BoneRange"]:
for dv_attr in attr_list:
pm.setDrivenKeyframe(
"%s.%s" % (bone["BoneName"], dv_attr),
cd=source,
dv=0)
max_value = bone["Max"]
dv_value = [
max_value[0] * 100,
max_value[1] * 100,
max_value[2] * 100,
max_value[3],
max_value[4],
max_value[5],
max_value[6],
max_value[7],
max_value[8],
]
helper.position_joint(bone["BoneName"], value=dv_value)
for dv_attr in attr_list:
pm.setDrivenKeyframe(
"%s.%s" % (bone["BoneName"], dv_attr),
cd=source,
dv=1)
min_value = bone["Min"]
dv_value = [
min_value[0] * 100,
min_value[1] * 100,
min_value[2] * 100,
min_value[3],
min_value[4],
min_value[5],
min_value[6],
min_value[7],
min_value[8],
]
helper.position_joint(bone["BoneName"], value=dv_value)
for dv_attr in attr_list:
pm.setDrivenKeyframe(
"%s.%s" % (bone["BoneName"], dv_attr),
cd=source,
dv=-1)
pm.setAttr(source, 0)
return
def __str__(self):
return self.name
class FaceFeatureManager(xd_com.Singleton):
u"""脸部特征管理器"""
def __init__(self):
super(FaceFeatureManager, self).__init__()
self.toolName = "FaceFeatureManager"
self.json_path = ''
self.module_sections = []
self.create_window()
self.create_layout()
self.initialize()
def create_window(self):
if pm.window(self.toolName, ex=True):
pm.deleteUI(self.toolName)
pm.window(self.toolName, t=u"角色脸部特征编辑器 {}".format(version), mb=True,
cc=lambda *args: self._closed_window_cmd())
pm.showWindow(self.toolName)
def create_layout(self):
form_layout = pm.formLayout(p=self.toolName)
config_frame = pm.frameLayout(
p=form_layout, label=u"配置面板", mw=5, mh=5, bgs=True, cll=False, cl=False)
pm.textFieldButtonGrp(
"XDFaceEditDataStoreField", label=u"存储路径", bl=u"设置", adj=2, cw3=[60, 100, 40],
bc=lambda *args: self.setting_json_path())
pm.textFieldButtonGrp(
"XDFaceEditNewModuleField", label=u"特征模块", bl=u"新建", adj=2, cw3=[60, 100, 40],
bc=lambda *args: self.command_new_module())
pm.setParent(config_frame)
main_tab = pm.tabLayout("XDFeatureManagerTabLayout",
p=form_layout, innerMarginWidth=5, innerMarginHeight=5)
pm.setParent(main_tab)
pm.formLayout(
form_layout, edit=True,
attachForm=[
(config_frame, 'top', 5),
(config_frame, 'left', 5),
(config_frame, 'right', 5),
(main_tab, 'left', 5),
(main_tab, 'right', 5),
(main_tab, 'bottom', 5),
],
attachControl=[
(main_tab, 'top', 5, config_frame),
])
pm.setParent(form_layout)
def _closed_window_cmd(self):
pm.optionVar(sv=('jsonManagerFolder', self.json_path))
def initialize(self):
if pm.optionVar(q='jsonManagerFolder'):
self.json_path = pm.optionVar(q='jsonManagerFolder')
pm.textFieldButtonGrp("XDFaceEditDataStoreField", e=True, text=self.json_path)
self.module_sections = get_module_list(path=self.json_path, return_type="folders")
for module_name in self.module_sections:
module = FaceFeatureModule(module_name, self.json_path)
layout = module.feature_widget(parent="XDFeatureManagerTabLayout")
pm.tabLayout("XDFeatureManagerTabLayout", edit=True, tabLabel=((layout, module_name)))
def setting_json_path(self):
json_folder = pm.fileDialog2(
dialogStyle=2, fileFilter="JSON File (*.json);;", fileMode=3, okc=u"选择文件夹")
if json_folder[0]:
self.json_path = json_folder[0]
pm.textFieldButtonGrp("XDFaceEditDataStoreField", e=True, text=self.json_path)
return
def show_feature_manager():
FaceEditUI()
class FaceEditUI(xd_com.Singleton):
def __init__(self):
super(FaceEditUI, self).__init__()
self.toolName = "XDFaceEditUI"
self.json_path = ""
self.module_sections = []
self.initialize()
self.create_window()
self.create_layout()
def create_window(self):
if pm.window(self.toolName, ex=True):
pm.deleteUI(self.toolName)
pm.window(self.toolName, t=u"角色脸部特征编辑器 {}".format(version), mb=True,
cc=lambda *args: self._closed_window_cmd())
form_layout = pm.formLayout("FaceEditMainLayout", p=self.toolName)
pm.setParent(form_layout)
pm.showWindow(self.toolName)
def _closed_window_cmd(self):
pm.optionVar(sv=('jsonManagerFolder', self.json_path))
def initialize(self):
if pm.optionVar(q='jsonManagerFolder'):
self.json_path = pm.optionVar(q='jsonManagerFolder')
self.read_json()
def read_json(self):
self.module_sections = get_module_list(path=self.json_path, return_type="folders")
def create_layout(self):
config_frame = self.config_frame(parent="FaceEditMainLayout")
feature_layout = pm.scrollLayout("FaceEditFeatureLayout", cr=True, p="FaceEditMainLayout")
pm.setParent(feature_layout)
pm.formLayout(
"FaceEditMainLayout", edit=True,
attachForm=[
(config_frame, 'top', 5),
(config_frame, 'left', 5),
(config_frame, 'right', 5),
(feature_layout, 'left', 5),
(feature_layout, 'right', 5),
(feature_layout, 'bottom', 5),
],
attachControl=[
(feature_layout, 'top', 5, config_frame),
])
self.get_feature_modules(parent=feature_layout)
return
def config_frame(self, parent):
config_frame = pm.frameLayout(
p=parent, label=u"配置面板", mw=5, mh=5, bgs=True, cll=False, cl=False)
pm.textFieldButtonGrp(
"XDFaceEditDataStoreField", label=u"存储路径", bl=u"设置", adj=2, cw3=[60, 100, 40],
text=self.json_path,
bc=lambda *args: self.setting_json_path())
pm.textFieldButtonGrp(
"XDFaceEditNewModuleField", label=u"特征模块", bl=u"新建", adj=2, cw3=[60, 100, 40],
bc=lambda *args: self.command_new_module())
pm.setParent(config_frame)
return config_frame
def setting_json_path(self):
json_folder = pm.fileDialog2(
dialogStyle=2, fileFilter="JSON File (*.json);;", fileMode=3, okc=u"选择文件夹")
if json_folder[0]:
self.json_path = json_folder[0]
pm.textFieldButtonGrp("XDFaceEditDataStoreField", e=True, text=self.json_path)
return
def get_feature_modules(self, parent):
for module_name in self.module_sections:
module = FaceModule(module_name)
module.load_data(file_path=self.json_path)
# print rig_classic_components.get_controller_list()
module.build_widget(parent=parent)
def sdk_bone(source, target_data):
print(source)
print(target_data)
attr_list = ["tx", "ty", "tz", "rx", "ry", "rz", "sx", "sy", "sz"]
if len(target_data["BoneRange"]) > 0:
for bone in target_data["BoneRange"]:
for dv_attr in attr_list:
pm.setDrivenKeyframe(
"%s.%s" % (bone["BoneName"], dv_attr),
cd=source,
dv=0)
max_value = bone["Max"]
dv_value = [
max_value[0] * 100,
max_value[1] * 100,
max_value[2] * 100,
max_value[3],
max_value[4],
max_value[5],
max_value[6],
max_value[7],
max_value[8],
]
helper.position_joint(bone["BoneName"], value=dv_value)
for dv_attr in attr_list:
pm.setDrivenKeyframe(
"%s.%s" % (bone["BoneName"], dv_attr),
cd=source,
dv=1)
min_value = bone["Min"]
dv_value = [
min_value[0] * 100,
min_value[1] * 100,
min_value[2] * 100,
min_value[3],
min_value[4],
min_value[5],
min_value[6],
min_value[7],
min_value[8],
]
helper.position_joint(bone["BoneName"], value=dv_value)
for dv_attr in attr_list:
pm.setDrivenKeyframe(
"%s.%s" % (bone["BoneName"], dv_attr),
cd=source,
dv=-1)
pm.setAttr(source, 0)
return
def joint_cb_list(jnt, pre=5):
u"""骨骼在通道里面的值
列取骨骼在通道栏里面的属性及当前的值,数值小数点后保留5位,
其中位移属性的值需要缩小100倍,也就是乘以0.01,
这是为了解决FBX文件在MAYA,U3D这两个软件内比例单位的差异化造成的错误
:param jnt: 目标骨骼的名称
:param pre: 小数点后面保留几位
:return []
"""
jnt_value = [
round(pm.PyNode(jnt).translateX.get() * 0.01, pre),
round(pm.PyNode(jnt).translateY.get() * 0.01, pre),
round(pm.PyNode(jnt).translateZ.get() * 0.01, pre),
round(pm.PyNode(jnt).rotateX.get(), pre),
round(pm.PyNode(jnt).rotateY.get(), pre),
round(pm.PyNode(jnt).rotateZ.get(), pre),
round(pm.PyNode(jnt).scaleX.get(), pre),
round(pm.PyNode(jnt).scaleY.get(), pre),
round(pm.PyNode(jnt).scaleZ.get(), pre),
]
return jnt_value
class FaceModule:
def __init__(self, name):
self.name = name
self.file_path = None
self.control_file = None
self.control_group_file = None
self.controller_data = {}
self.control_group_data = {}
self.controller_list_widget = None
self.controller_name_widget = None
self.controller_bone_widget = None
self.controller_offset_widget = None
self.controller_axis_widget = None
self.controller_group_tablayout = None
self.context_controller = None
def load_data(self, file_path):
self.file_path = file_path
module_root = os.path.join(file_path, self.name)
self.control_file = os.path.join(module_root, '{}Controller.json'.format(self.name)).replace('\\', '/')
self.control_group_file = os.path.join(module_root, '{}ControlGroup.json'.format(self.name)).replace('\\', '/')
if os.path.isfile(self.control_file):
self.controller_data = xd_com.read_json(self.control_file)
if os.path.isfile(self.control_group_file):
self.control_group_data = xd_com.read_json(self.control_group_file)
def get_controller_list(self):
controller_list = []
for controller_data in self.controller_data["{}Controller".format(self.name)]:
controller_list.append(controller_data["ControllerName"])
return controller_list
def controller_detail(self, index):
return self.controller_data["{}Controller".format(self.name)][index]
def get_module_controller(self, controller):
return self.control_group_data[controller]
def get_module_controller_group(self, controller, axis):
bone_range = self.control_group_data[controller]["ControlGroup"]
for data in bone_range:
axis_side = "{}_{}".format(controller, axis.title())
if data["GroupName"] == axis_side:
return data
def update_module_controller_group(self, controller, axis, value="Max"):
bone_range = self.control_group_data[controller]["ControlGroup"]
for data in bone_range:
axis_side = "{}_{}".format(controller, axis.title())
if data["GroupName"] == axis_side:
bone_data_list = data["BoneRange"]
for bone_data in bone_data_list:
if pm.objExists(bone_data["BoneName"]):
if value == "Max":
bone_data["Max"] = joint_cb_list(bone_data["BoneName"])
if value == "Min":
bone_data["Min"] = joint_cb_list(bone_data["BoneName"])
xd_com.write_json(self.control_group_data, self.control_group_file)
return
def controller_list_frame(self, parent):
layout = pm.frameLayout(
"{}ControllerListFrameLayout".format(self.name),
bgs=True, mh=10, mw=10,
p=parent,
label=("{} Controllers".format(self.name).title())
)
self.controller_list_widget = pm.textScrollList(
"{}ControllerListWidget".format(self.name), w=120, h=130,
a=self.get_controller_list(),
sc=lambda *args: self.select_controller())
pm.popupMenu()
pm.menuItem(label=u"创建测试代理体", c=lambda *args: self.build_test_proxy())
pm.button("{}ControllerBuildBtn".format(self.name), label="New", w=100,
c=lambda *args: self.new_controller())
pm.setParent(layout)
return layout
def controller_meta_frame(self, parent):
layout = pm.frameLayout(
"{}ControllerMetaFrameLayout".format(self.name), bgs=True, mh=10, mw=10, p=parent,
label=("{} meta".format(self.name).title()))
pm.radioButtonGrp("{}ControllerSideField".format(self.name),
label=u'控制器位置',
numberOfRadioButtons=2, labelArray2=['Middle', 'LF And RT'], cw3=[140, 80, 80])
self.controller_name_widget = pm.textFieldGrp("{}ControllerNameField".format(self.name), label=u"名字")
self.controller_bone_widget = pm.textFieldGrp("{}ControllerBoneNameField".format(self.name), label=u"挂点骨骼")
self.controller_offset_widget = pm.floatFieldGrp("{}ControllerPositionOffsetField".format(self.name),
label=u'位置偏移', numberOfFields=3,
value1=0.0, value2=0.0, value3=0.0, cw4=[140, 50, 50, 50])
self.controller_axis_widget = pm.checkBoxGrp("{}ControllerAxisControlField".format(self.name),
label=u'控制滑竿',
numberOfCheckBoxes=3, labelArray3=['XAxis', 'YAxis', 'ZAxis'],
cw4=[140, 80, 80, 80])
pm.button("{}ControllerMetaUpdateBtn".format(self.name),
label=u"更新", c=lambda *args: self.update_controller())
pm.setParent(layout)
return layout
def axis_tab(self, parent, controller, axis):
layout = pm.formLayout("{}_{}_FormLayout".format(controller, axis), p=parent)
joint_list_frame = pm.frameLayout(label="Joint List", p=layout)
pm.textScrollList("{}_{}_JointListWidget".format(controller, axis.title()), w=120, h=180, ams=True,
sc=lambda *args: self.select_joint("{}_{}_JointListWidget".format(controller, axis.title())))
pm.popupMenu()
pm.menuItem(label=u"添加骨骼", c=lambda *args: self.add_axis_joints())
pm.setParent(joint_list_frame)
joint_meta_frame = pm.frameLayout(label="Joint Meta", p=layout)
pm.button(label=u"Update Max",
c=lambda *args: self.update_module_controller_group(controller=controller, axis=axis, value="Max"))
pm.button(label=u"Update Min",
c=lambda *args: self.update_module_controller_group(controller=controller, axis=axis, value="Min"))
pm.setParent("..")
pm.formLayout(layout, edit=True,
attachForm=[
(joint_list_frame, 'top', 10),
(joint_list_frame, 'left', 10),
(joint_list_frame, 'bottom', 10),
(joint_meta_frame, 'top', 10),
(joint_meta_frame, 'right', 10),
(joint_meta_frame, 'bottom', 10),
],
attachControl=[
(joint_meta_frame, 'left', 5, joint_list_frame),
])
pm.setParent(layout)
return layout
def refresh_axis_tab(self, controller, axis):
data = self.get_module_controller_group(controller=controller, axis=axis)
joint_list = [bone["BoneName"] for bone in data["BoneRange"]]
pm.textScrollList("{}_{}_JointListWidget".format(controller, axis.title()), e=True, a=joint_list)
def controller_group_frame(self, parent):
layout = pm.frameLayout("{}ControlJointListFrameLayout".format(self.name),
bgs=True, mh=10, mw=10, p=parent,
label=("{} controller group".format(self.name).title()))
self.controller_group_tablayout = pm.tabLayout("{}ControlJointListTabLayout".format(self.name), p=layout)
pm.setParent("..")
pm.setParent(layout)
return layout
def build_widget(self, parent):
layout = pm.frameLayout(p=parent, label=self.name, cll=True, cl=True, mw=10, mh=5)
form = pm.formLayout("{}FormTabLayout".format(self.name), p=layout)
controller_list_frame = self.controller_list_frame(parent=form)
controller_meta_frame = self.controller_meta_frame(parent=form)
controller_group_frame = self.controller_group_frame(parent=form)
pm.formLayout(
form, edit=True,
attachForm=[
(controller_list_frame, 'top', 0),
(controller_list_frame, 'left', 0),
(controller_meta_frame, 'top', 0),
(controller_meta_frame, 'right', 0),
(controller_group_frame, 'left', 10),
(controller_group_frame, 'right', 10),
(controller_group_frame, 'bottom', 10),
],
attachControl=[
(controller_meta_frame, 'left', 5, controller_list_frame),
(controller_group_frame, 'top', 5, controller_meta_frame),
])
pm.setParent(form)
pm.setParent(layout)
return layout
def select_controller(self):
selected_index = pm.textScrollList(self.controller_list_widget, q=True, sii=True)[0]
selected_controller = pm.textScrollList(self.controller_list_widget, q=True, si=True)[0]
controller_data = self.controller_detail(selected_index - 1)
self.refresh_meta_data(controller_data)
tab_list = pm.tabLayout(self.controller_group_tablayout, q=True, ca=True)
if tab_list is not None:
for tab in tab_list:
pm.deleteUI(tab)
for axis_side in ["x", "y", "z"]:
axis_tab = self.axis_tab(parent=self.controller_group_tablayout,
controller=selected_controller,
axis=axis_side)
self.refresh_axis_tab(
controller=selected_controller,
axis=axis_side)
pm.tabLayout(self.controller_group_tablayout, e=True,
tabLabel=(axis_tab, "{}_{}".format(selected_controller, axis_side.title())))
self.context_controller = selected_controller
return self.context_controller
def select_joint(self, widget):
pm.select(pm.textScrollList(widget, q=True, si=True))
return
def refresh_meta_data(self, data):
pm.textFieldGrp(self.controller_name_widget, e=True, text=data["ControllerName"])
pm.textFieldGrp(self.controller_bone_widget, e=True, text=data["ControllerBoneName"])
pm.floatFieldGrp(self.controller_offset_widget, e=True,
value1=data["ControllerPositionOffset"][0] * 100,
value2=data["ControllerPositionOffset"][1] * 100,
value3=data["ControllerPositionOffset"][2] * 100)
if data["AxisControl"]["XAxis"] == "":
axis_x = False
else:
axis_x = True
if data["AxisControl"]["YAxis"] == "":
axis_y = False
else:
axis_y = True
if data["AxisControl"]["ZAxis"] == "":
axis_z = False
else:
axis_z = True
pm.checkBoxGrp(self.controller_axis_widget, e=True, value1=axis_x, value2=axis_y, value3=axis_z)
def build_test_proxy(self):
if not pm.objExists("TestProxyGrp"):
pm.createNode("transform", name="TestProxyGrp")
test_controller = pm.spaceLocator(name="Test_{}".format(self.context_controller))
pm.parent(test_controller, "TestProxyGrp")
for axis in ["x", "y", "z"]:
control_group = self.get_module_controller_group(controller=self.context_controller, axis=axis)
attr_name = "{}_{}".format(self.context_controller, axis.title())
pm.addAttr(test_controller, ln=attr_name, at="double", dv=0, min=-1, max=1)
pm.setAttr("{}.{}".format(test_controller, attr_name), e=True, k=True)
sdk_bone(source="{}.{}".format(test_controller, attr_name), target_data=control_group)
return
def new_controller(self):
u"""创建新的控制器
:return:
"""
default_control_data = {
"ControllerPositionOffset": [0.0, 0.0, 0.0],
"ControllerGroupName": "{}ControlGroup".format(self.name),
"ControllerBoneName": "",
"AxisControl": {
"ZAxis": "",
"XAxis": "",
"YAxis": ""
},
"ControllerName": "control"
}
self.controller_data['{}Controller'.format(self.name)].append(default_control_data)
xd_com.write_json(self.controller_data, self.control_file)
default_control_joint_group = [
{
"BoneRange": [],
"GroupName": "control_X"
},
{
"BoneRange": [],
"GroupName": "control_Y"
},
{
"BoneRange": [],
"GroupName": "control_Z"
}
]
default_control_group_data = {
"ControlGroup": default_control_joint_group,
"GroupName": "{}ControlGroup".format(self.name),
"shapeType": "control"
}
self.control_group_data["control"] = default_control_group_data
xd_com.write_json(self.control_group_data, self.control_group_file)
return True
def update_controller(self):
U""" 更新元数据
:return: True
"""
meta_data = {}
controller_name = pm.textFieldGrp(self.controller_name_widget, q=True, text=True)
meta_data["ControllerName"] = controller_name
meta_data["ControllerBoneName"] = pm.textFieldGrp(self.controller_bone_widget, q=True, text=True)
meta_data["ControllerGroupName"] = "{}ControlGroup".format(self.name)
meta_data["ControllerPositionOffset"] = pm.floatFieldGrp(self.controller_offset_widget, q=True, value=True)
meta_data["AxisControl"] = {}
if pm.checkBoxGrp("{}ControllerAxisControlField".format(self.name), q=True, v1=True):
meta_data["AxisControl"]["XAxis"] = "{}_X".format(controller_name)
else:
meta_data["AxisControl"]["XAxis"] = ""
if pm.checkBoxGrp("{}ControllerAxisControlField".format(self.name), q=True, v2=True):
meta_data["AxisControl"]["YAxis"] = "{}_Y".format(controller_name)
else:
meta_data["AxisControl"]["YAxis"] = ""
if pm.checkBoxGrp("{}ControllerAxisControlField".format(self.name), q=True, v3=True):
meta_data["AxisControl"]["ZAxis"] = "{}_Z".format(controller_name)
else:
meta_data["AxisControl"]["ZAxis"] = ""
select_index = pm.textScrollList(self.controller_list_widget, q=True, sii=True)[0]
select_control = pm.textScrollList(self.controller_list_widget, q=True, si=True)[0]
self.controller_data["{}Controller".format(self.name)][select_index - 1] = meta_data
# print(select_control)
# print(self.control_group_data)
control_data = self.control_group_data[select_control]
control_data["shapeType"] = controller_name
control_data["GroupName"] = "{}ControlGroup".format(self.name)
current_controller = pm.textScrollList(
"{}ControllerListWidget".format(self.name), q=True, si=True)[0]
for control_group in control_data["ControlGroup"]:
control_group["GroupName"] = control_group["GroupName"].replace(
current_controller, controller_name)
del self.control_group_data[select_control]
self.control_group_data[controller_name] = control_data
# print(self.control_group_data)
xd_com.write_json(self.controller_data, self.control_file)
xd_com.write_json(self.control_group_data, self.control_group_file)
self.refresh_controller_list()
pm.textScrollList(self.controller_list_widget, e=True, sii=select_index)
# self.clean_meta_data_frame()
#
# all_tabs = pm.tabLayout(
# "{}ControlJointListTabLayout".format(self.name), q=True, ca=True)
# if all_tabs is not None:
# if len(all_tabs) > 1:
# for tab in all_tabs:
# pm.deleteUI(tab)
#
# self.init_data()
return True
def refresh_controller_list(self):
self.load_data(file_path=self.file_path)
controller_list = self.get_controller_list()
pm.textScrollList(self.controller_list_widget, e=True, ra=True)
pm.textScrollList(self.controller_list_widget, e=True, a=controller_list)
return
def add_axis_joints(self):
tabs = pm.tabLayout(self.controller_group_tablayout, q=True, tl=True)
select_tab_index = pm.tabLayout(self.controller_group_tablayout, q=True, sti=True)
current_tab = (tabs[select_tab_index - 1])
select_joint = pm.ls(sl=True)
for index in range(0, len(self.control_group_data[self.context_controller]["ControlGroup"])):
if current_tab in self.control_group_data[self.context_controller]["ControlGroup"][index]["GroupName"]:
bone_range = self.control_group_data[self.context_controller]["ControlGroup"][index]["BoneRange"]
for joint in select_joint:
if joint not in pm.textScrollList("{}_JointListWidget".format(current_tab), q=True, ai=True):
pm.textScrollList("{}_JointListWidget".format(current_tab), e=True, a=joint)
joint_data = {
"BoneName": joint.name(),
"Max": [0, 0, 0, 0, 0, 0, 1, 1, 1],
"Min": [0, 0, 0, 0, 0, 0, 1, 1, 1],
}
bone_range.append(joint_data)
self.control_group_data[self.context_controller]["ControlGroup"][index]["BoneRange"] = bone_range
xd_com.write_json(self.control_group_data, self.control_group_file)
return
def __str__(self):
return self.name
| 40.177533
| 119
| 0.580048
|
4a0836aa83a5697d9da1f7723c7d635057b611fc
| 3,625
|
gyp
|
Python
|
externals/skia/gyp/svg.gyp
|
terrajobst/linux-packaging-skiasharp
|
47dbb2ff9ae01305b190f409ccea00b3b4f0bc79
|
[
"MIT"
] | 1
|
2019-10-29T14:36:32.000Z
|
2019-10-29T14:36:32.000Z
|
externals/skia/gyp/svg.gyp
|
terrajobst/linux-packaging-skiasharp
|
47dbb2ff9ae01305b190f409ccea00b3b4f0bc79
|
[
"MIT"
] | 1
|
2017-06-18T00:25:03.000Z
|
2017-11-29T16:01:48.000Z
|
externals/skia/gyp/svg.gyp
|
terrajobst/linux-packaging-skiasharp
|
47dbb2ff9ae01305b190f409ccea00b3b4f0bc79
|
[
"MIT"
] | 5
|
2017-11-30T06:06:50.000Z
|
2022-03-31T21:48:49.000Z
|
# Copyright 2015 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'svg',
'product_name': 'skia_svg',
'type': 'static_library',
'standalone_static_library': 1,
'dependencies': [
'skia_lib.gyp:skia_lib',
'xml.gyp:*',
],
'include_dirs': [
'../include/private',
'../include/svg',
'../src/core',
],
'sources': [
'<(skia_include_path)/svg/SkSVGCanvas.h',
'<(skia_src_path)/svg/SkSVGCanvas.cpp',
'<(skia_src_path)/svg/SkSVGDevice.cpp',
'<(skia_src_path)/svg/SkSVGDevice.h',
],
'direct_dependent_settings': {
'include_dirs': [
'../include/svg',
],
},
},
{
'target_name': 'svgdom',
'type': 'static_library',
'standalone_static_library': 1,
'dependencies': [
'skia_lib.gyp:skia_lib',
'xml.gyp:xml',
],
'include_dirs': [
'<(skia_include_path)/private',
'../experimental/svg/model',
],
'sources': [
'../experimental/svg/model/SkSVGAttribute.h',
'../experimental/svg/model/SkSVGAttribute.cpp',
'../experimental/svg/model/SkSVGAttributeParser.h',
'../experimental/svg/model/SkSVGAttributeParser.cpp',
'../experimental/svg/model/SkSVGCircle.h',
'../experimental/svg/model/SkSVGCircle.cpp',
'../experimental/svg/model/SkSVGClipPath.h',
'../experimental/svg/model/SkSVGClipPath.cpp',
'../experimental/svg/model/SkSVGContainer.h',
'../experimental/svg/model/SkSVGContainer.cpp',
'../experimental/svg/model/SkSVGDefs.h',
'../experimental/svg/model/SkSVGDOM.h',
'../experimental/svg/model/SkSVGDOM.cpp',
'../experimental/svg/model/SkSVGEllipse.h',
'../experimental/svg/model/SkSVGEllipse.cpp',
'../experimental/svg/model/SkSVGG.h',
'../experimental/svg/model/SkSVGHiddenContainer.h',
'../experimental/svg/model/SkSVGIDMapper.h',
'../experimental/svg/model/SkSVGLine.h',
'../experimental/svg/model/SkSVGLine.cpp',
'../experimental/svg/model/SkSVGLinearGradient.h',
'../experimental/svg/model/SkSVGLinearGradient.cpp',
'../experimental/svg/model/SkSVGNode.h',
'../experimental/svg/model/SkSVGNode.cpp',
'../experimental/svg/model/SkSVGPath.h',
'../experimental/svg/model/SkSVGPath.cpp',
'../experimental/svg/model/SkSVGPoly.h',
'../experimental/svg/model/SkSVGPoly.cpp',
'../experimental/svg/model/SkSVGRect.h',
'../experimental/svg/model/SkSVGRect.cpp',
'../experimental/svg/model/SkSVGRenderContext.h',
'../experimental/svg/model/SkSVGRenderContext.cpp',
'../experimental/svg/model/SkSVGShape.h',
'../experimental/svg/model/SkSVGShape.cpp',
'../experimental/svg/model/SkSVGStop.h',
'../experimental/svg/model/SkSVGStop.cpp',
'../experimental/svg/model/SkSVGSVG.h',
'../experimental/svg/model/SkSVGSVG.cpp',
'../experimental/svg/model/SkSVGTransformableNode.h',
'../experimental/svg/model/SkSVGTransformableNode.cpp',
'../experimental/svg/model/SkSVGTypes.h',
'../experimental/svg/model/SkSVGValue.h',
'../experimental/svg/model/SkSVGValue.cpp',
'../experimental/svg/model/SkPEG.h',
],
'direct_dependent_settings': {
'include_dirs': [
'../experimental/svg/model',
],
},
},
],
}
| 35.539216
| 72
| 0.593103
|
4a0836cb0882c92f49f609e23f9a4d2d8cd6b288
| 9,686
|
py
|
Python
|
hospital/urls.py
|
romsha28/hospital_python
|
1bb86266223df5084321917169156aaec1c5e318
|
[
"Apache-2.0"
] | null | null | null |
hospital/urls.py
|
romsha28/hospital_python
|
1bb86266223df5084321917169156aaec1c5e318
|
[
"Apache-2.0"
] | 1
|
2021-10-18T08:56:11.000Z
|
2021-10-18T08:56:11.000Z
|
hospital/urls.py
|
romsha28/hospital_python
|
1bb86266223df5084321917169156aaec1c5e318
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
from django.urls import path
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse
from django.test import SimpleTestCase, override_settings
from . import views
def response_error_handler(request, exception=None):
return HttpResponse('Error handler content', status=403)
def permission_denied_view(request):
raise PermissionDenied
urlpatterns = [
path('403/', permission_denied_view),
#################################################################
path('', views.index, name ='hospital'),
path('dashborad', views.dashboard, name ='hospital-dashborad'),
path('send-mail', views.sendMail, name ='hospital-sendmail'),
path('user-list', views.getList, name ='hospital-list'),
path('myprofile', views.getMyProfile, name ='myprofile'),
path('myprofile-post', views.postMyProfile, name ='myprofile'),
path('user-view/<int:id>', views.getView, name ='hospital-view'),
path('user-save', views.postStore, name ='hospital-save'),
path('user-deiele/<int:id>', views.getDeiele, name ='hospital-delete'),
############ Doctor #####################################################
path('doctors', views.getDoctors, name ='doctors'),
path('create-doctor', views.getCreateDoctor, name ='create-doctor'),
path('post-doctor', views.postDoctors, name ='post-doctor'),
path('doctor/<int:id>', views.getDoctorDetails, name ='details-doctor'),
path('doctor-status/<int:id>', views.getDoctorStatus, name ='doctor-status'),
path('doctor-edit/<int:id>', views.getDoctorEdit, name ='doctor-edit'),
path('doctor-update/<int:id>', views.postDoctorUpdate, name ='doctor-update'),
path('doctor-delete/<int:id>', views.getDoctorDelete, name ='doctor-delete'),
path('doctor/docto-verification', views.getDoctoVerification, name ='doctor/docto-verification'),
############ Doctor #####################################################
path('categories', views.getCategories, name ='categories'),
path('subcategories', views.getSubCategories, name ='subcategories'),
path('appointments', views.getAppointments, name ='appointments'),
path('appointment/<int:id>', views.getAppointmentDetails, name ='appointment-details'),
path('create-appointment', views.getCreateAppointments, name ='create-appointment'),
path('store-appointment', views.postAppointments, name ='store-appointment'),
path('status-appointment/<int:id>', views.getAppointmentStatus, name ='status-appointment'),
path('appointment-edit/<int:id>', views.getAppointmentEdit, name ='appointment-edit'),
path('appointment-update/<int:id>', views.postAppointmentUpdate, name ='appointment-update'),
############ treatments #####################################################
path('treatments', views.getTreatments, name ='treatments'),
path('treatment-create', views.getCreateTreatments, name ='treatment-create'),
path('treatment-post', views.postStoreTreatments, name ='treatment-post'),
path('treatment/<int:id>', views.getViewTreatments, name ='treatment-details'),
path('treatment-status/<int:id>', views.getStatusTreatments, name ='treatment-status'),
path('treatment-edit/<int:id>', views.getEditTreatments, name ='treatment-edit'),
path('treatment-update/<int:id>', views.postUpdateTreatments, name ='treatment-update'),
path('treatment-delete/<int:id>', views.getDeleteTreatments, name ='treatment-delete'),
############ treatments #####################################################
path('treatment-categories', views.getTreatmentCategories, name ='treatment-categories'),
path('treatment-categories-create', views.getCreateTreatmentCategories, name ='treatment-categories-create'),
path('treatment-categories-post', views.postStoreTreatmentCategories, name ='treatment-categories-post'),
path('treatment-categories/<int:id>', views.getViewTreatmentCategories, name ='treatment-categories-details'),
path('treatment-categories-status/<int:id>', views.getStatusTreatmentCategories, name ='treatment-categories-status'),
path('treatment-categories-edit/<int:id>', views.getEditTreatmentCategories, name ='treatment-categories-edit'),
path('treatment-categories-update/<int:id>', views.postUpdateTreatmentCategories, name ='treatment-categories-update'),
path('treatment-categories-delete/<int:id>', views.getDeleteTreatmentCategories, name ='treatment-categories-delete'),
############ patients #####################################################
path('patients', views.getPatients, name ='patients'),
path('patient-create', views.getCreatePatient, name ='patient-create'),
path('patient-post', views.postStorePatient, name ='patient-post'),
path('patient/<int:id>', views.getViewPatient, name ='patient-details'),
path('patient-status/<int:id>', views.getStatusPatient, name ='patient-status'),
path('patient-edit/<int:id>', views.getEditPatient, name ='patient-edit'),
path('patient-update/<int:id>', views.postUpdatePatient, name ='patient-update'),
path('patient-delete/<int:id>', views.getDeletePatient, name ='patient-delete'),
############ Plan #####################################################
path('plans', views.getPlans, name ='plans'),
path('plan-create', views.getCreatePlans, name ='plan-create'),
path('plan-post', views.postStorePlans, name ='plan-post'),
path('plan/<int:id>', views.getViewPlans, name ='plan-details'),
path('plan-status/<int:id>', views.getStatusPlans, name ='plan-status'),
path('plan-edit/<int:id>', views.getEditPlans, name ='plan-edit'),
path('plan-update/<int:id>', views.postUpdatePlans, name ='plan-update'),
path('plan-delete/<int:id>', views.getDeletePlans, name ='plan-delete'),
path('plan-subscriptions', views.getPlanSubscriptions, name ='plan-subscriptions'),
path('plan-subscriptions/<int:id>', views.getViewPlanSubscriptions, name ='plan-subscriptions-details'),
path('plan-subscriptions-status/<int:id>', views.getStatusPlanSubscriptions, name ='plan-subscriptions-status'),
##########################################################################################################
# Website
# Website blogs
path('blogs', views.getBlogs, name ='plans'),
path('blog-create', views.getCreateBlogs, name ='plan-create'),
path('blog-post', views.postStoreBlogs, name ='plan-post'),
path('blog/<int:id>', views.getViewBlogs, name ='plan-details'),
path('blog-status/<int:id>', views.getStatusBlogs, name ='plan-status'),
path('blog-edit/<int:id>', views.getEditBlogs, name ='plan-edit'),
path('blog-update/<int:id>', views.postUpdateBlogs, name ='plan-update'),
path('blog-delete/<int:id>', views.getDeleteBlogs, name ='plan-delete'),
# Website Banners
path('banners', views.getBanners, name ='plans'),
path('banner-create', views.getCreateBanners, name ='plan-create'),
path('banner-post', views.postStoreBanners, name ='plan-post'),
path('banner/<int:id>', views.getViewBanners, name ='plan-details'),
path('banner-status/<int:id>', views.getStatusBanners, name ='plan-status'),
path('banner-edit/<int:id>', views.getEditBanners, name ='plan-edit'),
path('banner-update/<int:id>', views.postUpdateBanners, name ='plan-update'),
path('banner-delete/<int:id>', views.getDeleteBanners, name ='plan-delete'),
# Website pages
path('pages', views.getPages, name ='pages'),
path('page-create', views.getCreatePages, name ='page-create'),
path('page-post', views.postStorePages, name ='page-post'),
path('page/<int:id>', views.getViewPages, name ='page-details'),
path('page-status/<int:id>', views.getStatusPages, name ='page-status'),
path('page-edit/<int:id>', views.getEditPages, name ='page-edit'),
path('page-update/<int:id>', views.postUpdatePages, name ='page-update'),
path('page-delete/<int:id>', views.getDeletePages, name ='page-delete'),
# Website privacy policy
path('policy', views.getPolicy, name ='policy'),
path('policy-create', views.getCreatePolicy, name ='policy-create'),
path('policy-post', views.postStorePolicy, name ='policy-post'),
path('policy/<int:id>', views.getViewPolicy, name ='policy-details'),
path('policy-status/<int:id>', views.getStatusPolicy, name ='policy-status'),
path('policy-edit/<int:id>', views.getEditPolicy, name ='policy-edit'),
path('policy-update/<int:id>', views.postUpdatePolicy, name ='policy-update'),
path('policy-delete/<int:id>', views.getDeletePolicy, name ='policy-delete'),
# settings
path('settings', views.getSettings, name ='settings'),
############ End #####################################################
]
handler403 = response_error_handler
# #The page_not_found() view is overridden by handler404:
# handler404 = 'mysite.views.my_custom_page_not_found_view'
# #The server_error() view is overridden by handler500:
# handler500 = 'mysite.views.my_custom_error_view'
# #The permission_denied() view is overridden by handler403:
# handler403x = 'mysite.views.my_custom_permission_denied_view'
# #The bad_request() view is overridden by handler400:
# handler400 = 'mysite.views.my_custom_bad_request_view'
# ROOT_URLCONF must specify the module that contains handler403 = ...
@override_settings(ROOT_URLCONF=__name__)
class CustomErrorHandlerTests(SimpleTestCase):
def test_handler_renders_template_response(self):
response = self.client.get('/403/')
# Make assertions on the response here. For example:
self.assertContains(response, 'Error handler content', status_code=403)
| 64.573333
| 123
| 0.6688
|
4a083718d134ede122d5a761c592318a9fa5748e
| 14,574
|
py
|
Python
|
tests/test_extra.py
|
nicoddemus/promise
|
4627315476f6b9fc82818327ae09b04f89f9bda7
|
[
"MIT"
] | 339
|
2016-05-18T11:25:39.000Z
|
2022-03-27T08:15:53.000Z
|
tests/test_extra.py
|
syrusakbary/pypromise
|
4627315476f6b9fc82818327ae09b04f89f9bda7
|
[
"MIT"
] | 81
|
2016-05-24T17:07:49.000Z
|
2021-12-20T15:39:52.000Z
|
tests/test_extra.py
|
syrusakbary/pypromise
|
4627315476f6b9fc82818327ae09b04f89f9bda7
|
[
"MIT"
] | 76
|
2016-05-24T16:55:06.000Z
|
2022-03-19T12:42:44.000Z
|
# This exercises some capabilities above and beyond
# the Promises/A+ test suite
from time import sleep
from pytest import raises, fixture
from threading import Event
from promise import (
Promise,
is_thenable,
promisify,
promise_for_dict as free_promise_for_dict,
)
from concurrent.futures import Future
from threading import Thread
from .utils import assert_exception
class DelayedFulfill(Thread):
def __init__(self, d, p, v):
self.delay = d
self.promise = p
self.value = v
Thread.__init__(self)
def run(self):
sleep(self.delay)
self.promise.do_resolve(self.value)
class DelayedRejection(Thread):
def __init__(self, d, p, r):
self.delay = d
self.promise = p
self.reason = r
Thread.__init__(self)
def run(self):
sleep(self.delay)
self.promise.do_reject(self.reason)
class FakeThenPromise:
def __init__(self, raises=True):
self.raises = raises
def then(self, s=None, f=None):
if self.raises:
raise Exception("FakeThenPromise raises in 'then'")
def df(value, dtime):
p = Promise()
t = DelayedFulfill(dtime, p, value)
t.start()
return p
def dr(reason, dtime):
p = Promise()
t = DelayedRejection(dtime, p, reason)
t.start()
return p
# Static methods
def test_fulfilled():
p = Promise.fulfilled(4)
assert p.is_fulfilled
assert p.get() == 4
def test_rejected():
p = Promise.rejected(Exception("Static rejected"))
assert p.is_rejected
with raises(Exception) as exc_info:
p.get()
assert str(exc_info.value) == "Static rejected"
# Fulfill
def test_fulfill_self():
p = Promise()
with raises(TypeError) as excinfo:
p.do_resolve(p)
p.get()
# Exceptions
def test_exceptions():
def throws(v):
assert False
p1 = Promise()
p1.then(throws)
p1.do_resolve(5)
p2 = Promise()
p2.catch(throws)
p2.do_reject(Exception())
with raises(Exception) as excinfo:
p2.get()
def test_thrown_exceptions_have_stacktrace():
def throws(v):
assert False
p3 = Promise.resolve("a").then(throws)
with raises(AssertionError) as assert_exc:
p3.get()
assert assert_exc.traceback[-1].path.strpath == __file__
def test_thrown_exceptions_preserve_stacktrace():
def throws(v):
assert False
def after_throws(v):
pass
p3 = Promise.resolve("a").then(throws).then(after_throws)
with raises(AssertionError) as assert_exc:
p3.get()
assert assert_exc.traceback[-1].path.strpath == __file__
# WAIT
# def test_wait_when():
# p1 = df(5, 0.01)
# assert p1.is_pending
# p1._wait()
# assert p1.is_fulfilled
def test_wait_if():
p1 = Promise()
p1.do_resolve(5)
p1._wait()
assert p1.is_fulfilled
# def test_wait_timeout():
# p1 = df(5, 0.1)
# assert p1.is_pending
# with raises(Exception) as exc_info:
# p1._wait(timeout=0.05)
# assert str(exc_info.value) == "Timeout"
# assert p1.is_pending
# p1._wait()
# assert p1.is_fulfilled
# # GET
# def test_get_when():
# p1 = df(5, 0.01)
# assert p1.is_pending
# v = p1.get()
# assert p1.is_fulfilled
# assert 5 == v
def test_get_if():
p1 = Promise()
p1.do_resolve(5)
v = p1.get()
assert p1.is_fulfilled
assert 5 == v
# def test_get_timeout():
# p1 = df(5, 0.1)
# assert p1.is_pending
# with raises(Exception) as exc_info:
# p1._wait(timeout=0.05)
# assert str(exc_info.value) == "Timeout"
# assert p1.is_pending
# v = p1.get()
# assert p1.is_fulfilled
# assert 5 == v
# Promise.all
def test_promise_all_when():
p1 = Promise()
p2 = Promise()
pl = Promise.all([p1, p2])
assert p1.is_pending
assert p2.is_pending
assert pl.is_pending
p1.do_resolve(5)
p1._wait()
assert p1.is_fulfilled
assert p2.is_pending
assert pl.is_pending
p2.do_resolve(10)
p2._wait()
pl._wait()
assert p1.is_fulfilled
assert p2.is_fulfilled
assert pl.is_fulfilled
assert 5 == p1.get()
assert 10 == p2.get()
assert 5 == pl.get()[0]
assert 10 == pl.get()[1]
def test_promise_all_when_mixed_promises():
p1 = Promise()
p2 = Promise()
pl = Promise.all([p1, 32, p2, False, True])
assert p1.is_pending
assert p2.is_pending
assert pl.is_pending
p1.do_resolve(5)
p1._wait()
assert p1.is_fulfilled
assert p2.is_pending
assert pl.is_pending
p2.do_resolve(10)
p2._wait()
pl._wait()
assert p1.is_fulfilled
assert p2.is_fulfilled
assert pl.is_fulfilled
assert 5 == p1.get()
assert 10 == p2.get()
assert pl.get() == [5, 32, 10, False, True]
def test_promise_all_when_if_no_promises():
pl = Promise.all([10, 32, False, True])
assert pl.get() == [10, 32, False, True]
def test_promise_all_if():
p1 = Promise()
p2 = Promise()
pd1 = Promise.all([p1, p2])
pd2 = Promise.all([p1])
pd3 = Promise.all([])
pd3._wait()
assert p1.is_pending
assert p2.is_pending
assert pd1.is_pending
assert pd2.is_pending
assert pd3.is_fulfilled
p1.do_resolve(5)
p1._wait()
pd2._wait()
assert p1.is_fulfilled
assert p2.is_pending
assert pd1.is_pending
assert pd2.is_fulfilled
p2.do_resolve(10)
p2._wait()
pd1._wait()
pd2._wait()
assert p1.is_fulfilled
assert p2.is_fulfilled
assert pd1.is_fulfilled
assert pd2.is_fulfilled
assert 5 == p1.get()
assert 10 == p2.get()
assert 5 == pd1.get()[0]
assert 5 == pd2.get()[0]
assert 10 == pd1.get()[1]
assert [] == pd3.get()
# promise_for_dict
@fixture(params=[Promise.for_dict, free_promise_for_dict])
def promise_for_dict(request):
return request.param
def test_dict_promise_when(promise_for_dict):
p1 = Promise()
p2 = Promise()
d = {"a": p1, "b": p2}
pd1 = promise_for_dict(d)
pd2 = promise_for_dict({"a": p1})
pd3 = promise_for_dict({})
assert p1.is_pending
assert p2.is_pending
assert pd1.is_pending
assert pd2.is_pending
pd3._wait()
assert pd3.is_fulfilled
p1.do_resolve(5)
p1._wait()
pd2._wait()
assert p1.is_fulfilled
assert p2.is_pending
assert pd1.is_pending
assert pd2.is_fulfilled
p2.do_resolve(10)
p2._wait()
pd1._wait()
assert p1.is_fulfilled
assert p2.is_fulfilled
assert pd1.is_fulfilled
assert pd2.is_fulfilled
assert 5 == p1.get()
assert 10 == p2.get()
assert 5 == pd1.get()["a"]
assert 5 == pd2.get()["a"]
assert 10 == pd1.get()["b"]
assert {} == pd3.get()
def test_dict_promise_if(promise_for_dict):
p1 = Promise()
p2 = Promise()
d = {"a": p1, "b": p2}
pd = promise_for_dict(d)
assert p1.is_pending
assert p2.is_pending
assert pd.is_pending
p1.do_resolve(5)
p1._wait()
assert p1.is_fulfilled
assert p2.is_pending
assert pd.is_pending
p2.do_resolve(10)
p2._wait()
assert p1.is_fulfilled
assert p2.is_fulfilled
# pd._wait()
# assert pd.is_fulfilled
# assert 5 == p1.get()
# assert 10 == p2.get()
# assert 5 == pd.get()["a"]
# assert 10 == pd.get()["b"]
def test_done():
counter = [0]
r = Promise()
def inc(_):
counter[0] += 1
def dec(_):
counter[0] -= 1
def end(_):
r.do_resolve(None)
p = Promise()
p.done(inc, dec)
p.done(inc, dec)
p.done(end)
p.do_resolve(4)
Promise.wait(r)
assert counter[0] == 2
r = Promise()
counter = [0]
p = Promise()
p.done(inc, dec)
p.done(inc, dec)
p.done(None, end)
p.do_reject(Exception())
Promise.wait(r)
assert counter[0] == -2
def test_done_all():
counter = [0]
def inc(_):
counter[0] += 1
def dec(_):
counter[0] -= 1
p = Promise()
r = Promise()
p.done_all()
p.done_all([(inc, dec)])
p.done_all(
[
(inc, dec),
(inc, dec),
{"success": inc, "failure": dec},
lambda _: r.do_resolve(None),
]
)
p.do_resolve(4)
Promise.wait(r)
assert counter[0] == 4
p = Promise()
r = Promise()
p.done_all()
p.done_all([inc])
p.done_all([(inc, dec)])
p.done_all(
[
(inc, dec),
{"success": inc, "failure": dec},
(None, lambda _: r.do_resolve(None)),
]
)
p.do_reject(Exception("Uh oh!"))
Promise.wait(r)
assert counter[0] == 1
def test_then_all():
p = Promise()
handlers = [
((lambda x: x * x), (lambda r: 1)),
{"success": (lambda x: x + x), "failure": (lambda r: 2)},
]
results = (
p.then_all()
+ p.then_all([lambda x: x])
+ p.then_all([(lambda x: x * x, lambda r: 1)])
+ p.then_all(handlers)
)
p.do_resolve(4)
assert [r.get() for r in results] == [4, 16, 16, 8]
p = Promise()
handlers = [
((lambda x: x * x), (lambda r: 1)),
{"success": (lambda x: x + x), "failure": (lambda r: 2)},
]
results = (
p.then_all()
+ p.then_all([(lambda x: x * x, lambda r: 1)])
+ p.then_all(handlers)
)
p.do_reject(Exception())
assert [r.get() for r in results] == [1, 1, 2]
def test_do_resolve():
p1 = Promise(lambda resolve, reject: resolve(0))
assert p1.get() == 0
assert p1.is_fulfilled
def test_do_resolve_fail_on_call():
def raises(resolve, reject):
raise Exception("Fails")
p1 = Promise(raises)
assert not p1.is_fulfilled
assert str(p1.reason) == "Fails"
def test_catch():
p1 = Promise(lambda resolve, reject: resolve(0))
p2 = p1.then(lambda value: 1 / value).catch(lambda e: e).then(lambda e: type(e))
assert p2.get() == ZeroDivisionError
assert p2.is_fulfilled
def test_is_thenable_promise():
promise = Promise()
assert is_thenable(promise)
def test_is_thenable_then_object():
promise = FakeThenPromise()
assert not is_thenable(promise)
def test_is_thenable_future():
promise = Future()
assert is_thenable(promise)
def test_is_thenable_simple_object():
assert not is_thenable(object())
@fixture(params=[Promise.resolve])
def resolve(request):
return request.param
def test_resolve_promise(resolve):
promise = Promise()
assert resolve(promise) == promise
def test_resolve_then_object(resolve):
promise = FakeThenPromise(raises=False)
p = resolve(promise)
assert isinstance(p, Promise)
def test_resolve_future(resolve):
future = Future()
promise = resolve(future)
assert promise.is_pending
future.set_result(1)
assert promise.get() == 1
assert promise.is_fulfilled
def test_resolve_future_rejected(resolve):
future = Future()
promise = resolve(future)
assert promise.is_pending
future.set_exception(Exception("Future rejected"))
assert promise.is_rejected
assert_exception(promise.reason, Exception, "Future rejected")
def test_resolve_object(resolve):
val = object()
promised = resolve(val)
assert isinstance(promised, Promise)
assert promised.get() == val
def test_resolve_promise_subclass():
class MyPromise(Promise):
pass
p = Promise()
p.do_resolve(10)
m_p = MyPromise.resolve(p)
assert isinstance(m_p, MyPromise)
assert m_p.get() == p.get()
def test_promise_repr_pending():
promise = Promise()
assert repr(promise) == "<Promise at {} pending>".format(hex(id(promise)))
def test_promise_repr_pending():
val = {1: 2}
promise = Promise.fulfilled(val)
promise._wait()
assert repr(promise) == "<Promise at {} fulfilled with {}>".format(
hex(id(promise)), repr(val)
)
def test_promise_repr_fulfilled():
val = {1: 2}
promise = Promise.fulfilled(val)
promise._wait()
assert repr(promise) == "<Promise at {} fulfilled with {}>".format(
hex(id(promise)), repr(val)
)
def test_promise_repr_rejected():
err = Exception("Error!")
promise = Promise.rejected(err)
promise._wait()
assert repr(promise) == "<Promise at {} rejected with {}>".format(
hex(id(promise)), repr(err)
)
def test_promise_loop():
def by_two(result):
return result * 2
def executor(resolve, reject):
resolve(Promise.resolve(1).then(lambda v: Promise.resolve(v).then(by_two)))
p = Promise(executor)
assert p.get(.1) == 2
def test_resolve_future_like(resolve):
class CustomThenable(object):
def add_done_callback(self, f):
f(True)
def done(self):
return True
def exception(self):
pass
def result(self):
return True
instance = CustomThenable()
promise = resolve(instance)
assert promise.get() == True
def sum_function(a, b):
return a + b
def test_promisify_function_resolved(resolve):
promisified_func = promisify(sum_function)
result = promisified_func(1, 2)
assert isinstance(result, Promise)
assert result.get() == 3
def test_promisify_function_rejected(resolve):
promisified_func = promisify(sum_function)
result = promisified_func(None, None)
assert isinstance(result, Promise)
with raises(Exception) as exc_info_promise:
result.get()
with raises(Exception) as exc_info:
sum_function(None, None)
assert str(exc_info_promise.value) == str(exc_info.value)
def test_promises_with_only_then():
context = {"success": False}
error = RuntimeError("Ooops!")
promise1 = Promise(
lambda resolve, reject: context.update({"promise1_reject": reject})
)
promise2 = promise1.then(lambda x: None)
promise3 = promise1.then(lambda x: None)
context["promise1_reject"](error)
promise2._wait()
promise3._wait()
assert promise2.reason == error
assert promise3.reason == error
def test_promises_promisify_still_works_but_deprecated_for_non_callables():
x = promisify(1)
assert isinstance(x, Promise)
assert x.get() == 1
# def test_promise_loop():
# values = Promise.resolve([1, None, 2])
# def on_error(error):
# error
# def executor(resolve, reject):
# resolve(Promise.resolve(values).then(lambda values: Promise.all([Promise.resolve(values[0])]).catch(on_error)))
# p = Promise(executor)
# assert p.get(.1) == 2
| 21.719821
| 121
| 0.619665
|
4a08376c13e6d7f0a8bd52e834bc9b4f1f10ae69
| 18,365
|
py
|
Python
|
sdk/python/pulumi_azure_native/network/v20200801/__init__.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20200801/__init__.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20200801/__init__.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
# Export this package's modules as members:
from ._enums import *
from .application_gateway import *
from .application_gateway_private_endpoint_connection import *
from .application_security_group import *
from .azure_firewall import *
from .bastion_host import *
from .connection_monitor import *
from .custom_ip_prefix import *
from .ddos_custom_policy import *
from .ddos_protection_plan import *
from .dscp_configuration import *
from .express_route_circuit import *
from .express_route_circuit_authorization import *
from .express_route_circuit_connection import *
from .express_route_circuit_peering import *
from .express_route_connection import *
from .express_route_cross_connection_peering import *
from .express_route_gateway import *
from .express_route_port import *
from .firewall_policy import *
from .firewall_policy_rule_collection_group import *
from .flow_log import *
from .get_active_sessions import *
from .get_application_gateway import *
from .get_application_gateway_backend_health_on_demand import *
from .get_application_gateway_private_endpoint_connection import *
from .get_application_security_group import *
from .get_azure_firewall import *
from .get_bastion_host import *
from .get_bastion_shareable_link import *
from .get_connection_monitor import *
from .get_custom_ip_prefix import *
from .get_ddos_custom_policy import *
from .get_ddos_protection_plan import *
from .get_dscp_configuration import *
from .get_express_route_circuit import *
from .get_express_route_circuit_authorization import *
from .get_express_route_circuit_connection import *
from .get_express_route_circuit_peering import *
from .get_express_route_connection import *
from .get_express_route_cross_connection_peering import *
from .get_express_route_gateway import *
from .get_express_route_port import *
from .get_firewall_policy import *
from .get_firewall_policy_rule_collection_group import *
from .get_flow_log import *
from .get_hub_route_table import *
from .get_hub_virtual_network_connection import *
from .get_inbound_nat_rule import *
from .get_ip_allocation import *
from .get_ip_group import *
from .get_load_balancer import *
from .get_load_balancer_backend_address_pool import *
from .get_local_network_gateway import *
from .get_nat_gateway import *
from .get_nat_rule import *
from .get_network_interface import *
from .get_network_interface_tap_configuration import *
from .get_network_profile import *
from .get_network_security_group import *
from .get_network_virtual_appliance import *
from .get_network_watcher import *
from .get_p2s_vpn_gateway import *
from .get_p2s_vpn_gateway_p2s_vpn_connection_health import *
from .get_p2s_vpn_gateway_p2s_vpn_connection_health_detailed import *
from .get_packet_capture import *
from .get_private_dns_zone_group import *
from .get_private_endpoint import *
from .get_private_link_service import *
from .get_private_link_service_private_endpoint_connection import *
from .get_public_ip_address import *
from .get_public_ip_prefix import *
from .get_route import *
from .get_route_filter import *
from .get_route_filter_rule import *
from .get_route_table import *
from .get_security_partner_provider import *
from .get_security_rule import *
from .get_service_endpoint_policy import *
from .get_service_endpoint_policy_definition import *
from .get_subnet import *
from .get_virtual_appliance_site import *
from .get_virtual_hub import *
from .get_virtual_hub_bgp_connection import *
from .get_virtual_hub_ip_configuration import *
from .get_virtual_hub_route_table_v2 import *
from .get_virtual_network import *
from .get_virtual_network_gateway import *
from .get_virtual_network_gateway_advertised_routes import *
from .get_virtual_network_gateway_bgp_peer_status import *
from .get_virtual_network_gateway_connection import *
from .get_virtual_network_gateway_learned_routes import *
from .get_virtual_network_gateway_vpnclient_connection_health import *
from .get_virtual_network_gateway_vpnclient_ipsec_parameters import *
from .get_virtual_network_peering import *
from .get_virtual_network_tap import *
from .get_virtual_router import *
from .get_virtual_router_peering import *
from .get_virtual_wan import *
from .get_vpn_connection import *
from .get_vpn_gateway import *
from .get_vpn_server_configuration import *
from .get_vpn_site import *
from .get_web_application_firewall_policy import *
from .hub_route_table import *
from .hub_virtual_network_connection import *
from .inbound_nat_rule import *
from .ip_allocation import *
from .ip_group import *
from .load_balancer import *
from .load_balancer_backend_address_pool import *
from .local_network_gateway import *
from .nat_gateway import *
from .nat_rule import *
from .network_interface import *
from .network_interface_tap_configuration import *
from .network_profile import *
from .network_security_group import *
from .network_virtual_appliance import *
from .network_watcher import *
from .p2s_vpn_gateway import *
from .packet_capture import *
from .private_dns_zone_group import *
from .private_endpoint import *
from .private_link_service import *
from .private_link_service_private_endpoint_connection import *
from .public_ip_address import *
from .public_ip_prefix import *
from .route import *
from .route_filter import *
from .route_filter_rule import *
from .route_table import *
from .security_partner_provider import *
from .security_rule import *
from .service_endpoint_policy import *
from .service_endpoint_policy_definition import *
from .subnet import *
from .virtual_appliance_site import *
from .virtual_hub import *
from .virtual_hub_bgp_connection import *
from .virtual_hub_ip_configuration import *
from .virtual_hub_route_table_v2 import *
from .virtual_network import *
from .virtual_network_gateway import *
from .virtual_network_gateway_connection import *
from .virtual_network_peering import *
from .virtual_network_tap import *
from .virtual_router import *
from .virtual_router_peering import *
from .virtual_wan import *
from .vpn_connection import *
from .vpn_gateway import *
from .vpn_server_configuration import *
from .vpn_site import *
from .web_application_firewall_policy import *
from ._inputs import *
from . import outputs
def _register_module():
import pulumi
from ... import _utilities
class Module(pulumi.runtime.ResourceModule):
_version = _utilities.get_semver_version()
def version(self):
return Module._version
def construct(self, name: str, typ: str, urn: str) -> pulumi.Resource:
if typ == "azure-native:network/v20200801:ApplicationGateway":
return ApplicationGateway(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:ApplicationGatewayPrivateEndpointConnection":
return ApplicationGatewayPrivateEndpointConnection(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:ApplicationSecurityGroup":
return ApplicationSecurityGroup(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:AzureFirewall":
return AzureFirewall(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:BastionHost":
return BastionHost(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:ConnectionMonitor":
return ConnectionMonitor(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:CustomIPPrefix":
return CustomIPPrefix(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:DdosCustomPolicy":
return DdosCustomPolicy(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:DdosProtectionPlan":
return DdosProtectionPlan(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:DscpConfiguration":
return DscpConfiguration(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:ExpressRouteCircuit":
return ExpressRouteCircuit(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:ExpressRouteCircuitAuthorization":
return ExpressRouteCircuitAuthorization(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:ExpressRouteCircuitConnection":
return ExpressRouteCircuitConnection(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:ExpressRouteCircuitPeering":
return ExpressRouteCircuitPeering(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:ExpressRouteConnection":
return ExpressRouteConnection(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:ExpressRouteCrossConnectionPeering":
return ExpressRouteCrossConnectionPeering(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:ExpressRouteGateway":
return ExpressRouteGateway(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:ExpressRoutePort":
return ExpressRoutePort(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:FirewallPolicy":
return FirewallPolicy(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:FirewallPolicyRuleCollectionGroup":
return FirewallPolicyRuleCollectionGroup(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:FlowLog":
return FlowLog(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:HubRouteTable":
return HubRouteTable(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:HubVirtualNetworkConnection":
return HubVirtualNetworkConnection(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:InboundNatRule":
return InboundNatRule(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:IpAllocation":
return IpAllocation(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:IpGroup":
return IpGroup(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:LoadBalancer":
return LoadBalancer(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:LoadBalancerBackendAddressPool":
return LoadBalancerBackendAddressPool(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:LocalNetworkGateway":
return LocalNetworkGateway(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:NatGateway":
return NatGateway(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:NatRule":
return NatRule(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:NetworkInterface":
return NetworkInterface(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:NetworkInterfaceTapConfiguration":
return NetworkInterfaceTapConfiguration(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:NetworkProfile":
return NetworkProfile(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:NetworkSecurityGroup":
return NetworkSecurityGroup(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:NetworkVirtualAppliance":
return NetworkVirtualAppliance(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:NetworkWatcher":
return NetworkWatcher(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:P2sVpnGateway":
return P2sVpnGateway(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:PacketCapture":
return PacketCapture(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:PrivateDnsZoneGroup":
return PrivateDnsZoneGroup(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:PrivateEndpoint":
return PrivateEndpoint(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:PrivateLinkService":
return PrivateLinkService(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:PrivateLinkServicePrivateEndpointConnection":
return PrivateLinkServicePrivateEndpointConnection(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:PublicIPAddress":
return PublicIPAddress(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:PublicIPPrefix":
return PublicIPPrefix(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:Route":
return Route(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:RouteFilter":
return RouteFilter(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:RouteFilterRule":
return RouteFilterRule(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:RouteTable":
return RouteTable(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:SecurityPartnerProvider":
return SecurityPartnerProvider(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:SecurityRule":
return SecurityRule(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:ServiceEndpointPolicy":
return ServiceEndpointPolicy(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:ServiceEndpointPolicyDefinition":
return ServiceEndpointPolicyDefinition(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:Subnet":
return Subnet(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:VirtualApplianceSite":
return VirtualApplianceSite(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:VirtualHub":
return VirtualHub(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:VirtualHubBgpConnection":
return VirtualHubBgpConnection(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:VirtualHubIpConfiguration":
return VirtualHubIpConfiguration(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:VirtualHubRouteTableV2":
return VirtualHubRouteTableV2(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:VirtualNetwork":
return VirtualNetwork(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:VirtualNetworkGateway":
return VirtualNetworkGateway(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:VirtualNetworkGatewayConnection":
return VirtualNetworkGatewayConnection(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:VirtualNetworkPeering":
return VirtualNetworkPeering(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:VirtualNetworkTap":
return VirtualNetworkTap(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:VirtualRouter":
return VirtualRouter(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:VirtualRouterPeering":
return VirtualRouterPeering(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:VirtualWan":
return VirtualWan(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:VpnConnection":
return VpnConnection(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:VpnGateway":
return VpnGateway(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:VpnServerConfiguration":
return VpnServerConfiguration(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:VpnSite":
return VpnSite(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-native:network/v20200801:WebApplicationFirewallPolicy":
return WebApplicationFirewallPolicy(name, pulumi.ResourceOptions(urn=urn))
else:
raise Exception(f"unknown resource type {typ}")
_module_instance = Module()
pulumi.runtime.register_resource_module("azure-native", "network/v20200801", _module_instance)
_register_module()
| 55.990854
| 105
| 0.735148
|
4a0837c419b7a0b71d290ffb78e270f7d80135b1
| 82
|
py
|
Python
|
ex001.py
|
natanpaess/Python_CeV
|
17126d33ad6a06cbc7947243f0205b131c551fb6
|
[
"MIT"
] | null | null | null |
ex001.py
|
natanpaess/Python_CeV
|
17126d33ad6a06cbc7947243f0205b131c551fb6
|
[
"MIT"
] | null | null | null |
ex001.py
|
natanpaess/Python_CeV
|
17126d33ad6a06cbc7947243f0205b131c551fb6
|
[
"MIT"
] | null | null | null |
# Crie um programa que mostre "Olá mundo" na tela.
msg = 'Olá, Mundo!'
print(msg)
| 20.5
| 50
| 0.682927
|
4a0837f9090356b0adbd2cf1b6d57eb72b5557b8
| 3,134
|
py
|
Python
|
RSS2ATOM/transformation/HoutcategoriesSolveRefChannelCategoryATOMCategory.py
|
levilucio/SyVOLT
|
7526ec794d21565e3efcc925a7b08ae8db27d46a
|
[
"MIT"
] | 3
|
2017-06-02T19:26:27.000Z
|
2021-06-14T04:25:45.000Z
|
RSS2ATOM/transformation/HoutcategoriesSolveRefChannelCategoryATOMCategory.py
|
levilucio/SyVOLT
|
7526ec794d21565e3efcc925a7b08ae8db27d46a
|
[
"MIT"
] | 8
|
2016-08-24T07:04:07.000Z
|
2017-05-26T16:22:47.000Z
|
RSS2ATOM/transformation/HoutcategoriesSolveRefChannelCategoryATOMCategory.py
|
levilucio/SyVOLT
|
7526ec794d21565e3efcc925a7b08ae8db27d46a
|
[
"MIT"
] | 1
|
2019-10-31T06:00:23.000Z
|
2019-10-31T06:00:23.000Z
|
from core.himesis import Himesis
import uuid
class HoutcategoriesSolveRefChannelCategoryATOMCategory(Himesis):
def __init__(self):
"""
Creates the himesis graph representing the DSLTrans rule outcategoriesSolveRefChannelCategoryATOMCategory.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HoutcategoriesSolveRefChannelCategoryATOMCategory, self).__init__(name='HoutcategoriesSolveRefChannelCategoryATOMCategory', num_nodes=0, edges=[])
# Set the graph attributes
self["mm__"] = ['HimesisMM']
self["name"] = """outcategoriesSolveRefChannelCategoryATOMCategory"""
self["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'outcategoriesSolveRefChannelCategoryATOMCategory')
# match model. We only support one match model
self.add_node()
self.vs[0]["mm__"] = """MatchModel"""
# apply model node
self.add_node()
self.vs[1]["mm__"] = """ApplyModel"""
# paired with relation between match and apply models
self.add_node()
self.vs[2]["mm__"] = """paired_with"""
self.vs[2]["attr1"] = """outcategoriesSolveRefChannelCategoryATOMCategory"""
# match class Channel(5.0.m.0Channel) node
self.add_node()
self.vs[3]["mm__"] = """Channel"""
self.vs[3]["attr1"] = """+"""
# match class Category(5.0.m.1Category) node
self.add_node()
self.vs[4]["mm__"] = """Category"""
self.vs[4]["attr1"] = """+"""
# apply class ATOM(5.0.a.0ATOM) node
self.add_node()
self.vs[5]["mm__"] = """ATOM"""
self.vs[5]["attr1"] = """1"""
# apply class Category(5.0.a.1Category) node
self.add_node()
self.vs[6]["mm__"] = """Category"""
self.vs[6]["attr1"] = """1"""
# match association Channel--category-->Category node
self.add_node()
self.vs[7]["attr1"] = """category"""
self.vs[7]["mm__"] = """directLink_S"""
# apply association ATOM--categories-->Category node
self.add_node()
self.vs[8]["attr1"] = """categories"""
self.vs[8]["mm__"] = """directLink_T"""
# backward association ATOM-->Channelnode
self.add_node()
self.vs[9]["mm__"] = """backward_link"""
# backward association Category-->Categorynode
self.add_node()
self.vs[10]["mm__"] = """backward_link"""
# Add the edges
self.add_edges([
(0,3), # matchmodel -> match_class Channel(5.0.m.0Channel)
(0,4), # matchmodel -> match_class Category(5.0.m.1Category)
(1,5), # applymodel -> apply_classATOM(5.0.a.0ATOM)
(1,6), # applymodel -> apply_classCategory(5.0.a.1Category)
(3,7), # match classChannel(5.0.m.0Channel) -> association category
(7,4), # associationcategory -> match_classChannel(5.0.m.1Category)
(5,8), # apply class ATOM(5.0.a.0ATOM) -> association categories
(8,6), # associationcategories -> apply_classCategory(5.0.a.1Category)
(5,9), # apply class ATOM(5.0.m.0Channel) -> backward_association
(9,3), # backward_associationChannel -> match_class Channel(5.0.m.0Channel)
(6,10), # apply class Category(5.0.m.1Category) -> backward_association
(10,4), # backward_associationCategory -> match_class Category(5.0.m.1Category)
(0,2), # matchmodel -> pairedwith
(2,1) # pairedwith -> applyModel
])
self["equations"] = []
| 35.213483
| 154
| 0.678047
|
4a083849bd39f606877069419396d8c42ef077eb
| 3,376
|
py
|
Python
|
tensorflow/python/grappler/item.py
|
M155K4R4/Tensorflow
|
e5e03ef3148303b3dfed89a1492dedf92b45be25
|
[
"Apache-2.0"
] | 24
|
2018-02-01T15:49:22.000Z
|
2021-01-11T16:31:18.000Z
|
tensorflow/python/grappler/item.py
|
M155K4R4/Tensorflow
|
e5e03ef3148303b3dfed89a1492dedf92b45be25
|
[
"Apache-2.0"
] | 13
|
2020-01-28T22:20:14.000Z
|
2022-03-11T23:20:14.000Z
|
tensorflow/python/grappler/item.py
|
M155K4R4/Tensorflow
|
e5e03ef3148303b3dfed89a1492dedf92b45be25
|
[
"Apache-2.0"
] | 13
|
2018-09-07T13:28:38.000Z
|
2020-07-17T15:06:24.000Z
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A python interface for Grappler items."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.grappler.costs import op_performance_data_pb2
from tensorflow.core.protobuf import meta_graph_pb2
from tensorflow.python import pywrap_tensorflow as tf_item
from tensorflow.python.framework import errors
class Item(object):
"""GrapplerItem."""
def __init__(self,
metagraph,
ignore_colocation=True,
ignore_user_placement=False):
"""Creates an Item.
Args:
metagraph: a TensorFlow metagraph.
ignore_colocation: if set, the tool will ignore all the colocation
constraints generated by TensorFlow.
ignore_user_placement: if set, all the placement annotations annotated in
the metagraph will be ignored.
Raises:
ValueError: the metagraph is incomplete or invalid.
"""
self._metagraph = metagraph
self._item_graph = meta_graph_pb2.MetaGraphDef()
self._item_graph.CopyFrom(metagraph)
self._ignore_colocation = ignore_colocation
self._ignore_user_placement = ignore_user_placement
self._tf_item = None
self._BuildTFItem()
def IdentifyImportantOps(self, sort_topologically=False):
with errors.raise_exception_on_not_ok_status() as status:
return tf_item.TF_IdentifyImportantOps(self.tf_item, sort_topologically,
status)
def GetOpProperties(self):
ret_from_swig = tf_item.TF_GetOpProperties(self.tf_item)
properties = {}
for key, values in ret_from_swig.items():
prop = []
for value in values:
prop.append(
op_performance_data_pb2.OpInfo.TensorProperties.FromString(value))
properties[key] = prop
return properties
def GetColocationGroups(self):
"""Return a list of hard colocation constraints.
All the nodes in a colocation tuple must be placed on the same device for
the model to work.
Returns:
A list of colocation tuples.
"""
return tf_item.TF_GetColocationGroups(self.tf_item)
@property
def metagraph(self):
return self._metagraph
@property
def tf_item(self):
if self._item_graph != self._metagraph:
self._BuildTFItem()
self._item_graph.CopyFrom(self._metagraph)
return self._tf_item
def _BuildTFItem(self):
with errors.raise_exception_on_not_ok_status() as status:
self._tf_item = tf_item.TF_NewItem(self._metagraph.SerializeToString(),
self._ignore_colocation,
self._ignore_user_placement, status)
| 35.166667
| 80
| 0.695201
|
4a08385702ff6725e29813c1c1a7bc7306bc2fa2
| 7,580
|
py
|
Python
|
wizard_builder/tests/test_migrations.py
|
SexualHealthInnovations/django-wizard-builder
|
f5effe8c462313f16be914b562dbea8ea796b672
|
[
"BSD-3-Clause"
] | 16
|
2016-06-21T04:05:24.000Z
|
2017-09-26T15:40:24.000Z
|
wizard_builder/tests/test_migrations.py
|
SexualHealthInnovations/django-wizard-builder
|
f5effe8c462313f16be914b562dbea8ea796b672
|
[
"BSD-3-Clause"
] | 141
|
2016-06-21T20:46:46.000Z
|
2017-09-28T00:20:49.000Z
|
wizard_builder/tests/test_migrations.py
|
project-callisto/django-wizard-builder
|
f5effe8c462313f16be914b562dbea8ea796b672
|
[
"BSD-3-Clause"
] | 7
|
2017-10-04T22:52:18.000Z
|
2018-05-31T17:14:39.000Z
|
from django_migration_testcase import MigrationTest
class SitesMigrationTest(MigrationTest):
app_name = 'wizard_builder'
before = '0005_delete_constraints'
after = '0006_many_sites'
def migrate_kwargs(self):
return {
'verbosity': 1,
'interactive': False,
}
def test_sites_attribute_populated(self):
OldQuestionPage = self.get_model_before('wizard_builder.QuestionPage')
old_page = OldQuestionPage.objects.create(site_id=1)
self.run_migration()
NewQuestionPage = self.get_model_after('wizard_builder.QuestionPage')
new_page = NewQuestionPage.objects.first()
self.assertEqual(old_page.site_id, new_page.sites.first().id)
def test_phantom_sites_not_populated(self):
OldQuestionPage = self.get_model_before('wizard_builder.QuestionPage')
old_page = OldQuestionPage.objects.create()
self.run_migration()
NewQuestionPage = self.get_model_after('wizard_builder.QuestionPage')
new_page = NewQuestionPage.objects.first()
self.assertEqual(old_page.site_id, None)
self.assertEqual(new_page.sites.count(), 0)
class QuestionPageMigrationTest(MigrationTest):
app_name = 'wizard_builder'
before = '0008_remove_textpage'
after = '0011_rename_questionpage_attrs'
def migrate_kwargs(self):
return {
'verbosity': 1,
'interactive': False,
}
def test_attributes_populated(self):
OldQuestionPage = self.get_model_before('wizard_builder.QuestionPage')
old_page = OldQuestionPage.objects.create(
position=20,
section=1,
)
old_page.sites.add(1)
old_page_sites_count = old_page.sites.count()
self.run_migration()
NewQuestionPage = self.get_model_after('wizard_builder.QuestionPage')
new_page = NewQuestionPage.objects.first()
new_page_sites_count = new_page.sites.count()
self.assertEqual(old_page.section, new_page.section)
self.assertEqual(old_page.position, new_page.position)
self.assertEqual(old_page_sites_count, new_page_sites_count)
class PageIDMigrationTest(MigrationTest):
app_name = 'wizard_builder'
before = '0011_rename_questionpage_attrs'
after = '0014_questionpage_to_page_3'
def migrate_kwargs(self):
return {
'verbosity': 1,
'interactive': False,
}
def _get_attrs(self, cls, attr):
return list(cls.objects.all().values_list(attr, flat=True))
def test_attributes_populated(self):
OldQuestionPage = self.get_model_before('wizard_builder.QuestionPage')
for i in range(3):
OldQuestionPage.objects.create()
old_page_ids = self._get_attrs(OldQuestionPage, 'pagebase_ptr_id')
self.run_migration()
NewPage = self.get_model_after('wizard_builder.Page')
new_page_ids = self._get_attrs(NewPage, 'id')
self.assertCountEqual(old_page_ids, new_page_ids)
class PopulateTypeMigrationTest(MigrationTest):
app_name = 'wizard_builder'
before = '0028_formquestion_type'
after = '0029_populate_type'
def migrate_kwargs(self):
return {
'verbosity': 1,
'interactive': False,
}
def test_type_populated(self):
FormQuestion = self.get_model_before('wizard_builder.FormQuestion')
RadioButton = self.get_model_before('wizard_builder.RadioButton')
Checkbox = self.get_model_before('wizard_builder.Checkbox')
TextArea = self.get_model_before('wizard_builder.TextArea')
SingleLineText = self.get_model_before('wizard_builder.SingleLineText')
formquestion = FormQuestion.objects.create()
radiobutton = RadioButton.objects.create()
checkbox = Checkbox.objects.create()
textarea = TextArea.objects.create()
singlelinetext = SingleLineText.objects.create()
self.run_migration()
self.assertEqual(
FormQuestion.objects.get(id=formquestion.id).type,
None,
)
self.assertEqual(
FormQuestion.objects.get(id=radiobutton.id).type,
'radiobutton',
)
self.assertEqual(
FormQuestion.objects.get(id=checkbox.id).type,
'checkbox',
)
self.assertEqual(
FormQuestion.objects.get(id=textarea.id).type,
'textarea',
)
self.assertEqual(
FormQuestion.objects.get(id=singlelinetext.id).type,
'singlelinetext',
)
class PopulateDropdownMigrationTest(MigrationTest):
app_name = 'wizard_builder'
before = '0031_formquestion_choices_default'
after = '0032_move_question_dropdown'
def migrate_kwargs(self):
return {
'verbosity': 1,
'interactive': False,
}
def test_type_populated(self):
RadioButton = self.get_model_before('wizard_builder.RadioButton')
yes_dropdown = RadioButton.objects.create(is_dropdown=True)
no_dropdown = RadioButton.objects.create()
self.run_migration()
FormQuestion = self.get_model_after('wizard_builder.FormQuestion')
yes_question_id = yes_dropdown.formquestion_ptr.id
no_question_id = no_dropdown.formquestion_ptr.id
self.assertEqual(
FormQuestion.objects.get(id=yes_question_id).is_dropdown,
True,
)
self.assertEqual(
FormQuestion.objects.get(id=no_question_id).is_dropdown,
False,
)
class MoveChoiceQuestionMigrationTest(MigrationTest):
app_name = 'wizard_builder'
before = '0033_add_temps'
after = '0035_auto_20171025_0014'
def migrate_kwargs(self):
return {
'verbosity': 1,
'interactive': False,
}
def test_type_populated(self):
RadioButton = self.get_model_before('wizard_builder.RadioButton')
OldChoice = self.get_model_before('wizard_builder.Choice')
question = RadioButton.objects.create()
old_choice = OldChoice.objects.create(question=question)
old_base_question = old_choice.question.formquestion_ptr
self.run_migration()
NewChoice = self.get_model_after('wizard_builder.Choice')
new_choice = NewChoice.objects.get(id=old_choice.id)
new_base_question = new_choice.question
self.assertEqual(
old_base_question._meta.model_name.lower(), 'formquestion')
self.assertEqual(
new_base_question._meta.model_name.lower(), 'formquestion')
self.assertEqual(
old_base_question.id, new_base_question.id)
class DropdownMigrationTest(MigrationTest):
app_name = 'wizard_builder'
before = '0039_dropdown_proxy'
after = '0040_populate_dropdown'
def migrate_kwargs(self):
return {
'verbosity': 1,
'interactive': False,
}
def test_type_populated(self):
FormQuestion = self.get_model_before('wizard_builder.FormQuestion')
no_dropdown = FormQuestion.objects.create(
type='radiobutton')
yes_dropdown = FormQuestion.objects.create(
type='radiobutton', is_dropdown=True)
self.run_migration()
no_dropdown = FormQuestion.objects.get(id=no_dropdown.id)
yes_dropdown = FormQuestion.objects.get(id=yes_dropdown.id)
self.assertEqual(no_dropdown.type, 'radiobutton')
self.assertEqual(yes_dropdown.type, 'dropdown')
| 31.452282
| 79
| 0.666095
|
4a0838bd7930a5a43c5cde11bb2874fc9383d769
| 1,714
|
py
|
Python
|
app/models.py
|
PatrickRudgeri/financial-mngt
|
42754e7ade89805a2297c1783f86a0451dec4674
|
[
"MIT"
] | 2
|
2021-08-06T20:26:40.000Z
|
2021-09-02T22:47:42.000Z
|
app/models.py
|
PHenriqueCEC/financial-mngt
|
42754e7ade89805a2297c1783f86a0451dec4674
|
[
"MIT"
] | null | null | null |
app/models.py
|
PHenriqueCEC/financial-mngt
|
42754e7ade89805a2297c1783f86a0451dec4674
|
[
"MIT"
] | 2
|
2021-08-02T23:17:56.000Z
|
2021-08-31T23:42:26.000Z
|
from django.db import models
from django.contrib.auth.models import User
from django.urls import reverse
# https://docs.djangoproject.com/pt-br/3.2/ref/contrib/auth/#user-model
# classe User
class CategoriaReceita(models.Model):
nome = models.CharField(max_length=50)
usuario = models.ForeignKey(User, on_delete=models.CASCADE)
def __str__(self):
return self.nome
def __repr__(self):
return self.nome
class CategoriaDespesa(models.Model):
nome = models.CharField(max_length=50)
usuario = models.ForeignKey(User, on_delete=models.CASCADE)
def __str__(self):
return self.nome
def __repr__(self):
return self.nome
class Receita(models.Model):
nome = models.CharField(max_length=200)
valor = models.FloatField()
data = models.DateTimeField()
usuario = models.ForeignKey(User, on_delete=models.CASCADE)
categoria = models.ForeignKey(CategoriaReceita, null=True, on_delete=models.SET_NULL)
def __str__(self):
return f'{self.nome} ({self.categoria}), R$ {self.valor:.2f}, {self.data.day:02}/{self.data.month}, {self.usuario}'
def get_absolute_url(self):
return reverse('app:receitas')
class Despesa(models.Model):
nome = models.CharField(max_length=200)
valor = models.FloatField()
data = models.DateTimeField()
usuario = models.ForeignKey(User, on_delete=models.CASCADE)
categoria = models.ForeignKey(CategoriaDespesa, null=True, on_delete=models.SET_NULL)
def __str__(self):
return f'{self.nome} ({self.categoria}), R$ {self.valor:.2f}, {self.data.day:02}/{self.data.month}, {self.usuario}'
def get_absolute_url(self):
return reverse('app:despesas')
| 30.070175
| 123
| 0.703617
|
4a0838f852690c703efc974cf336fd75555f4a1d
| 3,358
|
py
|
Python
|
tests/integration/TestSQLite3.py
|
rakhimov/rtk
|
adc35e218ccfdcf3a6e3082f6a1a1d308ed4ff63
|
[
"BSD-3-Clause"
] | null | null | null |
tests/integration/TestSQLite3.py
|
rakhimov/rtk
|
adc35e218ccfdcf3a6e3082f6a1a1d308ed4ff63
|
[
"BSD-3-Clause"
] | null | null | null |
tests/integration/TestSQLite3.py
|
rakhimov/rtk
|
adc35e218ccfdcf3a6e3082f6a1a1d308ed4ff63
|
[
"BSD-3-Clause"
] | 2
|
2020-04-03T04:14:42.000Z
|
2021-02-22T05:30:35.000Z
|
#!/usr/bin/env python -O
"""
This is the test class for testing the Environment class.
"""
# -*- coding: utf-8 -*-
#
# rtk.tests.unit.TestDAO.py is part of The RTK Project
#
# All rights reserved.
# Copyright 2007 - 2017 Andrew Rowland andrew.rowland <AT> reliaqual <DOT> com
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER
# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
from os.path import dirname
sys.path.insert(0, dirname(dirname(dirname(__file__))) + "/rtk", )
import sqlite3
import unittest
from nose.plugins.attrib import attr
import dao.DAO as _dao
__author__ = 'Andrew Rowland'
__email__ = 'andrew.rowland@reliaqual.com'
__organization__ = 'ReliaQual Associates, LLC'
__copyright__ = 'Copyright 2014 Andrew "Weibullguy" Rowland'
class TestSQLite3Model(unittest.TestCase):
"""
Class for testing the SQLite3 model class.
"""
def setUp(self):
"""
(TestSQLite3) setup the test fixture for the SQLite3 model class
"""
_database = '/tmp/tempdb.rtk'
self.DUT = _dao(_database)
@attr(all=True, integration=True)
def test01_create_sqlite3(self):
"""
(TestSQLite3) SQLite3 __init__() should return an sqlite3.Connection
"""
self.assertTrue(isinstance(self.DUT, _dao))
self.assertTrue(isinstance(self.DUT.model.connection,
sqlite3.Connection))
@attr(all=True, integration=True)
def test02_execute(self):
"""
(TestSQLite3) execute should return 0 when an SQL query is successfully executed
"""
_query = "SELECT * FROM tbl_revisions"
self.assertEqual(self.DUT.execute(_query)[1], 0)
@attr(all=True, integration=True)
def test03_get_next_id(self):
"""
(TestSQLite3) Tests that the next ID can be retrieved.
"""
self.assertEqual(self.DUT.get_last_id('tbl_functions')[1], 0)
| 34.979167
| 88
| 0.705479
|
4a083948c5757e969a9fc7e41488969c0db8f29c
| 11,778
|
py
|
Python
|
utils/nuswide_dataset.py
|
stevehuanghe/multi_label_zsl
|
68ac74d4e6ed2c2528fbae7f0a05df7c5e73bc78
|
[
"MIT"
] | 4
|
2021-05-14T15:40:38.000Z
|
2021-11-02T06:26:58.000Z
|
utils/nuswide_dataset.py
|
stevehuanghe/multi_label_zsl
|
68ac74d4e6ed2c2528fbae7f0a05df7c5e73bc78
|
[
"MIT"
] | null | null | null |
utils/nuswide_dataset.py
|
stevehuanghe/multi_label_zsl
|
68ac74d4e6ed2c2528fbae7f0a05df7c5e73bc78
|
[
"MIT"
] | 1
|
2021-06-15T15:13:09.000Z
|
2021-06-15T15:13:09.000Z
|
import torch
import torch.utils.data as data
import torchvision.transforms as transforms
import os
from pathlib import Path
import pickle
import numpy as np
from PIL import Image
from PIL import ImageFile
ImageFile.LOAD_TRUNCATED_IMAGES = True
import csv
import copy
class NUSWideDataset(data.Dataset):
"""Custom Dataset compatible with torch.utils.data.DataLoader."""
def __init__(self, image_dir, anno_dir, transform=None, n_val=0, mode="train", n_unseen=16, unseen_file=None):
"""Set the path for images, captions and vocabulary wrapper.
Args:
image_dir: image directory.
anno_json: coco annotation file path.
label_set: list of labels, IDs or names.
transform: image transformation function, callable.
"""
assert n_val >= 0
self.image_dir = image_dir
self.anno_dir = anno_dir
self.transform = transform
self.mode = mode
self.valid_ids = []
common = ['plane', 'zebra', 'valley', 'tiger', 'castle']
unseen_labels_file = Path(anno_dir) / Path("Concepts81.txt")
seen_labels_file = Path(anno_dir) / Path("NUS_WID_Tags/TagList1k.txt")
unseen_cats = self.load_label_set(unseen_labels_file)
seen_cats = self.load_label_set(seen_labels_file)
assert len(seen_cats) == 1000
assert len(unseen_cats) == 81
seen_cats_new = [x for x in seen_cats if x not in unseen_cats]
seen_label_idx = [i for i, x in enumerate(seen_cats) if x not in unseen_cats]
assert len(seen_cats_new) == 925
self.seen_label_idx = torch.tensor(seen_label_idx).long()
unseen_cats_new = [x for x in unseen_cats if x not in common]
assert len(unseen_cats_new) == 76
unseen_label_idx = [i for i, x in enumerate(unseen_cats) if x not in common]
self.unseen_label_idx = torch.tensor(unseen_label_idx).long()
self.seen_idx = torch.tensor([i for i in range(925)]).long()
self.unseen_idx = torch.tensor([i+925 for i in range(len(unseen_cats_new))]).long()
self.all_cats = seen_cats_new + unseen_cats_new
self.seen_cats = seen_cats_new
self.unseen_cats = unseen_cats_new
self.train_idx = self.seen_idx
self.val_idx = self.seen_idx
train_seen_anno = Path(anno_dir) / Path("NUS_WID_Tags/Train_Tags1k.dat")
test_unseen_anno = Path(anno_dir) / Path("NUS_WID_Tags/Test_Tags81.txt")
test_seen_anno = Path(anno_dir) / Path("NUS_WID_Tags/Test_Tags1k.dat")
train_image_file = Path(anno_dir) / Path("ImageList/TrainImagelist.txt")
test_image_file = Path(anno_dir) / Path("ImageList/TestImagelist.txt")
if mode == "train":
self.img_list = self.load_image_list(train_image_file, image_dir)
self.gt_labels = self.load_gt_labels(train_seen_anno)[:,self.seen_label_idx]
else:
self.img_list = self.load_image_list(test_image_file, image_dir)
test_unseen_gt = self.load_gt_labels(test_unseen_anno)[:, self.unseen_label_idx]
test_seen_gt = self.load_gt_labels(test_seen_anno)[:, self.seen_label_idx]
self.gt_labels = torch.cat([test_seen_gt, test_unseen_gt], dim=1)
assert len(self.img_list) == self.gt_labels.size(0)
@staticmethod
def load_label_set(label_file):
if not os.path.isfile(label_file):
raise FileNotFoundError(f"file not found: {label_file}")
label_set = []
with open(label_file, "r") as fin:
lines = fin.readlines()
for line in lines:
word = line.split('\n')[0]
if word != '':
label_set.append(word)
return label_set[:1000]
def load_image_list(self, image_file, image_dir):
if not os.path.isfile(image_file):
raise FileNotFoundError(f"file not found: {image_file}")
image_list = []
with open(image_file, "r") as fin:
lines = fin.readlines()
for idx, line in enumerate(lines):
filename = line.split()[0]
filename = os.path.join(image_dir, filename.split('_')[-1])
if os.path.isfile(filename):
image_list.append(filename)
self.valid_ids.append(idx)
return image_list
def load_gt_labels(self, anno_file):
if not os.path.isfile(anno_file):
raise FileNotFoundError(f"file not found: {anno_file}")
gt_labels = []
with open(anno_file, "r") as fin:
reader = fin.readlines()
for line in reader:
line = line.split()
labels = torch.from_numpy(np.array(line) == '1').long()
gt_labels.append(labels.view(1, -1))
assert len(self.valid_ids) > 0
gt_labels = torch.cat(gt_labels, dim=0)[self.valid_ids]
return gt_labels
def __len__(self):
return len(self.img_list)
def __getitem__(self, index):
labels = self.gt_labels[index]
image = Image.open(os.path.join(self.image_dir, self.img_list[index])).convert('RGB')
if self.transform is not None:
image = self.transform(image)
else:
image = transforms.ToTensor()(image)
return image, labels
class NUSWideDataset81(data.Dataset):
"""Custom Dataset compatible with torch.utils.data.DataLoader."""
def __init__(self, image_dir, anno_dir, transform=None, n_val=0, mode="train", n_unseen=16, unseen_file=None):
"""Set the path for images, captions and vocabulary wrapper.
Args:
image_dir: image directory.
anno_json: coco annotation file path.
label_set: list of labels, IDs or names.
transform: image transformation function, callable.
"""
assert n_val >= 0
self.image_dir = image_dir
self.anno_dir = anno_dir
self.transform = transform
self.mode = mode
self.valid_ids = []
common = ['plane', 'zebra', 'valley', 'tiger', 'castle']
labels_file = Path(anno_dir) / Path("Concepts81.txt")
all_cats = self.load_label_set(labels_file)
unseen_names = []
if unseen_file is not None:
with Path(unseen_file).open('r') as fin:
lines = fin.readlines()
for line in lines:
label = line.split('\n')[0]
unseen_names.append(label)
elif n_unseen > 0:
all_cats_copy = copy.deepcopy(all_cats)
while True:
np.random.shuffle(all_cats_copy)
unseen_names = all_cats_copy[:n_unseen]
if set(unseen_names).intersection(set(common)) == set():
break
else:
unseen_names = all_cats
self.n_unseen = len(unseen_names)
self.n_seen = len(all_cats) - self.n_unseen
self.n_all = len(all_cats)
seen_cats = []
unseen_cats = []
seen_idx = []
unseen_idx = []
for i, cat in enumerate(all_cats):
if cat not in unseen_names:
seen_idx.append(i)
seen_cats.append(cat)
else:
unseen_idx.append(i)
unseen_cats.append(cat)
if len(seen_cats) == 0:
self.n_seen = self.n_all
seen_cats = unseen_cats
seen_idx = unseen_idx
self.seen_idx = torch.tensor(seen_idx).long()
self.unseen_idx = torch.tensor(unseen_idx).long()
self.all_cats = all_cats
self.seen_cats = seen_cats
self.unseen_cats = unseen_cats
# TODO:
self.train_idx = self.seen_idx
self.val_idx = self.seen_idx
train_anno = Path(anno_dir) / Path("NUS_WID_Tags/Train_Tags81.txt")
test_anno = Path(anno_dir) / Path("NUS_WID_Tags/Test_Tags81.txt")
train_image_file = Path(anno_dir) / Path("ImageList/TrainImagelist.txt")
test_image_file = Path(anno_dir) / Path("ImageList/TestImagelist.txt")
if mode == "train":
self.img_list = self.load_image_list(train_image_file, image_dir)
self.gt_labels = self.load_gt_labels(train_anno)[:, self.seen_idx]
else:
self.img_list = self.load_image_list(test_image_file, image_dir)
self.gt_labels = self.load_gt_labels(test_anno)
nonempty_idx = []
for i in range(self.gt_labels.size(0)):
if self.gt_labels[i].sum() > 0:
nonempty_idx.append(i)
self.img_list = [x for i, x in enumerate(self.img_list) if i in nonempty_idx]
self.gt_labels = self.gt_labels[nonempty_idx, :]
assert len(self.img_list) == self.gt_labels.size(0)
@staticmethod
def load_label_set(label_file, n_max=1000):
if not os.path.isfile(label_file):
raise FileNotFoundError(f"file not found: {label_file}")
label_set = []
with open(label_file, "r") as fin:
lines = fin.readlines()
for line in lines:
word = line.split('\n')[0]
if word != '':
label_set.append(word)
return label_set[:n_max]
def load_image_list(self, image_file, image_dir):
if not os.path.isfile(image_file):
raise FileNotFoundError(f"file not found: {image_file}")
image_list = []
with open(image_file, "r") as fin:
lines = fin.readlines()
for idx, line in enumerate(lines):
filename = line.split()[0]
filename = os.path.join(image_dir, filename.split('_')[-1])
if os.path.isfile(filename):
image_list.append(filename)
self.valid_ids.append(idx)
return image_list
def load_gt_labels(self, anno_file):
if not os.path.isfile(anno_file):
raise FileNotFoundError(f"file not found: {anno_file}")
gt_labels = []
with open(anno_file, "r") as fin:
reader = fin.readlines()
for line in reader:
line = line.split()
labels = torch.from_numpy(np.array(line) == '1').long()
gt_labels.append(labels.view(1, -1))
assert len(self.valid_ids) > 0
gt_labels = torch.cat(gt_labels, dim=0)[self.valid_ids]
return gt_labels
def __len__(self):
return len(self.img_list)
def __getitem__(self, index):
labels = self.gt_labels[index]
image = Image.open(os.path.join(self.image_dir, self.img_list[index])).convert('RGB')
if self.transform is not None:
image = self.transform(image)
else:
image = transforms.ToTensor()(image)
return image, labels
if __name__ == '__main__':
from torch.utils.data import DataLoader
def transform_fn(image):
transform = transforms.Compose([
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.485, 0.456, 0.406),
(0.229, 0.224, 0.225))])
return transform(image)
nus_img_dir = '/media/hehuang/Data/nus_wide/images'
nus_anno_dir = '/media/hehuang/Data/nus_wide/annotations'
dataset = NUSWideDataset(nus_img_dir, nus_anno_dir, transform=transform_fn, mode="train")
loader = DataLoader(dataset,
batch_size=10,
num_workers=2,
shuffle=False)
print(len(dataset))
for image, target in loader:
print(image.size())
print(target.size())
break
| 35.690909
| 114
| 0.60197
|
4a0839d25dbfa3e9c8bbcc9e5f8b2d307d2face7
| 832
|
py
|
Python
|
Server/prediction/migrations/0005_auto_20210422_1951.py
|
mohanj098/Item-Price-Forecasting
|
14fc787ad4d9dcc6af03b43fa5e866cd254a99f5
|
[
"MIT"
] | null | null | null |
Server/prediction/migrations/0005_auto_20210422_1951.py
|
mohanj098/Item-Price-Forecasting
|
14fc787ad4d9dcc6af03b43fa5e866cd254a99f5
|
[
"MIT"
] | 2
|
2021-03-15T15:53:22.000Z
|
2021-05-03T09:32:34.000Z
|
Server/prediction/migrations/0005_auto_20210422_1951.py
|
mohanj098/Item-Price-Forecasting
|
14fc787ad4d9dcc6af03b43fa5e866cd254a99f5
|
[
"MIT"
] | 1
|
2021-05-04T15:35:06.000Z
|
2021-05-04T15:35:06.000Z
|
# Generated by Django 3.1.7 on 2021-04-22 14:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('prediction', '0004_auto_20210421_1329'),
]
operations = [
migrations.RenameField(
model_name='price',
old_name='cost',
new_name='price',
),
migrations.AlterField(
model_name='price',
name='pid',
field=models.CharField(max_length=500),
),
migrations.AlterField(
model_name='product',
name='pid',
field=models.CharField(max_length=500),
),
migrations.AlterField(
model_name='product',
name='productName',
field=models.CharField(max_length=500),
),
]
| 24.470588
| 51
| 0.545673
|
4a083c2b2c1c09cd7d853947a938f7054e3baa71
| 10,332
|
py
|
Python
|
model/twossrnet.py
|
kcv-if/Agendernet-SSD
|
4d33ba5623a6869b3f78a490b6e8857d4b70bce3
|
[
"MIT"
] | null | null | null |
model/twossrnet.py
|
kcv-if/Agendernet-SSD
|
4d33ba5623a6869b3f78a490b6e8857d4b70bce3
|
[
"MIT"
] | null | null | null |
model/twossrnet.py
|
kcv-if/Agendernet-SSD
|
4d33ba5623a6869b3f78a490b6e8857d4b70bce3
|
[
"MIT"
] | null | null | null |
import numpy as np
from keras.layers import Dense, Flatten, Dropout, GlobalAveragePooling2D, Input, Conv2D
from keras.layers import Activation, Multiply, Lambda, AveragePooling2D, MaxPooling2D, BatchNormalization
from keras.models import Model
from keras.utils import plot_model
from keras import backend as K
class TwoAgenderSSRNet(Model):
"""Soft Stagewise Regression Network
Parameters
----------
image_size : int
size for image used as input
stage_num : list
list of stage number
lambda_local : float
local lambda
lambda_d : float
d lambda
"""
def __init__(self, image_size, stage_num, lambda_local, lambda_d):
self.input_size = image_size
if K.image_dim_ordering() == "th":
self.__channel_axis = 1
self.__input_shape = (3, image_size, image_size)
else:
self.__channel_axis = -1
self.__input_shape = (image_size, image_size, 3)
self.__stage_num = stage_num
self.__lambda_local = lambda_local
self.__lambda_d = lambda_d
self.block = {'age': {},
'gender': {}}
inputs = Input(shape=self.__input_shape)
self.__extraction_block(inputs, 'gender')
self.__extraction_block(inputs, 'age')
pred_age = self.__classifier_block(101, 'age')
pred_gender = self.__classifier_block(1, 'gender')
super().__init__(inputs=inputs, outputs=[pred_gender, pred_age], name='TwoSSR_Net')
def __extraction_block(self, inputs, name):
"""
Build block to extract feature from image
Parameters
----------
inputs : keras Input layer
Input layer to be used to receive image input
name : string
Name of block ['age', 'gender']
"""
x = Conv2D(32, (3, 3))(inputs)
x = BatchNormalization(axis=self.__channel_axis)(x)
x = Activation('relu')(x)
self.block[name]['x_layer1'] = AveragePooling2D(2, 2)(x)
x = Conv2D(32, (3, 3))(self.block[name]['x_layer1'])
x = BatchNormalization(axis=self.__channel_axis)(x)
x = Activation('relu')(x)
self.block[name]['x_layer2'] = AveragePooling2D(2, 2)(x)
x = Conv2D(32, (3, 3))(self.block[name]['x_layer2'])
x = BatchNormalization(axis=self.__channel_axis)(x)
x = Activation('relu')(x)
self.block[name]['x_layer3'] = AveragePooling2D(2, 2)(x)
x = Conv2D(32, (3, 3))(self.block[name]['x_layer3'])
x = BatchNormalization(axis=self.__channel_axis)(x)
self.block[name]['x'] = Activation('relu')(x)
# -------------------------------------------------------------------------------------------------------------------------
s = Conv2D(16, (3, 3))(inputs)
s = BatchNormalization(axis=self.__channel_axis)(s)
s = Activation('tanh')(s)
self.block[name]['s_layer1'] = MaxPooling2D(2, 2)(s)
s = Conv2D(16, (3, 3))(self.block[name]['s_layer1'])
s = BatchNormalization(axis=self.__channel_axis)(s)
s = Activation('tanh')(s)
self.block[name]['s_layer2'] = MaxPooling2D(2, 2)(s)
s = Conv2D(16, (3, 3))(self.block[name]['s_layer2'])
s = BatchNormalization(axis=self.__channel_axis)(s)
s = Activation('tanh')(s)
self.block[name]['s_layer3'] = MaxPooling2D(2, 2)(s)
s = Conv2D(16, (3, 3))(self.block[name]['s_layer3'])
s = BatchNormalization(axis=self.__channel_axis)(s)
self.block[name]['s'] = Activation('tanh')(s)
def __classifier_block(self, V, name):
"""
Build classifier block to calculate regression value for prediction
Parameters
----------
V : int
Number of prediction range to be used, e.g age:100, gender:2
name : string
Name of prediction output ['age', 'gender']
Returns
-------
keras layer
prediction block
"""
s_layer4 = Conv2D(10, (1, 1), activation='relu')(self.block[name]['s'])
s_layer4 = Flatten()(s_layer4)
s_layer4_mix = Dropout(0.2)(s_layer4)
s_layer4_mix = Dense(units=self.__stage_num[0], activation="relu")(s_layer4_mix)
x_layer4 = Conv2D(10, (1, 1), activation='relu')(self.block[name]['x'])
x_layer4 = Flatten()(x_layer4)
x_layer4_mix = Dropout(0.2)(x_layer4)
x_layer4_mix = Dense(units=self.__stage_num[0], activation="relu")(x_layer4_mix)
feat_s1_pre = Multiply()([s_layer4, x_layer4])
delta_s1 = Dense(1, activation='tanh', name=name+'_delta_s1')(feat_s1_pre)
feat_s1 = Multiply()([s_layer4_mix, x_layer4_mix])
feat_s1 = Dense(2*self.__stage_num[0], activation='relu')(feat_s1)
pred_s1 = Dense(units=self.__stage_num[0], activation="relu", name=name+'_pred_stage1')(feat_s1)
local_s1 = Dense(units=self.__stage_num[0], activation='tanh', name=name+'_local_delta_stage1')(feat_s1)
# -------------------------------------------------------------------------------------------------------------------------
s_layer2 = Conv2D(10, (1, 1), activation='relu')(self.block[name]['s_layer2'])
s_layer2 = MaxPooling2D(4, 4)(s_layer2)
s_layer2 = Flatten()(s_layer2)
s_layer2_mix = Dropout(0.2)(s_layer2)
s_layer2_mix = Dense(self.__stage_num[1], activation='relu')(s_layer2_mix)
x_layer2 = Conv2D(10, (1, 1), activation='relu')(self.block[name]['x_layer2'])
x_layer2 = AveragePooling2D(4, 4)(x_layer2)
x_layer2 = Flatten()(x_layer2)
x_layer2_mix = Dropout(0.2)(x_layer2)
x_layer2_mix = Dense(self.__stage_num[1], activation='relu')(x_layer2_mix)
feat_s2_pre = Multiply()([s_layer2, x_layer2])
delta_s2 = Dense(1, activation='tanh', name=name+'_delta_s2')(feat_s2_pre)
feat_s2 = Multiply()([s_layer2_mix, x_layer2_mix])
feat_s2 = Dense(2*self.__stage_num[1], activation='relu')(feat_s2)
pred_s2 = Dense(units=self.__stage_num[1], activation="relu", name=name+'_pred_stage2')(feat_s2)
local_s2 = Dense(units=self.__stage_num[1], activation='tanh', name=name+'_local_delta_stage2')(feat_s2)
# -------------------------------------------------------------------------------------------------------------------------
s_layer1 = Conv2D(10, (1, 1), activation='relu')(self.block[name]['s_layer1'])
s_layer1 = MaxPooling2D(8, 8)(s_layer1)
s_layer1 = Flatten()(s_layer1)
s_layer1_mix = Dropout(0.2)(s_layer1)
s_layer1_mix = Dense(self.__stage_num[2], activation='relu')(s_layer1_mix)
x_layer1 = Conv2D(10, (1, 1), activation='relu')(self.block[name]['x_layer1'])
x_layer1 = AveragePooling2D(8, 8)(x_layer1)
x_layer1 = Flatten()(x_layer1)
x_layer1_mix = Dropout(0.2)(x_layer1)
x_layer1_mix = Dense(self.__stage_num[2], activation='relu')(x_layer1_mix)
feat_s3_pre = Multiply()([s_layer1, x_layer1])
delta_s3 = Dense(1, activation='tanh', name=name+'_delta_s3')(feat_s3_pre)
feat_s3 = Multiply()([s_layer1_mix, x_layer1_mix])
feat_s3 = Dense(2*self.__stage_num[2], activation='relu')(feat_s3)
pred_s3 = Dense(units=self.__stage_num[2], activation="relu", name=name+'_pred_stage3')(feat_s3)
local_s3 = Dense(units=self.__stage_num[2], activation='tanh', name=name+'_local_delta_stage3')(feat_s3)
# -------------------------------------------------------------------------------------------------------------------------
def SSR_module(x, s1, s2, s3, lambda_local, lambda_d, V):
a = x[0][:, 0]*0
b = x[0][:, 0]*0
c = x[0][:, 0]*0
for i in range(0, s1):
a = a+(i+lambda_local*x[6][:, i])*x[0][:, i]
a = K.expand_dims(a, -1)
a = a/(s1*(1+lambda_d*x[3]))
for j in range(0, s2):
b = b+(j+lambda_local*x[7][:, j])*x[1][:, j]
b = K.expand_dims(b, -1)
b = b/(s1*(1+lambda_d*x[3]))/(s2*(1+lambda_d*x[4]))
for k in range(0, s3):
c = c+(k+lambda_local*x[8][:, k])*x[2][:, k]
c = K.expand_dims(c, -1)
c = c/(s1*(1+lambda_d*x[3]))/(s2*(1+lambda_d*x[4]))/(s3*(1+lambda_d*x[5]))
out = (a+b+c)*V
return out
pred = Lambda(SSR_module,
arguments={'s1': self.__stage_num[0],
's2': self.__stage_num[1],
's3': self.__stage_num[2],
'lambda_local': self.__lambda_local, 'lambda_d': self.__lambda_d, 'V': V},
name=name + '_prediction')([pred_s1, pred_s2, pred_s3, delta_s1, delta_s2, delta_s3, local_s1,
local_s2, local_s3])
return pred
def prep_phase1(self):
"""Do nothing
"""
pass
def prep_phase2(self):
"""Do nothing
"""
pass
@staticmethod
def decode_prediction(prediction):
"""
Decode prediction to age and gender prediction.
Parameters
----------
prediction : list of numpy array
Result from model prediction [gender, age]
Return
----------
gender_predicted : numpy array
Decoded gender 1 male, 0 female
age_predicted : numpy array
Age from regression
"""
gender_predicted = np.around(prediction[0]).astype('int').squeeze()
age_predicted = prediction[1].squeeze()
return gender_predicted, age_predicted
@staticmethod
def prep_image(data):
"""Preproces image specific to model
Parameters
----------
data : numpy ndarray
Array of N images to be preprocessed
Returns
-------
numpy ndarray
Array of preprocessed image
"""
data = data.astype('float16')
return data
if __name__ == '__main__':
model = TwoAgenderSSRNet(64, [3, 3, 3], 1.0, 1.0)
# print(model.summary())
# for (i, layer) in enumerate(model.layers):
# print(i, layer.name)
plot_model(model, 'twossrnet.png')
| 40.837945
| 131
| 0.555265
|
4a083c3129fc7ea8995a08b39a4205b056c3f32d
| 3,114
|
py
|
Python
|
notebooks/image_models/labs/mnist_models/trainer/model.py
|
jfesteban/Google-ASL
|
8e991a437e348b1950cdc351dba39e2d40a6b08f
|
[
"Apache-2.0"
] | null | null | null |
notebooks/image_models/labs/mnist_models/trainer/model.py
|
jfesteban/Google-ASL
|
8e991a437e348b1950cdc351dba39e2d40a6b08f
|
[
"Apache-2.0"
] | null | null | null |
notebooks/image_models/labs/mnist_models/trainer/model.py
|
jfesteban/Google-ASL
|
8e991a437e348b1950cdc351dba39e2d40a6b08f
|
[
"Apache-2.0"
] | null | null | null |
import os
import shutil
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
from tensorflow.keras import Sequential
from tensorflow.keras.callbacks import TensorBoard
from tensorflow.keras.layers import (
Conv2D, Dense, Dropout, Flatten, MaxPooling2D, Softmax)
from . import util
# Image Variables
WIDTH = 28
HEIGHT = 28
def get_layers(
model_type,
nclasses=10,
hidden_layer_1_neurons=400,
hidden_layer_2_neurons=100,
dropout_rate=0.25,
num_filters_1=64,
kernel_size_1=3,
pooling_size_1=2,
num_filters_2=32,
kernel_size_2=3,
pooling_size_2=2):
"""Constructs layers for a keras model based on a dict of model types."""
model_layers = {
'linear': [
Flatten(),
Dense(nclasses),
Softmax()
],
'dnn': [
# TODO
Flatten(),
Dense(hidden_layer_1_neurons, activation='relu'),
Dense(hidden_layer_2_neurons, activation='relu'),
Dense(nclasses),
Softmax()
],
'dnn_dropout': [
# TODO
Flatten(),
Dense(hidden_layer_1_neurons, activation='relu'),
Dropout(dropout_rate),
Dense(hidden_layer_2_neurons, activation='relu'),
Dropout(dropout_rate),
Dense(nclasses),
Softmax()
],
'cnn': [
# TODO
Conv2D(num_filters_1, kernel_size_1, activation='relu', input_shape=(WIDTH, HEIGHT, 1)),
MaxPooling2D(pooling_size_1),
Conv2D(num_filters_2, kernel_size_2, activation='relu'),
MaxPooling2D(pooling_size_2),
Flatten(),
Dense(hidden_layer_1_neurons, activation='relu'),
Dropout(dropout_rate),
Dense(hidden_layer_2_neurons, activation='relu'),
Dropout(dropout_rate),
Dense(nclasses),
Softmax()
]
}
return model_layers[model_type]
def build_model(layers, output_dir):
"""Compiles keras model for image classification."""
model = Sequential(layers)
model.compile(optimizer='adam',
loss='categorical_crossentropy',
metrics=['accuracy'])
return model
def train_and_evaluate(model, num_epochs, steps_per_epoch, output_dir):
"""Compiles keras model and loads data into it for training."""
mnist = tf.keras.datasets.mnist.load_data()
train_data = util.load_dataset(mnist)
validation_data = util.load_dataset(mnist, training=False)
callbacks = []
if output_dir:
tensorboard_callback = TensorBoard(log_dir=output_dir)
callbacks = [tensorboard_callback]
history = model.fit(
train_data,
validation_data=validation_data,
epochs=num_epochs,
steps_per_epoch=steps_per_epoch,
verbose=2,
callbacks=callbacks)
if output_dir:
export_path = os.path.join(output_dir, 'keras_export')
model.save(export_path, save_format='tf')
return history
| 28.833333
| 100
| 0.613359
|
4a083d1f6613edbbcf002a209279995d56afa3a7
| 7,608
|
py
|
Python
|
runtime_mgr_api/api.py
|
adam-j-turner/runtime-mgr-api
|
a10b1034ec6f731eb1eaa82bb090913b815cdf09
|
[
"MIT"
] | 1
|
2018-11-14T19:02:41.000Z
|
2018-11-14T19:02:41.000Z
|
runtime_mgr_api/api.py
|
adam-j-turner/runtime-mgr-api
|
a10b1034ec6f731eb1eaa82bb090913b815cdf09
|
[
"MIT"
] | null | null | null |
runtime_mgr_api/api.py
|
adam-j-turner/runtime-mgr-api
|
a10b1034ec6f731eb1eaa82bb090913b815cdf09
|
[
"MIT"
] | null | null | null |
from .constants import *
import datetime
import functools
import requests
import hashlib
import time
import re
class UnauthorizedError(Exception):
pass
class AnypointAuthError(Exception):
pass
class AnypointRequestError(Exception):
pass
class OrgError(Exception):
pass
class EnvError(Exception):
pass
class DeployError(Exception):
pass
class ModifyAppError(Exception):
pass
class ComponentError(Exception):
pass
class API(object):
def __init__(self, anypointUser, anypointPass, proxy=None, verifySSL=True):
self.session = requests.session()
self.session.verify = verifySSL
if proxy is not None:
self.session.proxies = {"https": proxy}
self.__login(anypointUser, anypointPass)
self.org_context = None
self.env_context = None
self.__refresh_orgs()
self.__refresh_envs()
self.__anypoint_user = anypointUser
self.__anypoint_pass = anypointPass
def __anypoint_request(self, *args, **kwargs):
try:
resp = self.session.request(*args, **kwargs)
except requests.exceptions.Timeout:
raise AnypointRequestError("Timed out during request to Anypoint")
except requests.exceptions.TooManyRedirects:
raise AnypointRequestError(
"Too many redirects during request to Anypoint")
if resp.status_code == 401:
raise UnauthorizedError()
return resp.json(), resp.status_code
def __login(self, un=None, pw=None):
if un is None:
un = self.__anypoint_user
if pw is None:
pw = self.__anypoint_pass
# clear headers so we are not passing token during login
self.session.headers = {}
args = 'POST', ANYPOINT_LOGIN_URL
kwargs = {'data': {
'Content-Type': 'application/json',
'username': un, 'password': pw
}}
try:
auth, code = self.__anypoint_request(*args, **kwargs)
except UnauthorizedError:
raise AnypointAuthError('Invalid credentials')
self.session.headers['Authorization'] = 'Bearer {}'.format(
auth['access_token']
)
@property
def current_org(self):
if self.org_context is None:
return None
return next(
o for o in self.orgs if o['id'] == self.org_context
)
@property
def org_context(self):
return self.__org_context
@org_context.setter
def org_context(self, value):
self.__org_context = value
self.session.headers['X-ANYPNT-ORG-ID'] = value
@property
def current_env(self):
if self.env_context is None:
return None
return next(
e for e in self.envs if e['id'] == self.env_context
)
@property
def env_context(self):
return self.__env_context
@env_context.setter
def env_context(self, value):
self.__env_context = value
self.session.headers['X-ANYPNT-ENV-ID'] = value
def __refresh_orgs(self):
args = 'GET', ANYPOINT_ORG_URL
try:
resp, code = self.__anypoint_request(*args)
self.orgs = resp['user']['memberOfOrganizations']
except AnypointRequestError:
raise OrgError('Could not get Orgs')
if self.org_context is None:
self.org_context = next(
(o for o in self.orgs if o['isMaster']), None
)['id']
def switch_org(self, orgName):
try:
self.org_context = next(
o for o in self.orgs if o['name'] == orgName
)['id']
except StopIteration:
raise OrgError('Could not find desired Org')
self.env_context = None
self.__refresh_envs()
def __refresh_envs(self):
url = ANYPOINT_ENV_URL.format(self.org_context)
args = 'GET', url
try:
resp, code = self.__anypoint_request(*args)
self.envs = resp['data']
except AnypointRequestError:
raise EnvError('Could not get Envs')
def switch_env(self, envName):
try:
self.env_context = next(
e for e in self.envs if e['name'] == envName
)['id']
except StopIteration:
raise EnvError('Could not find desired Env')
def get_apps(self, targetName=None):
args = 'GET', ANYPOINT_APP_URL
resp, code = self.__anypoint_request(*args)
apps = resp['data']
if targetName is not None:
apps = [a for a in apps if a['target']['name'] == targetName]
return apps
def get_servers(self):
args = 'GET', ANYPOINT_SERVER_URL
resp, code = self.__anypoint_request(*args)
return resp['data']
def get_server_groups(self):
args = 'GET', ANYPOINT_SERVERGROUP_URL
resp, code = self.__anypoint_request(*args)
return resp['data']
def get_clusters(self):
args = 'GET', ANYPOINT_CLUSTER_URL
resp, code = self.__anypoint_request(*args)
return resp['data']
def get_targets(self):
return (
self.get_servers() + self.get_server_groups() + self.get_clusters()
)
def __verify_app_name(self, appName):
assert len(appName) > APP_MIN_LEN, "App name too short"
assert len(appName) <= APP_MAX_LEN, "App name too long"
assert (not appName.startswith('-') and not appName.endswith('-')), \
"App name starts or ends with a dash"
assert re.search(APP_CHAR_REGEX, appName) is None, \
"App name has invalid characters"
def deploy_app(self, appName, zipFile, targetId=None, targetName=None):
try:
self.__verify_app_name(appName)
except AssertionError as e:
raise DeployError('App name invalid: {}'.format(str(e)))
if targetId is None:
targets = self.get_targets()
try:
targetId = next(
t for t in targets if t['name'] == targetName
)['id']
except StopIteration:
raise DeployError('Target server or cluster not found')
args = 'POST', ANYPOINT_APP_URL
kwargs = {'files': {
'artifactName': appName,
'file': zipFile,
'targetId': str(targetId)
}}
resp, code = self.__anypoint_request(*args, **kwargs)
if code != 202:
raise DeployError(
'Deploy failed with HTTP code {}: {}'.format(
str(code), resp['message']
)
)
def update_app(self, appName, zipFile, verify=True):
apps = self.get_apps()
try:
app = next(
a for a in apps if a['name'] == appName
)
except StopIteration:
raise DeployError('Target app not found')
if verify:
localhash = hashlib.sha1(zipFile.read()).hexdigest()
zipFile.seek(0)
if localhash == app['artifact']['fileChecksum']:
raise DeployError('Application is already up-to-date')
args = 'PATCH', '{}/{}'.format(ANYPOINT_APP_URL, str(app['id']))
kwargs = {'files': {'file': zipFile}}
resp, code = self.__anypoint_request(*args, **kwargs)
if code != 200:
raise DeployError(
'App update failed with HTTP code {}: {}'.format(
str(code), resp['message']
)
)
| 27.970588
| 79
| 0.57663
|
4a083d7ca57bbe8ab58b40305420bbb32e912a9a
| 9,709
|
py
|
Python
|
images/pannotator/p_procariota/gbf2gff.py
|
ezequieljsosa/sndg-bio
|
5f709b5b572564ec1dfa40d090eca9a34295743e
|
[
"MIT"
] | null | null | null |
images/pannotator/p_procariota/gbf2gff.py
|
ezequieljsosa/sndg-bio
|
5f709b5b572564ec1dfa40d090eca9a34295743e
|
[
"MIT"
] | null | null | null |
images/pannotator/p_procariota/gbf2gff.py
|
ezequieljsosa/sndg-bio
|
5f709b5b572564ec1dfa40d090eca9a34295743e
|
[
"MIT"
] | 1
|
2020-09-01T15:57:54.000Z
|
2020-09-01T15:57:54.000Z
|
#!/usr/bin/python
import sys
import re
import getopt
from copy import copy
from Bio import SeqIO
def help():
print "Conversion gbf -> tbl + gff3 + fasta.\n\
Opciones:\n\
-i Archivo de entrada gbf. Default: contigs.gbf\n\
-t Archivo de salida tbl. Default: archivo_de_entrada.tbl\n\
-g Archivo de salida gff3. Default: archivo_de_entrada.gff3\n\
-f Archivo de salida fasta. Default: archivo_de_entrada.fasta\n\
-o Nombre del organismo Default: vacio\n\
-s ID de Cepa. Default: vacio\n\
-n ID de NCBI Project. Default: vacio\n\
-h Imprime este mensaje de ayuda\n"
try:
options, args = getopt.getopt(sys.argv[1:], "i:t:f:o:s:n:h")
except getopt.GetoptError as err:
print str(err)
sys.exit(2)
params = {}
params["i"] = "contigs.gbf"
params["t"] = ""
params["g"] = ""
params["f"] = ""
params["o"] = ""
params["s"] = ""
params["n"] = ""
for option, value in options:
if option.startswith("-"): option = option[1:]
if option in params.keys(): params[option] = value
if option == "h":
help()
sys.exit()
if not params["t"]:
params["t"] = ".".join(params["i"].split(".")[:-1]) + ".tbl"
if not params["f"]:
params ["f"] = ".".join(params["i"].split(".")[:-1]) + ".fasta"
if not params["g"]:
params ["g"] = ".".join(params["i"].split(".")[:-1]) + ".gff3"
if params["n"]:
params["n"] = "_" + params["n"]
def find_gene_entry(features, locus_tag):
for f in features:
if f.type == 'gene':
if f.qualifiers['locus_tag'][0] == locus_tag:
return f
raise ValueError
# Escapado de caracteres con significado especifico en gff3
def formatogff(cadena):
cadenagff=re.sub("%", "%25", cadena, flags=re.I)
cadenagff=re.sub(";", "%3B", cadenagff, flags=re.I)
cadenagff=re.sub("=", "%3D", cadenagff, flags=re.I)
cadenagff=re.sub("&", "%26", cadenagff, flags=re.I)
cadenagff=re.sub(",", "%2C", cadenagff, flags=re.I)
cadenagff=re.sub("\t", "%09", cadenagff, flags=re.I)
cadenagff=re.sub("\n", "%0A", cadenagff, flags=re.I)
cadenagff=re.sub("\r", "%0D", cadenagff, flags=re.I)
return cadenagff
coding = ['CDS', 'tRNA', 'rRNA']
seqid = 0
featid = 0
fasta_fh = open(params["f"], "w")
feature_fh = open(params["t"], "w")
gff3_fh = open(params["g"],"w")
allowed_tags = ['locus_tag', 'gene', 'product', 'pseudo', 'protein_id', 'gene_desc', 'old_locus_tag']
records = list(SeqIO.parse(params["i"], "genbank"))
gff3_featsCDS = ''
for rec in records:
input_number = rec.name[-3:]
for f in rec.features:
if f.type in coding and 'gene' in f.qualifiers:
f2 = find_gene_entry(rec.features, f.qualifiers['locus_tag'][0])
f2.qualifiers['gene'] = f.qualifiers['gene']
del f.qualifiers['gene']
if 'locus_tag' in f.qualifiers:
rec.locus_tag = f.qualifiers['locus_tag'][0].split("_")[0]
else:
rec.locus_tag = ""
if 'transl_table' in f.qualifiers:
rec.trans_tab = f.qualifiers['transl_table'][0].split("_")[0]
else:
rec.trans_tab = 11
rec.defline = rec.name + " " + rec.description.split(".")[0]
for rec in records:
seqid += 1
mol_type = rec.annotations.get('molecule', 'circular')
rec.description = "[organism=%s] [strain=%s] [topology=%s] [molecule=DNA] [tech=wgs] [gcode=11]" % (params["o"], params["s"], mol_type)
SeqIO.write([rec], fasta_fh, "fasta")
print >>feature_fh, ">Feature %s" % (rec.name)
print >> gff3_fh, "##gff-version 3"
print >> gff3_fh, "##feature-ontology so.obo"
for f in rec.features:
gff3_feats = {}
if f.type == 'source':
organism = f.qualifiers["organism"][0]
gff3id = "_".join(organism.split())
if f.strand == 1:
start = f.location.nofuzzy_start + 1
end = f.location.nofuzzy_end
gff3_feats['strand'] = '+'
else:
start = f.location.nofuzzy_end
end = f.location.nofuzzy_start + 1
gff3_feats['strand'] = '-'
print >>feature_fh, "%d\t%d\t%s" % (start, end, f.type)
gff3_feats['start'] = f.location.nofuzzy_start + 1
gff3_feats['end'] = f.location.nofuzzy_end
if f.type == 'CDS' and 'product' not in f.qualifiers:
f.qualifiers['product'] = ['hypothetical protein']
if f.type == 'CDS':
f.qualifiers['protein_id'] = ["gnl|ProjectID%s|%s" % (params["n"], f.qualifiers['locus_tag'][0])]
if f.type == 'rRNA':
f.qualifiers['product'] = [f.qualifiers['product'][0].split("S")[0] + "s_rRNA" ]
if f.type in coding:
del f.qualifiers['locus_tag']
for key, vals in f.qualifiers.iteritems():
my_allowed_tags = copy(allowed_tags)
if 'pseudo' or 'note' in f.qualifiers:
my_allowed_tags.append('note')
if 'EC_number' in key:
my_allowed_tags.append('EC_number')
vals = [";".join(vals)]
if key not in my_allowed_tags:
continue
# print vals
for v in vals:
if len(v) or key == 'pseudo':
print >>feature_fh, "\t\t\t%s\t%s" % (key, v)
if key == 'gene':
gff3_feats['gene'] = v
if key == 'product':
gff3_feats['product'] = v
if key == 'EC_number':
gff3_feats['EC'] = ";EC=" + ",".join(v.split(";"))
if key == 'note':
if re.search("COG", v):
gff3_feats['note'] = ";top_cog_hit=" + formatogff(v)
if re.match("tRNA", v):
gff3_feats['note'] = v
if f.type == 'source':
print >> gff3_fh, "%s\tgenbankfile\tcontig\t%d\t%d\t.\t%s\t.\tID=%s;translation_table=%s;organism_name=%s;abbreviation=%s;defline=%s;Name=%s" % (formatogff(rec.id), gff3_feats['start'], gff3_feats['end'], gff3_feats['strand'], formatogff(rec.name), rec.trans_tab, formatogff(f.qualifiers["organism"][0]), formatogff(rec.locus_tag), formatogff(rec.defline),formatogff(rec.name))
if f.type == 'gene':
if 'gene' in gff3_feats:
gff3_featsCDS = ";gene_symbol=" + gff3_feats['gene']
featid += 1
print >> gff3_fh, "%s\tgenbankfile\tgene\t%d\t%d\t.\t%s\t.\tID=%s.gene.%s%s" % (formatogff(rec.id), gff3_feats['start'], gff3_feats['end'], gff3_feats['strand'], formatogff(gff3id), input_number, featid)
if f.type == 'CDS':
if not 'EC' in gff3_feats:
gff3_feats['EC'] = ''
if not 'product' in gff3_feats:
gff3_feats['product'] = ''
if not 'note' in gff3_feats:
gff3_feats['note'] = ''
print >> gff3_fh, "%s\tgenbankfile\texon\t%d\t%d\t.\t%s\t0\tID=%s.exon.%s%s;Parent=%s.transcript.%s%s" % (formatogff(rec.id), gff3_feats['start'], gff3_feats['end'], gff3_feats['strand'], formatogff(gff3id), input_number, featid, formatogff(gff3id), input_number, featid)
print >> gff3_fh, "%s\tgenbankfile\tmRNA\t%d\t%d\t.\t%s\t.\tID=%s.transcript.%s%s;Parent=%s.gene.%s%s%s%s;Note=%s;Name=%s.transcript.%s%s;%s" % (formatogff(rec.id), gff3_feats['start'], gff3_feats['end'], gff3_feats['strand'], formatogff(gff3id), input_number, featid, formatogff(gff3id), input_number, featid, gff3_feats['EC'], gff3_featsCDS, formatogff(gff3_feats['product']), formatogff(gff3id), input_number, featid, gff3_feats['note'])
print >> gff3_fh, "%s\tgenbankfile\tCDS\t%d\t%d\t.\t%s\t0\tID=%s.CDS.%s%s;Parent=%s.transcript.%s%s" % (formatogff(rec.id), gff3_feats['start'], gff3_feats['end'], gff3_feats['strand'], formatogff(gff3id), input_number, featid, formatogff(gff3id), input_number, featid)
gff3_featsCDS = ''
if f.type == 'tRNA':
if not 'product' in gff3_feats:
gff3_feats['product'] = ''
if 'note' in gff3_feats:
gff3_feats['product'] = gff3_feats['note']
print >> gff3_fh, "%s\tgenbankfile\texon\t%d\t%d\t.\t%s\t0\tID=%s.exon.%s%s" % (formatogff(rec.id), gff3_feats['start'], gff3_feats['end'], gff3_feats['strand'], formatogff(gff3id), input_number, featid)
print >> gff3_fh, "%s\tgenbankfile\ttRNA\t%d\t%d\t.\t%s\t.\tID=%s.transcript.%s%s;Note=%s;description=%s" % (formatogff(rec.id), gff3_feats['start'], gff3_feats['end'], gff3_feats['strand'], formatogff(gff3id), input_number, featid, formatogff(gff3_feats['product']), formatogff(gff3_feats['product']))
print >> gff3_fh, "%s\tgenbankfile\tCDS\t%d\t%d\t.\t%s\t0\tID=%s.CDS.%s%s" % (formatogff(rec.id), gff3_feats['start'], gff3_feats['end'], gff3_feats['strand'],formatogff(gff3id), input_number, featid)
if f.type == 'rRNA':
print >> gff3_fh, "%s\tgenbankfile\texon\t%d\t%d\t.\t%s\t0\tID=%s.exon.%s%s" % (formatogff(rec.id), gff3_feats['start'], gff3_feats['end'], gff3_feats['strand'], formatogff(gff3id), input_number, featid)
print >> gff3_fh, "%s\tgenbankfile\trRNA\t%d\t%d\t.\t%s\t.\tID=%s.transcript.%s%s;Note=%s;description=%s" % (formatogff(rec.id), gff3_feats['start'], gff3_feats['end'], gff3_feats['strand'], formatogff(gff3id), input_number, featid, formatogff(gff3_feats['product']), formatogff(gff3_feats['product']))
print >> gff3_fh, "%s\tgenbankfile\tCDS\t%d\t%d\t.\t%s\t0\tID=%s.CDS.%s%s" % (formatogff(rec.id), gff3_feats['start'], gff3_feats['end'], gff3_feats['strand'],formatogff(gff3id), input_number, featid)
feature_fh.close()
fasta_fh.close()
gff3_fh.close()
| 44.949074
| 453
| 0.58523
|
4a083d9bd143996d71fb7d240d9c8704510b9988
| 6,994
|
py
|
Python
|
paddleseg/models/isanet.py
|
wen-flow/PaddleSeg
|
cc18ef1d4d06166539bbeb90c44c79a21d1b8df4
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-03-14T13:48:42.000Z
|
2021-03-14T13:48:42.000Z
|
paddleseg/models/isanet.py
|
wen-flow/PaddleSeg
|
cc18ef1d4d06166539bbeb90c44c79a21d1b8df4
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
paddleseg/models/isanet.py
|
wen-flow/PaddleSeg
|
cc18ef1d4d06166539bbeb90c44c79a21d1b8df4
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import paddle
import paddle.nn as nn
import paddle.nn.functional as F
from paddleseg.models import layers
from paddleseg.cvlibs import manager
from paddleseg.utils import utils
@manager.MODELS.add_component
class ISANet(nn.Layer):
"""Interlaced Sparse Self-Attention for Semantic Segmentation.
The original article refers to Lang Huang, et al. "Interlaced Sparse Self-Attention for Semantic Segmentation"
(https://arxiv.org/abs/1907.12273).
Args:
num_classes (int): The unique number of target classes.
backbone (Paddle.nn.Layer): A backbone network.
backbone_indices (tuple): The values in the tuple indicate the indices of output of backbone.
isa_channels (int): The channels of ISA Module.
down_factor (tuple): Divide the height and width dimension to (Ph, PW) groups.
enable_auxiliary_loss (bool, optional): A bool value indicates whether adding auxiliary loss. Default: True.
align_corners (bool): An argument of F.interpolate. It should be set to False when the output size of feature
is even, e.g. 1024x512, otherwise it is True, e.g. 769x769. Default: False.
pretrained (str, optional): The path or url of pretrained model. Default: None.
"""
def __init__(self,
num_classes,
backbone,
backbone_indices=(2, 3),
isa_channels=256,
down_factor=(8, 8),
enable_auxiliary_loss=True,
align_corners=False,
pretrained=None):
super().__init__()
self.backbone = backbone
self.backbone_indices = backbone_indices
in_channels = [self.backbone.feat_channels[i] for i in backbone_indices]
self.head = ISAHead(num_classes, in_channels, isa_channels, down_factor, enable_auxiliary_loss)
self.align_corners = align_corners
self.pretrained = pretrained
self.init_weight()
def forward(self, x):
feats = self.backbone(x)
feats = [feats[i] for i in self.backbone_indices]
logit_list = self.head(feats)
logit_list = [F.interpolate(
logit,
x.shape[2:],
mode='bilinear',
align_corners=self.align_corners,
align_mode=1) for logit in logit_list]
return logit_list
def init_weight(self):
if self.pretrained is not None:
utils.load_entire_model(self, self.pretrained)
class ISAHead(nn.Layer):
"""
The ISAHead.
Args:
num_classes (int): The unique number of target classes.
in_channels (tuple): The number of input channels.
isa_channels (int): The channels of ISA Module.
down_factor (tuple): Divide the height and width dimension to (Ph, PW) groups.
enable_auxiliary_loss (bool, optional): A bool value indicates whether adding auxiliary loss. Default: True.
"""
def __init__(self, num_classes, in_channels, isa_channels, down_factor, enable_auxiliary_loss):
super(ISAHead, self).__init__()
self.in_channels = in_channels[-1]
inter_channels = self.in_channels // 4
self.down_factor = down_factor
self.enable_auxiliary_loss = enable_auxiliary_loss
self.in_conv = layers.ConvBNReLU(self.in_channels, inter_channels, 3, bias_attr=False)
self.global_relation = SelfAttentionBlock(inter_channels, isa_channels)
self.local_relation = SelfAttentionBlock(inter_channels, isa_channels)
self.out_conv = layers.ConvBNReLU(inter_channels * 2, inter_channels, 1, bias_attr=False)
self.cls = nn.Sequential(nn.Dropout2D(p=0.1), nn.Conv2D(inter_channels, num_classes, 1))
self.aux = nn.Sequential(
layers.ConvBNReLU(in_channels=1024, out_channels=256, kernel_size=3, bias_attr=False),
nn.Dropout2D(p=0.1),
nn.Conv2D(256, num_classes, 1))
def forward(self, feat_list):
C3, C4 = feat_list
x = self.in_conv(C4)
n, c, h, w = x.shape
P_h, P_w = self.down_factor
Q_h, Q_w = math.ceil(h / P_h), math.ceil(w / P_w)
pad_h, pad_w = Q_h * P_h - h, Q_w * P_w - w
if pad_h > 0 or pad_w > 0:
padding = [pad_w // 2, pad_w - pad_w // 2, pad_h // 2, pad_h - pad_h // 2]
feat = F.pad(x, padding)
else:
feat = x
feat = feat.reshape([n, c, Q_h, P_h, Q_w, P_w])
feat = feat.transpose([0, 3, 5, 1, 2, 4]).reshape([-1, c, Q_h, Q_w])
feat = self.global_relation(feat)
feat = feat.reshape([n, P_h, P_w, c, Q_h, Q_w])
feat = feat.transpose([0, 4, 5, 3, 1, 2]).reshape([-1, c, P_h, P_w])
feat = self.local_relation(feat)
feat = feat.reshape([n, Q_h, Q_w, c, P_h, P_w])
feat = feat.transpose([0, 3, 1, 4, 2, 5]).reshape([n, c, P_h * Q_h, P_w * Q_w])
if pad_h > 0 or pad_w > 0:
feat = feat[:, :, pad_h // 2:pad_h // 2 + h, pad_w // 2:pad_w // 2 + w]
feat = self.out_conv(paddle.concat([feat, x], axis=1))
output = self.cls(feat)
if self.enable_auxiliary_loss:
auxout = self.aux(C3)
return [output, auxout]
else:
return [output]
class SelfAttentionBlock(layers.AttentionBlock):
"""General self-attention block/non-local block.
Args:
in_channels (int): Input channels of key/query feature.
channels (int): Output channels of key/query transform.
"""
def __init__(self, in_channels, channels):
super(SelfAttentionBlock, self).__init__(
key_in_channels=in_channels,
query_in_channels=in_channels,
channels=channels,
out_channels=in_channels,
share_key_query=False,
query_downsample=None,
key_downsample=None,
key_query_num_convs=2,
key_query_norm=True,
value_out_num_convs=1,
value_out_norm=False,
matmul_norm=True,
with_out=False)
self.output_project = self.build_project(
in_channels,
in_channels,
num_convs=1,
use_conv_module=True)
def forward(self, x):
context = super(SelfAttentionBlock, self).forward(x, x)
return self.output_project(context)
| 39.072626
| 117
| 0.633543
|
4a083dbbd5f8e1568f9425bce4effef57fe58423
| 13,782
|
py
|
Python
|
support/android/android.py
|
kasatani/titanium_mobile
|
714ab28ba58ba12f2339e9bfe54d3479676b6503
|
[
"Apache-2.0"
] | null | null | null |
support/android/android.py
|
kasatani/titanium_mobile
|
714ab28ba58ba12f2339e9bfe54d3479676b6503
|
[
"Apache-2.0"
] | 1
|
2018-10-02T13:36:41.000Z
|
2018-10-02T13:36:41.000Z
|
support/android/android.py
|
kasatani/titanium_mobile
|
714ab28ba58ba12f2339e9bfe54d3479676b6503
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Appcelerator Titanium Mobile
# Copyright (c) 2011 by Appcelerator, Inc. All Rights Reserved.
# Licensed under the terms of the Apache Public License
# Please see the LICENSE included with this distribution for details.
#
# Android Application Script
#
import os, sys, shutil, platform, zipfile
import string, subprocess, re
from mako.template import Template
from xml.etree.ElementTree import ElementTree
from StringIO import StringIO
from os.path import join, splitext, split, exists
from shutil import copyfile
from androidsdk import AndroidSDK
from compiler import Compiler
import bindings
template_dir = os.path.abspath(os.path.dirname(sys._getframe(0).f_code.co_filename))
module_dir = os.path.join(os.path.dirname(template_dir), 'module')
sys.path.extend([os.path.dirname(template_dir), module_dir])
from tiapp import TiAppXML, touch_tiapp_xml
from manifest import Manifest
from module import ModuleDetector
import simplejson
ignoreFiles = ['.gitignore', '.cvsignore', '.DS_Store'];
ignoreDirs = ['.git','.svn','_svn', 'CVS'];
def run(args):
return subprocess.Popen(args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE).communicate()[0]
def pipe(args1,args2):
p1 = subprocess.Popen(args1, stdout=subprocess.PIPE)
p2 = subprocess.Popen(args2, stdin=p1.stdout, stdout=subprocess.PIPE)
return p2.communicate()[0]
def copy_resources(source, target):
if not os.path.exists(os.path.expanduser(target)):
os.mkdir(os.path.expanduser(target))
for root, dirs, files in os.walk(source):
for name in ignoreDirs:
if name in dirs:
dirs.remove(name) # don't visit ignored directories
for file in files:
if file in ignoreFiles:
continue
from_ = join(root, file)
to_ = os.path.expanduser(from_.replace(source, target, 1))
to_directory = os.path.expanduser(split(to_)[0])
if not exists(to_directory):
os.makedirs(to_directory)
print "[TRACE] copying: %s to: %s" % (from_,to_)
copyfile(from_, to_)
class Android(object):
def __init__(self, name, myid, sdk, deploy_type, java):
self.name = name
# android requires at least one dot in packageid
if len(re.findall(r'\.',myid))==0:
myid = 'com.%s' % myid
self.id = myid
self.sdk = sdk
# Used in templating
self.config = {
'appid': self.id,
'appname' : self.name,
'appversion' : '1',
'apiversion' : '7', #Android 2.1
'deploy_type': deploy_type
}
self.config['classname'] = Android.strip_classname(self.name)
self.deploy_type = deploy_type
self.java = java
@classmethod
def strip_classname(cls, name):
classname = ''.join([str.capitalize() for str in re.split('[^A-Za-z0-9_]', name)])
if re.search("^[0-9]", classname) != None:
classname = "_" + classname
return classname
def newdir(self, *segments):
path = os.path.join(*segments)
if not os.path.exists(path):
os.makedirs(path)
return path
def copyfile(self, file, src, dest):
shutil.copy(os.path.join(src, file), os.path.join(dest, file))
def load_template(self, template):
return Template(filename=template, output_encoding='utf-8', encoding_errors='replace')
def render_android_manifest(self):
template_dir = os.path.dirname(sys._getframe(0).f_code.co_filename)
tmpl = self.load_template(os.path.join(template_dir, 'templates', 'AndroidManifest.xml'))
return tmpl.render(config = self.config)
def render(self, template_dir, template_file, dest, dest_file, **kwargs):
tmpl = self.load_template(os.path.join(template_dir, 'templates', template_file))
f = None
try:
print "[TRACE] Generating %s" % os.path.join(dest, dest_file)
f = open(os.path.join(dest, dest_file), "w")
f.write(tmpl.render(config = self.config, **kwargs))
finally:
if f!=None: f.close
def build_app_info(self, project_dir):
tiapp = ElementTree()
assets_tiappxml = os.path.join(project_dir, 'build', 'android', 'bin', 'assets', 'tiapp.xml')
self.app_info = {'fullscreen':'false','navbar-hidden':'false'}
self.app_properties = {}
if not os.path.exists(assets_tiappxml):
shutil.copy(os.path.join(project_dir, 'tiapp.xml'), assets_tiappxml)
tiapp.parse(open(assets_tiappxml, 'r'))
for key in ['id', 'name', 'version', 'publisher', 'url', 'copyright',
'description', 'icon', 'analytics', 'guid', 'navbar-hidden', 'fullscreen']:
el = tiapp.find(key)
if el != None:
self.app_info[key] = el.text
for property_el in tiapp.findall("property"):
name = property_el.get("name")
type = property_el.get("type")
value = property_el.text
if name == None: continue
if type == None: type = "string"
if value == None: value = ""
self.app_properties[name] = {"type": type, "value": value}
def generate_activities(self, app_package_dir):
if not 'activities' in self.tiapp.android: return
for key in self.tiapp.android['activities'].keys():
activity = self.tiapp.android['activities'][key]
print '[DEBUG] generating activity class: ' + activity['classname']
self.render(template_dir, 'JSActivity.java', app_package_dir, activity['classname']+'.java', activity=activity)
def generate_services(self, app_package_dir):
if not 'services' in self.tiapp.android: return
for key in self.tiapp.android['services'].keys():
service = self.tiapp.android['services'][key]
service_type = service['service_type']
print '[DEBUG] generating service type "%s", class "%s"' %(service_type, service['classname'])
if service_type == 'interval':
self.render(template_dir, 'JSIntervalService.java', app_package_dir, service['classname']+'.java', service=service)
else:
self.render(template_dir, 'JSService.java', app_package_dir, service['classname']+'.java', service=service)
def build_modules_info(self, resources_dir, app_bin_dir, include_all_ti_modules=False):
self.app_modules = []
(modules, external_child_modules) = bindings.get_all_module_bindings()
compiler = Compiler(self.tiapp, resources_dir, self.java, app_bin_dir, os.path.dirname(app_bin_dir),
include_all_modules=include_all_ti_modules)
compiler.compile(compile_bytecode=False, info_message=None)
for module in compiler.modules:
module_bindings = []
# TODO: we should also detect module properties
for method in compiler.module_methods:
if method.lower().startswith(module+'.') and '.' not in method:
module_bindings.append(method[len(module)+1:])
module_onAppCreate = None
module_class = None
module_apiName = None
for m in modules.keys():
if modules[m]['fullAPIName'].lower() == module:
module_class = m
module_apiName = modules[m]['fullAPIName']
if 'onAppCreate' in modules[m]:
module_onAppCreate = modules[m]['onAppCreate']
break
if module_apiName == None: continue # module wasn't found
ext_modules = []
if module_class in external_child_modules:
for child_module in external_child_modules[module_class]:
if child_module['fullAPIName'].lower() in compiler.modules:
ext_modules.append(child_module)
self.app_modules.append({
'api_name': module_apiName,
'class_name': module_class,
'bindings': module_bindings,
'external_child_modules': ext_modules,
'on_app_create': module_onAppCreate
})
# discover app modules
detector = ModuleDetector(self.project_dir)
missing, detected_modules = detector.find_app_modules(self.tiapp, 'android')
for missing_module in missing: print '[WARN] Couldn\'t find app module: %s' % missing_module['id']
self.custom_modules = []
for module in detected_modules:
if module.jar == None: continue
module_jar = zipfile.ZipFile(module.jar)
module_bindings = bindings.get_module_bindings(module_jar)
if module_bindings is None: continue
for module_class in module_bindings['modules'].keys():
module_apiName = module_bindings['modules'][module_class]['apiName']
module_proxy = module_bindings['proxies'][module_class]
module_id = module_proxy['proxyAttrs']['id']
module_proxy_class_name = module_proxy['proxyClassName']
module_onAppCreate = None
if 'onAppCreate' in module_proxy:
module_onAppCreate = module_proxy['onAppCreate']
print '[DEBUG] module_id = %s' % module_id
if module_id == module.manifest.moduleid:
# make sure that the module was not built before 1.8.0.1
try:
module_api_version = int(module.manifest.apiversion)
if module_api_version < 2:
print "[ERROR] The 'apiversion' for '%s' in the module manifest is less than version 2. The module was likely built against a Titanium SDK pre 1.8.0.1. Please use a version of the module that has 'apiversion' 2 or greater" % module_id
touch_tiapp_xml(os.path.join(self.project_dir, 'tiapp.xml'))
sys.exit(1)
except(TypeError, ValueError):
print "[ERROR] The 'apiversion' for '%s' in the module manifest is not a valid value. Please use a version of the module that has an 'apiversion' value of 2 or greater set in it's manifest file" % module_id
touch_tiapp_xml(os.path.join(self.project_dir, 'tiapp.xml'))
sys.exit(1)
print '[DEBUG] appending module: %s' % module_class
self.custom_modules.append({
'module_id': module_id,
'module_apiName': module_apiName,
'proxy_name': module_proxy_class_name,
'class_name': module_class,
'manifest': module.manifest,
'on_app_create': module_onAppCreate
})
def create(self, dir, build_time=False, project_dir=None, include_all_ti_modules=False):
template_dir = os.path.dirname(sys._getframe(0).f_code.co_filename)
# Build up output directory tree
if project_dir is None:
project_dir = self.newdir(dir, self.name)
self.project_dir = project_dir
# Paths to Titanium assets that need to be linked into eclipse structure
self.config['ti_tiapp_xml'] = os.path.join(project_dir, 'tiapp.xml')
self.tiapp = TiAppXML(self.config['ti_tiapp_xml'])
resource_dir = os.path.join(project_dir, 'Resources')
self.config['ti_resources_dir'] = resource_dir
json_contents = open(os.path.join(template_dir,'dependency.json')).read()
depends_map = simplejson.loads(json_contents)
runtime = depends_map['runtimes']['defaultRuntime']
if self.tiapp.has_app_property("ti.android.runtime"):
requested_runtime = self.tiapp.get_app_property("ti.android.runtime")
if requested_runtime == "rhino" or requested_runtime == "v8":
runtime = requested_runtime
else:
print "[ERROR] invalid runtime \"" + requested_runtime + "\" requested, must be 'v8' or 'rhino'"
sys.exit(1);
app_build_dir = self.newdir(project_dir, 'build')
app_dir = self.newdir(app_build_dir, 'android')
#if os.path.exists(os.path.join(app_dir,'bin')):
# shutil.rmtree(os.path.join(app_dir,'bin'))
if os.path.exists(os.path.join(app_dir,'src')):
shutil.rmtree(os.path.join(app_dir,'src'))
if os.path.exists(os.path.join(app_dir,'res')):
shutil.rmtree(os.path.join(app_dir,'res'))
app_bin_dir = self.newdir(app_dir, 'bin')
app_lib_dir = self.newdir(app_dir, 'lib')
app_src_dir = self.newdir(app_dir, 'src')
app_res_dir = self.newdir(app_dir, 'res')
app_gen_dir = self.newdir(app_dir, 'gen')
app_bin_classes_dir = self.newdir(app_bin_dir, 'classes')
app_res_drawable_dir = self.newdir(app_res_dir, 'drawable')
app_assets_dir = self.newdir(app_dir, 'assets')
app_package_dir = self.newdir(app_gen_dir, *self.id.split('.'))
app_bin_assets_dir = self.newdir(app_bin_dir, 'assets')
self.build_app_info(project_dir)
self.build_modules_info(resource_dir, app_bin_dir, include_all_ti_modules=include_all_ti_modules)
# Create android source
self.render(template_dir, 'AppInfo.java', app_package_dir, self.config['classname'] + 'AppInfo.java',
app_properties = self.app_properties, app_info = self.app_info)
self.render(template_dir, 'AndroidManifest.xml', app_dir, 'AndroidManifest.xml')
self.render(template_dir, 'App.java', app_package_dir, self.config['classname'] + 'Application.java',
app_modules = self.app_modules, custom_modules = self.custom_modules, runtime = runtime)
self.render(template_dir, 'Activity.java', app_package_dir, self.config['classname'] + 'Activity.java')
self.generate_activities(app_package_dir)
self.generate_services(app_package_dir)
self.render(template_dir, 'classpath', app_dir, '.classpath')
self.render(template_dir, 'project', app_dir, '.project')
self.render(template_dir, 'default.properties', app_dir, 'default.properties')
print "[TRACE] Generating app.json"
f = None
try:
f = open(os.path.join(app_bin_assets_dir, "app.json"), "w")
f.write(simplejson.dumps({"app_modules":self.app_modules}))
finally:
if f is not None:
f.close()
# Don't override a pre-existing .gitignore in case users have their own preferences
# for what should be in it. (LH #2446)
if not os.path.exists(os.path.join(app_dir, '.gitignore')):
self.render(template_dir, 'gitignore', app_dir, '.gitignore')
else:
print "[TRACE] Skipping copying gitignore -> .gitignore because already exists"
android_project_resources = os.path.join(project_dir,'Resources','android')
if build_time==False and os.path.exists(android_project_resources):
shutil.rmtree(android_project_resources)
if not os.path.exists(android_project_resources):
copy_resources(os.path.join(template_dir,'resources'),android_project_resources)
if __name__ == '__main__':
# this is for testing only for the time being
if len(sys.argv) != 5 or sys.argv[1]=='--help':
print "Usage: %s <name> <id> <directory> <sdk>" % os.path.basename(sys.argv[0])
sys.exit(1)
sdk = AndroidSDK(sys.argv[4])
android = Android(sys.argv[1], sys.argv[2], sdk, None, 'java')
android.create(sys.argv[3])
| 39.83237
| 243
| 0.71753
|
4a083ed7081297469c42be3f1f942781414654e6
| 511
|
py
|
Python
|
mmdet/models/backbones/__init__.py
|
azuredsky/RepPointsV2
|
735a585b365e223e5cac10b431d13d279595c144
|
[
"MIT"
] | 295
|
2020-07-16T13:03:29.000Z
|
2022-03-29T05:20:12.000Z
|
mmdet/models/backbones/__init__.py
|
azuredsky/RepPointsV2
|
735a585b365e223e5cac10b431d13d279595c144
|
[
"MIT"
] | 23
|
2020-07-17T03:05:08.000Z
|
2021-05-20T19:01:07.000Z
|
mmdet/models/backbones/__init__.py
|
azuredsky/RepPointsV2
|
735a585b365e223e5cac10b431d13d279595c144
|
[
"MIT"
] | 50
|
2020-07-17T02:16:52.000Z
|
2022-03-02T12:45:21.000Z
|
from .detectors_resnet import DetectoRS_ResNet
from .detectors_resnext import DetectoRS_ResNeXt
from .hourglass import HourglassNet
from .hrnet import HRNet
from .mobilenet import MobileNetV2
from .regnet import RegNet
from .res2net import Res2Net
from .resnet import ResNet, ResNetV1d
from .resnext import ResNeXt
from .ssd_vgg import SSDVGG
__all__ = [
'RegNet', 'ResNet', 'ResNetV1d', 'ResNeXt', 'SSDVGG', 'HRNet', 'Res2Net',
'HourglassNet', 'DetectoRS_ResNet', 'DetectoRS_ResNeXt', 'MobileNetV2'
]
| 31.9375
| 77
| 0.782779
|
4a083f4e33d5d8c21484ace1c6b4906b0aa764e8
| 546
|
py
|
Python
|
utilities/TNPy/TNFunc.py
|
aniabrown/QuEST-TN
|
8e0c8686859531d670d537af5eec03b7232f6b26
|
[
"MIT"
] | null | null | null |
utilities/TNPy/TNFunc.py
|
aniabrown/QuEST-TN
|
8e0c8686859531d670d537af5eec03b7232f6b26
|
[
"MIT"
] | 1
|
2020-02-06T07:02:40.000Z
|
2021-03-01T14:44:40.000Z
|
utilities/TNPy/TNFunc.py
|
aniabrown/QuEST-TN
|
8e0c8686859531d670d537af5eec03b7232f6b26
|
[
"MIT"
] | null | null | null |
from QuESTPy.QuESTTypes import *
from .TNTypes import *
# Public API
contractIndices = TNTestee ("contractIndices", retType=Tensor, argType=[Tensor, Tensor, POINTER(c_int), POINTER(c_int), c_int, \
POINTER(c_int), c_int, POINTER(c_int), c_int,
QuESTEnv])
initVirtualTarget = TNTestee("initVirtualTarget", retType=None, argType=[Tensor, c_int])
initVirtualControl = TNTestee("initVirtualControl", retType=None, argType=[Tensor, c_int])
createTensor = TNTestee("createTensor", retType=Tensor, argType=[c_int, c_int, QuESTEnv])
| 36.4
| 128
| 0.747253
|
4a084053a1fc3c37636acf6ae60d20e025916221
| 1,050
|
py
|
Python
|
recorder/main.py
|
RamtinAlami/BCI-Drone-Project
|
4c8c27d16433d3cdd856deac49569d3128c6a254
|
[
"MIT"
] | null | null | null |
recorder/main.py
|
RamtinAlami/BCI-Drone-Project
|
4c8c27d16433d3cdd856deac49569d3128c6a254
|
[
"MIT"
] | null | null | null |
recorder/main.py
|
RamtinAlami/BCI-Drone-Project
|
4c8c27d16433d3cdd856deac49569d3128c6a254
|
[
"MIT"
] | null | null | null |
import curses
import time
import random
from curses.textpad import rectangle
# def start():
def main(stdscr):
curses.curs_set(0)
draw_squares(stdscr)
while True:
selected = random.randint(0, 2)
wait = random.random() * 2 + 1
time.sleep(wait)
draw_squares(stdscr, selected=selected)
time.sleep(0.7)
draw_squares(stdscr)
stdscr.getkey()
def draw_squares(stdscr, selected=-1):
height, width = list(stdscr.getmaxyx())
stdscr.clear()
# Draw the outlines
rectangle(stdscr, 1, 5, height-2, width-5)
current = 6
div = (width-14)//3
ends = []
for i in range(3):
ends.append((2, current, height-3, current+div))
current += div + 1
rectangle(stdscr, ends[i][0], ends[i][1], ends[i][2], ends[i][3])
# Fill the selected square
if selected > -1:
i = selected
for k in range(ends[i][0], ends[i][2]):
rectangle(stdscr, ends[i][0], ends[i][1], k, ends[i][3])
stdscr.refresh()
curses.wrapper(main)
| 22.826087
| 73
| 0.591429
|
4a08408f40b51db7fecd4d925dc984b31e772237
| 793
|
py
|
Python
|
tap_amazon_advertising/__init__.py
|
fishtown-analytics/tap-amazon-advertising
|
04aecf16a9c4f418f00b67b85fc0f4dc5db3f171
|
[
"Apache-2.0"
] | 2
|
2019-09-10T15:25:39.000Z
|
2019-12-12T14:50:05.000Z
|
tap_amazon_advertising/__init__.py
|
dbt-labs/tap-amazon-advertising
|
04aecf16a9c4f418f00b67b85fc0f4dc5db3f171
|
[
"Apache-2.0"
] | 1
|
2021-02-17T13:32:02.000Z
|
2021-02-24T17:52:02.000Z
|
tap_amazon_advertising/__init__.py
|
fishtown-analytics/tap-amazon-advertising
|
04aecf16a9c4f418f00b67b85fc0f4dc5db3f171
|
[
"Apache-2.0"
] | 5
|
2019-09-10T15:25:47.000Z
|
2020-11-03T11:55:10.000Z
|
#!/usr/bin/env python3
import singer
import tap_framework
from tap_amazon_advertising.client import AmazonAdvertisingClient
from tap_amazon_advertising.streams import AVAILABLE_STREAMS
LOGGER = singer.get_logger() # noqa
class AmazonAdvertisingRunner(tap_framework.Runner):
pass
@singer.utils.handle_top_exception(LOGGER)
def main():
args = singer.utils.parse_args(
required_config_keys=['client_id', 'client_secret', 'refresh_token',
'redirect_uri', 'profile_id', 'start_date'])
client = AmazonAdvertisingClient(args.config)
runner = AmazonAdvertisingRunner(
args, client, AVAILABLE_STREAMS)
if args.discover:
runner.do_discover()
else:
runner.do_sync()
if __name__ == '__main__':
main()
| 22.027778
| 76
| 0.708701
|
4a0841267c9f873f27d72f9aa5e39074e294ff1f
| 4,210
|
py
|
Python
|
tartiflette/executors/basic.py
|
alexchamberlain/tartiflette
|
6904b0f47770c348553e907be5f5bdb0929fe149
|
[
"MIT"
] | null | null | null |
tartiflette/executors/basic.py
|
alexchamberlain/tartiflette
|
6904b0f47770c348553e907be5f5bdb0929fe149
|
[
"MIT"
] | 1
|
2020-08-11T15:41:41.000Z
|
2020-08-11T15:41:41.000Z
|
tartiflette/executors/basic.py
|
alexchamberlain/tartiflette
|
6904b0f47770c348553e907be5f5bdb0929fe149
|
[
"MIT"
] | null | null | null |
import asyncio
from typing import Any, AsyncIterable, Callable, Dict, List, Optional
from tartiflette.executors.types import ExecutionContext
from tartiflette.types.exceptions.tartiflette import (
UnknownAnonymousdOperation,
UnknownNamedOperation,
)
async def _execute(
root_resolvers: List["NodeField"],
execution_ctx: ExecutionContext,
request_ctx: Optional[Dict[str, Any]],
initial_value: Optional[Any],
allow_parallelization: bool,
) -> None:
if not allow_parallelization:
for resolver in root_resolvers:
await resolver(
execution_ctx, request_ctx, parent_result=initial_value
)
else:
await asyncio.gather(
*[
resolver(
execution_ctx, request_ctx, parent_result=initial_value
)
for resolver in root_resolvers
],
return_exceptions=False,
)
def _get_datas(root_nodes: List["NodeField"]) -> Optional[dict]:
data = {}
for node in root_nodes:
if node.cant_be_null and node.marshalled is None:
return None
if not node.is_execution_stopped:
data[node.alias] = node.marshalled
return data or None
def get_operation(operations, operation_name):
try:
return operations[operation_name], None
except KeyError:
if operation_name or len(operations) != 1:
error = (
UnknownNamedOperation(
"Unknown operation named < %s >." % operation_name
)
if operation_name is not None
else UnknownAnonymousdOperation(
"Must provide operation name if query contains multiple operations."
)
)
return None, [error]
return operations[list(operations.keys())[0]], None
async def execute(
operations: Dict[Optional[str], List["NodeOperationDefinition"]],
operation_name: Optional[str],
request_ctx: Optional[Dict[str, Any]],
initial_value: Optional[Any],
error_coercer: Callable[[Exception], dict],
) -> dict:
# pylint: disable=too-many-locals
execution_ctx = ExecutionContext()
operation, errors = get_operation(operations, operation_name)
if errors:
return {"data": None, "errors": [error_coercer(err) for err in errors]}
return await execute_fields(
operation.children,
execution_ctx,
request_ctx,
initial_value=initial_value,
error_coercer=error_coercer,
allow_parallelization=operation.allow_parallelization,
)
async def subscribe(
operations: Dict[Optional[str], List["NodeOperationDefinition"]],
operation_name: Optional[str],
request_ctx: Optional[Dict[str, Any]],
initial_value: Optional[Any],
error_coercer: Callable[[Exception], dict],
) -> AsyncIterable[Dict[str, Any]]:
# pylint: disable=too-many-locals
execution_ctx = ExecutionContext()
operation, errors = get_operation(operations, operation_name)
if errors:
yield {"data": None, "errors": [error_coercer(err) for err in errors]}
root_nodes = operation.children
source_event_stream = await root_nodes[0].create_source_event_stream(
execution_ctx, request_ctx, parent_result=initial_value
)
async for message in source_event_stream:
yield await execute_fields(
root_nodes,
execution_ctx,
request_ctx,
initial_value=message,
error_coercer=error_coercer,
allow_parallelization=operation.allow_parallelization,
)
async def execute_fields(
fields,
execution_ctx,
request_ctx,
initial_value,
error_coercer,
allow_parallelization=True,
):
await _execute(
fields,
execution_ctx,
request_ctx,
initial_value=initial_value,
allow_parallelization=allow_parallelization,
)
results = {
"data": _get_datas(fields),
"errors": [error_coercer(err) for err in execution_ctx.errors if err],
}
if not results["errors"]:
del results["errors"]
return results
| 28.639456
| 88
| 0.644656
|
4a08418ed105494b1bc9a13dd10a76bbb595e8d6
| 60,723
|
py
|
Python
|
torchaudio/functional/filtering.py
|
underdogliu/audio
|
38e530d77e5a194d4e5f91356cc1a191207a3b29
|
[
"BSD-2-Clause"
] | null | null | null |
torchaudio/functional/filtering.py
|
underdogliu/audio
|
38e530d77e5a194d4e5f91356cc1a191207a3b29
|
[
"BSD-2-Clause"
] | null | null | null |
torchaudio/functional/filtering.py
|
underdogliu/audio
|
38e530d77e5a194d4e5f91356cc1a191207a3b29
|
[
"BSD-2-Clause"
] | null | null | null |
import math
import warnings
from typing import Optional
import torch
from torch import Tensor
def _dB2Linear(x: float) -> float:
return math.exp(x * math.log(10) / 20.0)
def _generate_wave_table(
wave_type: str,
data_type: str,
table_size: int,
min: float,
max: float,
phase: float,
device: torch.device,
) -> Tensor:
r"""A helper function for phaser. Generates a table with given parameters.
Args:
wave_type (str): SINE or TRIANGULAR
data_type (str): desired data_type ( `INT` or `FLOAT` )
table_size (int): desired table size
min (float): desired min value
max (float): desired max value
phase (float): desired phase
device (torch.device): Torch device on which table must be generated
Returns:
Tensor: A 1D tensor with wave table values
"""
phase_offset = int(phase / math.pi / 2 * table_size + 0.5)
t = torch.arange(table_size, device=device, dtype=torch.int32)
point = (t + phase_offset) % table_size
d = torch.zeros_like(point, device=device, dtype=torch.float64)
if wave_type == "SINE":
d = (torch.sin(point.to(torch.float64) / table_size * 2 * math.pi) + 1) / 2
elif wave_type == "TRIANGLE":
d = point.to(torch.float64) * 2 / table_size
value = torch.div(4 * point, table_size, rounding_mode="floor")
d[value == 0] = d[value == 0] + 0.5
d[value == 1] = 1.5 - d[value == 1]
d[value == 2] = 1.5 - d[value == 2]
d[value == 3] = d[value == 3] - 1.5
d = d * (max - min) + min
if data_type == "INT":
mask = d < 0
d[mask] = d[mask] - 0.5
d[~mask] = d[~mask] + 0.5
d = d.to(torch.int32)
elif data_type == "FLOAT":
d = d.to(torch.float32)
return d
def allpass_biquad(waveform: Tensor, sample_rate: int, central_freq: float, Q: float = 0.707) -> Tensor:
r"""Design two-pole all-pass filter. Similar to SoX implementation.
.. devices:: CPU CUDA
.. properties:: Autograd TorchScript
Args:
waveform(torch.Tensor): audio waveform of dimension of `(..., time)`
sample_rate (int): sampling rate of the waveform, e.g. 44100 (Hz)
central_freq (float or torch.Tensor): central frequency (in Hz)
Q (float or torch.Tensor, optional): https://en.wikipedia.org/wiki/Q_factor (Default: ``0.707``)
Returns:
Tensor: Waveform of dimension of `(..., time)`
Reference:
- http://sox.sourceforge.net/sox.html
- https://www.w3.org/2011/audio/audio-eq-cookbook.html#APF
"""
dtype = waveform.dtype
device = waveform.device
central_freq = torch.as_tensor(central_freq, dtype=dtype, device=device)
Q = torch.as_tensor(Q, dtype=dtype, device=device)
w0 = 2 * math.pi * central_freq / sample_rate
alpha = torch.sin(w0) / 2 / Q
b0 = 1 - alpha
b1 = -2 * torch.cos(w0)
b2 = 1 + alpha
a0 = 1 + alpha
a1 = -2 * torch.cos(w0)
a2 = 1 - alpha
return biquad(waveform, b0, b1, b2, a0, a1, a2)
def band_biquad(
waveform: Tensor,
sample_rate: int,
central_freq: float,
Q: float = 0.707,
noise: bool = False,
) -> Tensor:
r"""Design two-pole band filter. Similar to SoX implementation.
.. devices:: CPU CUDA
.. properties:: Autograd TorchScript
Args:
waveform (Tensor): audio waveform of dimension of `(..., time)`
sample_rate (int): sampling rate of the waveform, e.g. 44100 (Hz)
central_freq (float or torch.Tensor): central frequency (in Hz)
Q (float or torch.Tensor, optional): https://en.wikipedia.org/wiki/Q_factor (Default: ``0.707``).
noise (bool, optional) : If ``True``, uses the alternate mode for un-pitched audio (e.g. percussion).
If ``False``, uses mode oriented to pitched audio, i.e. voice, singing,
or instrumental music (Default: ``False``).
Returns:
Tensor: Waveform of dimension of `(..., time)`
Reference:
- http://sox.sourceforge.net/sox.html
- https://www.w3.org/2011/audio/audio-eq-cookbook.html#APF
"""
dtype = waveform.dtype
device = waveform.device
central_freq = torch.as_tensor(central_freq, dtype=dtype, device=device)
Q = torch.as_tensor(Q, dtype=dtype, device=device)
w0 = 2 * math.pi * central_freq / sample_rate
bw_Hz = central_freq / Q
a0 = 1.0
a2 = torch.exp(-2 * math.pi * bw_Hz / sample_rate)
a1 = -4 * a2 / (1 + a2) * torch.cos(w0)
b0 = torch.sqrt(1 - a1 * a1 / (4 * a2)) * (1 - a2)
if noise:
mult = torch.sqrt(((1 + a2) * (1 + a2) - a1 * a1) * (1 - a2) / (1 + a2)) / b0
b0 = mult * b0
b1 = 0.0
b2 = 0.0
return biquad(waveform, b0, b1, b2, a0, a1, a2)
def bandpass_biquad(
waveform: Tensor,
sample_rate: int,
central_freq: float,
Q: float = 0.707,
const_skirt_gain: bool = False,
) -> Tensor:
r"""Design two-pole band-pass filter. Similar to SoX implementation.
.. devices:: CPU CUDA
.. properties:: Autograd TorchScript
Args:
waveform (Tensor): audio waveform of dimension of `(..., time)`
sample_rate (int): sampling rate of the waveform, e.g. 44100 (Hz)
central_freq (float or torch.Tensor): central frequency (in Hz)
Q (float or torch.Tensor, optional): https://en.wikipedia.org/wiki/Q_factor (Default: ``0.707``)
const_skirt_gain (bool, optional) : If ``True``, uses a constant skirt gain (peak gain = Q).
If ``False``, uses a constant 0dB peak gain. (Default: ``False``)
Returns:
Tensor: Waveform of dimension of `(..., time)`
Reference:
- http://sox.sourceforge.net/sox.html
- https://www.w3.org/2011/audio/audio-eq-cookbook.html#APF
"""
dtype = waveform.dtype
device = waveform.device
central_freq = torch.as_tensor(central_freq, dtype=dtype, device=device)
Q = torch.as_tensor(Q, dtype=dtype, device=device)
w0 = 2 * math.pi * central_freq / sample_rate
alpha = torch.sin(w0) / 2 / Q
temp = torch.sin(w0) / 2 if const_skirt_gain else alpha
b0 = temp
b1 = 0.0
b2 = -temp
a0 = 1 + alpha
a1 = -2 * torch.cos(w0)
a2 = 1 - alpha
return biquad(waveform, b0, b1, b2, a0, a1, a2)
def bandreject_biquad(waveform: Tensor, sample_rate: int, central_freq: float, Q: float = 0.707) -> Tensor:
r"""Design two-pole band-reject filter. Similar to SoX implementation.
.. devices:: CPU CUDA
.. properties:: Autograd TorchScript
Args:
waveform (Tensor): audio waveform of dimension of `(..., time)`
sample_rate (int): sampling rate of the waveform, e.g. 44100 (Hz)
central_freq (float or torch.Tensor): central frequency (in Hz)
Q (float or torch.Tensor, optional): https://en.wikipedia.org/wiki/Q_factor (Default: ``0.707``)
Returns:
Tensor: Waveform of dimension of `(..., time)`
Reference:
- http://sox.sourceforge.net/sox.html
- https://www.w3.org/2011/audio/audio-eq-cookbook.html#APF
"""
dtype = waveform.dtype
device = waveform.device
central_freq = torch.as_tensor(central_freq, dtype=dtype, device=device)
Q = torch.as_tensor(Q, dtype=dtype, device=device)
w0 = 2 * math.pi * central_freq / sample_rate
alpha = torch.sin(w0) / 2 / Q
b0 = 1.0
b1 = -2 * torch.cos(w0)
b2 = 1.0
a0 = 1 + alpha
a1 = -2 * torch.cos(w0)
a2 = 1 - alpha
return biquad(waveform, b0, b1, b2, a0, a1, a2)
def bass_biquad(
waveform: Tensor,
sample_rate: int,
gain: float,
central_freq: float = 100,
Q: float = 0.707,
) -> Tensor:
r"""Design a bass tone-control effect. Similar to SoX implementation.
.. devices:: CPU CUDA
.. properties:: Autograd TorchScript
Args:
waveform (Tensor): audio waveform of dimension of `(..., time)`
sample_rate (int): sampling rate of the waveform, e.g. 44100 (Hz)
gain (float or torch.Tensor): desired gain at the boost (or attenuation) in dB.
central_freq (float or torch.Tensor, optional): central frequency (in Hz). (Default: ``100``)
Q (float or torch.Tensor, optional): https://en.wikipedia.org/wiki/Q_factor (Default: ``0.707``).
Returns:
Tensor: Waveform of dimension of `(..., time)`
Reference:
- http://sox.sourceforge.net/sox.html
- https://www.w3.org/2011/audio/audio-eq-cookbook.html#APF
"""
dtype = waveform.dtype
device = waveform.device
central_freq = torch.as_tensor(central_freq, dtype=dtype, device=device)
Q = torch.as_tensor(Q, dtype=dtype, device=device)
gain = torch.as_tensor(gain, dtype=dtype, device=device)
w0 = 2 * math.pi * central_freq / sample_rate
alpha = torch.sin(w0) / 2 / Q
A = torch.exp(gain / 40 * math.log(10))
temp1 = 2 * torch.sqrt(A) * alpha
temp2 = (A - 1) * torch.cos(w0)
temp3 = (A + 1) * torch.cos(w0)
b0 = A * ((A + 1) - temp2 + temp1)
b1 = 2 * A * ((A - 1) - temp3)
b2 = A * ((A + 1) - temp2 - temp1)
a0 = (A + 1) + temp2 + temp1
a1 = -2 * ((A - 1) + temp3)
a2 = (A + 1) + temp2 - temp1
return biquad(waveform, b0 / a0, b1 / a0, b2 / a0, a0 / a0, a1 / a0, a2 / a0)
def biquad(waveform: Tensor, b0: float, b1: float, b2: float, a0: float, a1: float, a2: float) -> Tensor:
r"""Perform a biquad filter of input tensor. Initial conditions set to 0.
.. devices:: CPU CUDA
.. properties:: Autograd TorchScript
Args:
waveform (Tensor): audio waveform of dimension of `(..., time)`
b0 (float or torch.Tensor): numerator coefficient of current input, x[n]
b1 (float or torch.Tensor): numerator coefficient of input one time step ago x[n-1]
b2 (float or torch.Tensor): numerator coefficient of input two time steps ago x[n-2]
a0 (float or torch.Tensor): denominator coefficient of current output y[n], typically 1
a1 (float or torch.Tensor): denominator coefficient of current output y[n-1]
a2 (float or torch.Tensor): denominator coefficient of current output y[n-2]
Returns:
Tensor: Waveform with dimension of `(..., time)`
Reference:
- https://en.wikipedia.org/wiki/Digital_biquad_filter
"""
device = waveform.device
dtype = waveform.dtype
b0 = torch.as_tensor(b0, dtype=dtype, device=device).view(1)
b1 = torch.as_tensor(b1, dtype=dtype, device=device).view(1)
b2 = torch.as_tensor(b2, dtype=dtype, device=device).view(1)
a0 = torch.as_tensor(a0, dtype=dtype, device=device).view(1)
a1 = torch.as_tensor(a1, dtype=dtype, device=device).view(1)
a2 = torch.as_tensor(a2, dtype=dtype, device=device).view(1)
output_waveform = lfilter(
waveform,
torch.cat([a0, a1, a2]),
torch.cat([b0, b1, b2]),
)
return output_waveform
def contrast(waveform: Tensor, enhancement_amount: float = 75.0) -> Tensor:
r"""Apply contrast effect. Similar to SoX implementation.
.. devices:: CPU CUDA
.. properties:: Autograd TorchScript
Comparable with compression, this effect modifies an audio signal to make it sound louder
Args:
waveform (Tensor): audio waveform of dimension of `(..., time)`
enhancement_amount (float, optional): controls the amount of the enhancement
Allowed range of values for enhancement_amount : 0-100
Note that enhancement_amount = 0 still gives a significant contrast enhancement
Returns:
Tensor: Waveform of dimension of `(..., time)`
Reference:
- http://sox.sourceforge.net/sox.html
"""
if not 0 <= enhancement_amount <= 100:
raise ValueError("Allowed range of values for enhancement_amount : 0-100")
contrast = enhancement_amount / 750.0
temp1 = waveform * (math.pi / 2)
temp2 = contrast * torch.sin(temp1 * 4)
output_waveform = torch.sin(temp1 + temp2)
return output_waveform
def dcshift(waveform: Tensor, shift: float, limiter_gain: Optional[float] = None) -> Tensor:
r"""Apply a DC shift to the audio. Similar to SoX implementation.
.. devices:: CPU CUDA
.. properties:: TorchScript
This can be useful to remove a DC offset
(caused perhaps by a hardware problem in the recording chain) from the audio
Args:
waveform (Tensor): audio waveform of dimension of `(..., time)`
shift (float): indicates the amount to shift the audio
Allowed range of values for shift : -2.0 to +2.0
limiter_gain (float of None, optional): It is used only on peaks to prevent clipping
It should have a value much less than 1 (e.g. 0.05 or 0.02)
Returns:
Tensor: Waveform of dimension of `(..., time)`
Reference:
- http://sox.sourceforge.net/sox.html
"""
output_waveform = waveform
limiter_threshold = 0.0
if limiter_gain is not None:
limiter_threshold = 1.0 - (abs(shift) - limiter_gain)
# Note:
# the following index-based update breaks auto-grad support
if limiter_gain is not None and shift > 0:
mask = waveform > limiter_threshold
temp = (waveform[mask] - limiter_threshold) * limiter_gain / (1 - limiter_threshold)
output_waveform[mask] = (temp + limiter_threshold + shift).clamp(max=limiter_threshold)
output_waveform[~mask] = (waveform[~mask] + shift).clamp(min=-1, max=1)
elif limiter_gain is not None and shift < 0:
mask = waveform < -limiter_threshold
temp = (waveform[mask] + limiter_threshold) * limiter_gain / (1 - limiter_threshold)
output_waveform[mask] = (temp - limiter_threshold + shift).clamp(min=-limiter_threshold)
output_waveform[~mask] = (waveform[~mask] + shift).clamp(min=-1, max=1)
else:
output_waveform = (waveform + shift).clamp(min=-1, max=1)
return output_waveform
def deemph_biquad(waveform: Tensor, sample_rate: int) -> Tensor:
r"""Apply ISO 908 CD de-emphasis (shelving) IIR filter. Similar to SoX implementation.
.. devices:: CPU CUDA
.. properties:: Autograd TorchScript
Args:
waveform (Tensor): audio waveform of dimension of `(..., time)`
sample_rate (int): sampling rate of the waveform, Allowed sample rate ``44100`` or ``48000``
Returns:
Tensor: Waveform of dimension of `(..., time)`
Reference:
- http://sox.sourceforge.net/sox.html
- https://www.w3.org/2011/audio/audio-eq-cookbook.html#APF
"""
if sample_rate == 44100:
central_freq = 5283
width_slope = 0.4845
gain = -9.477
elif sample_rate == 48000:
central_freq = 5356
width_slope = 0.479
gain = -9.62
else:
raise ValueError("Sample rate must be 44100 (audio-CD) or 48000 (DAT)")
w0 = 2 * math.pi * central_freq / sample_rate
A = math.exp(gain / 40.0 * math.log(10))
alpha = math.sin(w0) / 2 * math.sqrt((A + 1 / A) * (1 / width_slope - 1) + 2)
temp1 = 2 * math.sqrt(A) * alpha
temp2 = (A - 1) * math.cos(w0)
temp3 = (A + 1) * math.cos(w0)
b0 = A * ((A + 1) + temp2 + temp1)
b1 = -2 * A * ((A - 1) + temp3)
b2 = A * ((A + 1) + temp2 - temp1)
a0 = (A + 1) - temp2 + temp1
a1 = 2 * ((A - 1) - temp3)
a2 = (A + 1) - temp2 - temp1
return biquad(waveform, b0, b1, b2, a0, a1, a2)
def _add_noise_shaping(dithered_waveform: Tensor, waveform: Tensor) -> Tensor:
r"""Noise shaping is calculated by error:
error[n] = dithered[n] - original[n]
noise_shaped_waveform[n] = dithered[n] + error[n-1]
"""
wf_shape = waveform.size()
waveform = waveform.reshape(-1, wf_shape[-1])
dithered_shape = dithered_waveform.size()
dithered_waveform = dithered_waveform.reshape(-1, dithered_shape[-1])
error = dithered_waveform - waveform
# add error[n-1] to dithered_waveform[n], so offset the error by 1 index
zeros = torch.zeros(1, dtype=error.dtype, device=error.device)
for index in range(error.size()[0]):
err = error[index]
error_offset = torch.cat((zeros, err))
error[index] = error_offset[: waveform.size()[1]]
noise_shaped = dithered_waveform + error
return noise_shaped.reshape(dithered_shape[:-1] + noise_shaped.shape[-1:])
def _apply_probability_distribution(waveform: Tensor, density_function: str = "TPDF") -> Tensor:
r"""Apply a probability distribution function on a waveform.
Triangular probability density function (TPDF) dither noise has a
triangular distribution; values in the center of the range have a higher
probability of occurring.
Rectangular probability density function (RPDF) dither noise has a
uniform distribution; any value in the specified range has the same
probability of occurring.
Gaussian probability density function (GPDF) has a normal distribution.
The relationship of probabilities of results follows a bell-shaped,
or Gaussian curve, typical of dither generated by analog sources.
Args:
waveform (Tensor): Tensor of audio of dimension (..., time)
density_function (str, optional): The density function of a
continuous random variable (Default: ``"TPDF"``)
Options: Triangular Probability Density Function - `TPDF`
Rectangular Probability Density Function - `RPDF`
Gaussian Probability Density Function - `GPDF`
Returns:
Tensor: waveform dithered with TPDF
"""
# pack batch
shape = waveform.size()
waveform = waveform.reshape(-1, shape[-1])
channel_size = waveform.size()[0] - 1
time_size = waveform.size()[-1] - 1
random_channel = (
int(
torch.randint(
channel_size,
[
1,
],
).item()
)
if channel_size > 0
else 0
)
random_time = (
int(
torch.randint(
time_size,
[
1,
],
).item()
)
if time_size > 0
else 0
)
number_of_bits = 16
up_scaling = 2 ** (number_of_bits - 1) - 2
signal_scaled = waveform * up_scaling
down_scaling = 2 ** (number_of_bits - 1)
signal_scaled_dis = waveform
if density_function == "RPDF":
RPDF = waveform[random_channel][random_time] - 0.5
signal_scaled_dis = signal_scaled + RPDF
elif density_function == "GPDF":
# TODO Replace by distribution code once
# https://github.com/pytorch/pytorch/issues/29843 is resolved
# gaussian = torch.distributions.normal.Normal(torch.mean(waveform, -1), 1).sample()
num_rand_variables = 6
gaussian = waveform[random_channel][random_time]
for ws in num_rand_variables * [time_size]:
rand_chan = int(
torch.randint(
channel_size,
[
1,
],
).item()
)
gaussian += waveform[rand_chan][
int(
torch.randint(
ws,
[
1,
],
).item()
)
]
signal_scaled_dis = signal_scaled + gaussian
else:
# dtype needed for https://github.com/pytorch/pytorch/issues/32358
TPDF = torch.bartlett_window(time_size + 1, dtype=signal_scaled.dtype, device=signal_scaled.device)
TPDF = TPDF.repeat((channel_size + 1), 1)
signal_scaled_dis = signal_scaled + TPDF
quantised_signal_scaled = torch.round(signal_scaled_dis)
quantised_signal = quantised_signal_scaled / down_scaling
# unpack batch
return quantised_signal.reshape(shape[:-1] + quantised_signal.shape[-1:])
def dither(waveform: Tensor, density_function: str = "TPDF", noise_shaping: bool = False) -> Tensor:
r"""Apply dither
.. devices:: CPU CUDA
.. properties:: TorchScript
Dither increases the perceived dynamic range of audio stored at a
particular bit-depth by eliminating nonlinear truncation distortion
(i.e. adding minimally perceived noise to mask distortion caused by quantization).
Args:
waveform (Tensor): Tensor of audio of dimension (..., time)
density_function (str, optional):
The density function of a continuous random variable. One of
``"TPDF"`` (Triangular Probability Density Function),
``"RPDF"`` (Rectangular Probability Density Function) or
``"GPDF"`` (Gaussian Probability Density Function) (Default: ``"TPDF"``).
noise_shaping (bool, optional): a filtering process that shapes the spectral
energy of quantisation error (Default: ``False``)
Returns:
Tensor: waveform dithered
"""
dithered = _apply_probability_distribution(waveform, density_function=density_function)
if noise_shaping:
return _add_noise_shaping(dithered, waveform)
else:
return dithered
def equalizer_biquad(
waveform: Tensor,
sample_rate: int,
center_freq: float,
gain: float,
Q: float = 0.707,
) -> Tensor:
r"""Design biquad peaking equalizer filter and perform filtering. Similar to SoX implementation.
.. devices:: CPU CUDA
.. properties:: Autograd TorchScript
Args:
waveform (Tensor): audio waveform of dimension of `(..., time)`
sample_rate (int): sampling rate of the waveform, e.g. 44100 (Hz)
center_freq (float): filter's central frequency
gain (float or torch.Tensor): desired gain at the boost (or attenuation) in dB
Q (float or torch.Tensor, optional): https://en.wikipedia.org/wiki/Q_factor (Default: ``0.707``)
Returns:
Tensor: Waveform of dimension of `(..., time)`
"""
dtype = waveform.dtype
device = waveform.device
center_freq = torch.as_tensor(center_freq, dtype=dtype, device=device)
Q = torch.as_tensor(Q, dtype=dtype, device=device)
gain = torch.as_tensor(gain, dtype=dtype, device=device)
w0 = 2 * math.pi * center_freq / sample_rate
A = torch.exp(gain / 40.0 * math.log(10))
alpha = torch.sin(w0) / 2 / Q
b0 = 1 + alpha * A
b1 = -2 * torch.cos(w0)
b2 = 1 - alpha * A
a0 = 1 + alpha / A
a1 = -2 * torch.cos(w0)
a2 = 1 - alpha / A
return biquad(waveform, b0, b1, b2, a0, a1, a2)
def filtfilt(
waveform: Tensor,
a_coeffs: Tensor,
b_coeffs: Tensor,
clamp: bool = True,
) -> Tensor:
r"""Apply an IIR filter forward and backward to a waveform.
.. devices:: CPU CUDA
.. properties:: Autograd TorchScript
Inspired by https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.filtfilt.html
Args:
waveform (Tensor): audio waveform of dimension of `(..., time)`. Must be normalized to -1 to 1.
a_coeffs (Tensor): denominator coefficients of difference equation of dimension of either
1D with shape `(num_order + 1)` or 2D with shape `(num_filters, num_order + 1)`.
Lower delay coefficients are first, e.g. ``[a0, a1, a2, ...]``.
Must be same size as b_coeffs (pad with 0's as necessary).
b_coeffs (Tensor): numerator coefficients of difference equation of dimension of either
1D with shape `(num_order + 1)` or 2D with shape `(num_filters, num_order + 1)`.
Lower delay coefficients are first, e.g. ``[b0, b1, b2, ...]``.
Must be same size as a_coeffs (pad with 0's as necessary).
clamp (bool, optional): If ``True``, clamp the output signal to be in the range [-1, 1] (Default: ``True``)
Returns:
Tensor: Waveform with dimension of either `(..., num_filters, time)` if ``a_coeffs`` and ``b_coeffs``
are 2D Tensors, or `(..., time)` otherwise.
"""
forward_filtered = lfilter(waveform, a_coeffs, b_coeffs, clamp=False, batching=True)
backward_filtered = lfilter(
forward_filtered.flip(-1),
a_coeffs,
b_coeffs,
clamp=clamp,
batching=True,
).flip(-1)
return backward_filtered
def flanger(
waveform: Tensor,
sample_rate: int,
delay: float = 0.0,
depth: float = 2.0,
regen: float = 0.0,
width: float = 71.0,
speed: float = 0.5,
phase: float = 25.0,
modulation: str = "sinusoidal",
interpolation: str = "linear",
) -> Tensor:
r"""Apply a flanger effect to the audio. Similar to SoX implementation.
.. devices:: CPU CUDA
.. properties:: Autograd TorchScript
Args:
waveform (Tensor): audio waveform of dimension of `(..., channel, time)` .
Max 4 channels allowed
sample_rate (int): sampling rate of the waveform, e.g. 44100 (Hz)
delay (float, optional): desired delay in milliseconds(ms)
Allowed range of values are 0 to 30
depth (float, optional): desired delay depth in milliseconds(ms)
Allowed range of values are 0 to 10
regen (float, optional): desired regen(feedback gain) in dB
Allowed range of values are -95 to 95
width (float, optional): desired width(delay gain) in dB
Allowed range of values are 0 to 100
speed (float, optional): modulation speed in Hz
Allowed range of values are 0.1 to 10
phase (float, optional): percentage phase-shift for multi-channel
Allowed range of values are 0 to 100
modulation (str, optional): Use either "sinusoidal" or "triangular" modulation. (Default: ``sinusoidal``)
interpolation (str, optional): Use either "linear" or "quadratic" for delay-line interpolation.
(Default: ``linear``)
Returns:
Tensor: Waveform of dimension of `(..., channel, time)`
Reference:
- http://sox.sourceforge.net/sox.html
- Scott Lehman, `Effects Explained`_,
.. _Effects Explained:
https://web.archive.org/web/20051125072557/http://www.harmony-central.com/Effects/effects-explained.html
"""
if modulation not in ("sinusoidal", "triangular"):
raise ValueError("Only 'sinusoidal' or 'triangular' modulation allowed")
if interpolation not in ("linear", "quadratic"):
raise ValueError("Only 'linear' or 'quadratic' interpolation allowed")
actual_shape = waveform.shape
device, dtype = waveform.device, waveform.dtype
if actual_shape[-2] > 4:
raise ValueError("Max 4 channels allowed")
# convert to 3D (batch, channels, time)
waveform = waveform.view(-1, actual_shape[-2], actual_shape[-1])
# Scaling
feedback_gain = regen / 100
delay_gain = width / 100
channel_phase = phase / 100
delay_min = delay / 1000
delay_depth = depth / 1000
n_channels = waveform.shape[-2]
if modulation == "sinusoidal":
wave_type = "SINE"
else:
wave_type = "TRIANGLE"
# Balance output:
in_gain = 1.0 / (1 + delay_gain)
delay_gain = delay_gain / (1 + delay_gain)
# Balance feedback loop:
delay_gain = delay_gain * (1 - abs(feedback_gain))
delay_buf_length = int((delay_min + delay_depth) * sample_rate + 0.5)
delay_buf_length = delay_buf_length + 2
delay_bufs = torch.zeros(waveform.shape[0], n_channels, delay_buf_length, dtype=dtype, device=device)
delay_last = torch.zeros(waveform.shape[0], n_channels, dtype=dtype, device=device)
lfo_length = int(sample_rate / speed)
table_min = math.floor(delay_min * sample_rate + 0.5)
table_max = delay_buf_length - 2.0
lfo = _generate_wave_table(
wave_type=wave_type,
data_type="FLOAT",
table_size=lfo_length,
min=float(table_min),
max=float(table_max),
phase=3 * math.pi / 2,
device=device,
)
output_waveform = torch.zeros_like(waveform, dtype=dtype, device=device)
delay_buf_pos = 0
lfo_pos = 0
channel_idxs = torch.arange(0, n_channels, device=device)
for i in range(waveform.shape[-1]):
delay_buf_pos = (delay_buf_pos + delay_buf_length - 1) % delay_buf_length
cur_channel_phase = (channel_idxs * lfo_length * channel_phase + 0.5).to(torch.int64)
delay_tensor = lfo[(lfo_pos + cur_channel_phase) % lfo_length]
frac_delay = torch.frac(delay_tensor)
delay_tensor = torch.floor(delay_tensor)
int_delay = delay_tensor.to(torch.int64)
temp = waveform[:, :, i]
delay_bufs[:, :, delay_buf_pos] = temp + delay_last * feedback_gain
delayed_0 = delay_bufs[:, channel_idxs, (delay_buf_pos + int_delay) % delay_buf_length]
int_delay = int_delay + 1
delayed_1 = delay_bufs[:, channel_idxs, (delay_buf_pos + int_delay) % delay_buf_length]
int_delay = int_delay + 1
if interpolation == "linear":
delayed = delayed_0 + (delayed_1 - delayed_0) * frac_delay
else:
delayed_2 = delay_bufs[:, channel_idxs, (delay_buf_pos + int_delay) % delay_buf_length]
int_delay = int_delay + 1
delayed_2 = delayed_2 - delayed_0
delayed_1 = delayed_1 - delayed_0
a = delayed_2 * 0.5 - delayed_1
b = delayed_1 * 2 - delayed_2 * 0.5
delayed = delayed_0 + (a * frac_delay + b) * frac_delay
delay_last = delayed
output_waveform[:, :, i] = waveform[:, :, i] * in_gain + delayed * delay_gain
lfo_pos = (lfo_pos + 1) % lfo_length
return output_waveform.clamp(min=-1, max=1).view(actual_shape)
def gain(waveform: Tensor, gain_db: float = 1.0) -> Tensor:
r"""Apply amplification or attenuation to the whole waveform.
.. devices:: CPU CUDA
.. properties:: Autograd TorchScript
Args:
waveform (Tensor): Tensor of audio of dimension (..., time).
gain_db (float, optional) Gain adjustment in decibels (dB) (Default: ``1.0``).
Returns:
Tensor: the whole waveform amplified by gain_db.
"""
if gain_db == 0:
return waveform
ratio = 10 ** (gain_db / 20)
return waveform * ratio
def highpass_biquad(waveform: Tensor, sample_rate: int, cutoff_freq: float, Q: float = 0.707) -> Tensor:
r"""Design biquad highpass filter and perform filtering. Similar to SoX implementation.
.. devices:: CPU CUDA
.. properties:: Autograd TorchScript
Args:
waveform (Tensor): audio waveform of dimension of `(..., time)`
sample_rate (int): sampling rate of the waveform, e.g. 44100 (Hz)
cutoff_freq (float or torch.Tensor): filter cutoff frequency
Q (float or torch.Tensor, optional): https://en.wikipedia.org/wiki/Q_factor (Default: ``0.707``)
Returns:
Tensor: Waveform dimension of `(..., time)`
"""
dtype = waveform.dtype
device = waveform.device
cutoff_freq = torch.as_tensor(cutoff_freq, dtype=dtype, device=device)
Q = torch.as_tensor(Q, dtype=dtype, device=device)
w0 = 2 * math.pi * cutoff_freq / sample_rate
alpha = torch.sin(w0) / 2.0 / Q
b0 = (1 + torch.cos(w0)) / 2
b1 = -1 - torch.cos(w0)
b2 = b0
a0 = 1 + alpha
a1 = -2 * torch.cos(w0)
a2 = 1 - alpha
return biquad(waveform, b0, b1, b2, a0, a1, a2)
def _lfilter_core_generic_loop(input_signal_windows: Tensor, a_coeffs_flipped: Tensor, padded_output_waveform: Tensor):
n_order = a_coeffs_flipped.size(1)
a_coeffs_flipped = a_coeffs_flipped.unsqueeze(2)
for i_sample, o0 in enumerate(input_signal_windows.permute(2, 0, 1)):
windowed_output_signal = padded_output_waveform[:, :, i_sample : i_sample + n_order]
o0 -= (windowed_output_signal.transpose(0, 1) @ a_coeffs_flipped)[..., 0].t()
padded_output_waveform[:, :, i_sample + n_order - 1] = o0
try:
_lfilter_core_cpu_loop = torch.ops.torchaudio._lfilter_core_loop
except RuntimeError as err:
assert str(err) == "No such operator torchaudio::_lfilter_core_loop"
_lfilter_core_cpu_loop = _lfilter_core_generic_loop
def _lfilter_core(
waveform: Tensor,
a_coeffs: Tensor,
b_coeffs: Tensor,
) -> Tensor:
assert a_coeffs.size() == b_coeffs.size()
assert len(waveform.size()) == 3
assert waveform.device == a_coeffs.device
assert b_coeffs.device == a_coeffs.device
n_batch, n_channel, n_sample = waveform.size()
n_order = a_coeffs.size(1)
assert n_order > 0
# Pad the input and create output
padded_waveform = torch.nn.functional.pad(waveform, [n_order - 1, 0])
padded_output_waveform = torch.zeros_like(padded_waveform)
# Set up the coefficients matrix
# Flip coefficients' order
a_coeffs_flipped = a_coeffs.flip(1)
b_coeffs_flipped = b_coeffs.flip(1)
# calculate windowed_input_signal in parallel using convolution
input_signal_windows = torch.nn.functional.conv1d(padded_waveform, b_coeffs_flipped.unsqueeze(1), groups=n_channel)
input_signal_windows.div_(a_coeffs[:, :1])
a_coeffs_flipped.div_(a_coeffs[:, :1])
if (
input_signal_windows.device == torch.device("cpu")
and a_coeffs_flipped.device == torch.device("cpu")
and padded_output_waveform.device == torch.device("cpu")
):
_lfilter_core_cpu_loop(input_signal_windows, a_coeffs_flipped, padded_output_waveform)
else:
_lfilter_core_generic_loop(input_signal_windows, a_coeffs_flipped, padded_output_waveform)
output = padded_output_waveform[:, :, n_order - 1 :]
return output
try:
_lfilter = torch.ops.torchaudio._lfilter
except RuntimeError as err:
assert str(err) == "No such operator torchaudio::_lfilter"
_lfilter = _lfilter_core
def lfilter(waveform: Tensor, a_coeffs: Tensor, b_coeffs: Tensor, clamp: bool = True, batching: bool = True) -> Tensor:
r"""Perform an IIR filter by evaluating difference equation.
.. devices:: CPU CUDA
.. properties:: Autograd TorchScript
Note:
To avoid numerical problems, small filter order is preferred.
Using double precision could also minimize numerical precision errors.
Args:
waveform (Tensor): audio waveform of dimension of `(..., time)`. Must be normalized to -1 to 1.
a_coeffs (Tensor): denominator coefficients of difference equation of dimension of either
1D with shape `(num_order + 1)` or 2D with shape `(num_filters, num_order + 1)`.
Lower delays coefficients are first, e.g. ``[a0, a1, a2, ...]``.
Must be same size as b_coeffs (pad with 0's as necessary).
b_coeffs (Tensor): numerator coefficients of difference equation of dimension of either
1D with shape `(num_order + 1)` or 2D with shape `(num_filters, num_order + 1)`.
Lower delays coefficients are first, e.g. ``[b0, b1, b2, ...]``.
Must be same size as a_coeffs (pad with 0's as necessary).
clamp (bool, optional): If ``True``, clamp the output signal to be in the range [-1, 1] (Default: ``True``)
batching (bool, optional): Effective only when coefficients are 2D. If ``True``, then waveform should be at
least 2D, and the size of second axis from last should equals to ``num_filters``.
The output can be expressed as ``output[..., i, :] = lfilter(waveform[..., i, :],
a_coeffs[i], b_coeffs[i], clamp=clamp, batching=False)``. (Default: ``True``)
Returns:
Tensor: Waveform with dimension of either `(..., num_filters, time)` if ``a_coeffs`` and ``b_coeffs``
are 2D Tensors, or `(..., time)` otherwise.
"""
assert a_coeffs.size() == b_coeffs.size()
assert a_coeffs.ndim <= 2
if a_coeffs.ndim > 1:
if batching:
assert waveform.ndim > 1
assert waveform.shape[-2] == a_coeffs.shape[0]
else:
waveform = torch.stack([waveform] * a_coeffs.shape[0], -2)
else:
a_coeffs = a_coeffs.unsqueeze(0)
b_coeffs = b_coeffs.unsqueeze(0)
# pack batch
shape = waveform.size()
waveform = waveform.reshape(-1, a_coeffs.shape[0], shape[-1])
output = _lfilter(waveform, a_coeffs, b_coeffs)
if clamp:
output = torch.clamp(output, min=-1.0, max=1.0)
# unpack batch
output = output.reshape(shape[:-1] + output.shape[-1:])
return output
def lowpass_biquad(waveform: Tensor, sample_rate: int, cutoff_freq: float, Q: float = 0.707) -> Tensor:
r"""Design biquad lowpass filter and perform filtering. Similar to SoX implementation.
.. devices:: CPU CUDA
.. properties:: Autograd TorchScript
Args:
waveform (torch.Tensor): audio waveform of dimension of `(..., time)`
sample_rate (int): sampling rate of the waveform, e.g. 44100 (Hz)
cutoff_freq (float or torch.Tensor): filter cutoff frequency
Q (float or torch.Tensor, optional): https://en.wikipedia.org/wiki/Q_factor (Default: ``0.707``)
Returns:
Tensor: Waveform of dimension of `(..., time)`
"""
dtype = waveform.dtype
device = waveform.device
cutoff_freq = torch.as_tensor(cutoff_freq, dtype=dtype, device=device)
Q = torch.as_tensor(Q, dtype=dtype, device=device)
w0 = 2 * math.pi * cutoff_freq / sample_rate
alpha = torch.sin(w0) / 2 / Q
b0 = (1 - torch.cos(w0)) / 2
b1 = 1 - torch.cos(w0)
b2 = b0
a0 = 1 + alpha
a1 = -2 * torch.cos(w0)
a2 = 1 - alpha
return biquad(waveform, b0, b1, b2, a0, a1, a2)
def _overdrive_core_loop_generic(
waveform: Tensor, temp: Tensor, last_in: Tensor, last_out: Tensor, output_waveform: Tensor
):
for i in range(waveform.shape[-1]):
last_out = temp[:, i] - last_in + 0.995 * last_out
last_in = temp[:, i]
output_waveform[:, i] = waveform[:, i] * 0.5 + last_out * 0.75
try:
_overdrive_core_loop_cpu = torch.ops.torchaudio._overdrive_core_loop
except RuntimeError as err:
assert str(err) == "No such operator torchaudio::_overdrive_core_loop"
_overdrive_core_loop_cpu = _overdrive_core_loop_generic
def overdrive(waveform: Tensor, gain: float = 20, colour: float = 20) -> Tensor:
r"""Apply a overdrive effect to the audio. Similar to SoX implementation.
.. devices:: CPU CUDA
.. properties:: Autograd TorchScript
This effect applies a non linear distortion to the audio signal.
Args:
waveform (Tensor): audio waveform of dimension of `(..., time)`
gain (float, optional): desired gain at the boost (or attenuation) in dB
Allowed range of values are 0 to 100
colour (float, optional): controls the amount of even harmonic content in the over-driven output
Allowed range of values are 0 to 100
Returns:
Tensor: Waveform of dimension of `(..., time)`
Reference:
- http://sox.sourceforge.net/sox.html
"""
actual_shape = waveform.shape
device, dtype = waveform.device, waveform.dtype
# convert to 2D (..,time)
waveform = waveform.view(-1, actual_shape[-1])
gain = _dB2Linear(gain)
colour = colour / 200
last_in = torch.zeros(waveform.shape[:-1], dtype=dtype, device=device)
last_out = torch.zeros(waveform.shape[:-1], dtype=dtype, device=device)
temp = waveform * gain + colour
mask1 = temp < -1
temp[mask1] = torch.tensor(-2.0 / 3.0, dtype=dtype, device=device)
# Wrapping the constant with Tensor is required for Torchscript
mask2 = temp > 1
temp[mask2] = torch.tensor(2.0 / 3.0, dtype=dtype, device=device)
mask3 = ~mask1 & ~mask2
temp[mask3] = temp[mask3] - (temp[mask3] ** 3) * (1.0 / 3)
output_waveform = torch.zeros_like(waveform, dtype=dtype, device=device)
# Uses CPU optimized loop function if available for CPU device
if device == torch.device("cpu"):
_overdrive_core_loop_cpu(waveform, temp, last_in, last_out, output_waveform)
else:
_overdrive_core_loop_generic(waveform, temp, last_in, last_out, output_waveform)
return output_waveform.clamp(min=-1, max=1).view(actual_shape)
def phaser(
waveform: Tensor,
sample_rate: int,
gain_in: float = 0.4,
gain_out: float = 0.74,
delay_ms: float = 3.0,
decay: float = 0.4,
mod_speed: float = 0.5,
sinusoidal: bool = True,
) -> Tensor:
r"""Apply a phasing effect to the audio. Similar to SoX implementation.
.. devices:: CPU CUDA
.. properties:: Autograd TorchScript
Args:
waveform (Tensor): audio waveform of dimension of `(..., time)`
sample_rate (int): sampling rate of the waveform, e.g. 44100 (Hz)
gain_in (float, optional): desired input gain at the boost (or attenuation) in dB
Allowed range of values are 0 to 1
gain_out (float, optional): desired output gain at the boost (or attenuation) in dB
Allowed range of values are 0 to 1e9
delay_ms (float, optional): desired delay in milliseconds
Allowed range of values are 0 to 5.0
decay (float, optional): desired decay relative to gain-in
Allowed range of values are 0 to 0.99
mod_speed (float, optional): modulation speed in Hz
Allowed range of values are 0.1 to 2
sinusoidal (bool, optional): If ``True``, uses sinusoidal modulation (preferable for multiple instruments)
If ``False``, uses triangular modulation (gives single instruments a sharper phasing effect)
(Default: ``True``)
Returns:
Tensor: Waveform of dimension of `(..., time)`
Reference:
- http://sox.sourceforge.net/sox.html
- Scott Lehman, `Effects Explained`_.
.. _Effects Explained:
https://web.archive.org/web/20051125072557/http://www.harmony-central.com/Effects/effects-explained.html
"""
actual_shape = waveform.shape
device, dtype = waveform.device, waveform.dtype
# convert to 2D (channels,time)
waveform = waveform.view(-1, actual_shape[-1])
delay_buf_len = int((delay_ms * 0.001 * sample_rate) + 0.5)
delay_buf = torch.zeros(waveform.shape[0], delay_buf_len, dtype=dtype, device=device)
mod_buf_len = int(sample_rate / mod_speed + 0.5)
if sinusoidal:
wave_type = "SINE"
else:
wave_type = "TRIANGLE"
mod_buf = _generate_wave_table(
wave_type=wave_type,
data_type="INT",
table_size=mod_buf_len,
min=1.0,
max=float(delay_buf_len),
phase=math.pi / 2,
device=device,
)
delay_pos = 0
mod_pos = 0
output_waveform_pre_gain_list = []
waveform = waveform * gain_in
delay_buf = delay_buf * decay
waveform_list = [waveform[:, i] for i in range(waveform.size(1))]
delay_buf_list = [delay_buf[:, i] for i in range(delay_buf.size(1))]
mod_buf_list = [mod_buf[i] for i in range(mod_buf.size(0))]
for i in range(waveform.shape[-1]):
idx = int((delay_pos + mod_buf_list[mod_pos]) % delay_buf_len)
mod_pos = (mod_pos + 1) % mod_buf_len
delay_pos = (delay_pos + 1) % delay_buf_len
temp = (waveform_list[i]) + (delay_buf_list[idx])
delay_buf_list[delay_pos] = temp * decay
output_waveform_pre_gain_list.append(temp)
output_waveform = torch.stack(output_waveform_pre_gain_list, dim=1).to(dtype=dtype, device=device)
output_waveform.mul_(gain_out)
return output_waveform.clamp(min=-1, max=1).view(actual_shape)
def riaa_biquad(waveform: Tensor, sample_rate: int) -> Tensor:
r"""Apply RIAA vinyl playback equalization. Similar to SoX implementation.
.. devices:: CPU CUDA
.. properties:: Autograd TorchScript
Args:
waveform (Tensor): audio waveform of dimension of `(..., time)`
sample_rate (int): sampling rate of the waveform, e.g. 44100 (Hz).
Allowed sample rates in Hz : ``44100``,``48000``,``88200``,``96000``
Returns:
Tensor: Waveform of dimension of `(..., time)`
Reference:
- http://sox.sourceforge.net/sox.html
- https://www.w3.org/2011/audio/audio-eq-cookbook.html#APF
"""
if sample_rate == 44100:
zeros = [-0.2014898, 0.9233820]
poles = [0.7083149, 0.9924091]
elif sample_rate == 48000:
zeros = [-0.1766069, 0.9321590]
poles = [0.7396325, 0.9931330]
elif sample_rate == 88200:
zeros = [-0.1168735, 0.9648312]
poles = [0.8590646, 0.9964002]
elif sample_rate == 96000:
zeros = [-0.1141486, 0.9676817]
poles = [0.8699137, 0.9966946]
else:
raise ValueError("Sample rate must be 44.1k, 48k, 88.2k, or 96k")
# polynomial coefficients with roots zeros[0] and zeros[1]
b0 = 1.0
b1 = -(zeros[0] + zeros[1])
b2 = zeros[0] * zeros[1]
# polynomial coefficients with roots poles[0] and poles[1]
a0 = 1.0
a1 = -(poles[0] + poles[1])
a2 = poles[0] * poles[1]
# Normalize to 0dB at 1kHz
y = 2 * math.pi * 1000 / sample_rate
b_re = b0 + b1 * math.cos(-y) + b2 * math.cos(-2 * y)
a_re = a0 + a1 * math.cos(-y) + a2 * math.cos(-2 * y)
b_im = b1 * math.sin(-y) + b2 * math.sin(-2 * y)
a_im = a1 * math.sin(-y) + a2 * math.sin(-2 * y)
g = 1 / math.sqrt((b_re**2 + b_im**2) / (a_re**2 + a_im**2))
b0 *= g
b1 *= g
b2 *= g
return biquad(waveform, b0, b1, b2, a0, a1, a2)
def treble_biquad(
waveform: Tensor,
sample_rate: int,
gain: float,
central_freq: float = 3000,
Q: float = 0.707,
) -> Tensor:
r"""Design a treble tone-control effect. Similar to SoX implementation.
.. devices:: CPU CUDA
.. properties:: Autograd TorchScript
Args:
waveform (Tensor): audio waveform of dimension of `(..., time)`
sample_rate (int): sampling rate of the waveform, e.g. 44100 (Hz)
gain (float or torch.Tensor): desired gain at the boost (or attenuation) in dB.
central_freq (float or torch.Tensor, optional): central frequency (in Hz). (Default: ``3000``)
Q (float or torch.Tensor, optional): https://en.wikipedia.org/wiki/Q_factor (Default: ``0.707``).
Returns:
Tensor: Waveform of dimension of `(..., time)`
Reference:
- http://sox.sourceforge.net/sox.html
- https://www.w3.org/2011/audio/audio-eq-cookbook.html#APF
"""
dtype = waveform.dtype
device = waveform.device
central_freq = torch.as_tensor(central_freq, dtype=dtype, device=device)
Q = torch.as_tensor(Q, dtype=dtype, device=device)
gain = torch.as_tensor(gain, dtype=dtype, device=device)
w0 = 2 * math.pi * central_freq / sample_rate
alpha = torch.sin(w0) / 2 / Q
A = torch.exp(gain / 40 * math.log(10))
temp1 = 2 * torch.sqrt(A) * alpha
temp2 = (A - 1) * torch.cos(w0)
temp3 = (A + 1) * torch.cos(w0)
b0 = A * ((A + 1) + temp2 + temp1)
b1 = -2 * A * ((A - 1) + temp3)
b2 = A * ((A + 1) + temp2 - temp1)
a0 = (A + 1) - temp2 + temp1
a1 = 2 * ((A - 1) - temp3)
a2 = (A + 1) - temp2 - temp1
return biquad(waveform, b0, b1, b2, a0, a1, a2)
def _measure(
measure_len_ws: int,
samples: Tensor,
spectrum: Tensor,
noise_spectrum: Tensor,
spectrum_window: Tensor,
spectrum_start: int,
spectrum_end: int,
cepstrum_window: Tensor,
cepstrum_start: int,
cepstrum_end: int,
noise_reduction_amount: float,
measure_smooth_time_mult: float,
noise_up_time_mult: float,
noise_down_time_mult: float,
index_ns: int,
boot_count: int,
) -> float:
assert spectrum.size()[-1] == noise_spectrum.size()[-1]
samplesLen_ns = samples.size()[-1]
dft_len_ws = spectrum.size()[-1]
dftBuf = torch.zeros(dft_len_ws)
_index_ns = torch.tensor([index_ns] + [(index_ns + i) % samplesLen_ns for i in range(1, measure_len_ws)])
dftBuf[:measure_len_ws] = samples[_index_ns] * spectrum_window[:measure_len_ws]
# memset(c->dftBuf + i, 0, (p->dft_len_ws - i) * sizeof(*c->dftBuf));
dftBuf[measure_len_ws:dft_len_ws].zero_()
# lsx_safe_rdft((int)p->dft_len_ws, 1, c->dftBuf);
_dftBuf = torch.fft.rfft(dftBuf)
# memset(c->dftBuf, 0, p->spectrum_start * sizeof(*c->dftBuf));
_dftBuf[:spectrum_start].zero_()
mult: float = boot_count / (1.0 + boot_count) if boot_count >= 0 else measure_smooth_time_mult
_d = _dftBuf[spectrum_start:spectrum_end].abs()
spectrum[spectrum_start:spectrum_end].mul_(mult).add_(_d * (1 - mult))
_d = spectrum[spectrum_start:spectrum_end] ** 2
_zeros = torch.zeros(spectrum_end - spectrum_start)
_mult = (
_zeros
if boot_count >= 0
else torch.where(
_d > noise_spectrum[spectrum_start:spectrum_end],
torch.tensor(noise_up_time_mult), # if
torch.tensor(noise_down_time_mult), # else
)
)
noise_spectrum[spectrum_start:spectrum_end].mul_(_mult).add_(_d * (1 - _mult))
_d = torch.sqrt(
torch.max(
_zeros,
_d - noise_reduction_amount * noise_spectrum[spectrum_start:spectrum_end],
)
)
_cepstrum_Buf: Tensor = torch.zeros(dft_len_ws >> 1)
_cepstrum_Buf[spectrum_start:spectrum_end] = _d * cepstrum_window
_cepstrum_Buf[spectrum_end : dft_len_ws >> 1].zero_()
# lsx_safe_rdft((int)p->dft_len_ws >> 1, 1, c->dftBuf);
_cepstrum_Buf = torch.fft.rfft(_cepstrum_Buf)
result: float = float(torch.sum(_cepstrum_Buf[cepstrum_start:cepstrum_end].abs().pow(2)))
result = math.log(result / (cepstrum_end - cepstrum_start)) if result > 0 else -math.inf
return max(0, 21 + result)
def vad(
waveform: Tensor,
sample_rate: int,
trigger_level: float = 7.0,
trigger_time: float = 0.25,
search_time: float = 1.0,
allowed_gap: float = 0.25,
pre_trigger_time: float = 0.0,
# Fine-tuning parameters
boot_time: float = 0.35,
noise_up_time: float = 0.1,
noise_down_time: float = 0.01,
noise_reduction_amount: float = 1.35,
measure_freq: float = 20.0,
measure_duration: Optional[float] = None,
measure_smooth_time: float = 0.4,
hp_filter_freq: float = 50.0,
lp_filter_freq: float = 6000.0,
hp_lifter_freq: float = 150.0,
lp_lifter_freq: float = 2000.0,
) -> Tensor:
r"""Voice Activity Detector. Similar to SoX implementation.
.. devices:: CPU CUDA
.. properties:: TorchScript
Attempts to trim silence and quiet background sounds from the ends of recordings of speech.
The algorithm currently uses a simple cepstral power measurement to detect voice,
so may be fooled by other things, especially music.
The effect can trim only from the front of the audio,
so in order to trim from the back, the reverse effect must also be used.
Args:
waveform (Tensor): Tensor of audio of dimension `(channels, time)` or `(time)`
Tensor of shape `(channels, time)` is treated as a multi-channel recording
of the same event and the resulting output will be trimmed to the earliest
voice activity in any channel.
sample_rate (int): Sample rate of audio signal.
trigger_level (float, optional): The measurement level used to trigger activity detection.
This may need to be cahnged depending on the noise level, signal level,
and other characteristics of the input audio. (Default: 7.0)
trigger_time (float, optional): The time constant (in seconds)
used to help ignore short bursts of sound. (Default: 0.25)
search_time (float, optional): The amount of audio (in seconds)
to search for quieter/shorter bursts of audio to include prior
to the detected trigger point. (Default: 1.0)
allowed_gap (float, optional): The allowed gap (in seconds) between
quieter/shorter bursts of audio to include prior
to the detected trigger point. (Default: 0.25)
pre_trigger_time (float, optional): The amount of audio (in seconds) to preserve
before the trigger point and any found quieter/shorter bursts. (Default: 0.0)
boot_time (float, optional) The algorithm (internally) uses adaptive noise
estimation/reduction in order to detect the start of the wanted audio.
This option sets the time for the initial noise estimate. (Default: 0.35)
noise_up_time (float, optional) Time constant used by the adaptive noise estimator
for when the noise level is increasing. (Default: 0.1)
noise_down_time (float, optional) Time constant used by the adaptive noise estimator
for when the noise level is decreasing. (Default: 0.01)
noise_reduction_amount (float, optional) Amount of noise reduction to use in
the detection algorithm (e.g. 0, 0.5, ...). (Default: 1.35)
measure_freq (float, optional) Frequency of the algorithm’s
processing/measurements. (Default: 20.0)
measure_duration: (float, optional) Measurement duration.
(Default: Twice the measurement period; i.e. with overlap.)
measure_smooth_time (float, optional) Time constant used to smooth
spectral measurements. (Default: 0.4)
hp_filter_freq (float, optional) "Brick-wall" frequency of high-pass filter applied
at the input to the detector algorithm. (Default: 50.0)
lp_filter_freq (float, optional) "Brick-wall" frequency of low-pass filter applied
at the input to the detector algorithm. (Default: 6000.0)
hp_lifter_freq (float, optional) "Brick-wall" frequency of high-pass lifter used
in the detector algorithm. (Default: 150.0)
lp_lifter_freq (float, optional) "Brick-wall" frequency of low-pass lifter used
in the detector algorithm. (Default: 2000.0)
Returns:
Tensor: Tensor of audio of dimension `(..., time)`.
Reference:
- http://sox.sourceforge.net/sox.html
"""
if waveform.ndim > 2:
warnings.warn(
"Expected input tensor dimension of 1 for single channel"
f" or 2 for multi-channel. Got {waveform.ndim} instead. "
"Batch semantics is not supported. "
"Please refer to https://github.com/pytorch/audio/issues/1348"
" and https://github.com/pytorch/audio/issues/1468."
)
measure_duration: float = 2.0 / measure_freq if measure_duration is None else measure_duration
measure_len_ws = int(sample_rate * measure_duration + 0.5)
measure_len_ns = measure_len_ws
# for (dft_len_ws = 16; dft_len_ws < measure_len_ws; dft_len_ws <<= 1);
dft_len_ws = 16
while dft_len_ws < measure_len_ws:
dft_len_ws *= 2
measure_period_ns = int(sample_rate / measure_freq + 0.5)
measures_len = math.ceil(search_time * measure_freq)
search_pre_trigger_len_ns = measures_len * measure_period_ns
gap_len = int(allowed_gap * measure_freq + 0.5)
fixed_pre_trigger_len_ns = int(pre_trigger_time * sample_rate + 0.5)
samplesLen_ns = fixed_pre_trigger_len_ns + search_pre_trigger_len_ns + measure_len_ns
spectrum_window = torch.zeros(measure_len_ws)
for i in range(measure_len_ws):
# sox.h:741 define SOX_SAMPLE_MIN (sox_sample_t)SOX_INT_MIN(32)
spectrum_window[i] = 2.0 / math.sqrt(float(measure_len_ws))
# lsx_apply_hann(spectrum_window, (int)measure_len_ws);
spectrum_window *= torch.hann_window(measure_len_ws, dtype=torch.float)
spectrum_start: int = int(hp_filter_freq / sample_rate * dft_len_ws + 0.5)
spectrum_start: int = max(spectrum_start, 1)
spectrum_end: int = int(lp_filter_freq / sample_rate * dft_len_ws + 0.5)
spectrum_end: int = min(spectrum_end, dft_len_ws // 2)
cepstrum_window = torch.zeros(spectrum_end - spectrum_start)
for i in range(spectrum_end - spectrum_start):
cepstrum_window[i] = 2.0 / math.sqrt(float(spectrum_end) - spectrum_start)
# lsx_apply_hann(cepstrum_window,(int)(spectrum_end - spectrum_start));
cepstrum_window *= torch.hann_window(spectrum_end - spectrum_start, dtype=torch.float)
cepstrum_start = math.ceil(sample_rate * 0.5 / lp_lifter_freq)
cepstrum_end = math.floor(sample_rate * 0.5 / hp_lifter_freq)
cepstrum_end = min(cepstrum_end, dft_len_ws // 4)
assert cepstrum_end > cepstrum_start
noise_up_time_mult = math.exp(-1.0 / (noise_up_time * measure_freq))
noise_down_time_mult = math.exp(-1.0 / (noise_down_time * measure_freq))
measure_smooth_time_mult = math.exp(-1.0 / (measure_smooth_time * measure_freq))
trigger_meas_time_mult = math.exp(-1.0 / (trigger_time * measure_freq))
boot_count_max = int(boot_time * measure_freq - 0.5)
measure_timer_ns = measure_len_ns
boot_count = measures_index = flushedLen_ns = samplesIndex_ns = 0
# pack batch
shape = waveform.size()
waveform = waveform.view(-1, shape[-1])
n_channels, ilen = waveform.size()
mean_meas = torch.zeros(n_channels)
samples = torch.zeros(n_channels, samplesLen_ns)
spectrum = torch.zeros(n_channels, dft_len_ws)
noise_spectrum = torch.zeros(n_channels, dft_len_ws)
measures = torch.zeros(n_channels, measures_len)
has_triggered: bool = False
num_measures_to_flush: int = 0
pos: int = 0
while pos < ilen and not has_triggered:
measure_timer_ns -= 1
for i in range(n_channels):
samples[i, samplesIndex_ns] = waveform[i, pos]
# if (!p->measure_timer_ns) {
if measure_timer_ns == 0:
index_ns: int = (samplesIndex_ns + samplesLen_ns - measure_len_ns) % samplesLen_ns
meas: float = _measure(
measure_len_ws=measure_len_ws,
samples=samples[i],
spectrum=spectrum[i],
noise_spectrum=noise_spectrum[i],
spectrum_window=spectrum_window,
spectrum_start=spectrum_start,
spectrum_end=spectrum_end,
cepstrum_window=cepstrum_window,
cepstrum_start=cepstrum_start,
cepstrum_end=cepstrum_end,
noise_reduction_amount=noise_reduction_amount,
measure_smooth_time_mult=measure_smooth_time_mult,
noise_up_time_mult=noise_up_time_mult,
noise_down_time_mult=noise_down_time_mult,
index_ns=index_ns,
boot_count=boot_count,
)
measures[i, measures_index] = meas
mean_meas[i] = mean_meas[i] * trigger_meas_time_mult + meas * (1.0 - trigger_meas_time_mult)
has_triggered = has_triggered or (mean_meas[i] >= trigger_level)
if has_triggered:
n: int = measures_len
k: int = measures_index
jTrigger: int = n
jZero: int = n
j: int = 0
for j in range(n):
if (measures[i, k] >= trigger_level) and (j <= jTrigger + gap_len):
jZero = jTrigger = j
elif (measures[i, k] == 0) and (jTrigger >= jZero):
jZero = j
k = (k + n - 1) % n
j = min(j, jZero)
# num_measures_to_flush = range_limit(j, num_measures_to_flush, n);
num_measures_to_flush = min(max(num_measures_to_flush, j), n)
# end if has_triggered
# end if (measure_timer_ns == 0):
# end for
samplesIndex_ns += 1
pos += 1
# end while
if samplesIndex_ns == samplesLen_ns:
samplesIndex_ns = 0
if measure_timer_ns == 0:
measure_timer_ns = measure_period_ns
measures_index += 1
measures_index = measures_index % measures_len
if boot_count >= 0:
boot_count = -1 if boot_count == boot_count_max else boot_count + 1
if has_triggered:
flushedLen_ns = (measures_len - num_measures_to_flush) * measure_period_ns
samplesIndex_ns = (samplesIndex_ns + flushedLen_ns) % samplesLen_ns
res = waveform[:, pos - samplesLen_ns + flushedLen_ns :]
# unpack batch
return res.view(shape[:-1] + res.shape[-1:])
| 36.536101
| 119
| 0.630222
|
4a08425ffb167ba8ab6e7c45bc2c4bc6f3f90d11
| 4,008
|
py
|
Python
|
tools/boostbook/test/more/run-tests.py
|
lijgame/boost
|
ec2214a19cdddd1048058321a8105dd0231dac47
|
[
"BSL-1.0"
] | null | null | null |
tools/boostbook/test/more/run-tests.py
|
lijgame/boost
|
ec2214a19cdddd1048058321a8105dd0231dac47
|
[
"BSL-1.0"
] | null | null | null |
tools/boostbook/test/more/run-tests.py
|
lijgame/boost
|
ec2214a19cdddd1048058321a8105dd0231dac47
|
[
"BSL-1.0"
] | null | null | null |
#!/usr/bin/env python
# Copyright 2010 Daniel James.
# Distributed under the Boost Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
"""Boostbook tests
Usage: python build_docs.py [--generate-gold]
"""
import difflib, getopt, os, re, sys
import lxml.ElementInclude
from lxml import etree
from collections import defaultdict
# Globals
def usage_and_exit():
print __doc__
sys.exit(2)
def main(argv):
script_directory = os.path.dirname(sys.argv[0])
boostbook_directory = os.path.join(script_directory, "../../xsl")
try:
opts, args = getopt.getopt(argv, "",
["generate-gold"])
if(len(args)): usage_and_exit()
except getopt.GetoptError:
usage_and_exit()
generate_gold = False
for opt, arg in opts:
if opt == '--generate-gold':
generate_gold = True
# Walk the test directory
parser = etree.XMLParser()
try:
boostbook_xsl = etree.XSLT(
etree.parse(os.path.join(boostbook_directory, "docbook.xsl"), parser)
)
except lxml.etree.XMLSyntaxError, error:
print "Error parsing boostbook xsl:"
print error
sys.exit(1)
for root, dirs, files in os.walk(os.path.join(script_directory, 'tests')):
for filename in files:
(base, ext) = os.path.splitext(filename)
if (ext == '.xml'):
src_path = os.path.join(root, filename)
gold_path = os.path.join(root, base + '.gold')
try:
doc_text = run_boostbook(parser, boostbook_xsl, src_path)
except:
# TODO: Need better error reporting here:
print "Error running boostbook for " + src_path
continue
if (generate_gold):
file = open(gold_path, 'w')
try:
file.write(doc_text)
finally: file.close()
else:
file = open(gold_path, 'r')
try:
gold_text = file.read()
finally:
file.close()
compare_xml(src_path, doc_text, gold_text)
def run_boostbook(parser, boostbook_xsl, file):
doc = boostbook_xsl(etree.parse(file, parser))
normalize_boostbook_ids(doc)
return etree.tostring(doc)
def normalize_boostbook_ids(doc):
ids = {}
id_bases = defaultdict(int)
for node in doc.xpath("//*[starts-with(@id, 'id') or contains(@id, '_id')]"):
id = node.get('id')
if(id in ids):
print 'Duplicate id: ' + id
match = re.match("(.+_id|id)([mp]?\d+)((?:-bb)?)", id)
if(match):
# Truncate id name, as it sometimes has different lengths...
match2 = re.match("(.*?)([^.]*?)(_?id)", match.group(1))
base = match2.group(1) + match2.group(2)[:7] + match2.group(3)
count = id_bases[base] + 1
id_bases[base] = count
ids[id] = base + str(count) + match.group(3)
for node in doc.xpath("//*[@linkend or @id]"):
x = node.get('linkend')
if(x in ids): node.set('linkend', ids[x])
x = node.get('id')
if(x in ids): node.set('id', ids[x])
def compare_xml(file, doc_text, gold_text):
# Had hoped to use xmldiff but it turned out to be a pain to install.
# So instead just do a text diff.
if (doc_text != gold_text):
print "Error: " + file
print
sys.stdout.writelines(
difflib.unified_diff(
gold_text.splitlines(True),
doc_text.splitlines(True)
)
)
print
print
if __name__ == "__main__":
main(sys.argv[1:])
| 31.559055
| 82
| 0.529691
|
4a0842b7433093fa065c0fad56ed1e0a4e524a61
| 75,810
|
py
|
Python
|
tests/unit/faucet/valve_test_lib.py
|
snak1219/faucet
|
811048bcbc7ed3828ff2f034c2668010c226eacc
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/faucet/valve_test_lib.py
|
snak1219/faucet
|
811048bcbc7ed3828ff2f034c2668010c226eacc
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/faucet/valve_test_lib.py
|
snak1219/faucet
|
811048bcbc7ed3828ff2f034c2668010c226eacc
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""Library for test_valve.py."""
# Copyright (C) 2015 Research and Innovation Advanced Network New Zealand Ltd.
# Copyright (C) 2015--2019 The Contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import namedtuple
from functools import partial
import cProfile
import io
import ipaddress
import logging
import os
import pstats
import shutil
import socket
import tempfile
import time
import unittest
import yaml
from ryu.lib import mac
from ryu.lib.packet import (
arp, ethernet, icmp, icmpv6, ipv4, ipv6, lldp, slow, packet, vlan)
from ryu.ofproto import ether, inet
from ryu.ofproto import ofproto_v1_3 as ofp
from ryu.ofproto import ofproto_v1_3_parser as parser
from prometheus_client import CollectorRegistry
from beka.route import RouteAddition, RouteRemoval
from beka.ip import IPAddress, IPPrefix
from faucet import faucet_bgp
from faucet import faucet_dot1x
from faucet import faucet_event
from faucet import faucet_metrics
from faucet import valves_manager
from faucet import valve_of
from faucet import valve_packet
from faucet import valve_util
from faucet.valve import TfmValve
from fakeoftable import FakeOFTable
def build_pkt(pkt):
"""Build and return a packet and eth type from a dict."""
def serialize(layers):
"""Concatenate packet layers and serialize."""
result = packet.Packet()
for layer in reversed(layers):
result.add_protocol(layer)
result.serialize()
return result
layers = []
assert 'eth_dst' in pkt and 'eth_src' in pkt
ethertype = None
if 'arp_source_ip' in pkt and 'arp_target_ip' in pkt:
ethertype = ether.ETH_TYPE_ARP
arp_code = pkt.get('arp_code', arp.ARP_REQUEST)
layers.append(arp.arp(
src_ip=pkt['arp_source_ip'],
dst_ip=pkt['arp_target_ip'],
opcode=arp_code))
elif 'ipv6_src' in pkt and 'ipv6_dst' in pkt:
ethertype = ether.ETH_TYPE_IPV6
if 'router_solicit_ip' in pkt:
layers.append(icmpv6.icmpv6(
type_=icmpv6.ND_ROUTER_SOLICIT))
elif 'neighbor_advert_ip' in pkt:
layers.append(icmpv6.icmpv6(
type_=icmpv6.ND_NEIGHBOR_ADVERT,
data=icmpv6.nd_neighbor(
dst=pkt['neighbor_advert_ip'],
option=icmpv6.nd_option_sla(hw_src=pkt['eth_src']))))
elif 'neighbor_solicit_ip' in pkt:
layers.append(icmpv6.icmpv6(
type_=icmpv6.ND_NEIGHBOR_SOLICIT,
data=icmpv6.nd_neighbor(
dst=pkt['neighbor_solicit_ip'],
option=icmpv6.nd_option_sla(hw_src=pkt['eth_src']))))
elif 'echo_request_data' in pkt:
layers.append(icmpv6.icmpv6(
type_=icmpv6.ICMPV6_ECHO_REQUEST,
data=icmpv6.echo(id_=1, seq=1, data=pkt['echo_request_data'])))
layers.append(ipv6.ipv6(
src=pkt['ipv6_src'],
dst=pkt['ipv6_dst'],
nxt=inet.IPPROTO_ICMPV6))
elif 'ipv4_src' in pkt and 'ipv4_dst' in pkt:
ethertype = ether.ETH_TYPE_IP
proto = inet.IPPROTO_IP
if 'echo_request_data' in pkt:
echo = icmp.echo(id_=1, seq=1, data=pkt['echo_request_data'])
layers.append(icmp.icmp(type_=icmp.ICMP_ECHO_REQUEST, data=echo))
proto = inet.IPPROTO_ICMP
net = ipv4.ipv4(src=pkt['ipv4_src'], dst=pkt['ipv4_dst'], proto=proto)
layers.append(net)
elif 'actor_system' in pkt and 'partner_system' in pkt:
ethertype = ether.ETH_TYPE_SLOW
layers.append(slow.lacp(
version=1,
actor_system=pkt['actor_system'],
actor_port=1,
partner_system=pkt['partner_system'],
partner_port=1,
actor_key=1,
partner_key=1,
actor_system_priority=65535,
partner_system_priority=1,
actor_port_priority=255,
partner_port_priority=255,
actor_state_defaulted=0,
partner_state_defaulted=0,
actor_state_expired=0,
partner_state_expired=0,
actor_state_timeout=1,
partner_state_timeout=1,
actor_state_collecting=1,
partner_state_collecting=1,
actor_state_distributing=1,
partner_state_distributing=1,
actor_state_aggregation=1,
partner_state_aggregation=1,
actor_state_synchronization=pkt['actor_state_synchronization'],
partner_state_synchronization=1,
actor_state_activity=0,
partner_state_activity=0))
elif 'chassis_id' in pkt and 'port_id' in pkt:
ethertype = ether.ETH_TYPE_LLDP
return valve_packet.lldp_beacon(
pkt['eth_src'], pkt['chassis_id'], str(pkt['port_id']), 1,
org_tlvs=pkt.get('org_tlvs', None),
system_name=pkt.get('system_name', None))
assert ethertype is not None, pkt
if 'vid' in pkt:
tpid = ether.ETH_TYPE_8021Q
layers.append(vlan.vlan(vid=pkt['vid'], ethertype=ethertype))
else:
tpid = ethertype
eth = ethernet.ethernet(
dst=pkt['eth_dst'],
src=pkt['eth_src'],
ethertype=tpid)
layers.append(eth)
result = serialize(layers)
return result
FAUCET_MAC = '0e:00:00:00:00:01'
BASE_DP_CONFIG = """
hardware: 'GenericTFM'
ignore_learn_ins: 100
ofchannel_log: '/dev/null'
packetin_pps: 99
slowpath_pps: 99
lldp_beacon:
send_interval: 1
max_per_interval: 1
"""
BASE_DP1_CONFIG = """
dp_id: 1
""" + BASE_DP_CONFIG
DP1_CONFIG = """
combinatorial_port_flood: True
""" + BASE_DP1_CONFIG
IDLE_DP1_CONFIG = """
use_idle_timeout: True
""" + DP1_CONFIG
GROUP_DP1_CONFIG = """
group_table: True
""" + BASE_DP1_CONFIG
DOT1X_CONFIG = """
dot1x:
nfv_intf: lo
nfv_sw_port: 2
radius_ip: 127.0.0.1
radius_port: 1234
radius_secret: SECRET
""" + BASE_DP1_CONFIG
DOT1X_ACL_CONFIG = """
dot1x:
nfv_intf: lo
nfv_sw_port: 2
radius_ip: 127.0.0.1
radius_port: 1234
radius_secret: SECRET
auth_acl: auth_acl
noauth_acl: noauth_acl
""" + BASE_DP1_CONFIG
CONFIG = """
dps:
s1:
%s
interfaces:
p1:
number: 1
native_vlan: v100
lldp_beacon:
enable: True
system_name: "faucet"
port_descr: "first_port"
loop_protect: True
receive_lldp: True
max_hosts: 1
hairpin: True
p2:
number: 2
native_vlan: v200
tagged_vlans: [v100]
loop_protect: True
p3:
number: 3
tagged_vlans: [v100, v200]
p4:
number: 4
tagged_vlans: [v200]
p5:
number: 5
tagged_vlans: [v300]
s2:
hardware: 'GenericTFM'
dp_id: 0xdeadbeef
interfaces:
p1:
number: 1
native_vlan: v100
s3:
hardware: 'GenericTFM'
combinatorial_port_flood: True
dp_id: 0x3
stack:
priority: 1
interfaces:
p1:
number: 1
native_vlan: v300
p2:
number: 2
native_vlan: v300
p3:
number: 3
native_vlan: v300
p4:
number: 4
native_vlan: v300
5:
description: p5
stack:
dp: s4
port: 5
s4:
hardware: 'GenericTFM'
dp_id: 0x4
interfaces:
p1:
number: 1
native_vlan: v300
p2:
number: 2
native_vlan: v300
p3:
number: 3
native_vlan: v300
p4:
number: 4
native_vlan: v300
5:
description: p5
number: 5
stack:
dp: s3
port: 5
routers:
router1:
vlans: [v100, v200]
vlans:
v100:
vid: 0x100
targeted_gw_resolution: True
faucet_vips: ['10.0.0.254/24']
routes:
- route:
ip_dst: 10.99.99.0/24
ip_gw: 10.0.0.1
- route:
ip_dst: 10.99.98.0/24
ip_gw: 10.0.0.99
v200:
vid: 0x200
faucet_vips: ['fc00::1:254/112', 'fe80::1:254/64']
routes:
- route:
ip_dst: 'fc00::10:0/112'
ip_gw: 'fc00::1:1'
- route:
ip_dst: 'fc00::20:0/112'
ip_gw: 'fc00::1:99'
v300:
vid: 0x300
v400:
vid: 0x400
""" % DP1_CONFIG
STACK_CONFIG = """
dps:
s1:
%s
stack:
priority: 1
interfaces:
1:
description: p1
stack:
dp: s2
port: 1
2:
description: p2
stack:
dp: s2
port: 2
3:
description: p3
native_vlan: v100
s2:
hardware: 'GenericTFM'
dp_id: 0x2
stack:
priority: 2
interfaces:
1:
description: p1
stack:
dp: s1
port: 1
2:
description: p2
stack:
dp: s1
port: 2
3:
description: p3
stack:
dp: s3
port: 2
4:
description: p4
native_vlan: v100
s3:
dp_id: 0x3
hardware: 'GenericTFM'
interfaces:
1:
description: p1
native_vlan: v100
2:
description: p2
stack:
dp: s2
port: 3
vlans:
v100:
vid: 0x100
""" % DP1_CONFIG
STACK_LOOP_CONFIG = """
dps:
s1:
%s
interfaces:
1:
description: p1
stack:
dp: s2
port: 1
2:
description: p2
stack:
dp: s3
port: 1
3:
description: p3
native_vlan: v100
s2:
%s
faucet_dp_mac: 0e:00:00:00:01:02
dp_id: 0x2
interfaces:
1:
description: p1
stack:
dp: s1
port: 1
2:
description: p2
stack:
dp: s3
port: 2
3:
description: p3
native_vlan: v100
s3:
%s
faucet_dp_mac: 0e:00:00:00:01:03
dp_id: 0x3
stack:
priority: 1
interfaces:
1:
description: p1
stack:
dp: s1
port: 2
2:
description: p2
stack:
dp: s2
port: 2
3:
description: p3
native_vlan: v100
vlans:
v100:
vid: 0x100
""" % (BASE_DP1_CONFIG, BASE_DP_CONFIG, BASE_DP_CONFIG)
class ValveTestBases:
"""Insulate test base classes from unittest so we can reuse base clases."""
class ValveTestSmall(unittest.TestCase): # pytype: disable=module-attr
"""Base class for all Valve unit tests."""
DP = 's1'
DP_ID = 1
NUM_PORTS = 5
NUM_TABLES = 10
P1_V100_MAC = '00:00:00:01:00:01'
P2_V100_MAC = '00:00:00:01:00:02'
P3_V100_MAC = '00:00:00:01:00:03'
P1_V200_MAC = '00:00:00:02:00:01'
P2_V200_MAC = '00:00:00:02:00:02'
P3_V200_MAC = '00:00:00:02:00:03'
P1_V300_MAC = '00:00:00:03:00:01'
UNKNOWN_MAC = '00:00:00:04:00:04'
BROADCAST_MAC = 'ff:ff:ff:ff:ff:ff'
V100 = 0x100 | ofp.OFPVID_PRESENT
V200 = 0x200 | ofp.OFPVID_PRESENT
V300 = 0x300 | ofp.OFPVID_PRESENT
LOGNAME = 'faucet'
ICMP_PAYLOAD = bytes('A'*64, encoding='UTF-8') # must support 64b payload.
REQUIRE_TFM = True
CONFIG_AUTO_REVERT = False
def __init__(self, *args, **kwargs):
self.dot1x = None
self.last_flows_to_dp = {}
self.valve = None
self.valves_manager = None
self.metrics = None
self.bgp = None
self.table = None
self.logger = None
self.tmpdir = None
self.faucet_event_sock = None
self.registry = None
self.sock = None
self.notifier = None
self.config_file = None
self.up_ports = {}
self.mock_now_sec = 100
super(ValveTestBases.ValveTestSmall, self).__init__(*args, **kwargs)
def mock_time(self, increment_sec=1):
"""Manage a mock timer for better unit test control"""
self.mock_now_sec += increment_sec
return self.mock_now_sec
def setup_valve(self, config, error_expected=0, log_stdout=False):
"""Set up test DP with config."""
self.tmpdir = tempfile.mkdtemp()
self.config_file = os.path.join(self.tmpdir, 'valve_unit.yaml')
self.faucet_event_sock = os.path.join(self.tmpdir, 'event.sock')
self.table = FakeOFTable(self.NUM_TABLES)
logfile = 'STDOUT' if log_stdout else os.path.join(self.tmpdir, 'faucet.log')
self.logger = valve_util.get_logger(self.LOGNAME, logfile, logging.DEBUG, 0)
self.registry = CollectorRegistry()
self.metrics = faucet_metrics.FaucetMetrics(reg=self.registry) # pylint: disable=unexpected-keyword-arg
# TODO: verify events
self.notifier = faucet_event.FaucetEventNotifier(
self.faucet_event_sock, self.metrics, self.logger)
self.bgp = faucet_bgp.FaucetBgp(
self.logger, logfile, self.metrics, self.send_flows_to_dp_by_id)
self.dot1x = faucet_dot1x.FaucetDot1x(
self.logger, logfile, self.metrics, self.send_flows_to_dp_by_id)
self.valves_manager = valves_manager.ValvesManager(
self.LOGNAME, self.logger, self.metrics, self.notifier,
self.bgp, self.dot1x, self.CONFIG_AUTO_REVERT, self.send_flows_to_dp_by_id)
self.last_flows_to_dp[self.DP_ID] = []
self.notifier.start()
initial_ofmsgs = self.update_config(config, reload_expected=False, error_expected=error_expected)
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.sock.connect(self.faucet_event_sock)
if not error_expected:
self.connect_dp()
return initial_ofmsgs
def teardown_valve(self):
"""Tear down test DP."""
self.bgp.shutdown_bgp_speakers()
valve_util.close_logger(self.logger)
for valve in list(self.valves_manager.valves.values()):
valve.close_logs()
self.sock.close()
shutil.rmtree(self.tmpdir)
def tearDown(self):
self.teardown_valve()
def apply_ofmsgs(self, ofmsgs):
"""Postprocess flows before sending to simulated DP."""
final_ofmsgs = self.valve.prepare_send_flows(ofmsgs)
self.table.apply_ofmsgs(final_ofmsgs)
return final_ofmsgs
@staticmethod
def profile(func, sortby='cumulative', amount=20, count=1):
"""Convenience method to profile a function call."""
prof = cProfile.Profile()
prof.enable()
for _ in range(count):
func()
prof.disable()
prof_stream = io.StringIO()
prof_stats = pstats.Stats(prof, stream=prof_stream).sort_stats(sortby)
prof_stats.print_stats(amount)
return (prof_stats, prof_stream.getvalue())
def get_prom(self, var, labels=None, bare=False):
"""Return a Prometheus variable value."""
if labels is None:
labels = {}
if not bare:
labels.update({
'dp_name': self.DP,
'dp_id': '0x%x' % self.DP_ID})
val = self.registry.get_sample_value(var, labels)
if val is None:
val = 0
return val
def prom_inc(self, func, var, labels=None, inc_expected=True):
"""Check Prometheus variable increments by 1 after calling a function."""
before = self.get_prom(var, labels)
func()
after = self.get_prom(var, labels)
msg = '%s %s before %f after %f' % (var, labels, before, after)
if inc_expected:
self.assertEqual(before + 1, after, msg=msg)
else:
self.assertEqual(before, after, msg=msg)
def send_flows_to_dp_by_id(self, valve, flows):
"""Callback for ValvesManager to simulate sending flows to DP."""
flows = valve.prepare_send_flows(flows)
self.last_flows_to_dp[valve.dp.dp_id] = flows
def update_config(self, config, reload_type='cold',
reload_expected=True, error_expected=0):
"""Update FAUCET config with config as text."""
before_dp_status = int(self.get_prom('dp_status'))
existing_config = None
if os.path.exists(self.config_file):
with open(self.config_file) as config_file:
existing_config = config_file.read()
with open(self.config_file, 'w') as config_file:
config_file.write(config)
content_change_expected = config != existing_config
self.assertEqual(
content_change_expected,
self.valves_manager.config_watcher.content_changed(self.config_file))
self.last_flows_to_dp[self.DP_ID] = []
reload_ofmsgs = []
reload_func = partial(
self.valves_manager.request_reload_configs,
self.mock_time(10), self.config_file)
if error_expected:
reload_func()
else:
var = 'faucet_config_reload_%s_total' % reload_type
self.prom_inc(reload_func, var=var, inc_expected=reload_expected)
self.valve = self.valves_manager.valves[self.DP_ID]
reload_ofmsgs = self.last_flows_to_dp[self.DP_ID]
# DP requested reconnection
if reload_ofmsgs is None:
reload_ofmsgs = self.connect_dp()
else:
self.apply_ofmsgs(reload_ofmsgs)
self.assertEqual(before_dp_status, int(self.get_prom('dp_status')))
self.assertEqual(error_expected, self.get_prom('faucet_config_load_error', bare=True))
return reload_ofmsgs
def connect_dp(self):
"""Call DP connect and wth all ports up."""
discovered_up_ports = set(list(self.valve.dp.ports.keys())[:self.NUM_PORTS])
connect_msgs = (
self.valve.switch_features(None) +
self.valve.datapath_connect(self.mock_time(10), discovered_up_ports))
self.apply_ofmsgs(connect_msgs)
self.valves_manager.update_config_applied(sent={self.DP_ID: True})
self.assertEqual(1, int(self.get_prom('dp_status')))
self.assertTrue(self.valve.dp.to_conf())
return connect_msgs
def cold_start(self):
"""Cold-start dataplane"""
self.valve.datapath_disconnect()
return self.connect_dp()
def port_labels(self, port_no):
"""Get port labels"""
port = self.valve.dp.ports[port_no]
return {'port': port.name, 'port_description': port.description}
def port_expected_status(self, port_no, exp_status):
"""Verify port has status"""
if port_no not in self.valve.dp.ports:
return
labels = self.port_labels(port_no)
status = int(self.get_prom('port_status', labels=labels))
self.assertEqual(
status, exp_status,
msg='status %u != expected %u for port %s' % (
status, exp_status, labels))
def set_port_down(self, port_no):
"""Set port status of port to down."""
self.apply_ofmsgs(self.valve.port_status_handler(
port_no, ofp.OFPPR_DELETE, ofp.OFPPS_LINK_DOWN, [], time.time()).get(self.valve, []))
self.port_expected_status(port_no, 0)
def set_port_up(self, port_no):
"""Set port status of port to up."""
self.apply_ofmsgs(self.valve.port_status_handler(
port_no, ofp.OFPPR_ADD, 0, [], time.time()).get(self.valve, []))
self.port_expected_status(port_no, 1)
def flap_port(self, port_no):
"""Flap op status on a port."""
self.set_port_down(port_no)
self.set_port_up(port_no)
def all_stack_up(self):
"""Bring all the ports in a stack fully up"""
for valve in self.valves_manager.valves.values():
valve.dp.dyn_running = True
for port in valve.dp.stack_ports:
port.stack_up()
def up_stack_port(self, port, dp_id=None):
"""Bring up a single stack port"""
peer_dp = port.stack['dp']
peer_port = port.stack['port']
for state_func in [peer_port.stack_init, peer_port.stack_up]:
state_func()
self.rcv_lldp(port, peer_dp, peer_port, dp_id)
self.assertTrue(port.is_stack_up())
def down_stack_port(self, port):
"""Bring down a single stack port"""
self.up_stack_port(port)
peer_port = port.stack['port']
peer_port.stack_gone()
now = self.mock_time(600)
self.valves_manager.valve_flow_services(
now,
'fast_state_expire')
self.assertTrue(port.is_stack_gone())
def _update_port_map(self, port, add_else_remove):
this_dp = port.dp_id
this_num = port.number
this_key = '%s:%s' % (this_dp, this_num)
peer_dp = port.stack['dp'].dp_id
peer_num = port.stack['port'].number
peer_key = '%s:%s' % (peer_dp, peer_num)
key_array = [this_key, peer_key]
key_array.sort()
key = key_array[0]
if add_else_remove:
self.up_ports[key] = port
else:
del self.up_ports[key]
def activate_all_ports(self, packets=10):
"""Activate all stack ports through LLDP"""
for valve in self.valves_manager.valves.values():
valve.dp.dyn_running = True
for port in valve.dp.ports.values():
port.dyn_phys_up = True
for port in valve.dp.stack_ports:
self.up_stack_port(port, dp_id=valve.dp.dp_id)
self._update_port_map(port, True)
self.trigger_all_ports(packets=packets)
def trigger_all_ports(self, packets=10):
"""Do the needful to trigger any pending state changes"""
interval = self.valve.dp.lldp_beacon['send_interval']
for _ in range(0, packets):
for port in self.up_ports.values():
dp_id = port.dp_id
this_dp = self.valves_manager.valves[dp_id].dp
peer_dp = port.stack['dp']
peer_port = port.stack['port']
self.rcv_lldp(port, peer_dp, peer_port, dp_id)
self.rcv_lldp(peer_port, this_dp, port, peer_dp.dp_id)
self.last_flows_to_dp[self.DP_ID] = []
now = self.mock_time(interval)
self.valves_manager.valve_flow_services(
now, 'fast_state_expire')
flows = self.last_flows_to_dp[self.DP_ID]
self.apply_ofmsgs(flows)
def deactivate_stack_port(self, port, packets=10):
"""Deactivate a given stack port"""
self._update_port_map(port, False)
self.trigger_all_ports(packets=packets)
def activate_stack_port(self, port, packets=10):
"""Deactivate a given stack port"""
self._update_port_map(port, True)
self.trigger_all_ports(packets=packets)
@staticmethod
def packet_outs_from_flows(flows):
"""Return flows that are packetout actions."""
return [flow for flow in flows if isinstance(flow, valve_of.parser.OFPPacketOut)]
@staticmethod
def flowmods_from_flows(flows):
"""Return flows that are flowmods actions."""
return [flow for flow in flows if isinstance(flow, valve_of.parser.OFPFlowMod)]
def learn_hosts(self):
"""Learn some hosts."""
# TODO: verify learn caching.
for _ in range(2):
self.rcv_packet(1, 0x100, {
'eth_src': self.P1_V100_MAC,
'eth_dst': self.UNKNOWN_MAC,
'ipv4_src': '10.0.0.1',
'ipv4_dst': '10.0.0.4'})
# TODO: verify host learning banned
self.rcv_packet(1, 0x100, {
'eth_src': self.UNKNOWN_MAC,
'eth_dst': self.P1_V100_MAC,
'ipv4_src': '10.0.0.4',
'ipv4_dst': '10.0.0.1'})
self.rcv_packet(3, 0x100, {
'eth_src': self.P3_V100_MAC,
'eth_dst': self.P2_V100_MAC,
'ipv4_src': '10.0.0.3',
'ipv4_dst': '10.0.0.2',
'vid': 0x100})
self.rcv_packet(2, 0x200, {
'eth_src': self.P2_V200_MAC,
'eth_dst': self.P3_V200_MAC,
'ipv4_src': '10.0.0.2',
'ipv4_dst': '10.0.0.3',
'vid': 0x200})
self.rcv_packet(3, 0x200, {
'eth_src': self.P3_V200_MAC,
'eth_dst': self.P2_V200_MAC,
'ipv4_src': '10.0.0.3',
'ipv4_dst': '10.0.0.2',
'vid': 0x200})
def verify_expiry(self):
"""Verify FIB resolution attempts expire."""
for _ in range(self.valve.dp.max_host_fib_retry_count + 1):
now = self.mock_time(self.valve.dp.timeout * 2)
self.valve.state_expire(now, None)
self.valve.resolve_gateways(now, None)
# TODO: verify state expired
def verify_flooding(self, matches):
"""Verify flooding for a packet, depending on the DP implementation."""
def _verify_flood_to_port(match, port, valve_vlan, port_number=None):
if valve_vlan.port_is_tagged(port):
vid = valve_vlan.vid | ofp.OFPVID_PRESENT
else:
vid = 0
if port_number is None:
port_number = port.number
return self.table.is_output(match, port=port_number, vid=vid)
for match in matches:
in_port_number = match['in_port']
in_port = self.valve.dp.ports[in_port_number]
if ('vlan_vid' in match and
match['vlan_vid'] & ofp.OFPVID_PRESENT != 0):
valve_vlan = self.valve.dp.vlans[match['vlan_vid'] & ~ofp.OFPVID_PRESENT]
else:
valve_vlan = in_port.native_vlan
all_ports = {
port for port in self.valve.dp.ports.values() if port.running()}
remaining_ports = all_ports - {
port for port in valve_vlan.get_ports() if port.running}
hairpin_output = _verify_flood_to_port(
match, in_port, valve_vlan, ofp.OFPP_IN_PORT)
self.assertEqual(
in_port.hairpin, hairpin_output,
msg='hairpin flooding incorrect (expected %s got %s)' % (
in_port.hairpin, hairpin_output))
for port in valve_vlan.get_ports():
output = _verify_flood_to_port(match, port, valve_vlan)
if self.valve.floods_to_root():
# Packet should only be flooded to root.
self.assertEqual(False, output, 'unexpected non-root flood')
else:
# Packet must be flooded to all ports on the VLAN.
if port == in_port:
self.assertEqual(port.hairpin, output,
'unexpected hairpin flood %s %u' % (
match, port.number))
else:
self.assertTrue(
output,
msg=('%s with unknown eth_dst not flooded'
' on VLAN %u to port %u\n%s' % (
match, valve_vlan.vid, port.number, self.table)))
# Packet must not be flooded to ports not on the VLAN.
for port in remaining_ports:
if port.stack:
self.assertTrue(
self.table.is_output(match, port=port.number),
msg=('Unknown eth_dst not flooded to stack port %s' % port))
elif not port.mirror:
self.assertFalse(
self.table.is_output(match, port=port.number),
msg=('Unknown eth_dst flooded to non-VLAN/stack/mirror %s' % port))
def rcv_packet(self, port, vid, match, dp_id=None):
"""Apply and return flows created receiving a packet on a port/VID."""
dp_id = dp_id or self.DP_ID
valve = self.valves_manager.valves[dp_id]
pkt = build_pkt(match)
vlan_pkt = pkt
# TODO: VLAN packet submitted to packet in always has VID
# Fake OF switch implementation should do this by applying actions.
if vid and vid not in match:
vlan_match = match
vlan_match['vid'] = vid
vlan_pkt = build_pkt(match)
msg = namedtuple(
'null_msg',
('match', 'in_port', 'data', 'total_len', 'cookie', 'reason'))(
{'in_port': port}, port, vlan_pkt.data, len(vlan_pkt.data),
valve.dp.cookie, valve_of.ofp.OFPR_ACTION)
self.last_flows_to_dp[self.DP_ID] = []
now = self.mock_time(0)
packet_in_func = partial(self.valves_manager.valve_packet_in, now, valve, msg)
if dp_id == self.DP_ID:
self.prom_inc(packet_in_func, 'of_packet_ins_total')
else:
packet_in_func()
rcv_packet_ofmsgs = self.last_flows_to_dp[self.DP_ID]
self.last_flows_to_dp[self.DP_ID] = []
self.apply_ofmsgs(rcv_packet_ofmsgs)
for valve_service in (
'resolve_gateways', 'advertise', 'fast_advertise', 'state_expire'):
self.valves_manager.valve_flow_services(
now, valve_service)
self.valves_manager.update_metrics(now)
return rcv_packet_ofmsgs
def rcv_lldp(self, port, other_dp, other_port, dp_id=None):
"""Receive an LLDP packet"""
dp_id = dp_id if dp_id else self.DP_ID
tlvs = []
tlvs.extend(valve_packet.faucet_lldp_tlvs(other_dp))
tlvs.extend(valve_packet.faucet_lldp_stack_state_tlvs(other_dp, other_port))
dp_mac = other_dp.faucet_dp_mac if other_dp.faucet_dp_mac else FAUCET_MAC
self.rcv_packet(port.number, 0, {
'eth_src': dp_mac,
'eth_dst': lldp.LLDP_MAC_NEAREST_BRIDGE,
'port_id': other_port.number,
'chassis_id': dp_mac,
'system_name': other_dp.name,
'org_tlvs': tlvs}, dp_id=dp_id)
def set_stack_port_status(self, port_no, status, valve=None):
"""Set stack port up recalculating topology as necessary."""
if not valve:
valve = self.valve
port = valve.dp.ports[port_no]
port.dyn_stack_current_state = status
valve.switch_manager.update_stack_topo(True, valve.dp, port)
for valve_vlan in valve.dp.vlans.values():
self.apply_ofmsgs(valve.switch_manager.add_vlan(valve_vlan))
def set_stack_port_up(self, port_no, valve=None):
"""Set stack port up recalculating topology as necessary."""
self.set_stack_port_status(port_no, 3, valve)
def set_stack_port_down(self, port_no, valve=None):
"""Set stack port up recalculating topology as necessary."""
self.set_stack_port_status(port_no, 2, valve)
def validate_flood(self, in_port, vlan_vid, out_port, expected, msg):
bcast_match = {
'in_port': in_port,
'eth_dst': mac.BROADCAST_STR,
'vlan_vid': vlan_vid,
'eth_type': 0x800,
}
if expected:
self.assertTrue(self.table.is_output(bcast_match, port=out_port), msg=msg)
else:
self.assertFalse(self.table.is_output(bcast_match, port=out_port), msg=msg)
def pkt_match(self, src, dst):
"""Make a unicast packet match dict for the given src & dst"""
return {
'eth_src': '00:00:00:01:00:%02x' % src,
'eth_dst': '00:00:00:01:00:%02x' % dst,
'ipv4_src': '10.0.0.%d' % src,
'ipv4_dst': '10.0.0.%d' % dst,
'vid': self.V100
}
def _config_edge_learn_stack_root(self, new_value):
config = yaml.load(self.CONFIG, Loader=yaml.SafeLoader)
config['vlans']['v100']['edge_learn_stack_root'] = new_value
return yaml.dump(config)
class ValveTestBig(ValveTestSmall):
"""Test basic switching/L2/L3 functions."""
def setUp(self):
self.setup_valve(CONFIG)
def test_notifier_socket_path(self):
"""Test notifier socket path checker."""
new_path = os.path.join(self.tmpdir, 'new_path/new_socket')
self.assertEqual(self.notifier.check_path(new_path), new_path)
stale_socket = os.path.join(self.tmpdir, 'stale_socket')
with open(stale_socket, 'w') as stale_socket_file:
stale_socket_file.write('')
self.assertEqual(self.notifier.check_path(stale_socket), stale_socket)
def test_disconnect(self):
"""Test disconnection of DP from controller."""
self.assertEqual(1, int(self.get_prom('dp_status')))
self.prom_inc(partial(self.valve.datapath_disconnect), 'of_dp_disconnections_total')
self.assertEqual(0, int(self.get_prom('dp_status')))
def test_unexpected_port(self):
"""Test packet in from unexpected port."""
self.prom_inc(
partial(self.rcv_packet, 999, 0x100, {
'eth_src': self.P1_V300_MAC,
'eth_dst': self.UNKNOWN_MAC,
'ipv4_src': '10.0.0.1',
'ipv4_dst': '10.0.0.2'}),
'of_unexpected_packet_ins_total',
inc_expected=True)
def test_oferror(self):
"""Test OFError handler."""
datapath = None
msg = valve_of.parser.OFPFlowMod(datapath=datapath)
msg.xid = 123
self.valve.recent_ofmsgs.append(msg)
test_error = valve_of.parser.OFPErrorMsg(datapath=datapath, msg=msg)
self.valve.oferror(test_error)
def test_tfm(self):
"""Test TFM is sent."""
self.assertTrue(
isinstance(self.valve, TfmValve),
msg=type(self.valve))
discovered_up_ports = {port_no for port_no in range(1, self.NUM_PORTS + 1)}
flows = self.valve.datapath_connect(self.mock_time(10), discovered_up_ports)
self.apply_ofmsgs(flows)
tfm_flows = [
flow for flow in flows if isinstance(
flow, valve_of.parser.OFPTableFeaturesStatsRequest)]
# TODO: verify TFM content.
self.assertTrue(tfm_flows)
def test_pkt_meta(self):
"""Test bad fields in OFPacketIn."""
msg = parser.OFPPacketIn(datapath=None)
self.assertEqual(None, self.valve.parse_pkt_meta(msg))
msg.cookie = self.valve.dp.cookie
self.assertEqual(None, self.valve.parse_pkt_meta(msg))
msg.reason = valve_of.ofp.OFPR_ACTION
self.assertEqual(None, self.valve.parse_pkt_meta(msg))
msg.match = parser.OFPMatch(in_port=1)
self.assertEqual(None, self.valve.parse_pkt_meta(msg))
msg.data = b'1234'
self.assertEqual(None, self.valve.parse_pkt_meta(msg))
def test_loop_protect(self):
"""Learn loop protection."""
for _ in range(2):
self.rcv_packet(1, 0x100, {
'eth_src': self.P1_V100_MAC,
'eth_dst': self.UNKNOWN_MAC,
'ipv4_src': '10.0.0.1',
'ipv4_dst': '10.0.0.2'})
self.rcv_packet(2, 0x100, {
'eth_src': self.P1_V100_MAC,
'eth_dst': self.UNKNOWN_MAC,
'ipv4_src': '10.0.0.1',
'ipv4_dst': '10.0.0.2',
'vid': 0x100})
def test_lldp(self):
"""Test LLDP reception."""
self.assertFalse(self.rcv_packet(1, 0, {
'eth_src': self.P1_V100_MAC,
'eth_dst': lldp.LLDP_MAC_NEAREST_BRIDGE,
'chassis_id': self.P1_V100_MAC,
'port_id': 1}))
def test_bogon_arp_for_controller(self):
"""Bogon ARP request for controller VIP."""
replies = self.rcv_packet(1, 0x100, {
'eth_src': self.P1_V100_MAC,
'eth_dst': mac.BROADCAST_STR,
'arp_code': arp.ARP_REQUEST,
'arp_source_ip': '8.8.8.8',
'arp_target_ip': '10.0.0.254'})
# Must be no ARP reply to an ARP request not in our subnet.
self.assertFalse(self.packet_outs_from_flows(replies))
def test_arp_for_controller(self):
"""ARP request for controller VIP."""
for _retries in range(3):
for arp_mac in (mac.BROADCAST_STR, self.valve.dp.vlans[0x100].faucet_mac):
arp_replies = self.rcv_packet(1, 0x100, {
'eth_src': self.P1_V100_MAC,
'eth_dst': arp_mac,
'arp_code': arp.ARP_REQUEST,
'arp_source_ip': '10.0.0.1',
'arp_target_ip': '10.0.0.254'})
# TODO: check ARP reply is valid
self.assertTrue(self.packet_outs_from_flows(arp_replies), msg=arp_mac)
def test_arp_reply_from_host(self):
"""ARP reply for host."""
arp_replies = self.rcv_packet(1, 0x100, {
'eth_src': self.P1_V100_MAC,
'eth_dst': FAUCET_MAC,
'arp_code': arp.ARP_REPLY,
'arp_source_ip': '10.0.0.1',
'arp_target_ip': '10.0.0.254'})
# TODO: check ARP reply is valid
self.assertTrue(arp_replies)
self.assertFalse(self.packet_outs_from_flows(arp_replies))
def test_nd_for_controller(self):
"""IPv6 ND for controller VIP."""
for dst_ip in (
ipaddress.IPv6Address('fe80::1:254'),
ipaddress.IPv6Address('fc00::1:254')):
nd_mac = valve_packet.ipv6_link_eth_mcast(dst_ip)
ip_gw_mcast = valve_packet.ipv6_solicited_node_from_ucast(dst_ip)
for _retries in range(3):
nd_replies = self.rcv_packet(2, 0x200, {
'eth_src': self.P2_V200_MAC,
'eth_dst': nd_mac,
'vid': 0x200,
'ipv6_src': 'fc00::1:1',
'ipv6_dst': str(ip_gw_mcast),
'neighbor_solicit_ip': str(dst_ip)})
# TODO: check reply NA is valid
packet_outs = self.packet_outs_from_flows(nd_replies)
self.assertTrue(packet_outs)
def test_nd_from_host(self):
"""IPv6 NA from host."""
na_replies = self.rcv_packet(2, 0x200, {
'eth_src': self.P2_V200_MAC,
'eth_dst': FAUCET_MAC,
'vid': 0x200,
'ipv6_src': 'fc00::1:1',
'ipv6_dst': 'fc00::1:254',
'neighbor_advert_ip': 'fc00::1:1'})
# TODO: check NA response flows are valid
self.assertTrue(na_replies)
self.assertFalse(self.packet_outs_from_flows(na_replies))
def test_ra_for_controller(self):
"""IPv6 RA for controller."""
router_solicit_ip = 'ff02::2'
ra_replies = self.rcv_packet(2, 0x200, {
'eth_src': self.P2_V200_MAC,
'eth_dst': '33:33:00:00:00:02',
'vid': 0x200,
'ipv6_src': 'fe80::1:1',
'ipv6_dst': router_solicit_ip,
'router_solicit_ip': router_solicit_ip})
# TODO: check RA is valid
self.assertTrue(self.packet_outs_from_flows(ra_replies))
def test_icmp_ping_controller(self):
"""IPv4 ping controller VIP."""
echo_replies = self.rcv_packet(1, 0x100, {
'eth_src': self.P1_V100_MAC,
'eth_dst': FAUCET_MAC,
'vid': 0x100,
'ipv4_src': '10.0.0.1',
'ipv4_dst': '10.0.0.254',
'echo_request_data': self.ICMP_PAYLOAD})
packet_outs = self.packet_outs_from_flows(echo_replies)
self.assertTrue(packet_outs)
data = packet_outs[0].data
self.assertTrue(data.endswith(self.ICMP_PAYLOAD), msg=data)
def test_unresolved_route(self):
"""Test unresolved route tries to resolve."""
ip_dst = ipaddress.IPv4Network('10.100.100.0/24')
ip_gw = ipaddress.IPv4Address('10.0.0.1')
valve_vlan = self.valve.dp.vlans[0x100]
route_add_replies = self.valve.add_route(
valve_vlan, ip_gw, ip_dst)
self.assertFalse(route_add_replies)
resolve_replies = self.valve.resolve_gateways(
self.mock_time(10), None)
self.assertFalse(resolve_replies)
resolve_replies = self.valve.resolve_gateways(
self.mock_time(99), None)
self.assertTrue(resolve_replies)
def test_add_del_route(self):
"""IPv4 add/del of a route."""
arp_replies = self.rcv_packet(1, 0x100, {
'eth_src': self.P1_V100_MAC,
'eth_dst': mac.BROADCAST_STR,
'arp_code': arp.ARP_REQUEST,
'arp_source_ip': '10.0.0.1',
'arp_target_ip': '10.0.0.254'})
# TODO: check ARP reply is valid
self.assertTrue(self.packet_outs_from_flows(arp_replies))
valve_vlan = self.valve.dp.vlans[0x100]
ip_dst = ipaddress.IPv4Network('10.100.100.0/24')
ip_gw = ipaddress.IPv4Address('10.0.0.1')
route_add_replies = self.valve.add_route(
valve_vlan, ip_gw, ip_dst)
# TODO: check add flows.
self.assertTrue(route_add_replies)
route_del_replies = self.valve.del_route(
valve_vlan, ip_dst)
# TODO: check del flows.
self.assertTrue(route_del_replies)
def test_host_ipv4_fib_route(self):
"""Test learning a FIB rule for an IPv4 host."""
fib_route_replies = self.rcv_packet(1, 0x100, {
'eth_src': self.P1_V100_MAC,
'eth_dst': self.UNKNOWN_MAC,
'vid': 0x100,
'ipv4_src': '10.0.0.2',
'ipv4_dst': '10.0.0.4',
'echo_request_data': bytes(
'A'*8, encoding='UTF-8')}) # pytype: disable=wrong-keyword-args
# TODO: verify learning rule contents
# We want to know this host was learned we did not get packet outs.
self.assertTrue(fib_route_replies)
# Verify adding default route via 10.0.0.2
self.assertTrue((self.valve.add_route(
self.valve.dp.vlans[0x100],
ipaddress.IPv4Address('10.0.0.2'),
ipaddress.IPv4Network('0.0.0.0/0'))))
self.assertFalse(self.packet_outs_from_flows(fib_route_replies))
self.verify_expiry()
def test_host_ipv6_fib_route(self):
"""Test learning a FIB rule for an IPv6 host."""
fib_route_replies = self.rcv_packet(2, 0x200, {
'eth_src': self.P2_V200_MAC,
'eth_dst': self.UNKNOWN_MAC,
'vid': 0x200,
'ipv6_src': 'fc00::1:2',
'ipv6_dst': 'fc00::1:4',
'echo_request_data': self.ICMP_PAYLOAD})
# TODO: verify learning rule contents
# We want to know this host was learned we did not get packet outs.
self.assertTrue(fib_route_replies)
self.assertFalse(self.packet_outs_from_flows(fib_route_replies))
self.verify_expiry()
def test_ping_unknown_neighbor(self):
"""IPv4 ping unknown host on same subnet, causing proactive learning."""
echo_replies = self.rcv_packet(1, 0x100, {
'eth_src': self.P1_V100_MAC,
'eth_dst': FAUCET_MAC,
'vid': 0x100,
'ipv4_src': '10.0.0.1',
'ipv4_dst': '10.0.0.99',
'echo_request_data': self.ICMP_PAYLOAD})
# TODO: check proactive neighbor resolution
self.assertTrue(self.packet_outs_from_flows(echo_replies))
def test_ping6_unknown_neighbor(self):
"""IPv6 ping unknown host on same subnet, causing proactive learning."""
echo_replies = self.rcv_packet(2, 0x200, {
'eth_src': self.P2_V200_MAC,
'eth_dst': FAUCET_MAC,
'vid': 0x200,
'ipv6_src': 'fc00::1:2',
'ipv6_dst': 'fc00::1:4',
'echo_request_data': self.ICMP_PAYLOAD})
# TODO: check proactive neighbor resolution
self.assertTrue(self.packet_outs_from_flows(echo_replies))
def test_icmpv6_ping_controller(self):
"""IPv6 ping controller VIP."""
echo_replies = self.rcv_packet(2, 0x200, {
'eth_src': self.P2_V200_MAC,
'eth_dst': FAUCET_MAC,
'vid': 0x200,
'ipv6_src': 'fc00::1:1',
'ipv6_dst': 'fc00::1:254',
'echo_request_data': self.ICMP_PAYLOAD})
packet_outs = self.packet_outs_from_flows(echo_replies)
self.assertTrue(packet_outs)
data = packet_outs[0].data
self.assertTrue(data.endswith(self.ICMP_PAYLOAD), msg=data)
def test_invalid_vlan(self):
"""Test that packets with incorrect vlan tagging get dropped."""
matches = [
{'in_port': 1, 'vlan_vid': 18 | ofp.OFPVID_PRESENT},
{'in_port': 1, 'vlan_vid': self.V100},
{'in_port': 3, 'vlan_vid': 0}]
for match in matches:
self.assertFalse(
self.table.is_output(match),
msg='Packets with incorrect vlan tags are output')
def test_unknown_eth_src(self):
"""Test that packets from unknown macs are sent to controller.
Untagged packets should have VLAN tags pushed before they are sent to
the controller.
"""
matches = [
{'in_port': 1, 'vlan_vid': 0},
{'in_port': 1, 'vlan_vid': 0, 'eth_src': self.UNKNOWN_MAC},
{
'in_port': 1,
'vlan_vid': 0,
'eth_src': self.P2_V200_MAC
},
{'in_port': 2, 'vlan_vid': 0, 'eth_dst': self.UNKNOWN_MAC},
{'in_port': 2, 'vlan_vid': 0},
{
'in_port': 2,
'vlan_vid': self.V100,
'eth_src': self.P2_V200_MAC
},
{
'in_port': 2,
'vlan_vid': self.V100,
'eth_src': self.UNKNOWN_MAC,
'eth_dst': self.P1_V100_MAC
},
]
for match in matches:
if match['vlan_vid'] != 0:
vid = match['vlan_vid']
else:
vid = self.valve.dp.get_native_vlan(match['in_port']).vid
vid = vid | ofp.OFPVID_PRESENT
self.assertTrue(
self.table.is_output(match, ofp.OFPP_CONTROLLER, vid=vid),
msg="Packet with unknown ethernet src not sent to controller: "
"{0}".format(match))
def test_unknown_eth_dst_rule(self):
"""Test that packets with unkown eth dst addrs get flooded correctly.
They must be output to each port on the associated vlan, with the
correct vlan tagging. And they must not be forwarded to a port not
on the associated vlan
"""
self.learn_hosts()
matches = [
{
'in_port': 3,
'vlan_vid': self.V100,
},
{
'in_port': 2,
'vlan_vid': 0,
'eth_dst': self.P1_V100_MAC
},
{
'in_port': 1,
'vlan_vid': 0,
'eth_src': self.P1_V100_MAC
},
{
'in_port': 3,
'vlan_vid': self.V200,
'eth_src': self.P2_V200_MAC,
}
]
self.verify_flooding(matches)
def test_known_eth_src_rule(self):
"""Test that packets with known eth src addrs are not sent to controller."""
self.learn_hosts()
matches = [
{
'in_port': 1,
'vlan_vid': 0,
'eth_src': self.P1_V100_MAC
},
{
'in_port': 2,
'vlan_vid': self.V200,
'eth_src': self.P2_V200_MAC
},
{
'in_port': 3,
'vlan_vid': self.V200,
'eth_src': self.P3_V200_MAC,
'eth_dst': self.P2_V200_MAC
}
]
for match in matches:
self.assertFalse(
self.table.is_output(match, port=ofp.OFPP_CONTROLLER),
msg="Packet ({0}) output to controller when eth_src address"
" is known".format(match))
def test_known_eth_src_deletion(self):
"""Verify that when a mac changes port the old rules get deleted.
If a mac address is seen on one port, then seen on a different port on
the same vlan the rules associated with that mac address on previous
port need to be deleted. IE packets with that mac address arriving on
the old port should be output to the controller."""
self.rcv_packet(3, 0x200, {
'eth_src': self.P2_V200_MAC,
'eth_dst': self.UNKNOWN_MAC,
'vlan_vid': 0x200,
'ipv4_src': '10.0.0.3',
'ipv4_dst': '10.0.0.3'})
match = {'in_port': 2, 'vlan_vid': 0, 'eth_src': self.P2_V200_MAC}
self.assertTrue(
self.table.is_output(match, port=ofp.OFPP_CONTROLLER),
msg='eth src rule not deleted when mac seen on another port')
def test_known_eth_dst_rule(self):
"""Test that packets with known eth dst addrs are output correctly.
Output to the correct port with the correct vlan tagging."""
self.learn_hosts()
match_results = [
({
'in_port': 2,
'vlan_vid': self.V100,
'eth_dst': self.P1_V100_MAC
}, {
'out_port': 1,
'vlan_vid': 0
}),
({
'in_port': 3,
'vlan_vid': self.V200,
'eth_dst': self.P2_V200_MAC,
'eth_src': self.P3_V200_MAC
}, {
'out_port': 2,
'vlan_vid': 0,
})
]
for match, result in match_results:
self.assertTrue(
self.table.is_output(
match, result['out_port'], vid=result['vlan_vid']),
msg='packet not output to port correctly when eth dst is known')
incorrect_ports = set(range(1, self.NUM_PORTS + 1))
incorrect_ports.remove(result['out_port'])
for port in incorrect_ports:
self.assertFalse(
self.table.is_output(match, port=port),
msg=('packet %s output to incorrect port %u when eth_dst '
'is known' % (match, port)))
self.verify_expiry()
def test_mac_vlan_separation(self):
"""Test that when a mac is seen on a second vlan the original vlan
rules are unaffected."""
self.learn_hosts()
self.rcv_packet(2, 0x200, {
'eth_src': self.P1_V100_MAC,
'eth_dst': self.UNKNOWN_MAC,
'vlan_vid': 0x200,
'ipv4_src': '10.0.0.2',
'ipv4_dst': '10.0.0.3'})
# check eth_src rule
match1 = {'in_port': 1, 'vlan_vid': 0, 'eth_src': self.P1_V100_MAC}
self.assertFalse(
self.table.is_output(match1, ofp.OFPP_CONTROLLER),
msg=('mac address being seen on a vlan affects eth_src rule on '
'other vlan'))
# check eth_dst rule
match2 = {'in_port': 3, 'vlan_vid': self.V100, 'eth_dst': self.P1_V100_MAC}
self.assertTrue(
self.table.is_output(match2, port=1, vid=0),
msg=('mac address being seen on a vlan affects eth_dst rule on '
'other vlan'))
for port in (2, 4):
self.assertFalse(
self.table.is_output(match2, port=port),
msg=('mac address being seen on a vlan affects eth_dst rule on '
'other vlan'))
def test_known_eth_dst_deletion(self):
"""Test that eth_dst rules are deleted when the mac is learned on
another port.
This should only occur when the mac is seen on the same vlan."""
self.rcv_packet(2, 0x100, {
'eth_src': self.P1_V100_MAC,
'eth_dst': self.UNKNOWN_MAC,
'ipv4_src': '10.0.0.2',
'ipv4_dst': '10.0.0.3'})
match = {'in_port': 3, 'vlan_vid': self.V100, 'eth_dst': self.P1_V100_MAC}
self.assertTrue(
self.table.is_output(match, port=2, vid=self.V100),
msg='Packet not output correctly after mac is learnt on new port')
self.assertFalse(
self.table.is_output(match, port=1),
msg='Packet output on old port after mac is learnt on new port')
def test_port_delete_eth_dst(self):
"""Test that when a port is disabled packets are correctly output. """
match = {'in_port': 2, 'vlan_vid': self.V100, 'eth_dst': self.P1_V100_MAC}
valve_vlan = self.valve.dp.vlans[match['vlan_vid'] & ~ofp.OFPVID_PRESENT]
ofmsgs = self.valve.port_delete(port_num=1)
self.apply_ofmsgs(ofmsgs)
# Check packets are output to each port on vlan
for port in valve_vlan.get_ports():
if port.number != match['in_port'] and port.running():
if valve_vlan.port_is_tagged(port):
vid = valve_vlan.vid | ofp.OFPVID_PRESENT
else:
vid = 0
self.assertTrue(
self.table.is_output(match, port=port.number, vid=vid),
msg=('packet %s with eth dst learnt on deleted port not output '
'correctly on vlan %u to port %u' % (
match, valve_vlan.vid, port.number)))
def test_port_down_eth_src_removal(self):
"""Test that when a port goes down and comes back up learnt mac
addresses are deleted."""
match = {'in_port': 1, 'vlan_vid': 0, 'eth_src': self.P1_V100_MAC}
self.flap_port(1)
self.assertTrue(
self.table.is_output(match, port=ofp.OFPP_CONTROLLER),
msg='Packet not output to controller after port bounce')
def test_port_add_input(self):
"""Test that when a port is enabled packets are input correctly."""
match = {'in_port': 1, 'vlan_vid': 0}
self.apply_ofmsgs(
self.valve.port_delete(port_num=1))
self.assertFalse(
self.table.is_output(match, port=2, vid=self.V100),
msg='Packet output after port delete')
self.apply_ofmsgs(
self.valve.port_add(port_num=1))
self.assertTrue(
self.table.is_output(match, port=2, vid=self.V100),
msg='Packet not output after port add')
def test_dp_acl_deny(self):
"""Test DP acl denies forwarding"""
acl_config = """
dps:
s1:
dp_acls: [drop_non_ospf_ipv4]
%s
interfaces:
p2:
number: 2
native_vlan: v200
p3:
number: 3
tagged_vlans: [v200]
vlans:
v200:
vid: 0x200
acls:
drop_non_ospf_ipv4:
- rule:
nw_dst: '224.0.0.5'
dl_type: 0x800
actions:
meter: testmeter
allow: 1
- rule:
dl_type: 0x800
actions:
output:
set_fields:
- eth_dst: 00:00:00:00:00:01
allow: 0
meters:
testmeter:
meter_id: 99
entry:
flags: "KBPS"
bands:
[
{
type: "DROP",
rate: 1
}
]
""" % DP1_CONFIG
drop_match = {
'in_port': 2,
'vlan_vid': 0,
'eth_type': 0x800,
'ipv4_dst': '192.0.2.1'}
accept_match = {
'in_port': 2,
'vlan_vid': 0,
'eth_type': 0x800,
'ipv4_dst': '224.0.0.5'}
self.update_config(acl_config)
self.flap_port(2)
self.assertFalse(
self.table.is_output(drop_match),
msg='packet not blocked by ACL')
self.assertTrue(
self.table.is_output(accept_match, port=3, vid=self.V200),
msg='packet not allowed by ACL')
def test_dp_acl_deny_ordered(self):
"""Test DP acl denies forwarding"""
acl_config = """
dps:
s1:
dp_acls: [drop_non_ospf_ipv4]
%s
interfaces:
p2:
number: 2
native_vlan: v200
p3:
number: 3
tagged_vlans: [v200]
vlans:
v200:
vid: 0x200
acls:
drop_non_ospf_ipv4:
- rule:
nw_dst: '224.0.0.5'
dl_type: 0x800
actions:
meter: testmeter
allow: 1
- rule:
dl_type: 0x800
actions:
output:
- set_fields:
- eth_dst: 00:00:00:00:00:01
allow: 0
meters:
testmeter:
meter_id: 99
entry:
flags: "KBPS"
bands:
[
{
type: "DROP",
rate: 1
}
]
""" % DP1_CONFIG
drop_match = {
'in_port': 2,
'vlan_vid': 0,
'eth_type': 0x800,
'ipv4_dst': '192.0.2.1'}
accept_match = {
'in_port': 2,
'vlan_vid': 0,
'eth_type': 0x800,
'ipv4_dst': '224.0.0.5'}
self.update_config(acl_config)
self.flap_port(2)
self.assertFalse(
self.table.is_output(drop_match),
msg='packet not blocked by ACL')
self.assertTrue(
self.table.is_output(accept_match, port=3, vid=self.V200),
msg='packet not allowed by ACL')
def test_port_acl_deny(self):
"""Test that port ACL denies forwarding."""
acl_config = """
dps:
s1:
%s
interfaces:
p2:
number: 2
native_vlan: v200
acl_in: drop_non_ospf_ipv4
p3:
number: 3
tagged_vlans: [v200]
vlans:
v200:
vid: 0x200
acls:
drop_non_ospf_ipv4:
- rule:
nw_dst: '224.0.0.5'
dl_type: 0x800
actions:
meter: testmeter
allow: 1
- rule:
dl_type: 0x800
actions:
allow: 0
meters:
testmeter:
meter_id: 99
entry:
flags: "KBPS"
bands:
[
{
type: "DROP",
rate: 1
}
]
""" % DP1_CONFIG
drop_match = {
'in_port': 2,
'vlan_vid': 0,
'eth_type': 0x800,
'ipv4_dst': '192.0.2.1'}
accept_match = {
'in_port': 2,
'vlan_vid': 0,
'eth_type': 0x800,
'ipv4_dst': '224.0.0.5'}
# base case
for match in (drop_match, accept_match):
self.assertTrue(
self.table.is_output(match, port=3, vid=self.V200),
msg='Packet not output before adding ACL')
self.update_config(acl_config)
self.assertFalse(
self.table.is_output(drop_match),
msg='packet not blocked by ACL')
self.assertTrue(
self.table.is_output(accept_match, port=3, vid=self.V200),
msg='packet not allowed by ACL')
def test_lldp_beacon(self):
"""Test LLDP beacon service."""
# TODO: verify LLDP packet content.
self.assertTrue(self.valve.fast_advertise(self.mock_time(10), None))
def test_unknown_port(self):
"""Test port status change for unknown port handled."""
self.set_port_up(99)
def test_port_modify(self):
"""Set port status modify."""
for port_status in (0, 1):
self.apply_ofmsgs(self.valve.port_status_handler(
1, ofp.OFPPR_MODIFY, port_status, [], time.time())[self.valve])
def test_unknown_port_status(self):
"""Test unknown port status message."""
known_messages = set([ofp.OFPPR_MODIFY, ofp.OFPPR_ADD, ofp.OFPPR_DELETE])
unknown_messages = list(set(range(0, len(known_messages) + 1)) - known_messages)
self.assertTrue(unknown_messages)
self.assertFalse(self.valve.port_status_handler(
1, unknown_messages[0], 1, [], time.time()).get(self.valve, []))
def test_move_port(self):
"""Test host moves a port."""
self.rcv_packet(2, 0x200, {
'eth_src': self.P1_V100_MAC,
'eth_dst': self.UNKNOWN_MAC,
'vlan_vid': 0x200,
'ipv4_src': '10.0.0.2',
'ipv4_dst': '10.0.0.3'})
self.rcv_packet(4, 0x200, {
'eth_src': self.P1_V100_MAC,
'eth_dst': self.UNKNOWN_MAC,
'vlan_vid': 0x200,
'ipv4_src': '10.0.0.2',
'ipv4_dst': '10.0.0.3'})
def test_bgp_route_change(self):
"""Test BGP route change handler."""
nexthop = '10.0.0.1'
prefix = '192.168.1.1/32'
add_event = RouteAddition(
IPPrefix.from_string(prefix),
IPAddress.from_string(nexthop),
'65001',
'IGP'
)
del_event = RouteRemoval(
IPPrefix.from_string(prefix),
)
self.bgp._bgp_route_handler( # pylint: disable=protected-access
add_event,
faucet_bgp.BgpSpeakerKey(self.DP_ID, 0x100, 4))
self.bgp._bgp_route_handler( # pylint: disable=protected-access
del_event,
faucet_bgp.BgpSpeakerKey(self.DP_ID, 0x100, 4))
self.bgp._bgp_up_handler(nexthop, 65001) # pylint: disable=protected-access
self.bgp._bgp_down_handler(nexthop, 65001) # pylint: disable=protected-access
def test_packet_in_rate(self):
"""Test packet in rate limit triggers."""
now = self.mock_time(10)
for _ in range(self.valve.dp.ignore_learn_ins * 2 + 1):
if self.valve.rate_limit_packet_ins(now):
return
self.fail('packet in rate limit not triggered')
def test_ofdescstats_handler(self):
"""Test OFDescStatsReply handler."""
body = parser.OFPDescStats(
mfr_desc=u'test_mfr_desc'.encode(),
hw_desc=u'test_hw_desc'.encode(),
sw_desc=u'test_sw_desc'.encode(),
serial_num=u'99'.encode(),
dp_desc=u'test_dp_desc'.encode())
self.valve.ofdescstats_handler(body)
invalid_body = parser.OFPDescStats(
mfr_desc=b'\x80',
hw_desc=b'test_hw_desc',
sw_desc=b'test_sw_desc',
serial_num=b'99',
dp_desc=b'test_dp_desc')
self.valve.ofdescstats_handler(invalid_body)
def test_get_config_dict(self):
"""Test API call for DP config."""
# TODO: test actual config contents.
self.assertTrue(self.valve.get_config_dict())
self.assertTrue(self.valve.dp.get_tables())
class ValveTestStackedRouting(ValveTestSmall):
"""Test inter-vlan routing with stacking capabilities in an IPV4 network"""
V100 = 0x100
V200 = 0x200
VLAN100_FAUCET_MAC = '00:00:00:00:00:11'
VLAN200_FAUCET_MAC = '00:00:00:00:00:22'
VLAN100_FAUCET_VIPS = ''
VLAN100_FAUCET_VIP_SPACE = ''
VLAN200_FAUCET_VIPS = ''
VLAN200_FAUCET_VIP_SPACE = ''
V100_HOSTS = []
V200_HOSTS = []
def base_config(self):
"""Create the base config"""
self.V100_HOSTS = [1, 2, 3, 4]
self.V200_HOSTS = [1, 2, 3, 4]
return """
routers:
router1:
vlans: [vlan100, vlan200]
dps:
s1:
hardware: 'GenericTFM'
dp_id: 1
stack: {priority: 1}
interfaces:
1:
native_vlan: vlan100
2:
native_vlan: vlan200
3:
stack: {dp: s2, port: 3}
s2:
dp_id: 2
interfaces:
1:
native_vlan: vlan100
2:
native_vlan: vlan200
3:
stack: {dp: s1, port: 3}
4:
stack: {dp: s3, port: 3}
s3:
dp_id: 3
interfaces:
1:
native_vlan: vlan100
2:
native_vlan: vlan200
3:
stack: {dp: s2, port: 4}
4:
stack: {dp: s4, port: 3}
s4:
dp_id: 4
interfaces:
1:
native_vlan: vlan100
2:
native_vlan: vlan200
3:
stack: {dp: s3, port: 4}
"""
def create_config(self):
"""Create the config file"""
self.CONFIG = """
vlans:
vlan100:
vid: 0x100
faucet_mac: '%s'
faucet_vips: ['%s']
vlan200:
vid: 0x200
faucet_mac: '%s'
faucet_vips: ['%s']
%s
""" % (self.VLAN100_FAUCET_MAC, self.VLAN100_FAUCET_VIP_SPACE,
self.VLAN200_FAUCET_MAC, self.VLAN200_FAUCET_VIP_SPACE,
self.base_config())
def setup_stack_routing(self):
"""Create a stacking config file."""
self.create_config()
self.setup_valve(self.CONFIG)
for valve in self.valves_manager.valves.values():
valve.dp.dyn_running = True
for port in valve.dp.ports.values():
port.dyn_finalized = False
port.enabled = True
port.dyn_phys_up = True
port.dyn_finalized = True
@staticmethod
def create_mac(vindex, host):
"""Create a MAC address string"""
return '00:00:00:0%u:00:0%u' % (vindex, host)
@staticmethod
def create_ip(vindex, host):
"""Create a IP address string"""
return '10.0.%u.%u' % (vindex, host)
@staticmethod
def get_eth_type():
"""Returns IPV4 ether type"""
return valve_of.ether.ETH_TYPE_IP
def create_match(self, vindex, host, faucet_mac, faucet_vip, code):
"""Create an ARP reply message"""
return {
'eth_src': self.create_mac(vindex, host),
'eth_dst': faucet_mac,
'arp_code': code,
'arp_source_ip': self.create_ip(vindex, host),
'arp_target_ip': faucet_vip
}
def verify_router_cache(self, ip_match, eth_match, vid, dp_id):
"""Verify router nexthop cache stores correct values"""
host_valve = self.valves_manager.valves[dp_id]
for valve in self.valves_manager.valves.values():
valve_vlan = valve.dp.vlans[vid]
route_manager = valve._route_manager_by_eth_type.get( # pylint: disable=protected-access
self.get_eth_type(), None)
vlan_nexthop_cache = route_manager._vlan_nexthop_cache(valve_vlan) # pylint: disable=protected-access
self.assertTrue(vlan_nexthop_cache)
host_ip = ipaddress.ip_address(ip_match)
# Check IP address is properly cached
self.assertIn(host_ip, vlan_nexthop_cache)
nexthop = vlan_nexthop_cache[host_ip]
# Check MAC address is properly cached
self.assertEqual(eth_match, nexthop.eth_src)
if host_valve != valve:
# Check the proper nexthop port is cached
expected_port = valve.dp.shortest_path_port(host_valve.dp.name)
self.assertEqual(expected_port, nexthop.port)
def test_router_cache_learn_hosts(self):
"""Have all router caches contain proper host nexthops"""
# Learn Vlan100 hosts
for host_id in self.V100_HOSTS:
dp_id = host_id
self.rcv_packet(1, self.V100, self.create_match(
1, host_id, self.VLAN100_FAUCET_MAC,
self.VLAN100_FAUCET_VIPS, arp.ARP_REPLY), dp_id=dp_id)
self.verify_router_cache(
self.create_ip(1, host_id), self.create_mac(1, host_id), self.V100, dp_id)
# Learn Vlan200 hosts
for host_id in self.V200_HOSTS:
dp_id = host_id
self.rcv_packet(2, self.V200, self.create_match(
2, host_id, self.VLAN200_FAUCET_MAC,
self.VLAN200_FAUCET_VIPS, arp.ARP_REPLY), dp_id=dp_id)
self.verify_router_cache(
self.create_ip(2, host_id), self.create_mac(2, host_id), self.V200, dp_id)
| 38.345979
| 118
| 0.518335
|
4a084379df918ed1341b646644e212e6afae97df
| 2,904
|
bzl
|
Python
|
test/starlark_tests/rules/dsyms_test.bzl
|
uber-common/rules_apple
|
12ac0738c56f8a15c714a7e09ec87a1bbdbcada9
|
[
"Apache-2.0"
] | 2
|
2020-06-22T11:57:11.000Z
|
2021-04-09T20:20:35.000Z
|
test/starlark_tests/rules/dsyms_test.bzl
|
fnazarios/rules_apple
|
7d9a469023b55d8c047c4f02e3fe14e64c91e8ff
|
[
"Apache-2.0"
] | null | null | null |
test/starlark_tests/rules/dsyms_test.bzl
|
fnazarios/rules_apple
|
7d9a469023b55d8c047c4f02e3fe14e64c91e8ff
|
[
"Apache-2.0"
] | 1
|
2021-03-26T20:14:03.000Z
|
2021-03-26T20:14:03.000Z
|
# Copyright 2019 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Starlark test rules for debug symbols."""
load(
"@build_bazel_rules_apple//apple:providers.bzl",
"AppleBinaryInfo",
"AppleBundleInfo",
)
load(
"@bazel_skylib//lib:paths.bzl",
"paths",
)
load(
"@bazel_skylib//lib:unittest.bzl",
"analysistest",
"asserts",
)
def _dsyms_test_impl(ctx):
"""Implementation of the dsyms_test rule."""
env = analysistest.begin(ctx)
target_under_test = ctx.attr.target_under_test[0]
if AppleBundleInfo in target_under_test:
platform_type = target_under_test[AppleBundleInfo].platform_type
if platform_type == "watchos":
architecture = "i386"
else:
architecture = "x86_64"
elif AppleBinaryInfo in target_under_test:
# AppleBinaryInfo does not supply a platform_type. In this case, assume x86_64.
architecture = "x86_64"
else:
fail(("Target %s does not provide AppleBundleInfo or AppleBinaryInfo") %
target_under_test.label)
outputs = {
x.short_path: None
for x in target_under_test[OutputGroupInfo]["dsyms"].to_list()
}
package = target_under_test.label.package
expected_infoplists = [
"{0}/{1}.dSYM/Contents/Info.plist".format(package, x)
for x in ctx.attr.expected_dsyms
]
expected_binaries = [
"{0}/{1}.dSYM/Contents/Resources/DWARF/{2}_{3}".format(
package,
x,
paths.split_extension(x)[0],
architecture,
)
for x in ctx.attr.expected_dsyms
]
for expected in expected_infoplists + expected_binaries:
asserts.true(
env,
expected in outputs,
msg = "Expected\n\n{0}\n\nto be built. Contents were:\n\n{1}\n\n".format(
expected,
"\n".join(outputs.keys()),
),
)
return analysistest.end(env)
dsyms_test = analysistest.make(
_dsyms_test_impl,
attrs = {
"expected_dsyms": attr.string_list(
mandatory = True,
doc = """
List of bundle names in the format <bundle_name>.<bundle_extension> to verify that dSYMs bundles are
created for them.
""",
),
},
config_settings = {
"//command_line_option:apple_generate_dsym": "true",
},
)
| 29.333333
| 100
| 0.640496
|
4a084487c244576811ebd58987fa495da7f0f7b2
| 10,141
|
py
|
Python
|
moments_dnns/run_experiment.py
|
alabatie/moments-dnns
|
fea8f96481599be387be7612e8eaa26b097906f7
|
[
"Apache-2.0"
] | 4
|
2019-08-11T22:54:38.000Z
|
2020-02-14T08:34:10.000Z
|
moments_dnns/run_experiment.py
|
labatie-ai/moments-dnns
|
fea8f96481599be387be7612e8eaa26b097906f7
|
[
"Apache-2.0"
] | null | null | null |
moments_dnns/run_experiment.py
|
labatie-ai/moments-dnns
|
fea8f96481599be387be7612e8eaa26b097906f7
|
[
"Apache-2.0"
] | 2
|
2019-08-11T22:57:24.000Z
|
2020-04-29T08:06:49.000Z
|
import numpy as np
from tqdm.auto import tqdm
import fire
import inspect
from moments_dnns.manage_experiments import save_experiment
from moments_dnns.main_utils import get_name_moments, get_submodel_constants
from moments_dnns.main_utils import load_dataset, make_asserts
from moments_dnns.models import init_original_model, reset_model
from moments_dnns.models import init_ff_model, init_res_model
def run_experiment(
architecture,
total_depth,
kernel_size,
num_channels,
batch_size,
num_realizations,
name_experiment,
boundary="periodic",
dataset="cifar10",
epsilon=0.001,
res_depth=2,
num_computations=100,
numpy_seed=0,
verbose=True,
compute_reff_signal=True,
compute_reff_noise=True,
):
"""run_experiment
Entry point of the code to run experiments
# Steps
- Assert that experiment constants are valid
- Load data
- Get name of moments to be computed
- Initialize Keras models
- For each realization, propagate noise and signal, fetch moments
- Save moments in results/name_experiment/ as .npy files
# Usage
- This function can be imported as a standard python function
- Or execute directly as a script with the fire interface, e.g.
```python run_experiment.py --architecture=bn_ff
--total_depth=200 --kernel_size=3 --num_channels=512
--boundary=periodic --dataset=cifar10 --batch_size=64
--num_realizations=1000 --name_experiment=bn_ff```
# Arguments
architecture (str): 'vanilla' or 'bn_ff' or 'bn_res'
total_depth (int): total depth of the experiment
kernel_size (int): spatial extent of convolutional kernel
num_channels (int): number of channels
batch_size (int): number of images considered for each realization
(in other words, 1 realization = 1 batch)
num_realizations (int): number of realizations in the experiment,
i.e. number of randomly initialized simultaneous propagation of
signal on noise with computation of moments
name_experiment (str): name of experiment / directory to save results
(if directory already exists, it will be deleted and created again)
boundary (str): boundary condition among 'periodic' or 'symmetric'
or 'zero_padding' (only relevant if kernel_size > 1)
dataset (str): 'cifar10' or 'mnist'
epsilon (float): batch normalization fuzz factor
(only relevant if architecture = 'bn_ff' or 'bn_res')
res_depth (int): feedforward depth of residual units
(only relevant if architecture = 'bn_res')
num_computations (int): total number of moment computations
(moment computation every total depth // num_computations layers)
numpy_seed (int):
- seed to reproduce image selection
- it does not lead to fully deterministic behaviour either,
but this is not a problem since we are only concerned
in expectations and 1-sigma intervals
verbose (bool): whether parameter values are printed
compute_reff_signal (bool): whether reff is computed for the signal
compute_reff_noise (bool): whether reff is computed for the noise
"""
if verbose:
# print parameter names and values
frame = inspect.currentframe()
args, _, _, param_values = inspect.getargvalues(frame)
print("Running experiment with parameters:")
for name_param in args:
print(" {} = {}".format(name_param, param_values[name_param]))
# assertions
make_asserts(
architecture=architecture,
kernel_size=kernel_size,
num_channels=num_channels,
boundary=boundary,
total_depth=total_depth,
dataset=dataset,
num_computations=num_computations,
batch_size=batch_size,
)
# load data (all images are flattened if kernel_size = 1)
signal_original, (
original_strides,
original_num,
original_size,
original_channels,
) = load_dataset(dataset, kernel_size)
# get name of moments to be computed
name_moments_raw, locs, (num_moments_raw, num_moments) = get_name_moments(
architecture, compute_reff_signal, compute_reff_noise
)
# get submodel constants
spatial_size, num_submodels, sub_depth, delta_moments = get_submodel_constants(
original_size, original_strides, total_depth, num_computations
)
# initialize original model
original_model = init_original_model(
original_size=original_size,
kernel_size=kernel_size,
original_channels=original_channels,
num_channels=num_channels,
boundary=boundary,
original_strides=original_strides,
)
if architecture == "vanilla":
# vanilla net
submodel = init_ff_model(
spatial_size=spatial_size,
kernel_size=kernel_size,
num_channels=num_channels,
boundary=boundary,
sub_depth=sub_depth,
delta_moments=delta_moments,
name_moments_raw=name_moments_raw,
batch_normalization=False,
)
elif architecture == "bn_ff":
# batch normalized feedforward net
submodel = init_ff_model(
spatial_size=spatial_size,
kernel_size=kernel_size,
num_channels=num_channels,
boundary=boundary,
sub_depth=sub_depth,
delta_moments=delta_moments,
name_moments_raw=name_moments_raw,
batch_normalization=True,
)
elif architecture == "bn_res":
# batch normalized resnet
submodel = init_res_model(
spatial_size=spatial_size,
kernel_size=kernel_size,
num_channels=num_channels,
boundary=boundary,
sub_depth=sub_depth,
res_depth=res_depth,
delta_moments=delta_moments,
name_moments_raw=name_moments_raw,
)
# Fix numpy seed for image selection
np.random.seed(numpy_seed)
# this dict will aggregate all moments from all realizations
moments = {}
# save depth associated with each computation of moments
moments["depth"] = (
total_depth // num_computations * np.arange(1, num_computations + 1)
)
# save res_depth (only relevant for resnets in the power law fit for plots)
moments["res_depth"] = res_depth
for ireal in tqdm(range(num_realizations)):
# randomly sample original signal and noise
ind_real = np.random.permutation(original_num)[:batch_size]
signal = signal_original[
ind_real,
]
# Start with unit variance noise
# since all pathologies are invariant to original noise scaling and
# since we use the right equations of propagation - linear in
# the input noise - this works, and later avoids the normalization
# mu2(dx^0) in chi^l
noise = np.random.normal(
0, 1, (batch_size, original_size, original_size, original_channels)
)
# normalize with constant rescaling to have mu2_signal = 1
# this later avoids the additional normalization mu2(x^0) in chi^l
mean_signal = signal.mean(axis=(0, 1, 2), keepdims=True)
std_signal = signal.std(axis=(0, 1, 2), keepdims=True)
signal = (signal - mean_signal) / std_signal
# pass original signal and noise through original model
inputs = [signal, noise]
reset_model(original_model)
outputs = original_model.predict(inputs, batch_size=batch_size)
# incorporate logarithm of mu2(dx^l)
log_noise = np.zeros((batch_size, 1, 1, 1)) # start at zero log
inputs = outputs + [log_noise]
# pass through the same keras submodel, each time reinitialized
moments_raw = []
for imodel in range(num_submodels): # total depth divided in submodels
reset_model(submodel) # reinitialize submodel
outputs = submodel.predict(inputs, batch_size=batch_size)
moments_raw += outputs[3:] # fetch signal, noise, log_noise
inputs = outputs[:3] # fetch moments
# add locs to moments
moments_real = {}
for iloc, loc in enumerate(locs):
for iraw, name_moment_raw in enumerate(name_moments_raw):
imoment = iloc * num_moments_raw + iraw
moment = moments_raw[imoment::num_moments]
# convert to float128 to deal with large values
moment = np.array(moment, dtype=np.float128)
# average over fake batch dimension
# - this is just a dummy dimension added by keras,
# which necessarily returns an array (batch_size,)
# - outputs are already constants with respect to this dim
moment = moment.mean(1)
if "mu2_noise" in name_moment_raw:
# take exp for mu_2_noise, since it comes in log scale
# to avoid overflow inside model
moment = np.exp(moment)
# add loc
name_moment = name_moment_raw + "_" + loc
moments_real[name_moment] = moment
# compute normalized sensitivity
chi_square = (
moments_real["mu2_noise_" + loc] / moments_real["mu2_signal_" + loc]
)
moments_real["chi_" + loc] = np.sqrt(chi_square)
# add to aggregation
for name_moment, moment in moments_real.items():
if name_moment not in moments: # initialize array
moments[name_moment] = np.empty((0, num_computations))
moments[name_moment] = np.vstack((moments[name_moment], moment))
# save experiment
save_experiment(moments, name_experiment)
if __name__ == "__main__":
# fire enables to run this function directly in bash
fire.Fire(run_experiment)
| 38.558935
| 84
| 0.645992
|
4a0845411bcc64b103e36d60f198383ada971d34
| 19,265
|
py
|
Python
|
scripts/gen_pseudo_label.py
|
knjcode/kaggle-kuzushiji-recognition-2019
|
2aa47722e961745898f70d40145ecd286666f8b7
|
[
"MIT"
] | 20
|
2019-10-25T17:28:26.000Z
|
2020-12-24T06:29:04.000Z
|
scripts/gen_pseudo_label.py
|
knjcode/kaggle-kuzushiji-recognition-2019
|
2aa47722e961745898f70d40145ecd286666f8b7
|
[
"MIT"
] | 3
|
2021-06-08T20:31:34.000Z
|
2022-03-12T00:03:05.000Z
|
scripts/gen_pseudo_label.py
|
knjcode/kaggle-kuzushiji-recognition-2019
|
2aa47722e961745898f70d40145ecd286666f8b7
|
[
"MIT"
] | 6
|
2019-11-24T02:09:08.000Z
|
2022-03-24T12:27:21.000Z
|
#!/usr/bin/env python
# coding: utf-8
import math
import os
import pickle
import pandas as pd
import numpy as np
import torch
from PIL import Image
from sklearn.metrics import accuracy_score, recall_score, precision_score, f1_score
from sklearn.model_selection import train_test_split
import lightgbm as lgb
from sklearn import metrics
from util.functions import Rectangle, has_intersect, intersect, score_page, get_center_point, \
l2_distance, get_radian, get_nearest_box
save_dir = 'test_nms030_tta7_first_5models_soft_prob'
gather_info = True
cropping = True
expand_crop = True
padding_rate = 0.05
crop_target_dir = 'input/pseudo_images'
crop_prob = 1.0
target_images = [line.rstrip() for line in open('input/test_images.list').readlines()]
count = len(target_images)
def check_hiragana(label):
codepoint = int(label.replace('U+', '0x'), 16)
if 12352 <= codepoint <= 12447:
return 1
return 0
def is_first_in_second(a, b):
return a[0] >= b[0] and b[2] >= a[2] \
and a[1] >= b[1] and b[3] >= a[3]
def check_box(boxlist, size, prob_list):
width, height = size
broken_box_list = [0] * len(boxlist)
inside_box_list = [0] * len(boxlist)
has_box_list = [0] * len(boxlist)
overlap_rate_list = [0.] * len(boxlist)
for i, current_box in enumerate(boxlist):
if broken_box_list[i] == 1:
continue
try:
current_rect = Rectangle(*current_box)
except ValueError:
broken_box_list[i] = 1
continue
current_rect_overlap = 0.
for j, target_box in enumerate(boxlist):
try:
target_rect = Rectangle(*target_box)
except ValueError:
borken_box_list[j] = 1
continue
if i == j:
continue
if is_first_in_second(current_box, target_box):
inside_box_list[i] = 1
has_box_list[j] = 1
if has_intersect(current_rect, target_rect):
overlap_rate = intersect(current_rect, target_rect).area() / current_rect.area()
current_rect_overlap += overlap_rate
overlap_rate_list[i] = current_rect_overlap
return broken_box_list, inside_box_list, has_box_list, overlap_rate_list
def gen_info(prob, label, bbox, box_score, broken, overlap_rate, nearest_dict, new_boxlist, size, image_id):
# 統計量調査
w_list = []
h_list = []
area_list = []
x_point_list = []
y_point_list = []
for xmin, ymin, xmax, ymax in new_boxlist:
w = round(float(xmax - xmin))
w_list.append(w)
h = round(float(ymax - ymin))
h_list.append(h)
area_list.append(w*h)
center_point = get_center_point((xmin, ymin, xmax, ymax))
x_point_list.append(center_point[0])
y_point_list.append(center_point[1])
wl = pd.Series(w_list)
hl = pd.Series(h_list)
al = pd.Series(area_list)
xl = pd.Series(x_point_list)
yl = pd.Series(y_point_list)
mean_area = al.mean()
mean_width = wl.mean()
mean_height = hl.mean()
mean_x = xl.mean()
mean_y = yl.mean()
std_area = al.std()
std_width = wl.std()
std_height = hl.std()
std_x = xl.std()
std_y = yl.std()
median_area = al.median()
median_width = wl.median()
median_height = hl.median()
median_x = xl.median()
median_y = yl.median()
box_num = len(new_boxlist)
try:
nearest_box = new_boxlist[nearest_dict[0]['index']]
nearest_width = round(float(nearest_box[2] - nearest_box[0]))
nearest_height = round(float(nearest_box[3] - nearest_box[1]))
except IndexError:
nearest_width = np.nan
nearest_height = np.nan
try:
nearest2_box = new_boxlist[nearest_dict[1]['index']]
nearest2_width = round(float(nearest2_box[2] - nearest2_box[0]))
nearest2_height = round(float(nearest2_box[3] - nearest2_box[1]))
except IndexError:
nearest2_width = np.nan
nearest2_height = np.nan
try:
nearest3_box = new_boxlist[nearest_dict[2]['index']]
nearest3_width = round(float(nearest3_box[2] - nearest3_box[0]))
nearest3_height = round(float(nearest3_box[3] - nearest3_box[1]))
except IndexError:
nearest3_width = np.nan
nearest3_height = np.nan
try:
nearest4_box = new_boxlist[nearest_dict[3]['index']]
nearest4_width = round(float(nearest4_box[2] - nearest4_box[0]))
nearest4_height = round(float(nearest4_box[3] - nearest4_box[1]))
except IndexError:
nearest4_width = np.nan
nearest4_height = np.nan
try:
nearest5_box = new_boxlist[nearest_dict[4]['index']]
nearest5_width = round(float(nearest5_box[2] - nearest5_box[0]))
nearest5_height = round(float(nearest5_box[3] - nearest5_box[1]))
except IndexError:
nearest5_width = np.nan
nearest5_height = np.nan
try:
nearest_radian = nearest_dict[0]['radian']
nearest_distance = nearest_dict[0]['distance']
except IndexError:
nearest_radian = np.nan
nearest_distance = np.nan
try:
nearest_radian2 = nearest_dict[1]['radian']
nearest_distance2 = nearest_dict[1]['distance']
except IndexError:
nearest_radian2 = np.nan
nearest_distance2 = np.nan
try:
nearest_radian3 = nearest_dict[2]['radian']
nearest_distance3 = nearest_dict[2]['distance']
except IndexError:
nearest_radian3 = np.nan
nearest_distance3 = np.nan
try:
nearest_radian4 = nearest_dict[3]['radian']
nearest_distance4 = nearest_dict[3]['distance']
except IndexError:
nearest_radian4 = np.nan
nearest_distance4 = np.nan
try:
nearest_radian5 = nearest_dict[4]['radian']
nearest_distance5 = nearest_dict[4]['distance']
except IndexError:
nearest_radian5 = np.nan
nearest_distance5 = np.nan
center_point = get_center_point(bbox)
sub_str = f"{label} {center_point[0]} {center_point[1]}"
width = bbox[2] - bbox[0]
height = bbox[3] - bbox[1]
x_center, y_center = center_point
current_info = {
'image_id': image_id,
'char': label,
'char_score': prob,
'is_hiragana': check_hiragana(label),
'bbox': bbox,
'bbox_score': box_score,
# 'broken': broken,
# 'inside': inside,
# 'has_box': has_box,
'overlap_rate': overlap_rate,
'page_width': size[0],
'page_height': size[1],
# 'width': width,
'width_page_rate': width / size[0],
'width_mean_rate': width / mean_width,
'width_std_rate': width / std_width if std_width else 0.,
'width_median_rate': width / median_width,
# 'height': height,
'height_page_rate': height / size[1],
'height_mean_rate': height / mean_height,
'height_std_rate': height / std_height if std_height else 0.,
'height_median_rate': height / median_height,
# 'area': width * height,
'area_page_rate': width * height / size[0] * size[1],
'area_mean_rate': width * height / mean_area,
'area_std_rate': width * height / std_area if std_area else 0,
'area_median_rate': width * height / median_area,
# 'nearest_width': nearest_width,
'nearest_width_page_rate': nearest_width / size[0],
'nearest_width_mean_rate': nearest_width / mean_width,
'nearest_width_std_rate': nearest_width / std_width if std_width else 0,
'nearest_width_median_rate': nearest_width / median_width,
# 'nearest2_width': nearest2_width,
'nearest2_width_page_rate': nearest2_width / size[0],
'nearest2_width_mean_rate': nearest2_width / mean_width,
'nearest2_width_std_rate': nearest2_width / std_width if std_width else 0,
'nearest2_width_median_rate': nearest2_width / median_width,
# 'nearest3_width': nearest3_width,
'nearest3_width_page_rate': nearest3_width / size[0],
'nearest3_width_mean_rate': nearest3_width / mean_width,
'nearest3_width_std_rate': nearest3_width / std_width if std_width else 0,
'nearest3_width_median_rate': nearest3_width / median_width,
# 'nearest4_width': nearest4_width,
'nearest4_width_page_rate': nearest4_width / size[0],
'nearest4_width_mean_rate': nearest4_width / mean_width,
'nearest4_width_std_rate': nearest4_width / std_width if std_width else 0,
'nearest4_width_median_rate': nearest4_width / median_width,
# 'nearest5_width': nearest5_width,
'nearest5_width_page_rate': nearest5_width / size[0],
'nearest5_width_mean_rate': nearest5_width / mean_width,
'nearest5_width_std_rate': nearest5_width / std_width if std_width else 0,
'nearest5_width_median_rate': nearest5_width / median_width,
# 'nearest_height': nearest_height,
'nearest_height_page_rate': nearest_height / size[0],
'nearest_height_mean_rate': nearest_height / mean_height,
'nearest_height_std_rate': nearest_height / std_height if std_height else 0,
'nearest_height_median_rate': nearest_height / median_height,
# 'nearest2_height': nearest2_height,
'nearest2_height_page_rate': nearest2_height / size[0],
'nearest2_height_mean_rate': nearest2_height / mean_height,
'nearest2_height_std_rate': nearest2_height / std_height if std_height else 0,
'nearest2_height_median_rate': nearest2_height / median_height,
# 'nearest3_height': nearest3_height,
'nearest3_height_page_rate': nearest3_height / size[0],
'nearest3_height_mean_rate': nearest3_height / mean_height,
'nearest3_height_std_rate': nearest3_height / std_height if std_height else 0,
'nearest3_height_median_rate': nearest3_height / median_height,
# 'nearest4_height': nearest4_height,
'nearest4_height_page_rate': nearest4_height / size[0],
'nearest4_height_mean_rate': nearest4_height / mean_height,
'nearest4_height_std_rate': nearest4_height / std_height if std_height else 0,
'nearest4_height_median_rate': nearest4_height / median_height,
# 'nearest5_height': nearest5_height,
'nearest5_height_page_rate': nearest5_height / size[0],
'nearest5_height_mean_rate': nearest5_height / mean_height,
'nearest5_height_std_rate': nearest5_height / std_height if std_height else 0,
'nearest5_height_median_rate': nearest5_height / median_height,
# 'nearest_area': nearest_width * nearest_height,
'nearest_area_page_rate': nearest_width * nearest_height / size[0] * size[1],
'nearest_area_mean_rate': nearest_width * nearest_height / mean_area,
'nearest_area_std_rate': nearest_width * nearest_height / std_area if std_area else 0,
'nearest_area_median_rate': nearest_width * nearest_height / median_area,
# 'nearest2_area': nearest2_width * nearest2_height,
'nearest2_area_page_rate': nearest2_width * nearest2_height / size[0] * size[1],
'nearest2_area_mean_rate': nearest2_width * nearest2_height / mean_area,
'nearest2_area_std_rate': nearest2_width * nearest2_height / std_area if std_area else 0,
'nearest2_area_median_rate': nearest2_width * nearest2_height / median_area,
# 'nearest3_area': nearest3_width * nearest3_height,
'nearest3_area_page_rate': nearest3_width * nearest3_height / size[0] * size[1],
'nearest3_area_mean_rate': nearest3_width * nearest3_height / mean_area,
'nearest3_area_std_rate': nearest3_width * nearest3_height / std_area if std_area else 0,
'nearest3_area_median_rate': nearest3_width * nearest3_height / median_area,
# 'nearest4_area': nearest4_width * nearest4_height,
'nearest4_area_page_rate': nearest4_width * nearest4_height / size[0] * size[1],
'nearest4_area_mean_rate': nearest4_width * nearest4_height / mean_area,
'nearest4_area_std_rate': nearest4_width * nearest4_height / std_area if std_area else 0,
'nearest4_area_median_rate': nearest4_width * nearest4_height / median_area,
# 'nearest5_area': nearest5_width * nearest5_height,
'nearest5_area_page_rate': nearest5_width * nearest5_height / size[0] * size[1],
'nearest5_area_mean_rate': nearest5_width * nearest5_height / mean_area,
'nearest5_area_std_rate': nearest5_width * nearest5_height / std_area if std_area else 0,
'nearest5_area_median_rate': nearest5_width * nearest5_height / median_area,
# 'nearest_distance': nearest_dict[0]['distance'],
'nearest_distance_page_width_rate': nearest_distance / size[0],
'nearest_distance_page_height_rate': nearest_distance / size[1],
# 'nearest2_distance': nearest_dict[1]['distance'],
'nearest2_distance_page_width_rate': nearest_distance2 / size[0],
'nearest2_distance_page_height_rate': nearest_distance2 / size[1],
# 'nearest3_distance': nearest_dict[2]['distance'],
'nearest3_distance_page_width_rate': nearest_distance3 / size[0],
'nearest3_distance_page_height_rate': nearest_distance3 / size[1],
# 'nearest4_distance': nearest_dict[3]['distance'],
'nearest4_distance_page_width_rate': nearest_distance4 / size[0],
'nearest4_distance_page_height_rate': nearest_distance4 / size[1],
# 'nearest5_distance': nearest_dict[4]['distance'],
'nearest5_distance_page_width_rate': nearest_distance5 / size[0],
'nearest5_distance_page_height_rate': nearest_distance5 / size[1],
'nearest_radian': nearest_radian,
'nearest2_radian': nearest_radian2,
'nearest3_radian': nearest_radian3,
'nearest4_radian': nearest_radian4,
'nearest5_radian': nearest_radian5,
# 'x': x_center,
# 'y': y_center,
'x_mean_rate': x_center / mean_x,
'y_mean_yrate': y_center /mean_y,
'x_std_rate': x_center / std_x,
'y_std_rate': y_center / std_y,
'x_median_rate': x_center / median_x,
'y_median_rate': y_center / median_y,
'x_page_rate': x_center / size[0],
'y_page_rate': y_center / size[1],
'mean_area': mean_area,
'mean_width': mean_width,
'mean_height': mean_height,
'mean_x': mean_x,
'mean_y': mean_y,
'std_area': std_area,
'std_width': std_width,
'std_height': std_height,
'std_x': std_x,
'std_y': std_y,
'median_area': median_area,
'median_width': median_width,
'median_height': median_height,
'median_x': median_x,
'median_y': median_y,
'box_num': box_num,
}
return sub_str, current_info
def gen_csv_lgbm(prob_threshold, model_path, booster=False):
if booster:
with open(model_path, "rb") as fp:
boosters = pickle.load(fp)
else:
with open(model_path, "rb") as fp:
model = pickle.load(fp)
after_score = []
res = open('first_model_submission.csv', 'w')
res.write('image_id,labels\n')
write_count = 0
for target_index in range(0, count):
image_id = target_images[target_index]
target_file = f'test_images/{image_id}.jpg'
denoised_target_file = f'input/denoised_test/{image_id}.png'
load_file = os.path.join(save_dir,image_id + '.pickle')
with open(load_file, 'rb') as f:
r = pickle.load(f)
size = r['size']
prob_list = r['prob_list']
pred_labels = r['pred_labels']
bbox_score = r['bbox_score']
new_boxlist = r['new_boxlist']
sub_info = []
sub_list = []
char_score_list = []
box_score_list = []
## check box
broken_box_list, inside_box_list, has_box_list, overlap_rate_list = check_box(new_boxlist, size, prob_list)
## check nearest box
nearest_dict_list = get_nearest_box(new_boxlist)
if cropping:
orgimg = Image.open(denoised_target_file).convert('RGB')
for i, (prob, label, bbox, box_score, broken, overlap_rate, nearest_dict) in \
enumerate(zip(prob_list, pred_labels, new_boxlist, bbox_score, broken_box_list, overlap_rate_list, nearest_dict_list)):
sub_str, current_info = gen_info(prob, label, bbox, box_score, broken, overlap_rate, nearest_dict, new_boxlist, size, image_id)
sub_info.append(current_info)
sub_list.append(sub_str)
char_score_list.append(prob)
box_score_list.append(box_score)
sub_df = pd.DataFrame(sub_info)
try:
sub_df = sub_df.drop(['char', 'bbox', 'image_id'], axis=1)
except KeyError:
# sub_info is empty
pass
if len(sub_df) > 0:
if booster:
y_pred_list = []
for booster in boosters:
y_pred_list.append(booster.predict(sub_df, num_iteration=booster.best_iteration))
y_pred = np.average(y_pred_list, axis=0)
else:
y_pred = model.predict(sub_df, num_iteration=model.best_iteration)
tmp_sub_list = []
for current_info, sub, prob, char_score, box_score in zip(sub_info, sub_list, y_pred, char_score_list, box_score_list):
(xmin, ymin, xmax, ymax) = current_info['bbox']
if prob >= prob_threshold:
tmp_sub_list.append(sub)
if char_score >= crop_prob and cropping:
(xmin, ymin, xmax, ymax) = current_info['bbox']
label = current_info['char']
image_id = current_info['image_id']
w = xmax - xmin
h = ymax - ymin
if expand_crop:
padding = round((w+h)/2 * padding_rate)
xmin = max(xmin - padding, 0)
ymin = max(ymin - padding, 0)
xmax = min(xmax + padding, size[0])
ymax = min(ymax + padding, size[1])
else:
pass
img_crop = orgimg.crop((xmin, ymin, xmax, ymax))
target_save_dir = os.path.join(crop_target_dir, label)
os.makedirs(target_save_dir, exist_ok=True)
target_filename = f"{image_id}_{xmin}_{ymin}_{xmax}_{ymax}.png"
save_path = os.path.join(target_save_dir, target_filename)
img_crop.save(save_path)
sub_list = tmp_sub_list
else:
sub_list = []
sub_labels = ' '.join(sub_list)
res.write(image_id.rstrip() + ',' + sub_labels + '\n')
res.flush()
write_count += 1
print(".", end='')
res.close()
print('')
print('write_count:', write_count)
gen_csv_lgbm(0.50, "models/booster_for_val_nms030_tta7_first_5models_soft_prob.pkl", booster=True)
| 41.341202
| 139
| 0.640696
|
4a0845600d5fabc5c8a743429f08006083c1629c
| 5,764
|
py
|
Python
|
psy_win/psy_windrive.py
|
ppsyOps/psyOps
|
d01746f64b206984a901ae11a522eabcb8a3d644
|
[
"MIT"
] | 1
|
2016-12-01T18:42:41.000Z
|
2016-12-01T18:42:41.000Z
|
psy_win/psy_windrive.py
|
cuihantao/psyOps
|
d01746f64b206984a901ae11a522eabcb8a3d644
|
[
"MIT"
] | null | null | null |
psy_win/psy_windrive.py
|
cuihantao/psyOps
|
d01746f64b206984a901ae11a522eabcb8a3d644
|
[
"MIT"
] | 1
|
2016-12-01T18:42:58.000Z
|
2016-12-01T18:42:58.000Z
|
# ********** FOR WINDOWS ONLY **********
# adapted from http://stackoverflow.com/questions/2625877/copy-files-to-windrive-path-or-drive-using-python
#the two NET USE commands come in pair and the second one should always be executed when the first one was executed (even if an exception was raised somewhere in between)
# map_windrive() maps a Windows network share to a drive
# Returns: drive letter if succeeds, None if fails
def map_windrive(share, username=None, password=None, drive_letter=''):
if drive_letter=='' or is_windrive_mapped(drive_letter):
drive_letter=unmapped_windrives()[-1]
cmd_parts = ["NET USE %s: %s" % (drive_letter, share)]
if password:
cmd_parts.append(password)
if username:
cmd_parts.append("/USER:%s" % username)
os.system(" ".join(cmd_parts))
try:
return drive_letter
except:
return None
# unmaps a windrive drive
def unmap_windrive(drive_letter):
try:
os.system("NET USE %s: /DELETE" % drive_letter)
return drive_letter
except:
return None
# returns list of unmapped drives
def unmapped_windrives(letters_only=True):
import os, string
try:
ret_list = ['%s:' % d for d in string.ascii_uppercase if not os.path.exists('%s:' % d)]
if letters_only:
for x in range(len(ret_list)): ret_list[x]=str(ret_list[x])[0]
return ret_list
except: return None
# returns list of mapped drives
def mapped_windrives(letters_only=True):
import os, string
try:
ret_list = ['%s:' % d for d in string.ascii_uppercase if os.path.exists('%s:' % d)]
if letters_only:
for x in range(len(ret_list)): ret_list[x]=str(ret_list[x])[0]
return ret_list
except: return None
# alternative version of mapped_windrives() using win32api
def mapped_windrives_alt(letters_only=True):
try:
import win32api
if letters_only:
return win32api.GetLogicalDriveStrings().replace(':\\', '').split('\000')[:-1]
else:
return win32api.GetLogicalDriveStrings().split('\000')[:-1]
except:
return None
# Returns first available drive letter, alphabetically
def first_unmapped_windrive(letter_only=True):
return unmapped_windrives(letter_only)[0]
# Returns last available drive letter, alphabetically
def last_unmapped_windrive(letter_only=True):
return unmapped_windrives(letter_only)[-1]
# Returns True if drive letter available, False if unavailable (already in use)
def is_windrive_mapped(drive_letter):
try: return drive_letter.snip()[0] in mapped_windrives(True)
except: return None
# windrive_cntxt_mgr()
# Use with last_unmapped_windrive() or unmapped_windrives()[-1]
# to take action on folders and files on a Windows windrive drive.
from contextlib import contextmanager
@contextmanager
def windrive_cntxt_mgr(share, username=None, password=None, drive_letter = ''):
"""Context manager that mounts the given share using the given
username and password to the given drive letter when entering
the context and unmounts it when exiting."""
drive_letter=map_windrive(share, username, password, drive_letter)
try:
yield
finally:
unmap_windrive(drive_letter)
# Example 1 of windrive_cntxt_mgr
def windrive_cntxt_mgr_example1():
drive_letter=unmapped_windrives()[-1] # find last unused drive letter, alphabetically
fr_path = str(drive_letter) + r":\etools\deployments\afcatc\application.properties" # file to copy. In this example, copying from windrive share
to_path = r'C:\temp\delete.me2' # file to copy. In this example, copying TO local drive
with windrive_cntxt_mgr(r"\\corp.pjm.com\shares\special\Common", None, None, last_unmapped_windrive()):
import shutil
shutil.copyfile(fr_path, to_path) # copy file using windrive_cntxt_mgr() & shutil.copyfile
# on exit of 'with windrive_cntxt_mgr()' the network drive is automatically unmapped!
# Example 2 of windrive_cntxt_mgr
def windrive_cntxt_mgr_example2():
# windrive share properties
ntwk_path = r"\\corp.pjm.com\shares\special\Common" # windrive path for "I:\Common"
username = None # login is via Active Directory (AD) authentication. Do not provide ID & PW via script
password = None
drive_letter=unmapped_windrives()[-1] # find last unused drive letter, alphabetically
# file properties
fr_path = str(drive_letter) + r":\etools\deployments\afcatc\application.properties" # file to copy. In this example, copying from windrive share
to_path = r'C:\temp\delete.me2' # file to copy. In this example, copying TO local drive
# copy file using windrive_cntxt_mgr() & shutil.copyfile
with windrive_cntxt_mgr(ntwk_path, username, password, drive_letter):
import shutil
shutil.copyfile(fr_path, to_path)
# Example of map_windrive and unmap_windrive
def copy_files_w_maped_windrive_example():
try:
# map windrive drive (2n parameter missing, so use any drive letter available starting with Z)
drive_letter = map_windrive(r"\\corp.pjm.com\shares\special\Common")
if len(drive_letter)<1: return None #only continue if found an available drive letter
# specify from and to path+file
from_file = str(drive_letter) + r"\etools\deployments\afcatc\application.properties"
to_file = r'C:\temp\delete.me1'
# copy file(s)
import shutil
shutil.copyfile(fr_path, to_path)
except:
return None
finally:
unmap_windrive(drive_letter)
| 41.768116
| 171
| 0.685808
|
4a0846df3abe67bef7d87b06969825f8dbf7b9ac
| 11,443
|
py
|
Python
|
modules/dials/test/algorithms/refinement/test_parameter_auto_reduction.py
|
jorgediazjr/dials-dev20191018
|
77d66c719b5746f37af51ad593e2941ed6fbba17
|
[
"BSD-3-Clause"
] | null | null | null |
modules/dials/test/algorithms/refinement/test_parameter_auto_reduction.py
|
jorgediazjr/dials-dev20191018
|
77d66c719b5746f37af51ad593e2941ed6fbba17
|
[
"BSD-3-Clause"
] | null | null | null |
modules/dials/test/algorithms/refinement/test_parameter_auto_reduction.py
|
jorgediazjr/dials-dev20191018
|
77d66c719b5746f37af51ad593e2941ed6fbba17
|
[
"BSD-3-Clause"
] | 1
|
2020-02-04T15:39:06.000Z
|
2020-02-04T15:39:06.000Z
|
from __future__ import absolute_import, division, print_function
import copy
import pytest
from dials.algorithms.refinement.reflection_manager import (
phil_scope as refman_phil_scope,
)
from dials.algorithms.refinement.reflection_manager import ReflectionManagerFactory
from dials.algorithms.refinement.parameterisation.autoreduce import (
phil_scope as ar_phil_scope,
)
from dials.algorithms.refinement.parameterisation.autoreduce import AutoReduce
from dials.test.algorithms.refinement.test_stills_prediction_parameters import _Test
from dials.algorithms.refinement.prediction.managed_predictors import (
StillsExperimentsPredictor,
)
from dials.array_family import flex
from dials.algorithms.refinement import DialsRefineConfigError
@pytest.fixture(scope="session")
def tc():
test = _Test()
# Predict the reflections in place and put in a reflection manager
ref_predictor = StillsExperimentsPredictor(test.stills_experiments)
ref_predictor(test.reflections)
test.refman = ReflectionManagerFactory.from_parameters_reflections_experiments(
refman_phil_scope.extract(),
test.reflections,
test.stills_experiments,
do_stills=True,
)
test.refman.finalise()
return test
def test_check_and_fail(tc):
# There are 823 reflections and the detector parameterisation has 6 free
# parameters
assert len(tc.refman.get_matches()) == 823
assert tc.det_param.num_free() == 6
# Setting 137 reflections as the minimum should pass (137*6<823)
options = ar_phil_scope.extract()
options.min_nref_per_parameter = 137
ar = AutoReduce(
options,
[tc.det_param],
[tc.s0_param],
[tc.xlo_param],
[tc.xluc_param],
gon_params=[],
reflection_manager=tc.refman,
)
ar.check_and_fail()
# Setting 138 reflections as the minimum should fail (138*6>823)
options.min_nref_per_parameter = 138
ar = AutoReduce(
options,
[tc.det_param],
[tc.s0_param],
[tc.xlo_param],
[tc.xluc_param],
gon_params=[],
reflection_manager=tc.refman,
)
with pytest.raises(DialsRefineConfigError):
ar.check_and_fail()
def test_check_and_fix(tc):
n_det = tc.det_param.num_free()
n_beam = tc.s0_param.num_free()
n_xlo = tc.xlo_param.num_free()
n_xluc = tc.xluc_param.num_free()
# Similar to test_check_and_fail, setting 137 reflections as the minimum
# should leave all parameters free
options = ar_phil_scope.extract()
options.min_nref_per_parameter = 137
ar = AutoReduce(
options,
[tc.det_param],
[tc.s0_param],
[tc.xlo_param],
[tc.xluc_param],
gon_params=[],
reflection_manager=tc.refman,
)
ar.check_and_fix()
assert ar.det_params[0].num_free() == n_det == 6
assert ar.beam_params[0].num_free() == n_beam == 3
assert ar.xl_ori_params[0].num_free() == n_xlo == 3
assert ar.xl_uc_params[0].num_free() == n_xluc == 6
# Setting 138 reflections as the minimum should fix all the detector
# parameters and remove that parameterisation. The crystal unit cell also
# has 6 parameters, but each parameter is considered separately, so the
# critical minimum number of reflections is 138*1 not 138*6 in that case
options = ar_phil_scope.extract()
options.min_nref_per_parameter = 138
ar = AutoReduce(
options,
[tc.det_param],
[tc.s0_param],
[tc.xlo_param],
[tc.xluc_param],
gon_params=[],
reflection_manager=tc.refman,
)
ar.check_and_fix()
assert not ar.det_params
assert ar.xl_uc_params[0].num_free() == n_xluc
assert ar.beam_params[0].num_free() == n_beam
assert ar.xl_ori_params[0].num_free() == n_xlo
def test_check_and_remove():
test = _Test()
# Override the single panel model and parameterisation. This test function
# exercises the code for non-hierarchical multi-panel detectors. The
# hierarchical detector version is tested via test_cspad_refinement.py
from dxtbx.model import Detector
from dials.algorithms.refinement.parameterisation.detector_parameters import (
DetectorParameterisationMultiPanel,
)
from dials.test.algorithms.refinement.test_multi_panel_detector_parameterisation import (
make_panel_in_array,
)
multi_panel_detector = Detector()
for x in range(3):
for y in range(3):
new_panel = make_panel_in_array((x, y), test.detector[0])
multi_panel_detector.add_panel(new_panel)
test.detector = multi_panel_detector
test.stills_experiments[0].detector = multi_panel_detector
test.det_param = DetectorParameterisationMultiPanel(multi_panel_detector, test.beam)
# update the generated reflections
test.generate_reflections()
# Predict the reflections in place and put in a reflection manager
ref_predictor = StillsExperimentsPredictor(test.stills_experiments)
ref_predictor(test.reflections)
test.refman = ReflectionManagerFactory.from_parameters_reflections_experiments(
refman_phil_scope.extract(),
test.reflections,
test.stills_experiments,
do_stills=True,
)
test.refman.finalise()
# A non-hierarchical detector does not have panel groups, thus panels are
# not treated independently wrt which reflections affect their parameters.
# As before, setting 137 reflections as the minimum should leave all
# parameters free, and should not remove any reflections
options = ar_phil_scope.extract()
options.min_nref_per_parameter = 137
ar = AutoReduce(
options,
[test.det_param],
[test.s0_param],
[test.xlo_param],
[test.xluc_param],
gon_params=[],
reflection_manager=test.refman,
)
ar.check_and_remove()
assert ar.det_params[0].num_free() == 6
assert ar.beam_params[0].num_free() == 3
assert ar.xl_ori_params[0].num_free() == 3
assert ar.xl_uc_params[0].num_free() == 6
assert len(ar.reflection_manager.get_obs()) == 823
# Setting reflections as the minimum should fix the detector parameters,
# which removes that parameterisation. Because all reflections are recorded
# on that detector, they will all be removed as well. This then affects all
# other parameterisations, which will be removed.
options = ar_phil_scope.extract()
options.min_nref_per_parameter = 138
ar = AutoReduce(
options,
[test.det_param],
[test.s0_param],
[test.xlo_param],
[test.xluc_param],
gon_params=[],
reflection_manager=test.refman,
)
ar.check_and_remove()
assert not ar.det_params
assert not ar.beam_params
assert not ar.xl_ori_params
assert not ar.xl_uc_params
assert len(ar.reflection_manager.get_obs()) == 0
# Test the functionality of the parameter 'auto reduction' extension modules
@pytest.fixture(scope="session")
def setup_test_sorting():
# Borrowed from tst_reflection_table function tst_find_overlapping
N = 110
r = flex.reflection_table.empty_standard(N)
r["panel"] = flex.size_t([1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0] * 10)
r["id"] = flex.int([1, 2, 1, 1, 2, 0, 1, 1, 1, 0, 1] * 10)
exp_ids = flex.size_t([0, 1])
for i in range(N):
r["miller_index"][i] = (
int(i // 10) - 5,
i % 3,
i % 7,
) # A nice bunch of miller indices
# Filter out reflections to be used by refinement. Sorting of filtered reflections
# require to allow C++ extension modules to give performance benefit. Sorting
# performed within the _filter_reflections step by id, then by panel.
r_sorted = copy.deepcopy(r)
r_sorted.sort("id")
r_sorted.subsort("id", "panel")
# Test that the unfiltered/unsorted table becomes filtered/sorted for id
assert (r_sorted["id"] == r["id"].select(flex.sort_permutation(r["id"]))).count(
False
) == 0
# as above for panel within each id
for ii in [0, 1, 2]:
r_id = r.select(r["id"] == ii)
r_sorted_id = r_sorted.select(r_sorted["id"] == ii)
assert (
r_sorted_id["panel"]
== r_id["panel"].select(flex.sort_permutation(r_id["panel"]))
).count(False) == 0
return (r, r_sorted, exp_ids)
def test_auto_reduction_parameter_extension_modules_part1(setup_test_sorting):
# Cut-down original algorithm for AutoReduce._surplus_reflections
from dials_refinement_helpers_ext import surpl_iter as surpl
r, r_sorted, exp_ids = setup_test_sorting
isel = flex.size_t()
for exp_id in exp_ids:
isel.extend((r["id"] == exp_id).iselection())
res0 = len(isel)
# Updated algorithm for _surplus_reflections, with templated id column for int and size_t
res1_unsrt_int = surpl(r["id"], exp_ids).result
res1_int = surpl(r_sorted["id"], exp_ids).result
res1_sizet = surpl(flex.size_t(list(r_sorted["id"])), exp_ids).result
# Check that unsorted list fails, while sorted succeeds for both int and size_t array types
assert res0 != res1_unsrt_int
assert res0 == res1_int
assert res0 == res1_sizet
def test_auto_reduction_parameter_extension_modules_part2(setup_test_sorting):
# Cut-down original algorithm for AutoReduce._unit_cell_surplus_reflections
from dials_refinement_helpers_ext import uc_surpl_iter as uc_surpl
r, r_sorted, exp_ids = setup_test_sorting
isel = flex.size_t()
for exp_id in exp_ids:
isel.extend((r["id"] == exp_id).iselection())
ref = r.select(isel)
h = ref["miller_index"].as_vec3_double()
dB_dp = flex.mat3_double([(1, 2, 3, 4, 5, 6, 7, 8, 9), (0, 1, 0, 1, 0, 1, 0, 1, 0)])
nref_each_param = []
for der in dB_dp:
tst = (der * h).norms()
nref_each_param.append((tst > 0.0).count(True))
res0 = min(nref_each_param)
# Updated algorithm for _unit_cell_surplus_reflections
res1_unsrt_int = uc_surpl(r["id"], r["miller_index"], exp_ids, dB_dp).result
res1_int = uc_surpl(r_sorted["id"], r_sorted["miller_index"], exp_ids, dB_dp).result
res1_sizet = uc_surpl(
flex.size_t(list(r_sorted["id"])), r_sorted["miller_index"], exp_ids, dB_dp
).result
assert res0 != res1_unsrt_int
assert res0 == res1_int
assert res0 == res1_sizet
def test_auto_reduction_parameter_extension_modules_part3(setup_test_sorting):
# Cut-down original algorithm for AutoReduce._panel_gp_surplus_reflections
from dials_refinement_helpers_ext import pg_surpl_iter as pg_surpl
r, r_sorted, exp_ids = setup_test_sorting
isel = flex.size_t()
pnl_ids = [0, 1]
for exp_id in exp_ids:
sub_expID = (r["id"] == exp_id).iselection()
sub_panels_expID = r["panel"].select(sub_expID)
for pnl in pnl_ids:
isel.extend(sub_expID.select(sub_panels_expID == pnl))
res0 = len(isel)
# Updated algorithm for _panel_gp_surplus_reflections
res1_unsrt_int = pg_surpl(r["id"], r["panel"], pnl_ids, exp_ids, 0).result
res1_int = pg_surpl(r_sorted["id"], r_sorted["panel"], pnl_ids, exp_ids, 0).result
res1_sizet = pg_surpl(
flex.size_t(list(r_sorted["id"])), r_sorted["panel"], pnl_ids, exp_ids, 0
).result
assert res0 != res1_unsrt_int
assert res0 == res1_int
assert res0 == res1_sizet
| 34.993884
| 95
| 0.688631
|
4a08473ad0d4f3cd00c2212d0dcb6dc3ebcf29e0
| 493
|
py
|
Python
|
dictionaries.py
|
Roicochoa/astr-119-hw-1
|
1fb4efd072189d03a6ec8681b354d23adcd3e56c
|
[
"MIT"
] | null | null | null |
dictionaries.py
|
Roicochoa/astr-119-hw-1
|
1fb4efd072189d03a6ec8681b354d23adcd3e56c
|
[
"MIT"
] | 1
|
2018-10-09T20:13:15.000Z
|
2018-10-09T20:13:15.000Z
|
dictionaries.py
|
Roicochoa/astr-119-hw-1
|
1fb4efd072189d03a6ec8681b354d23adcd3e56c
|
[
"MIT"
] | null | null | null |
#define a dictionary data structure
#dictionaries have key : value for the elements
example_dict = {
"class" : "Astr 119",
"prof" : "Brant",
"awesomeness" : 10
}
print(type(example_dict)) #will say dict
#get a value via key
course = example_dict["class"]
print(course)
#change a value via key
example_dict["awesomeness"] += 1 #increases awesomeness
#print dictionary
print(example_dict)
#print dictionary element by element
for x in example_dict.keys():
print(x, example_dict[x])
| 21.434783
| 55
| 0.732252
|
4a084774d58e4c8ee765037998fe912d9a066658
| 3,712
|
py
|
Python
|
PythonCode/LatencyArbitrageAnalysis/utils/Dtypes.py
|
ericbudish/HFT-Races
|
fe9ffc2da98b529e43e25800695aad698b46b10a
|
[
"BSD-3-Clause"
] | 11
|
2021-09-16T10:05:30.000Z
|
2022-02-26T00:18:26.000Z
|
PythonCode/LatencyArbitrageAnalysis/utils/Dtypes.py
|
ericbudish/HFT-Races
|
fe9ffc2da98b529e43e25800695aad698b46b10a
|
[
"BSD-3-Clause"
] | null | null | null |
PythonCode/LatencyArbitrageAnalysis/utils/Dtypes.py
|
ericbudish/HFT-Races
|
fe9ffc2da98b529e43e25800695aad698b46b10a
|
[
"BSD-3-Clause"
] | 4
|
2021-09-23T13:41:54.000Z
|
2022-01-11T18:10:13.000Z
|
'''
dtypes.py
This file stores the Pandas data types for each data field.
Users should refer to Section 3 of the Code and Data Appendix for
detailed instructions on how to pre-process the exchange message
data.
'''
## dtypes for exchange message data after pre-processing
dtypes_raw_msgs = {
'ClientOrderID':'O', 'UniqueOrderID':'O', 'TradeMatchID': 'O',
'UserID':'O', 'FirmID':'O', 'SessionID':'float64',
'MessageTimestamp':'O', 'MessageType':'O', 'OrderType':'O',
'ExecType':'O', 'OrderStatus':'O', 'TradeInitiator':'O',
'TIF':'O', 'CancelRejectReason': 'O',
'Side':'O', 'OrderQty':'float64', 'DisplayQty':'float64',
'LimitPrice':'float64', 'StopPrice':'float64',
'ExecutedPrice': 'float64', 'ExecutedQty': 'float64', 'LeavesQty': 'float64',
'QuoteRelated':'bool',
'BidPrice':'float64', 'BidSize':'float64',
'AskPrice':'float64', 'AskSize':'float64',
'RegularHour':'bool'}
# OpenAuctionTrade and AuctionTrade contain NAs
# because they are only populated in trade confirmation messages.
# So we do not specify the dtype when reading them in.
# We will replace the NAs with False in the program (Classify_Messages.py).
## dtypes for message data after Classify_Messages.py
dtypes_msgs = {
'ClientOrderID':'O', 'UniqueOrderID':'O', 'TradeMatchID': 'O',
'UserID':'O', 'FirmID':'O', 'SessionID':'float64',
'MessageTimestamp':'O', 'MessageType':'O', 'OrderType':'O',
'ExecType':'O', 'OrderStatus':'O', 'TradeInitiator':'O',
'TIF':'O', 'CancelRejectReason': 'O',
'Side':'O', 'OrderQty':'float64', 'DisplayQty':'float64',
'LimitPrice':'float64', 'StopPrice':'float64',
'ExecutedPrice': 'float64', 'ExecutedQty': 'float64', 'LeavesQty': 'float64',
'QuoteRelated':'bool',
'BidPrice':'float64', 'BidSize':'float64',
'AskPrice':'float64', 'AskSize':'float64',
'RegularHour':'bool','OpenAuctionTrade':'bool', 'AuctionTrade':'bool',
'UnifiedMessageType': 'O',
'PrevPriceLvl': 'float64', 'PrevQty': 'float64', 'PriceLvl': 'float64',
'Categorized': 'bool', 'EventNum': 'float64', 'Event': 'O',
'MinExecPriceLvl':'float64', 'MaxExecPriceLvl':'float64',
'PrevBidPriceLvl': 'float64', 'PrevBidQty': 'float64', 'BidPriceLvl': 'float64',
'BidCategorized': 'bool', 'BidEventNum': 'float64', 'BidEvent': 'O',
'BidMinExecPriceLvl':'float64', 'BidMaxExecPriceLvl':'float64',
'PrevAskPriceLvl': 'float64', 'PrevAskQty': 'float64', 'AskPriceLvl': 'float64',
'AskCategorized': 'bool', 'AskEventNum': 'float64', 'AskEvent': 'O',
'AskMinExecPriceLvl':'float64', 'AskMaxExecPriceLvl':'float64'}
## dtypes for top-of-book data constructed in Prep_Order_Book.py
dtypes_top = {
'MessageTimestamp': 'O', 'Side': 'O','UnifiedMessageType': 'O',
'RegularHour':'bool','OpenAuctionTrade':'bool','AuctionTrade':'bool',
'BestBid': 'float64','BestBidQty': 'float64', 'BestAsk': 'float64','BestAskQty': 'float64',
'Spread': 'float64','MidPt': 'float64',
'last_BestBid': 'float64', 'last_BestAsk': 'float64','last_MidPt': 'float64',
't_last_chg_BestBid': 'O', 't_last_chg_BestAsk': 'O','t_last_chg_MidPt': 'O',
'Corrections_OrderAccept': 'float64','Corrections_Trade': 'float64',
'Corrections_notA': 'float64',
'Corrections_OrderAccept_h': 'float64','Corrections_Trade_h': 'float64',
'Corrections_notA_h': 'float64',
'DepthKilled': 'float64', 'DepthKilled_h': 'float64',
'BestBid_TickSize': 'float64', 'BestAsk_TickSize': 'float64','Diff_TickSize': 'O',
'Trade_Pos': 'O', 'BookUpdateParentMsgID': 'float64'}
| 53.797101
| 98
| 0.640356
|
4a0848a35a3418f89ee9237a7804483f87acc661
| 967
|
py
|
Python
|
tests/packages/sub_package/kitty/speak/purr.py
|
jayvdb/Nuitka
|
0ff702e065b1b53231ba0cae451385a3da0fe766
|
[
"Apache-2.0"
] | 1
|
2019-03-31T09:56:11.000Z
|
2019-03-31T09:56:11.000Z
|
tests/packages/sub_package/kitty/speak/purr.py
|
jayvdb/Nuitka
|
0ff702e065b1b53231ba0cae451385a3da0fe766
|
[
"Apache-2.0"
] | null | null | null |
tests/packages/sub_package/kitty/speak/purr.py
|
jayvdb/Nuitka
|
0ff702e065b1b53231ba0cae451385a3da0fe766
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Python test originally created or extracted from other peoples work. The
# parts from me are licensed as below. It is at least Free Software where
# it's copied from other people. In these cases, that will normally be
# indicated.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
def speak():
print("mrrruu")
| 40.291667
| 78
| 0.716649
|
4a084a2c4f357ee666a272e263a1bc855c8b28e8
| 5,642
|
py
|
Python
|
examples/pytorch/dimenet/modules/basis_utils.py
|
ketyi/dgl
|
a1b859c29b63a673c148d13231a49504740e0e01
|
[
"Apache-2.0"
] | 9,516
|
2018-12-08T22:11:31.000Z
|
2022-03-31T13:04:33.000Z
|
examples/pytorch/dimenet/modules/basis_utils.py
|
ketyi/dgl
|
a1b859c29b63a673c148d13231a49504740e0e01
|
[
"Apache-2.0"
] | 2,494
|
2018-12-08T22:43:00.000Z
|
2022-03-31T21:16:27.000Z
|
examples/pytorch/dimenet/modules/basis_utils.py
|
ketyi/dgl
|
a1b859c29b63a673c148d13231a49504740e0e01
|
[
"Apache-2.0"
] | 2,529
|
2018-12-08T22:56:14.000Z
|
2022-03-31T13:07:41.000Z
|
import numpy as np
import sympy as sym
from scipy.optimize import brentq
from scipy import special as sp
def Jn(r, n):
"""
r: int or list
n: int or list
len(r) == len(n)
return value should be the same shape as the input data
===
example:
r = n = np.array([1, 2, 3, 4])
res = [0.3, 0.1, 0.1, 0.1]
===
numerical spherical bessel functions of order n
"""
return np.sqrt(np.pi / (2 * r)) * sp.jv(n + 0.5, r) # the same shape as n
def Jn_zeros(n, k):
"""
n: int
k: int
res: array of shape [n, k]
Compute the first k zeros of the spherical bessel functions up to order n (excluded)
"""
zerosj = np.zeros((n, k), dtype="float32")
zerosj[0] = np.arange(1, k + 1) * np.pi
points = np.arange(1, k + n) * np.pi
racines = np.zeros(k + n - 1, dtype="float32")
for i in range(1, n):
for j in range(k + n - 1 - i):
foo = brentq(Jn, points[j], points[j + 1], (i,))
racines[j] = foo
points = racines
zerosj[i][:k] = racines[:k]
return zerosj
def spherical_bessel_formulas(n):
"""
n: int
res: array of shape [n,]
n sympy functions
Computes the sympy formulas for the spherical bessel functions up to order n (excluded)
"""
x = sym.symbols('x')
f = [sym.sin(x) / x]
a = sym.sin(x) / x
for i in range(1, n):
b = sym.diff(a, x) / x
f += [sym.simplify(b * (-x) ** i)]
a = sym.simplify(b)
return f
def bessel_basis(n, k):
"""
n: int
k: int
res: [n, k]
n * k sympy functions
Computes the sympy formulas for the normalized and rescaled spherical bessel functions up to
order n (excluded) and maximum frequency k (excluded).
"""
zeros = Jn_zeros(n, k)
normalizer = []
for order in range(n):
normalizer_tmp = []
for i in range(k):
normalizer_tmp += [0.5 * Jn(zeros[order, i], order + 1) ** 2]
normalizer_tmp = 1 / np.array(normalizer_tmp) ** 0.5
normalizer += [normalizer_tmp]
f = spherical_bessel_formulas(n)
x = sym.symbols('x')
bess_basis = []
for order in range(n):
bess_basis_tmp = []
for i in range(k):
bess_basis_tmp += [sym.simplify(normalizer[order][i] * f[order].subs(x, zeros[order, i] * x))]
bess_basis += [bess_basis_tmp]
return bess_basis
def sph_harm_prefactor(l, m):
"""
l: int
m: int
res: float
Computes the constant pre-factor for the spherical harmonic of degree l and order m
input:
l: int, l>=0
m: int, -l<=m<=l
"""
return ((2 * l + 1) * np.math.factorial(l - abs(m)) / (4 * np.pi * np.math.factorial(l + abs(m)))) ** 0.5
def associated_legendre_polynomials(l, zero_m_only=True):
"""
l: int
return: l sympy functions
Computes sympy formulas of the associated legendre polynomials up to order l (excluded).
"""
z = sym.symbols('z')
P_l_m = [[0] * (j + 1) for j in range(l)]
P_l_m[0][0] = 1
if l > 0:
P_l_m[1][0] = z
for j in range(2, l):
P_l_m[j][0] = sym.simplify(
((2 * j - 1) * z * P_l_m[j - 1][0] - (j - 1) * P_l_m[j - 2][0]) / j)
if not zero_m_only:
for i in range(1, l):
P_l_m[i][i] = sym.simplify((1 - 2 * i) * P_l_m[i - 1][i - 1])
if i + 1 < l:
P_l_m[i + 1][i] = sym.simplify((2 * i + 1) * z * P_l_m[i][i])
for j in range(i + 2, l):
P_l_m[j][i] = sym.simplify(((2 * j - 1) * z * P_l_m[j - 1][i] - (i + j - 1) * P_l_m[j - 2][i]) / (j - i))
return P_l_m
def real_sph_harm(l, zero_m_only=True, spherical_coordinates=True):
"""
return: a sympy function list of length l, for i-th index of the list, it is also a list of length (2 * i + 1)
Computes formula strings of the real part of the spherical harmonics up to order l (excluded).
Variables are either cartesian coordinates x,y,z on the unit sphere or spherical coordinates phi and theta.
"""
if not zero_m_only:
S_m = [0]
C_m = [1]
for i in range(1, l):
x = sym.symbols('x')
y = sym.symbols('y')
S_m += [x * S_m[i - 1] + y * C_m[i - 1]]
C_m += [x * C_m[i - 1] - y * S_m[i - 1]]
P_l_m = associated_legendre_polynomials(l, zero_m_only)
if spherical_coordinates:
theta = sym.symbols('theta')
z = sym.symbols('z')
for i in range(len(P_l_m)):
for j in range(len(P_l_m[i])):
if type(P_l_m[i][j]) != int:
P_l_m[i][j] = P_l_m[i][j].subs(z, sym.cos(theta))
if not zero_m_only:
phi = sym.symbols('phi')
for i in range(len(S_m)):
S_m[i] = S_m[i].subs(x, sym.sin(theta) * sym.cos(phi)).subs(y, sym.sin(theta) * sym.sin(phi))
for i in range(len(C_m)):
C_m[i] = C_m[i].subs(x, sym.sin(theta) * sym.cos(phi)).subs(y, sym.sin(theta) * sym.sin(phi))
Y_func_l_m = [['0'] * (2 * j + 1) for j in range(l)]
for i in range(l):
Y_func_l_m[i][0] = sym.simplify(sph_harm_prefactor(i, 0) * P_l_m[i][0])
if not zero_m_only:
for i in range(1, l):
for j in range(1, i + 1):
Y_func_l_m[i][j] = sym.simplify(2 ** 0.5 * sph_harm_prefactor(i, j) * C_m[j] * P_l_m[i][j])
for i in range(1, l):
for j in range(1, i + 1):
Y_func_l_m[i][-j] = sym.simplify(2 ** 0.5 * sph_harm_prefactor(i, -j) * S_m[j] * P_l_m[i][j])
return Y_func_l_m
| 31.875706
| 125
| 0.529777
|
4a084ab148500a8359188ecc2ded819e02881d3a
| 79
|
py
|
Python
|
vq/__init__.py
|
anonymouspaperowner/range-search
|
d382278f5db853b645bcd6b83cc4458f386e8f20
|
[
"MIT"
] | null | null | null |
vq/__init__.py
|
anonymouspaperowner/range-search
|
d382278f5db853b645bcd6b83cc4458f386e8f20
|
[
"MIT"
] | null | null | null |
vq/__init__.py
|
anonymouspaperowner/range-search
|
d382278f5db853b645bcd6b83cc4458f386e8f20
|
[
"MIT"
] | 1
|
2022-01-08T02:42:27.000Z
|
2022-01-08T02:42:27.000Z
|
from .pq import PQ
from .rq import RQ
from .opq import OPQ
from .neq import NEQ
| 19.75
| 20
| 0.759494
|
4a084abb6969fd3db40118ef058cc91849c66b91
| 1,342
|
py
|
Python
|
ted_talk_video_downloader/__main__.py
|
WagnoLeaoSergio/ted_talk_video_downloader
|
2a91bb41307fc9814500670156d80361df8781b2
|
[
"Unlicense"
] | 1
|
2021-12-22T23:21:35.000Z
|
2021-12-22T23:21:35.000Z
|
ted_talk_video_downloader/__main__.py
|
WagnoLeaoSergio/ted_talk_video_downloader
|
2a91bb41307fc9814500670156d80361df8781b2
|
[
"Unlicense"
] | null | null | null |
ted_talk_video_downloader/__main__.py
|
WagnoLeaoSergio/ted_talk_video_downloader
|
2a91bb41307fc9814500670156d80361df8781b2
|
[
"Unlicense"
] | null | null | null |
import argparse # pragma: no cover
from .downloader import TED_Downloader # pragma: no cover
def main() -> None: # pragma: no cover
"""
The main function executes on commands:
`python -m ted_talk_video_downloader` and `$ ted_talk_video_downloader `.
This is the program's entry point.
"""
parser = argparse.ArgumentParser(
description="ted_talk_video_downloader.",
epilog="Write the url of the video to download it.",
)
parser.add_argument(
"url",
type=str,
help="The URL for the video's website.",
)
parser.add_argument(
"output",
type=str,
help="Path where the video will be saved.",
default="",
)
parser.add_argument(
"--name",
type=str,
help="Name of the video when saved.",
default="new_video",
required=False
)
parser.add_argument(
"--quality",
type=str,
help="Set the video's quality (if available)",
default="240p",
choices=["240p", "320p", "480p"],
required=False
)
args = parser.parse_args()
ted_downloader = TED_Downloader()
ted_downloader.process_mp4_filename(args.url)
ted_downloader.download_and_save(args.name, args.output)
if __name__ == "__main__": # pragma: no cover
main()
| 26.313725
| 77
| 0.607303
|
4a084ac595747ffed3971cd8f1a5135029c7369c
| 6,660
|
py
|
Python
|
kubernetes_asyncio/client/models/v1_pod_template_list.py
|
olitheolix/kubernetes_asyncio
|
344426793e4e4b653bcd8e4a29c6fa4766e1fff7
|
[
"Apache-2.0"
] | 1
|
2020-03-25T01:24:27.000Z
|
2020-03-25T01:24:27.000Z
|
kubernetes_asyncio/client/models/v1_pod_template_list.py
|
olitheolix/kubernetes_asyncio
|
344426793e4e4b653bcd8e4a29c6fa4766e1fff7
|
[
"Apache-2.0"
] | null | null | null |
kubernetes_asyncio/client/models/v1_pod_template_list.py
|
olitheolix/kubernetes_asyncio
|
344426793e4e4b653bcd8e4a29c6fa4766e1fff7
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1.10.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class V1PodTemplateList(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'api_version': 'str',
'items': 'list[V1PodTemplate]',
'kind': 'str',
'metadata': 'V1ListMeta'
}
attribute_map = {
'api_version': 'apiVersion',
'items': 'items',
'kind': 'kind',
'metadata': 'metadata'
}
def __init__(self, api_version=None, items=None, kind=None, metadata=None): # noqa: E501
"""V1PodTemplateList - a model defined in Swagger""" # noqa: E501
self._api_version = None
self._items = None
self._kind = None
self._metadata = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
self.items = items
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
@property
def api_version(self):
"""Gets the api_version of this V1PodTemplateList. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources # noqa: E501
:return: The api_version of this V1PodTemplateList. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V1PodTemplateList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this V1PodTemplateList. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def items(self):
"""Gets the items of this V1PodTemplateList. # noqa: E501
List of pod templates # noqa: E501
:return: The items of this V1PodTemplateList. # noqa: E501
:rtype: list[V1PodTemplate]
"""
return self._items
@items.setter
def items(self, items):
"""Sets the items of this V1PodTemplateList.
List of pod templates # noqa: E501
:param items: The items of this V1PodTemplateList. # noqa: E501
:type: list[V1PodTemplate]
"""
if items is None:
raise ValueError("Invalid value for `items`, must not be `None`") # noqa: E501
self._items = items
@property
def kind(self):
"""Gets the kind of this V1PodTemplateList. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this V1PodTemplateList. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1PodTemplateList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this V1PodTemplateList. # noqa: E501
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""Gets the metadata of this V1PodTemplateList. # noqa: E501
Standard list metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds # noqa: E501
:return: The metadata of this V1PodTemplateList. # noqa: E501
:rtype: V1ListMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1PodTemplateList.
Standard list metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds # noqa: E501
:param metadata: The metadata of this V1PodTemplateList. # noqa: E501
:type: V1ListMeta
"""
self._metadata = metadata
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1PodTemplateList):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 33.134328
| 295
| 0.617868
|
4a084b3b65662a2880fd89019620fb55ff2463c0
| 6,807
|
py
|
Python
|
env.py
|
andy1771/Multi-Agent-DRL-Routing
|
2e0d56132cab181b686005bd69ec79344f8f5907
|
[
"MIT"
] | 2
|
2021-12-01T07:40:54.000Z
|
2022-03-01T02:31:36.000Z
|
env.py
|
andy1771/Multi-Agent-DRL-Routing
|
2e0d56132cab181b686005bd69ec79344f8f5907
|
[
"MIT"
] | null | null | null |
env.py
|
andy1771/Multi-Agent-DRL-Routing
|
2e0d56132cab181b686005bd69ec79344f8f5907
|
[
"MIT"
] | null | null | null |
import random
from collections import deque
from networkx.algorithms.shortest_paths.unweighted import single_source_shortest_path as sssp
from agent import Agent
import numpy as np
import torch
GAMMA = 0.90
class Packet():
def __init__(self, source, destination):
self.source = source
self.destination = destination
self.hops = 0
self.states = []
self.queuetime = []
self.nodes = [source]
self.actions = []
self.rewards = []
class RoutingEnv():
def __init__(self, graph):
"""
It initializes the environemt for the routing agents.
The functionalities include:
1. Read the graph and set the nodes and links.
2. Calculate the shortest paths between nodes.
The state contains:
- Packet Destination
- Number of packets
- Number of packets in neighbours queue
- Previous 3 actions
"""
self.packets = 100
self.transmitreward = self.packets/2
self.learning_rate = 3e-4
self.graph = graph
self.neighbours = {}
for node in self.graph.nodes():
self.neighbours[node] = [n for n in self.graph.neighbors(node)]
#Initialize nodes queue, channels and agents
self.queues = {}
self.channels = {}
self.agents = {}
self.previousActions = {}
self.optimizers = {}
self.entropy_term = {}
self.log_probs = {}
self.critic_value = {}
self.rewards = {}
for node in self.graph.nodes():
neighbors = len(list(self.graph.neighbors(node)))
inputs = 5 + neighbors
self.agents[node] = Agent(inputs, neighbors)
self.optimizers[node] = torch.optim.Adam(self.agents[node].parameters(),lr=self.learning_rate)
#Calculate forwarding table
self.forwardingTable = {}
for node in self.graph.nodes():
self.forwardingTable[node] = {}
shortest_p = sssp(self.graph, node)
for other_node in shortest_p:
if other_node != node:
self.forwardingTable[node][other_node] = shortest_p[other_node][1]
def reset(self):
"""
It resets the environment.
The functionalities include:
1. Inserting the initial 100 packets in the queue.
"""
self.queues = {}
self.channels = {}
self.previousActions = {}
self.entropy_term = {}
self.log_probs = {}
self.critic_value = {}
self.rewards = {}
for node in self.graph.nodes():
self.queues[node] = deque()
self.channels[node] = {}
self.entropy_term[node] = 0
self.log_probs[node] = []
self.critic_value[node] = []
self.rewards[node] = []
for n in self.graph.neighbors(node):
self.channels[node][n] = deque()
nodes = [n for n in self.graph.nodes()]
while self.packets > 0:
source = random.choice(nodes)
destination = random.choice(nodes)
if source == destination:
continue
pkt = Packet(source, destination)
pkt.queuetime.append(len(self.queues[source]))
self.queues[source].append(pkt)
self.packets = self.packets - 1
for node in nodes:
ngbrs = list(self.graph.neighbors(node))
ngbrs = ngbrs*2
self.previousActions[node] = ngbrs[:3]
self.packets = 50
def step(self, node, action, packet, observation):
"""
It fowards the packet in the link.
The functionalities include:
1. Forwarding the packet to the destination via action.
2. Updates the information in the packet.
3. If packet reaches the destination, then updates replay buffers.
"""
packet.actions.append(action)
action = self.neighbours[node][action]
"""
This is for the forwarding forwardingTable
"""
#action = self.forwardingTable[node][packet.destination]
packet.states.append(observation)
packet.nodes.append(action)
packet.hops = packet.hops + 1
self.previousActions[node].append(action)
if action == packet.destination:
reward = np.zeros(packet.hops)
values = np.zeros(packet.hops)
policy = [[]]*packet.hops
reward[packet.hops - 1] = int(-1*self.transmitreward)
for t in reversed(range(packet.hops-1)):
reward[t] = int(reward[t+1]*GAMMA - self.transmitreward - packet.queuetime[t+1])
for i in range(packet.hops):
a,policy[i] = self.agents[packet.nodes[i]].forward(packet.states[i])
values[i] = a.detach().numpy()[0,0]
for i in range(packet.hops-1):
self.rewards[packet.nodes[i]].append(reward[i])
self.critic_value[packet.nodes[i]].append(values[i])
log_prob = torch.log(policy[i].squeeze(0)[packet.actions[i]])
self.log_probs[packet.nodes[i]].append(log_prob)
entropy = -np.sum(np.mean(policy[i].detach().numpy()) * np.log(policy[i].detach().numpy()))
self.entropy_term[packet.nodes[i]] += entropy
else:
self.channels[node][action].append(packet)
def run(self):
"""
Send the packets from channel to queue
"""
done = True
for node in self.graph.nodes():
for n in self.graph.neighbors(node):
try:
packet = self.channels[node][n].popleft()
packet.queuetime.append(len(self.queues[n]))
self.queues[n].append(packet)
done = False
except:
pass
return done
def render(self):
"""
It prints the information on the screen, as needed.
"""
for node in self.graph.nodes():
print(len(self.queues[node]), end=" ")
print("\n")
def getState(self, node):
"""
Returns the state of the node, containing:
- Packet Destination
- Number of packets
- Number of packets in neighbours queue
- Previous 3 actions
"""
try:
packet = self.queues[node].popleft()
except:
return {}, True, None
neighborLengths = [len(self.queues[n]) for n in self.graph.neighbors(node)]
state = [packet.destination, len(self.queues[node]), *neighborLengths, *self.previousActions[node][-3:]]
state = np.array(state)
return state, False, packet
| 33.367647
| 112
| 0.558836
|
4a084b4b66e4c212ae37f57dcc4a2874999fa4a3
| 3,134
|
py
|
Python
|
ColorMasking.py
|
tonyyu0822/RiceBraille
|
f2f1b3f40f7ce3fa32ea4ffbb22e7e403abd1ece
|
[
"MIT"
] | 1
|
2019-11-10T00:02:02.000Z
|
2019-11-10T00:02:02.000Z
|
ColorMasking.py
|
tonyyu0822/RiceBraille
|
f2f1b3f40f7ce3fa32ea4ffbb22e7e403abd1ece
|
[
"MIT"
] | null | null | null |
ColorMasking.py
|
tonyyu0822/RiceBraille
|
f2f1b3f40f7ce3fa32ea4ffbb22e7e403abd1ece
|
[
"MIT"
] | 1
|
2020-11-03T20:01:17.000Z
|
2020-11-03T20:01:17.000Z
|
# import the necessary packages
import numpy as np
import argparse
import cv2 as cv
# construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--image", help="path to the image")
args = vars(ap.parse_args())
def set_masks(colors, delta):
bounds = [(np.array([color - delta, 100, 100]), np.array([color + delta, 255, 255])) for color in colors]
masks = [cv.inRange(hsv, bound[0], bound[1]) for bound in bounds]
mask = masks[0]
for i in range(1, len(masks)):
mask = mask | masks[i]
return mask
# load the image
image = cv.imread("images/blurNight.jpg")
cv.namedWindow("Display Window", cv.WINDOW_AUTOSIZE)
# Take each frame
# Convert BGR to HSV
hsv = cv.cvtColor(image, cv.COLOR_BGR2HSV)
# define range of blue color in HSV
#### Collin's Code #####
colors = [46, 100, 166, 130, 28, 146]
delta = 10
colormask = set_masks(colors, delta)
res1 = cv.bitwise_and(image, image, mask=colormask)
#########################
#### Original un-simplified #########
delta = 2.5
lower_green = np.array([46 - delta, 100, 100])
upper_green = np.array([46 + delta, 255, 255])
lower_blue = np.array([100 - delta, 100, 100])
upper_blue = np.array([100 + delta, 255, 255])
lower_pink = np.array([166 - delta, 90, 200])
upper_pink = np.array([166 + delta, 200, 255])
lower_purple = np.array([130 - delta, 0, 0])
upper_purple = np.array([130 + delta, 255, 255])
lower_yellow = np.array([28 - delta, 0, 0])
upper_yellow = np.array([28 + delta, 255, 255])
lower_purple_two = np.array([146 - delta, 0, 0])
upper_purple_two = np.array([146 + delta, 255, 255])
####################################
# Threshold the HSV image to get only blue colors
# Threshold the HSV image to get only green colors
mask_green = cv.inRange(hsv, lower_green, upper_green)
# Threshold for blue
mask_blue = cv.inRange(hsv, lower_blue, upper_blue)
mask_pink = cv.inRange(hsv, lower_pink, upper_pink)
mask_purple = cv.inRange(hsv, lower_purple, upper_purple)
mask_yellow = cv.inRange(hsv, lower_yellow, upper_yellow)
############ Aryan's Code ######################
def maskMaker(mask_values, delta_h, delta_s):
mask = False
for val in mask_values:
cur_top = np.array([(val[0]*0.5)+delta_h, (val[1]*255.0/100)+delta_s, (val[2]*255.0/100)+delta_s])
cur_bot = np.array([(val[0]*0.5)-delta_h, (val[1]*255.0/100)-delta_s, (val[2]*255.0/100)-delta_s])
cur_mask = cv.inRange(hsv, cur_bot, cur_top)
mask = mask | cur_mask
return mask
'''
green = (94, 61, 89)
blue = (196, 90, 90)
purple = (259, 36, 100)
pink = (332, 37, 100)
'''
red = (360, 72, 47)
orange = (18, 71, 64)
purple_two = (318, 71, 43)
yellow = (50, 64, 64)
my_vals = [red, orange, purple_two, yellow]
# Bitwise-AND mask and original image
res = cv.bitwise_and(image, image, mask=(maskMaker(my_vals, 10, 50)))
print(image.shape)
cv.imwrite("original.jpg", image)
cv.imwrite("mask.jpg", mask_blue)
cv.imwrite("res.jpg", res)
'''
cv.imshow('Display Window', image)
cv.waitKey(0)
cv.imshow('mask', mask)
cv.waitKey(0)
cv.imshow('res', res)
cv.waitKey(0)
'''
############################
| 27.982143
| 109
| 0.647096
|
4a084bb4be03a905b3cc7b9e842e15a7c077c7a2
| 6,901
|
py
|
Python
|
env/lib/python3.8/site-packages/plotly/graph_objs/splom/unselected/_marker.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 11,750
|
2015-10-12T07:03:39.000Z
|
2022-03-31T20:43:15.000Z
|
env/lib/python3.8/site-packages/plotly/graph_objs/splom/unselected/_marker.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 2,951
|
2015-10-12T00:41:25.000Z
|
2022-03-31T22:19:26.000Z
|
env/lib/python3.8/site-packages/plotly/graph_objs/splom/unselected/_marker.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 2,623
|
2015-10-15T14:40:27.000Z
|
2022-03-28T16:05:50.000Z
|
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Marker(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "splom.unselected"
_path_str = "splom.unselected.marker"
_valid_props = {"color", "opacity", "size"}
# color
# -----
@property
def color(self):
"""
Sets the marker color of unselected points, applied only when a
selection exists.
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# opacity
# -------
@property
def opacity(self):
"""
Sets the marker opacity of unselected points, applied only when
a selection exists.
The 'opacity' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self["opacity"]
@opacity.setter
def opacity(self, val):
self["opacity"] = val
# size
# ----
@property
def size(self):
"""
Sets the marker size of unselected points, applied only when a
selection exists.
The 'size' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
Sets the marker color of unselected points, applied
only when a selection exists.
opacity
Sets the marker opacity of unselected points, applied
only when a selection exists.
size
Sets the marker size of unselected points, applied only
when a selection exists.
"""
def __init__(self, arg=None, color=None, opacity=None, size=None, **kwargs):
"""
Construct a new Marker object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.splom.unselected.Marker`
color
Sets the marker color of unselected points, applied
only when a selection exists.
opacity
Sets the marker opacity of unselected points, applied
only when a selection exists.
size
Sets the marker size of unselected points, applied only
when a selection exists.
Returns
-------
Marker
"""
super(Marker, self).__init__("marker")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.splom.unselected.Marker
constructor must be a dict or
an instance of :class:`plotly.graph_objs.splom.unselected.Marker`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
_v = color if color is not None else _v
if _v is not None:
self["color"] = _v
_v = arg.pop("opacity", None)
_v = opacity if opacity is not None else _v
if _v is not None:
self["opacity"] = _v
_v = arg.pop("size", None)
_v = size if size is not None else _v
if _v is not None:
self["size"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
| 33.828431
| 82
| 0.552674
|
4a084c5457a71036bc415196b1bd2416eaf5c355
| 11,733
|
py
|
Python
|
sunpy/io/fits.py
|
naltun/sunpy
|
86d6b89e1f95e110dfba0a0096e1e66635a4da77
|
[
"BSD-2-Clause"
] | null | null | null |
sunpy/io/fits.py
|
naltun/sunpy
|
86d6b89e1f95e110dfba0a0096e1e66635a4da77
|
[
"BSD-2-Clause"
] | null | null | null |
sunpy/io/fits.py
|
naltun/sunpy
|
86d6b89e1f95e110dfba0a0096e1e66635a4da77
|
[
"BSD-2-Clause"
] | null | null | null |
"""
This module provides a FITS file reader.
Notes
-----
1. FITS files allow comments to be attached to every value in the header.
This is implemented in this module as a KEYCOMMENTS dictionary in the
sunpy header. To add a comment to the file on write, add a comment to this
dictionary with the same name as a key in the header (upcased).
2. Due to the way `~astropy.io.fits` works with images, the header dictionary may
differ depending on whether is accessed before or after the fits[0].data
is requested. If the header is read before the data then the original
header will be returned. If the header is read after the data has been
accessed then the data will have been scaled and a modified header
reflecting these changes will be returned: BITPIX may differ and
BSCALE and B_ZERO may be dropped in the modified version.
3. The verify('silentfix+warn') call attempts to handle violations of the FITS
standard. For example, ``nan`` values will be converted to "nan" strings.
Attempting to cast a `astropy.io.fits.Header` to a dictionary while it contains
invalid header tags will result in an error so verifying it early on
makes the header easier to work with later.
"""
import os
import re
import sys
import math
import traceback
import collections
from astropy.io import fits
from sunpy.io.header import FileHeader
from sunpy.util.exceptions import warn_metadata, warn_user
__all__ = ['header_to_fits', 'read', 'get_header', 'write', 'extract_waveunit']
HDPair = collections.namedtuple('HDPair', ['data', 'header'])
def read(filepath, hdus=None, memmap=None, **kwargs):
"""
Read a fits file.
Parameters
----------
filepath : `str`
The fits file to be read.
hdus: `int` or iterable
The HDU indexes to read from the file.
Returns
-------
pairs : `list`
A list of (data, header) tuples
Notes
-----
This routine reads all the HDU's in a fits file and returns a list of the
data and a FileHeader instance for each one.
Also all comments in the original file are concatenated into a single
"comment" key in the returned FileHeader.
"""
with fits.open(filepath, ignore_blank=True, memmap=memmap) as hdulist:
if hdus is not None:
if isinstance(hdus, int):
hdulist = hdulist[hdus]
elif isinstance(hdus, collections.Iterable):
hdulist = [hdulist[i] for i in hdus]
hdulist = fits.hdu.HDUList(hdulist)
for h in hdulist:
h.verify('silentfix+warn')
headers = get_header(hdulist)
pairs = []
for i, (hdu, header) in enumerate(zip(hdulist, headers)):
try:
pairs.append(HDPair(hdu.data, header))
except (KeyError, ValueError) as e:
message = f"Error when reading HDU {i}. Skipping.\n"
for line in traceback.format_tb(sys.exc_info()[2]):
message += line
message += '\n'
message += repr(e)
warn_user(message)
return pairs
def get_header(afile):
"""
Read a fits file and return just the headers for all HDU's. In each header,
the key WAVEUNIT denotes the wavelength unit which is used to describe the
value of the key "WAVELNTH".
Parameters
----------
afile : `str` or `astropy.io.fits.HDUList`
The file to be read, or HDUList to process.
Returns
-------
headers : `list`
A list of `sunpy.io.header.FileHeader` headers.
"""
if isinstance(afile, fits.HDUList):
hdulist = afile
close = False
else:
hdulist = fits.open(afile, ignore_blank=True)
hdulist.verify('silentfix')
close = True
try:
headers = []
for hdu in hdulist:
try:
comment = "".join(hdu.header['COMMENT']).strip()
except KeyError:
comment = ""
try:
history = "".join(hdu.header['HISTORY']).strip()
except KeyError:
history = ""
header = FileHeader(hdu.header)
header['COMMENT'] = comment
header['HISTORY'] = history
# Strip out KEYCOMMENTS to a dict, the hard way
keydict = {}
for card in hdu.header.cards:
if card.comment != '':
keydict.update({card.keyword: card.comment})
header['KEYCOMMENTS'] = keydict
waveunit = extract_waveunit(header)
if waveunit is not None:
header['WAVEUNIT'] = waveunit
headers.append(header)
finally:
if close:
hdulist.close()
return headers
def write(fname, data, header, hdu_type=None, **kwargs):
"""
Take a data header pair and write a FITS file.
Parameters
----------
fname : `str`
File name, with extension.
data : `numpy.ndarray`
n-dimensional data array.
header : `dict`
A header dictionary.
hdu_type : `~astropy.io.fits.hdu.base.ExtensionHDU` instance or class, optional
By default, a FITS file is written with the map in its primary HDU.
If a type is given, a new HDU of this type will be created.
If a HDU instance is given, its data and header will be updated from the map.
Then that HDU instance will be written to the file.
kwargs :
Additional keyword arguments are given to
`~astropy.io.fits.HDUList.writeto`.
"""
# Copy header so the one in memory is left alone while changing it for
# write.
header = header.copy()
fits_header = header_to_fits(header)
if isinstance(fname, str):
fname = os.path.expanduser(fname)
fitskwargs = {'output_verify': 'fix'}
fitskwargs.update(kwargs)
if not hdu_type:
hdu_type = fits.PrimaryHDU
if isinstance(hdu_type, (fits.PrimaryHDU, fits.hdu.base.ExtensionHDU)):
hdu = hdu_type # HDU already initialised
# Merge `header` into HDU's header
# Values in `header` take priority, including cards such as
# 'SIMPLE' and 'BITPIX'.
hdu.header.extend(fits_header, strip=False, update=True)
# Set the HDU's data
hdu.data = data
else:
hdu = hdu_type(data=data, header=fits_header)
if not isinstance(hdu, fits.PrimaryHDU):
hdul = fits.HDUList([fits.PrimaryHDU(), hdu])
else:
hdul = fits.HDUList([hdu])
hdul.writeto(fname, **fitskwargs)
def header_to_fits(header):
"""
Convert a header dict to a `~astropy.io.fits.Header`.
"""
# Copy the header to avoid modifying it in place
header = header.copy()
# The comments need to be added to the header separately from the normal
# kwargs. Find and deal with them:
fits_header = fits.Header()
# Check Header
key_comments = header.pop('KEYCOMMENTS', False)
for k, v in header.items():
# Drop any keys that have non-ascii characters
if not fits.Card._ascii_text_re.match(str(v)):
warn_metadata(f'The meta key {k} is not valid ascii, dropping from the FITS header')
continue
# Drop any keys which are too long to save into FITS
if len(k) > 8:
warn_metadata(f"The meta key {k} is too long, dropping from the FITS header "
"(maximum allowed key length is 8 characters).")
continue
if isinstance(v, float) and math.isnan(v):
warn_metadata(f'The meta key {k} has a NaN value, which is not valid in a FITS '
'header, dropping from the FITS header')
continue
if k.upper() in ('COMMENT', 'HV_COMMENT'):
comments = str(v).split('\n')
for com in comments:
fits_header.add_comment(com)
elif k.upper() == 'HISTORY':
hists = str(v).split('\n')
for hist in hists:
fits_header.add_history(hist)
elif isinstance(v, fits.header._HeaderCommentaryCards):
if k != '':
fits_header.append(fits.Card(k, str(v).split('\n')))
else:
# For some horrific reason, we save a list to the wavelnth key in
# sources/rhessi.py. This is the least invasive fix for that stupidity.
if isinstance(v, list):
v = str(v)
fits_header.append(fits.Card(k, v))
if isinstance(key_comments, dict):
for k, v in key_comments.items():
# Check that the Card for the comment exists before trying to write to it.
if k in fits_header:
fits_header.comments[k] = v
elif key_comments:
raise TypeError("KEYCOMMENTS must be a dictionary")
return fits_header
def extract_waveunit(header):
"""
Attempt to read the wavelength unit from a given FITS header.
Parameters
----------
header : `sunpy.io.header.FileHeader`
One `~sunpy.io.header.FileHeader` instance which was created by
reading a FITS file. For example, `sunpy.io.fits.get_header` returns a list of
such instances.
Returns
-------
waveunit : `str`
The wavelength unit that could be found or ``None`` otherwise.
Examples
--------
The goal of this function is to return a string that can be used in
conjunction with the astropy.units module so that the return value can be
directly passed to `astropy.units.Unit`.
>>> import astropy.units
>>> header = {'WAVEUNIT': 'Angstrom', 'KEYCOMMENTS': {}}
>>> waveunit = extract_waveunit(header)
>>> if waveunit is not None:
... unit = astropy.units.Unit(waveunit)
"""
# algorithm: try the following procedures in the following order and return
# as soon as a waveunit could be detected
# 1. read header('WAVEUNIT'). If None, go to step 2.
# 1.1 -9 -> 'nm'
# 1.2 -10 -> 'angstrom'
# 1.3 0 -> go to step 2
# 1.4 if neither of the above, return the value itself in lowercase
# 2. parse waveunit_comment
# 2.1 'in meters' -> 'm'
# 3. parse wavelnth_comment
# 3.1 "[$UNIT] ..." -> $UNIT
# 3.2 "Observed wavelength ($UNIT)" -> $UNIT
def parse_waveunit_comment(waveunit_comment):
if waveunit_comment == 'in meters':
return 'm'
waveunit_comment = header['KEYCOMMENTS'].get('WAVEUNIT')
wavelnth_comment = header['KEYCOMMENTS'].get('WAVELNTH')
waveunit = header.get('WAVEUNIT')
if waveunit is not None:
metre_submultiples = {
0: parse_waveunit_comment(waveunit_comment),
-1: 'dm',
-2: 'cm',
-3: 'mm',
-6: 'um',
-9: 'nm',
-10: 'angstrom',
-12: 'pm',
-15: 'fm',
-18: 'am',
-21: 'zm',
-24: 'ym'}
waveunit = metre_submultiples.get(waveunit, str(waveunit).lower())
elif waveunit_comment is not None:
waveunit = parse_waveunit_comment(waveunit_comment)
elif wavelnth_comment is not None:
# supported formats (where $UNIT is the unit like "nm" or "Angstrom"):
# "Observed wavelength ($UNIT)"
# "[$UNIT] ..."
parentheses_pattern = r'Observed wavelength \((\w+?)\)$'
brackets_pattern = r'^\[(\w+?)\]'
for pattern in [parentheses_pattern, brackets_pattern]:
m = re.search(pattern, wavelnth_comment)
if m is not None:
waveunit = m.group(1)
break
if waveunit == '':
return None # To fix problems associated with HMI FITS.
return waveunit
| 34.008696
| 96
| 0.603341
|
4a084d2b9472d269df6a78419304204cf1ce9cb7
| 1,449
|
py
|
Python
|
unittest/rmg/speciestest.py
|
sean-v8/RMG-Py
|
7cc7c3bfb330786526c56113d98c785bcaaa161a
|
[
"MIT"
] | null | null | null |
unittest/rmg/speciestest.py
|
sean-v8/RMG-Py
|
7cc7c3bfb330786526c56113d98c785bcaaa161a
|
[
"MIT"
] | null | null | null |
unittest/rmg/speciestest.py
|
sean-v8/RMG-Py
|
7cc7c3bfb330786526c56113d98c785bcaaa161a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import unittest
import sys
sys.path.append('.')
import rmg.thermo.data
from rmg.structure import *
from rmg.species import *
################################################################################
class SpeciesCheck(unittest.TestCase):
def testResonance(self):
"""
Check that the resonance form generator is working correctly.
"""
species = Species()
species.fromSMILES('C=CC=CC=CC=C[CH]C')
species.getResonanceIsomers()
self.assertTrue(len(species.structure) == 5, "Found %d structures, expected 5"%len(species.structure) )
for structure in species.structure:
self.assertTrue(structure.getFormula() == 'C10H13')
species = Species()
species.fromSMILES('C=CC=CC=CC=C[CH]C=C')
species.getResonanceIsomers()
self.assertTrue(len(species.structure) == 3)
for structure in species.structure:
self.assertTrue(structure.getFormula() == 'C11H13')
def testMakeNewSpecies(self):
structure1 = Structure()
structure1.fromSMILES('C=CC=C[CH]C')
structure2 = Structure()
structure2.fromSMILES('C[CH]C=CC=C')
species1, isNew = makeNewSpecies(structure1)
species2, isNew = makeNewSpecies(structure2)
self.assertTrue(species1 is species2)
################################################################################
if __name__ == '__main__':
unittest.main( testRunner = unittest.TextTestRunner(verbosity=2) )
| 28.411765
| 105
| 0.625259
|
4a084df517e072a3a849a85ee4d905a6d8f31288
| 27,036
|
py
|
Python
|
tests/activity/test_activity_publish_final_poa.py
|
elifesciences/elife-bot
|
d3a102c8030e4b7ec83cbd45e5f839dba4f9ffd9
|
[
"MIT"
] | 17
|
2015-02-10T07:10:29.000Z
|
2021-05-14T22:24:45.000Z
|
tests/activity/test_activity_publish_final_poa.py
|
elifesciences/elife-bot
|
d3a102c8030e4b7ec83cbd45e5f839dba4f9ffd9
|
[
"MIT"
] | 459
|
2015-03-31T18:24:23.000Z
|
2022-03-30T19:44:40.000Z
|
tests/activity/test_activity_publish_final_poa.py
|
elifesciences/elife-bot
|
d3a102c8030e4b7ec83cbd45e5f839dba4f9ffd9
|
[
"MIT"
] | 9
|
2015-04-18T16:57:31.000Z
|
2020-10-30T11:49:13.000Z
|
import unittest
import glob
import os
import time
import zipfile
import xml.etree.ElementTree as ET
from xml.etree.ElementTree import Element
from mock import patch
from testfixtures import TempDirectory
from ddt import ddt, data, unpack
import activity.activity_PublishFinalPOA as activity_module
from activity.activity_PublishFinalPOA import activity_PublishFinalPOA
from tests.classes_mock import FakeSMTPServer
from tests.activity import helpers, settings_mock
from tests.activity.classes_mock import FakeLogger, FakeStorageContext
import tests.activity.test_activity_data as activity_test_data
class TestPublishFinalPOA(unittest.TestCase):
def setUp(self):
self.poa = activity_PublishFinalPOA(
settings_mock, FakeLogger(), None, None, None
)
self.do_activity_passes = []
self.do_activity_passes.append(
{
"outbox_file_list": [],
"done_dir_file_count": 0,
"approve_status": False,
"publish_status": None,
"activity_status": True,
"output_dir_files": [],
"done_xml_files": [],
"clean_from_outbox_files": [],
"malformed_ds_file_names": [],
"empty_ds_file_names": [],
"unmatched_ds_file_names": [],
}
)
# Missing a PDF
self.do_activity_passes.append(
{
"outbox_file_list": ["elife_poa_e13833.xml", "elife_poa_e13833_ds.zip"],
"done_dir_file_count": 0,
"approve_status": True,
"publish_status": True,
"activity_status": True,
"output_dir_files": [],
"done_xml_files": [],
"clean_from_outbox_files": [],
"malformed_ds_file_names": [],
"empty_ds_file_names": [],
"unmatched_ds_file_names": ["elife_poa_e13833_ds.zip"],
}
)
# Full set of files for one article
self.do_activity_passes.append(
{
"outbox_file_list": [
"decap_elife_poa_e13833.pdf",
"elife_poa_e13833.xml",
"elife_poa_e13833_ds.zip",
],
"done_dir_file_count": 3,
"approve_status": True,
"publish_status": True,
"activity_status": True,
"output_dir_files": ["elife-13833-poa-r1.zip"],
"done_xml_files": ["elife-13833.xml"],
"clean_from_outbox_files": [
"decap_elife_poa_e13833.pdf",
"elife_poa_e13833.xml",
"elife_poa_e13833_ds.zip",
],
"malformed_ds_file_names": [],
"empty_ds_file_names": [],
"unmatched_ds_file_names": [],
}
)
# One article with no ds.zip file
self.do_activity_passes.append(
{
"outbox_file_list": [
"decap_elife_poa_e14692.pdf",
"elife_poa_e14692.xml",
],
"done_dir_file_count": 2,
"approve_status": True,
"publish_status": True,
"activity_status": True,
"output_dir_files": ["elife-14692-poa-r1.zip"],
"done_xml_files": ["elife-14692.xml"],
"clean_from_outbox_files": [
"decap_elife_poa_e14692.pdf",
"elife_poa_e14692.xml",
],
"malformed_ds_file_names": [],
"empty_ds_file_names": [],
"unmatched_ds_file_names": [],
}
)
# Full set of files for two articles
self.do_activity_passes.append(
{
"outbox_file_list": [
"decap_elife_poa_e13833.pdf",
"elife_poa_e13833.xml",
"elife_poa_e13833_ds.zip",
"decap_elife_poa_e14692.pdf",
"elife_poa_e14692.xml",
"elife_poa_e14692_ds.zip",
"elife_poa_e99999_ds.zip",
"elife_poa_e99997_ds.zip",
],
"done_dir_file_count": 6,
"approve_status": True,
"publish_status": True,
"activity_status": True,
"output_dir_files": [
"elife-13833-poa-r1.zip",
"elife-14692-poa-r1.zip",
],
"done_xml_files": ["elife-13833.xml", "elife-14692.xml"],
"clean_from_outbox_files": [
"decap_elife_poa_e13833.pdf",
"elife_poa_e13833.xml",
"elife_poa_e13833_ds.zip",
"decap_elife_poa_e14692.pdf",
"elife_poa_e14692.xml",
"elife_poa_e14692_ds.zip",
],
"malformed_ds_file_names": ["elife_poa_e99999_ds.zip"],
"empty_ds_file_names": [],
"unmatched_ds_file_names": ["elife_poa_e99997_ds.zip"],
}
)
# Full set of files for one article
self.do_activity_passes.append(
{
"outbox_file_list": [
"decap_elife_poa_e15082.pdf",
"elife_poa_e15082.xml",
"elife_poa_e15082_ds.zip",
],
"done_dir_file_count": 3,
"approve_status": True,
"publish_status": True,
"activity_status": True,
"output_dir_files": ["elife-15082-poa-r1.zip"],
"done_xml_files": ["elife-15082.xml"],
"clean_from_outbox_files": [
"decap_elife_poa_e15082.pdf",
"elife_poa_e15082.xml",
"elife_poa_e15082_ds.zip",
],
"malformed_ds_file_names": [],
"empty_ds_file_names": [],
"unmatched_ds_file_names": [],
}
)
# Tests for values in the XML files after rewriting
self.xml_file_values = {}
self.xml_file_values["elife-13833.xml"] = {
"./front/article-meta/volume": (None, "5"),
"./front/article-meta/article-id[@pub-id-type='publisher-id']": (
None,
"13833",
),
"./front/article-meta/pub-date[@date-type='pub']/day": (None, "05"),
"./front/article-meta/pub-date[@date-type='pub']/month": (None, "07"),
"./front/article-meta/pub-date[@date-type='pub']/year": (None, "2016"),
"./front/article-meta/self-uri": (
"{http://www.w3.org/1999/xlink}href",
"elife-13833.pdf",
),
}
self.xml_file_values["elife-14692.xml"] = {
"./front/article-meta/volume": (None, "5"),
"./front/article-meta/article-id[@pub-id-type='publisher-id']": (
None,
"14692",
),
"./front/article-meta/pub-date[@date-type='pub']/day": (None, "04"),
"./front/article-meta/pub-date[@date-type='pub']/month": (None, "07"),
"./front/article-meta/pub-date[@date-type='pub']/year": (None, "2016"),
"./front/article-meta/self-uri": (
"{http://www.w3.org/1999/xlink}href",
"elife-14692.pdf",
),
}
self.xml_file_values["elife-15082.xml"] = {
"./front/article-meta/volume": (None, "5"),
"./front/article-meta/article-id[@pub-id-type='publisher-id']": (
None,
"15082",
),
"./front/article-meta/pub-date[@date-type='pub']/day": (None, "13"),
"./front/article-meta/pub-date[@date-type='pub']/month": (None, "07"),
"./front/article-meta/pub-date[@date-type='pub']/year": (None, "2016"),
"./front/article-meta/self-uri": (
"{http://www.w3.org/1999/xlink}href",
"elife-15082.pdf",
),
}
# Tests for XML values only for when a ds zip file was packaged as part of the test
self.xml_file_values_when_ds_zip = {}
self.xml_file_values_when_ds_zip["elife-13833.xml"] = {
"./back/sec/supplementary-material/ext-link": (
"{http://www.w3.org/1999/xlink}href",
"elife-13833-supp.zip",
),
}
self.xml_file_values_when_ds_zip["elife-14692.xml"] = {
"./back/sec/supplementary-material/ext-link": (
"{http://www.w3.org/1999/xlink}href",
"elife-14692-supp.zip",
),
}
self.xml_file_values_when_ds_zip["elife-15082.xml"] = {
"./back/sec/supplementary-material/ext-link": (
"{http://www.w3.org/1999/xlink}href",
"elife-15082-supp.zip",
),
}
def tearDown(self):
self.poa.clean_tmp_dir()
helpers.delete_files_in_folder(
activity_test_data.ExpandArticle_files_dest_folder, filter_out=[".gitkeep"]
)
def remove_files_from_tmp_dir_subfolders(self):
"""
Run between each test pass, delete the subfolders in tmp_dir
"""
for directory in os.listdir(self.poa.get_tmp_dir()):
directory_full_path = self.poa.get_tmp_dir() + os.sep + directory
if os.path.isdir(directory_full_path):
for file in glob.glob(directory_full_path + "/*"):
os.remove(file)
@patch.object(activity_module.email_provider, "smtp_connect")
@patch("provider.lax_provider.article_publication_date")
@patch.object(activity_PublishFinalPOA, "next_revision_number")
@patch("provider.outbox_provider.get_outbox_s3_key_names")
@patch("provider.outbox_provider.storage_context")
@patch.object(activity_module, "storage_context")
@patch.object(activity_PublishFinalPOA, "clean_tmp_dir")
def test_do_activity(
self,
fake_clean_tmp_dir,
fake_storage_context,
fake_provider_storage_context,
fake_outbox_key_names,
fake_next_revision_number,
fake_get_pub_date_str_from_lax,
fake_email_smtp_connect,
):
fake_email_smtp_connect.return_value = FakeSMTPServer(self.poa.get_tmp_dir())
fake_clean_tmp_dir.return_value = None
fake_provider_storage_context.return_value = FakeStorageContext(
"tests/test_data/poa/outbox"
)
fake_storage_context.return_value = FakeStorageContext()
fake_next_revision_number.return_value = 1
# fake_upload_files_to_s3.return_value = True
fake_get_pub_date_str_from_lax.return_value = "20160704000000"
for test_data in self.do_activity_passes:
fake_outbox_key_names.return_value = test_data["outbox_file_list"]
param_data = None
success = self.poa.do_activity(param_data)
self.assertEqual(self.poa.approve_status, test_data["approve_status"])
self.assertEqual(self.poa.publish_status, test_data["publish_status"])
self.assertEqual(
count_files_in_dir(self.poa.directories.get("DONE_DIR")),
test_data["done_dir_file_count"],
)
self.assertEqual(self.poa.activity_status, test_data["activity_status"])
self.assertTrue(
compare_files_in_dir(
self.poa.directories.get("OUTPUT_DIR"),
test_data["output_dir_files"],
)
)
self.assertEqual(
sorted(self.poa.done_xml_files), sorted(test_data["done_xml_files"])
)
self.assertEqual(
sorted(self.poa.clean_from_outbox_files),
sorted(test_data["clean_from_outbox_files"]),
)
self.assertEqual(
sorted(self.poa.malformed_ds_file_names),
sorted(test_data["malformed_ds_file_names"]),
)
self.assertEqual(
sorted(self.poa.empty_ds_file_names),
sorted(test_data["empty_ds_file_names"]),
)
self.assertEqual(
sorted(self.poa.unmatched_ds_file_names),
sorted(test_data["unmatched_ds_file_names"]),
)
# Check XML values if XML was approved
if test_data["done_dir_file_count"] > 0:
xml_files = glob.glob(self.poa.directories.get("DONE_DIR") + "/*.xml")
for xml_file in xml_files:
self.assertTrue(check_xml_contents(xml_file, self.xml_file_values))
# If a ds zip file for the article, check more XML elements
if ds_zip_in_list_of_files(
xml_file, self.poa.clean_from_outbox_files
):
self.assertTrue(
check_xml_contents(
xml_file, self.xml_file_values_when_ds_zip
)
)
self.assertEqual(True, success)
# Clean the tmp_dir subfolders between tests
self.remove_files_from_tmp_dir_subfolders()
# Reset variables
self.poa.activity_status = None
self.poa.approve_status = None
self.poa.publish_status = None
self.poa.clean_from_outbox_files = []
self.poa.done_xml_files = []
self.poa.malformed_ds_file_names = []
self.poa.empty_ds_file_names = []
self.poa.unmatched_ds_file_names = []
@patch.object(FakeStorageContext, "list_resources")
@patch.object(activity_module, "storage_context")
def test_next_revision_number_default(
self, fake_storage_context, fake_list_resources
):
doi_id = "7"
key_names = []
expected = 1
fake_storage_context.return_value = FakeStorageContext()
fake_list_resources.return_value = key_names
self.assertEqual(self.poa.next_revision_number(doi_id), expected)
@patch.object(FakeStorageContext, "list_resources")
@patch.object(activity_module, "storage_context")
def test_next_revision_number_next(self, fake_storage_context, fake_list_resources):
doi_id = "7"
key_names = ["elife-00007-poa-r1.zip", "elife-00007-poa-r_bad_number.zip"]
expected = 2
fake_storage_context.return_value = FakeStorageContext()
fake_list_resources.return_value = key_names
self.assertEqual(self.poa.next_revision_number(doi_id), expected)
def count_files_in_dir(dir_name):
"""
After do_activity, check the directory contains a zip with ds_zip file name
"""
file_names = glob.glob(dir_name + os.sep + "*")
return len(file_names)
def compare_files_in_dir(dir_name, file_list):
"""
Compare the file names in the directroy to the file_list provided
"""
file_names = glob.glob(dir_name + os.sep + "*")
# First check the count is the same
if len(file_list) != len(file_names):
return False
# Then can compare file name by file name
for file in file_names:
file_name = file.split(os.sep)[-1]
if file_name not in file_list:
return False
return True
def check_xml_contents(xml_file, xml_file_values):
"""
Function to compare XML tag value as located by an xpath
Can compare one tag only at a time
"""
root = None
xml_file_name = xml_file.split(os.sep)[-1]
if xml_file_name in xml_file_values:
ET.register_namespace("xlink", "http://www.w3.org/1999/xlink")
root = ET.parse(xml_file)
if root:
for (xpath, (attribute, value)) in xml_file_values[xml_file_name].items():
matched_tags = root.findall(xpath)
if len(matched_tags) != 1:
return False
for matched_tag in matched_tags:
if attribute:
if matched_tag.get(attribute) != value:
return False
else:
if matched_tag.text != value:
return False
return True
def ds_zip_in_list_of_files(xml_file, file_list):
"""
Given an XML file and a list of files
check the list of files contains a ds zip file that matches the xml file
"""
doi_id = xml_file.split("-")[-1].split(".")[0]
for file in file_list:
if str(doi_id) in file and file.endswith("ds.zip"):
return True
return False
@ddt
class TestDoiIdFromFilename(unittest.TestCase):
@data(
(None, None),
("", None),
("decap_elife_poa_e10727.pdf", 10727),
("decap_elife_poa_e12029v2.pdf", 12029),
("elife_poa_e10727.xml", 10727),
("elife_poa_e10727_ds.zip", 10727),
("elife_poa_e12029v2.xml", 12029),
("bad_file_name.xml", None),
)
@unpack
def test_doi_id_from_filename(self, filename, expected):
doi_id = activity_module.doi_id_from_filename(filename)
self.assertEqual(doi_id, expected)
class TestGetPubDateIfMissing(unittest.TestCase):
def setUp(self):
self.logger = FakeLogger()
@patch.object(activity_module, "get_pub_date_str_from_lax")
def test_get_pub_date_if_missing_lax(self, fake_get_pub_date):
doi_id = 666
fake_get_pub_date.return_value = "20160704000000"
expected = time.strptime("2016-07-04T00:00:00Z", "%Y-%m-%dT%H:%M:%SZ")
pub_date = activity_module.get_pub_date_if_missing(
doi_id, settings_mock, self.logger
)
self.assertEqual(pub_date, expected)
@patch("time.gmtime")
@patch.object(activity_module, "get_pub_date_str_from_lax")
def test_get_pub_date_if_missing_no_lax(self, fake_get_pub_date, fake_gmtime):
fake_get_pub_date.return_value = None
struct_time = time.strptime("2016-07-04T00:00:00Z", "%Y-%m-%dT%H:%M:%SZ")
fake_gmtime.return_value = struct_time
doi_id = 666
expected = struct_time
pub_date = activity_module.get_pub_date_if_missing(
doi_id, settings_mock, self.logger
)
self.assertEqual(pub_date, expected)
class TestModifyXml(unittest.TestCase):
def setUp(self):
self.logger = FakeLogger()
@patch.object(activity_module, "convert_xml")
def test_modify_xml_exception(self, fake_convert_xml):
fake_convert_xml.side_effect = Exception("An exception")
doi_id = 666
return_value = activity_module.modify_xml(
None, doi_id, None, settings_mock, self.logger
)
self.assertEqual(return_value, False)
self.assertEqual(
self.logger.logexception,
"Exception when converting XML for doi %s, An exception" % doi_id,
)
class TestCheckMatchingXmlFile(unittest.TestCase):
@patch("glob.glob")
def test_check_matching_xml_file(self, fake_glob):
zip_filename = "elife_poa_e14692_ds.zip"
fake_glob.return_value = ["input_dir/elife_poa_e14692.xml"]
self.assertTrue(
activity_module.check_matching_xml_file(zip_filename, input_dir="")
)
@patch("glob.glob")
def test_check_matching_xml_file_not_found(self, fake_glob):
zip_filename = "elife_poa_e14692_ds.zip"
fake_glob.return_value = ["input_dir/not_found.xml"]
self.assertEqual(
activity_module.check_matching_xml_file(zip_filename, input_dir=""), False
)
class TestCheckMatchingPdfFile(unittest.TestCase):
@patch("glob.glob")
def test_check_matching_pdf_file(self, fake_glob):
zip_filename = "elife_poa_e14692_ds.zip"
fake_glob.return_value = ["input_dir/decap_elife_poa_e14692.pdf"]
self.assertTrue(
activity_module.check_matching_pdf_file(zip_filename, input_dir="")
)
@patch("glob.glob")
def test_check_matching_pdf_file_not_found(self, fake_glob):
zip_filename = "elife_poa_e14692_ds.zip"
fake_glob.return_value = ["input_dir/not_found.pdf"]
self.assertEqual(
activity_module.check_matching_pdf_file(zip_filename, input_dir=""), False
)
class TestAddSelfUriToXml(unittest.TestCase):
def setUp(self):
self.logger = FakeLogger()
def test_add_self_uri_to_xml(self):
file_name = "article.pdf"
doi_id = 666
xml_string = b"""<article>
<front>
<article-meta>
<permissions />
</article-meta>
</front>
</article>"""
root = ET.fromstring(xml_string)
expected = b"""<article>
<front>
<article-meta>
<permissions />
<self-uri content-type="pdf" xlink:href="article.pdf" /></article-meta>
</front>
</article>"""
output = activity_module.add_self_uri_to_xml(
doi_id, file_name, root, self.logger
)
self.assertEqual(ET.tostring(output), expected)
def test_add_self_uri_to_xml_no_permissions_tag(self):
file_name = "article.pdf"
doi_id = 666
xml_string = b"""<article>
<front>
<article-meta />
</front>
</article>"""
root = ET.fromstring(xml_string)
expected = xml_string
output = activity_module.add_self_uri_to_xml(
doi_id, file_name, root, self.logger
)
self.assertEqual(ET.tostring(output), expected)
self.assertEqual(
self.logger.loginfo[-1],
"no permissions tag and no self-uri tag added: %s" % doi_id,
)
class TestAddTagToXml(unittest.TestCase):
def setUp(self):
self.logger = FakeLogger()
def test_add_tag_to_xml(self):
add_tag = Element("volume")
add_tag.text = "1"
doi_id = 666
xml_string = b"""<article>
<front>
<article-meta>
<elocation-id />
</article-meta>
</front>
</article>"""
root = ET.fromstring(xml_string)
expected = b"""<article>
<front>
<article-meta>
<volume>1</volume><elocation-id />
</article-meta>
</front>
</article>"""
output = activity_module.add_tag_to_xml_before_elocation_id(
add_tag, root, doi_id, self.logger
)
self.assertEqual(ET.tostring(output), expected)
def test_add_tag_to_xml_no_elocation_id_tag(self):
add_tag = Element("foo")
doi_id = 666
xml_string = b"""<article>
<front>
<article-meta />
</front>
</article>"""
root = ET.fromstring(xml_string)
expected = xml_string
output = activity_module.add_tag_to_xml_before_elocation_id(
add_tag, root, doi_id, self.logger
)
self.assertEqual(ET.tostring(output), expected)
self.assertEqual(
self.logger.loginfo[-1], "no elocation-id tag and no foo added: %s" % doi_id
)
@ddt
class TestGetFilenameFromPath(unittest.TestCase):
@data(
("elife_poa_e99999.xml", ".xml", "elife_poa_e99999"),
(
os.path.join("folder", "elife_poa_e99999_ds.zip"),
"_ds.zip",
"elife_poa_e99999",
),
)
@unpack
def test_get_filename_from_path(self, file_path, extension, expected):
self.assertEqual(
activity_module.get_filename_from_path(file_path, extension), expected
)
class TestCheckEmptySupplementalFiles(unittest.TestCase):
def tearDown(self):
TempDirectory.cleanup_all()
def test_check_empty_supplemental_files(self):
input_zipfile = "tests/test_data/poa/outbox/elife_poa_e13833_ds.zip"
with zipfile.ZipFile(input_zipfile, "r") as current_zipfile:
self.assertTrue(
activity_module.check_empty_supplemental_files(current_zipfile)
)
def test_check_empty_supplemental_files_no_internal_zip(self):
input_zipfile = "tests/test_data/poa/outbox/elife_poa_e99997_ds.zip"
with zipfile.ZipFile(input_zipfile, "r") as current_zipfile:
self.assertTrue(
activity_module.check_empty_supplemental_files(current_zipfile)
)
def test_check_empty_supplemental_files_empty_internal_zip(self):
directory = TempDirectory()
internal_zip_path = os.path.join(directory.path, "internal.zip")
with zipfile.ZipFile(internal_zip_path, "w") as input_zipfile:
pass
zip_file_path = os.path.join(directory.path, "empty.zip")
with zipfile.ZipFile(zip_file_path, "w") as input_zipfile:
input_zipfile.write(internal_zip_path, "elife13833_Supplemental_files.zip")
with zipfile.ZipFile(zip_file_path, "r") as current_zipfile:
self.assertEqual(
activity_module.check_empty_supplemental_files(current_zipfile), False
)
@ddt
class TestNewFilenameFromOld(unittest.TestCase):
def setUp(self):
self.new_filenames = [
"elife-13833-supp.zip",
"elife-13833.xml",
"elife-13833.pdf",
"fake_file",
]
def test_new_filename_from_old(self):
old_filename = "elife_poa_e13833_ds.zip"
expected = "elife-13833-supp.zip"
self.assertEqual(
activity_module.new_filename_from_old(old_filename, self.new_filenames),
expected,
)
@data(
(None, None),
("", None),
("fake_file", "fake_file"),
("fake_file.", None),
("does_not_exist", None),
)
@unpack
def test_new_filename_from_old_edge_cases(self, old_filename, expected):
# edge cases for test coverage
self.assertEqual(
activity_module.new_filename_from_old(old_filename, self.new_filenames),
expected,
)
class TestNewZipFileName(unittest.TestCase):
def test_new_zip_file_name(self):
doi_id = "666"
revision = "1"
status = "poa"
expected = "elife-00666-poa-r1.zip"
self.assertEqual(
activity_module.new_zip_file_name(doi_id, revision, status), expected
)
class TestArticleXmlFromFilenameMap(unittest.TestCase):
def test_article_xml_from_filename_map(self):
filenames = ["elife_poa_e99999.xml"]
expected = "elife_poa_e99999.xml"
self.assertEqual(
activity_module.article_xml_from_filename_map(filenames), expected
)
def test_article_xml_from_filename_map_not_found(self):
filenames = ["elife_poa_e99999_ds.zip"]
expected = None
self.assertEqual(
activity_module.article_xml_from_filename_map(filenames), expected
)
| 36.683853
| 91
| 0.588142
|
4a085019c218735a49b240a77f4be39ee917c0d4
| 189
|
py
|
Python
|
products/urls.py
|
marcomoreschi/Milestone-4
|
0fe5c9b9621f3642ec33c24e4ecd916233300a08
|
[
"BSD-Source-Code"
] | null | null | null |
products/urls.py
|
marcomoreschi/Milestone-4
|
0fe5c9b9621f3642ec33c24e4ecd916233300a08
|
[
"BSD-Source-Code"
] | 7
|
2021-03-30T14:17:14.000Z
|
2022-01-13T03:17:18.000Z
|
products/urls.py
|
marcomoreschi/Milestone-4
|
0fe5c9b9621f3642ec33c24e4ecd916233300a08
|
[
"BSD-Source-Code"
] | 1
|
2020-10-06T15:32:10.000Z
|
2020-10-06T15:32:10.000Z
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.all_products, name='products'),
path('<product_id>', views.product_detail, name='product_detail'),
]
| 27
| 70
| 0.708995
|
4a0850734e13429768418736cdd84fcc7fa48f48
| 1,133
|
py
|
Python
|
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/tests/test_errors.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 18
|
2018-02-23T11:28:54.000Z
|
2021-09-23T08:19:54.000Z
|
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/tests/test_errors.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 2
|
2021-02-08T20:19:17.000Z
|
2021-04-30T20:32:52.000Z
|
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/tests/test_errors.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 5
|
2019-03-12T14:24:18.000Z
|
2021-06-23T13:42:58.000Z
|
# -*- coding: utf-8 -*-
import pytest
from warnings import catch_warnings
import pandas # noqa
import pandas as pd
@pytest.mark.parametrize(
"exc", ['UnsupportedFunctionCall', 'UnsortedIndexError',
'OutOfBoundsDatetime',
'ParserError', 'PerformanceWarning', 'DtypeWarning',
'EmptyDataError', 'ParserWarning'])
def test_exception_importable(exc):
from pandas import errors
e = getattr(errors, exc)
assert e is not None
# check that we can raise on them
with pytest.raises(e):
raise e()
def test_catch_oob():
from pandas import errors
try:
pd.Timestamp('15000101')
except errors.OutOfBoundsDatetime:
pass
def test_error_rename():
# see gh-12665
from pandas.errors import ParserError
from pandas.io.common import CParserError
try:
raise CParserError()
except ParserError:
pass
try:
raise ParserError()
except CParserError:
pass
with catch_warnings(record=True):
try:
raise ParserError()
except pd.parser.CParserError:
pass
| 21.377358
| 64
| 0.640777
|
4a0850763508f972b5341f07657ed37446b3c4f5
| 5,325
|
py
|
Python
|
libraries/botframework-connector/botframework/connector/auth/jwt_token_extractor.py
|
Shiftersky/botbuilder-python
|
e00ea990d5cb5b05d545d87c51249dfa8f183581
|
[
"MIT"
] | 1
|
2020-02-19T15:50:10.000Z
|
2020-02-19T15:50:10.000Z
|
libraries/botframework-connector/botframework/connector/auth/jwt_token_extractor.py
|
Fortune-Adekogbe/botbuilder-python
|
4e48c874c32a2a7fe7f27a7a1f825e2aa39466c4
|
[
"MIT"
] | null | null | null |
libraries/botframework-connector/botframework/connector/auth/jwt_token_extractor.py
|
Fortune-Adekogbe/botbuilder-python
|
4e48c874c32a2a7fe7f27a7a1f825e2aa39466c4
|
[
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import json
from datetime import datetime, timedelta
from typing import List
import requests
from jwt.algorithms import RSAAlgorithm
import jwt
from .claims_identity import ClaimsIdentity
from .verify_options import VerifyOptions
from .endorsements_validator import EndorsementsValidator
class JwtTokenExtractor:
metadataCache = {}
def __init__(
self,
validation_params: VerifyOptions,
metadata_url: str,
allowed_algorithms: list,
):
self.validation_parameters = validation_params
self.validation_parameters.algorithms = allowed_algorithms
self.open_id_metadata = JwtTokenExtractor.get_open_id_metadata(metadata_url)
@staticmethod
def get_open_id_metadata(metadata_url: str):
metadata = JwtTokenExtractor.metadataCache.get(metadata_url, None)
if metadata is None:
metadata = _OpenIdMetadata(metadata_url)
JwtTokenExtractor.metadataCache.setdefault(metadata_url, metadata)
return metadata
async def get_identity_from_auth_header(
self, auth_header: str, channel_id: str, required_endorsements: List[str] = None
) -> ClaimsIdentity:
if not auth_header:
return None
parts = auth_header.split(" ")
if len(parts) == 2:
return await self.get_identity(
parts[0], parts[1], channel_id, required_endorsements
)
return None
async def get_identity(
self,
schema: str,
parameter: str,
channel_id: str,
required_endorsements: List[str] = None,
) -> ClaimsIdentity:
# No header in correct scheme or no token
if schema != "Bearer" or not parameter:
return None
# Issuer isn't allowed? No need to check signature
if not self._has_allowed_issuer(parameter):
return None
try:
return await self._validate_token(
parameter, channel_id, required_endorsements
)
except Exception as error:
raise error
def _has_allowed_issuer(self, jwt_token: str) -> bool:
decoded = jwt.decode(jwt_token, verify=False)
issuer = decoded.get("iss", None)
if issuer in self.validation_parameters.issuer:
return True
return issuer == self.validation_parameters.issuer
async def _validate_token(
self, jwt_token: str, channel_id: str, required_endorsements: List[str] = None
) -> ClaimsIdentity:
required_endorsements = required_endorsements or []
headers = jwt.get_unverified_header(jwt_token)
# Update the signing tokens from the last refresh
key_id = headers.get("kid", None)
metadata = await self.open_id_metadata.get(key_id)
if key_id and metadata.endorsements:
# Verify that channelId is included in endorsements
if not EndorsementsValidator.validate(channel_id, metadata.endorsements):
raise Exception("Could not validate endorsement key")
# Verify that additional endorsements are satisfied.
# If no additional endorsements are expected, the requirement is satisfied as well
for endorsement in required_endorsements:
if not EndorsementsValidator.validate(
endorsement, metadata.endorsements
):
raise Exception("Could not validate endorsement key")
if headers.get("alg", None) not in self.validation_parameters.algorithms:
raise Exception("Token signing algorithm not in allowed list")
options = {
"verify_aud": False,
"verify_exp": not self.validation_parameters.ignore_expiration,
}
decoded_payload = jwt.decode(
jwt_token,
metadata.public_key,
leeway=self.validation_parameters.clock_tolerance,
options=options,
)
claims = ClaimsIdentity(decoded_payload, True)
return claims
class _OpenIdMetadata:
def __init__(self, url):
self.url = url
self.keys = []
self.last_updated = datetime.min
async def get(self, key_id: str):
# If keys are more than 5 days old, refresh them
if self.last_updated < (datetime.now() + timedelta(days=5)):
await self._refresh()
return self._find(key_id)
async def _refresh(self):
response = requests.get(self.url)
response.raise_for_status()
keys_url = response.json()["jwks_uri"]
response_keys = requests.get(keys_url)
response_keys.raise_for_status()
self.last_updated = datetime.now()
self.keys = response_keys.json()["keys"]
def _find(self, key_id: str):
if not self.keys:
return None
key = [x for x in self.keys if x["kid"] == key_id][0]
public_key = RSAAlgorithm.from_jwk(json.dumps(key))
endorsements = key.get("endorsements", [])
return _OpenIdConfig(public_key, endorsements)
class _OpenIdConfig:
def __init__(self, public_key, endorsements):
self.public_key = public_key
self.endorsements = endorsements
| 34.354839
| 94
| 0.649765
|
4a085179174503dfd5e4323e58e02c8188c7911f
| 3,502
|
py
|
Python
|
analysis/webservice/algorithms/doms/DatasetListQuery.py
|
dataplumber/nexus
|
f25a89e85eba098da9c6db1ff3d408dae8a6b310
|
[
"Apache-2.0"
] | 23
|
2016-08-09T22:45:14.000Z
|
2020-02-17T08:18:29.000Z
|
analysis/webservice/algorithms/doms/DatasetListQuery.py
|
lewismc/incubator-sdap-nexus
|
ff98fa346303431542b8391cc2a1bf7561d1bd03
|
[
"Apache-2.0"
] | 6
|
2017-04-27T21:22:17.000Z
|
2021-06-01T21:45:52.000Z
|
analysis/webservice/algorithms/doms/DatasetListQuery.py
|
dataplumber/nexus
|
f25a89e85eba098da9c6db1ff3d408dae8a6b310
|
[
"Apache-2.0"
] | 5
|
2016-08-31T13:47:29.000Z
|
2017-11-14T21:45:22.000Z
|
from webservice.NexusHandler import NexusHandler as BaseHandler
from webservice.webmodel import StatsComputeOptions
from webservice.NexusHandler import nexus_handler
from webservice.NexusHandler import DEFAULT_PARAMETERS_SPEC
from webservice.webmodel import NexusResults, NexusProcessingException, DatasetNotFoundException, cached
import BaseDomsHandler
import datafetch
import config
import requests
import json
import values
import traceback
@nexus_handler
class DomsDatasetListQueryHandler(BaseDomsHandler.BaseDomsQueryHandler):
name = "DOMS Dataset Listing"
path = "/domslist"
description = ""
params = {}
singleton = True
def __init__(self):
BaseHandler.__init__(self)
def getFacetsForInsituSource(self, source):
url = source["url"]
params = {
"facet": "true",
"stats": "true",
"startIndex": 0,
"itemsPerPage": 0
}
try:
r = requests.get(url, params=params)
results = json.loads(r.text)
depths = None
if "stats_fields" in results and "depth" in results["stats_fields"]:
depths = results["stats_fields"]["depth"]
for facet in results["facets"]:
field = facet["field"]
for value in facet["values"]:
value["value"] = values.getDescByListNameAndId(field, int(value["value"]))
return depths, results["facets"]
except: # KMG: Don't eat the exception. Add better handling...
traceback.print_exc()
return None, None
def getMetadataUrlForDataset(self, dataset):
datasetSpec = config.getEndpointByName(dataset)
if datasetSpec is not None:
return datasetSpec["metadataUrl"]
else:
# KMG: NOT a good hack
if dataset == "JPL-L4_GHRSST-SSTfnd-MUR-GLOB-v02.0-fv04.1" or dataset == "JPL-L4_GHRSST-SSTfnd-MUR-GLOB-v02.0-fv04.1_CLIM":
dataset = "MUR-JPL-L4-GLOB-v4.1"
elif dataset == "SMAP_L2B_SSS":
dataset = "JPL_SMAP-SSS_L2_EVAL-V2"
elif dataset == "AVHRR_OI_L4_GHRSST_NCEI" or dataset == "AVHRR_OI_L4_GHRSST_NCEI_CLIM":
dataset = "AVHRR_OI-NCEI-L4-GLOB-v2.0"
return "http://doms.jpl.nasa.gov/ws/metadata/dataset?shortName=%s&format=umm-json"%dataset
def getMetadataForSource(self, dataset):
try:
r = requests.get(self.getMetadataUrlForDataset(dataset))
results = json.loads(r.text)
return results
except:
return None
@cached(ttl=(60 * 60 * 1000)) # 1 hour cached
def calc(self, computeOptions, **args):
satellitesList = self._tile_service.get_dataseries_list(simple=True)
insituList = []
for satellite in satellitesList:
satellite["metadata"] = self.getMetadataForSource(satellite["shortName"])
for insitu in config.ENDPOINTS:
depths, facets = self.getFacetsForInsituSource(insitu)
insituList.append({
"name" : insitu["name"],
"endpoint" : insitu["url"],
"metadata": self.getMetadataForSource(insitu["name"]),
"depths": depths,
"facets": facets
})
values = {
"satellite" : satellitesList,
"insitu" : insituList
}
return BaseDomsHandler.DomsQueryResults(results=values)
| 32.728972
| 135
| 0.612793
|
4a0851eba4142ad71c3b2fc68108ed7faed07dee
| 2,607
|
py
|
Python
|
src/arch/mips/MipsSystem.py
|
YangZhou1997/GEM5_DRAMSim2
|
77aa7d479bba11be97fa455a31e4ea5f556841e0
|
[
"BSD-3-Clause"
] | 11
|
2015-03-21T13:35:06.000Z
|
2022-01-27T07:31:52.000Z
|
src/arch/mips/MipsSystem.py
|
YangZhou1997/GEM5_DRAMSim2
|
77aa7d479bba11be97fa455a31e4ea5f556841e0
|
[
"BSD-3-Clause"
] | 4
|
2015-01-13T18:27:31.000Z
|
2015-01-13T18:27:57.000Z
|
src/arch/mips/MipsSystem.py
|
YangZhou1997/GEM5_DRAMSim2
|
77aa7d479bba11be97fa455a31e4ea5f556841e0
|
[
"BSD-3-Clause"
] | 4
|
2015-03-21T13:35:24.000Z
|
2020-06-30T02:09:36.000Z
|
# -*- mode:python -*-
# Copyright (c) 2007 MIPS Technologies, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Jaidev Patwardhan
from m5.defines import buildEnv
from m5.params import *
from m5.proxy import *
from System import System
class MipsSystem(System):
type = 'MipsSystem'
console = Param.String("file that contains the console code")
bare_iron = Param.Bool(False, "Using Bare Iron Mode?")
hex_file_name = Param.String("test.hex","hex file that contains [address,data] pairs")
system_type = Param.UInt64("Type of system we are emulating")
system_rev = Param.UInt64("Revision of system we are emulating")
load_addr_mask = 0xffffffffff
class LinuxMipsSystem(MipsSystem):
type = 'LinuxMipsSystem'
system_type = 34
system_rev = 1 << 10
boot_cpu_frequency = Param.Frequency(Self.cpu[0].clock.frequency,
"boot processor frequency")
class BareIronMipsSystem(MipsSystem):
type = 'BareIronMipsSystem'
bare_iron = True
system_type = 34
system_rev = 1 << 10
hex_file_name = Param.String('test.hex',"hex file that contains [address,data] pairs")
| 42.737705
| 90
| 0.751438
|
4a0852332eb9cd3315d120b3997c3a8c09e9af06
| 35
|
py
|
Python
|
src/bayes.py
|
akshayaamukundan/tipr-first-assignment
|
ee06981debc79166f60f92f7943d61131d3c20a6
|
[
"MIT"
] | null | null | null |
src/bayes.py
|
akshayaamukundan/tipr-first-assignment
|
ee06981debc79166f60f92f7943d61131d3c20a6
|
[
"MIT"
] | null | null | null |
src/bayes.py
|
akshayaamukundan/tipr-first-assignment
|
ee06981debc79166f60f92f7943d61131d3c20a6
|
[
"MIT"
] | 7
|
2019-01-24T13:02:26.000Z
|
2020-11-10T18:23:53.000Z
|
# Implement Bayes Classifier here!
| 17.5
| 34
| 0.8
|
4a085263f3bc756a98abbc778c8c3135ae1ff2be
| 532
|
py
|
Python
|
ch2o/tests/node/Softmax.py
|
disktnk/chainer-compiler
|
5cfd027b40ea6e4abf73eb42be70b4fba74d1cde
|
[
"MIT"
] | null | null | null |
ch2o/tests/node/Softmax.py
|
disktnk/chainer-compiler
|
5cfd027b40ea6e4abf73eb42be70b4fba74d1cde
|
[
"MIT"
] | null | null | null |
ch2o/tests/node/Softmax.py
|
disktnk/chainer-compiler
|
5cfd027b40ea6e4abf73eb42be70b4fba74d1cde
|
[
"MIT"
] | null | null | null |
# coding: utf-8
import numpy as np
import chainer
import chainer.functions as F
class Softmax(chainer.Chain):
def forward(self, x):
return F.softmax(x)
class SoftmaxAxis(chainer.Chain):
def forward(self, x):
return F.softmax(x, axis=2)
# ======================================
import ch2o
if __name__ == '__main__':
np.random.seed(314)
a = np.random.rand(3, 5, 4).astype(np.float32)
ch2o.generate_testcase(Softmax(), [a])
ch2o.generate_testcase(SoftmaxAxis(), [a], subname='axis')
| 18.344828
| 62
| 0.614662
|
4a0852ec5301f8480ebaacdc78e37f433c6b77f7
| 3,567
|
py
|
Python
|
Scripts/GetBackgroundProps.py
|
htung0101/bvp
|
db318b11d5c41efd59dd04038d41c03500e5c8e1
|
[
"BSD-2-Clause"
] | null | null | null |
Scripts/GetBackgroundProps.py
|
htung0101/bvp
|
db318b11d5c41efd59dd04038d41c03500e5c8e1
|
[
"BSD-2-Clause"
] | null | null | null |
Scripts/GetBackgroundProps.py
|
htung0101/bvp
|
db318b11d5c41efd59dd04038d41c03500e5c8e1
|
[
"BSD-2-Clause"
] | null | null | null |
"""
.B.lender .V.ision .P.roject file operation
Gets properties for all backgrounds in a .blend file. Stores properties in a list
of dictionaries (one dict for each background (group) in the file), and saves that
list in a pickle (.pik) file with the same name as the .blend file.
These .pik files are loaded by the bvpLibrary class.
dictionaries are of the form:
{
'fname':'/path/to/Category_Blah.blend',
'name':'BG_001_Whatever',
'semantic_category':['outside','natural']
'real_world_size':100.000, # size of whole space in meters
'lens':50., # focal length for scene camera, in mm
'nVertices':1000,
'nFaces':900,
'obConstraints':bvpObConstraints(), # Derived from empty objects in the scene
'CamConstraint':bvpCamConstraints(),
'obstacles':None # To come!
'obSemanticCat':'all', ## List of object categories that can (reasonably) populate this scene
'sky_semantic_category': 'all', ## List of sky categories that can go with this background.
'obstacles':None, ## TO DO! background objects ##
}
ML 2012.02
"""
# Imports
import bpy,bvp,os,re
from bvp.utils.basics import savePik
from bvp.utils.blender import GetConstr
d = []
fName = os.path.split(bpy.data.filepath)[-1]
BaseCat = re.search('(?<=Category_)[A-Z,a-z,0-9]*',fName).group()
Grp = [g for g in bpy.data.groups if 'BG' in g.name] # Exclude other groups!
for G in Grp:
gOb = [g for g in G.objects if g.type=="EMPTY"][0]
Obst = [g for g in G.objects if g.type=="MESH" and 'obst' in g.name.lower()]
print(Obst)
# Semantic category of background
try:
semCat = gOb['semantic_category'].split(',')
except:
semCat = [BaseCat.lower()]
# Add file title category to list of categories, if not present:
if not semCat[0].lower()==BaseCat.lower():
semCat = [BaseCat.lower()]+semCat
# Allowable semantic categories for objects / skies
try:
obCat = gOb['ObjectSemanticCat'].split(',')
except:
obCat = ['all']
try:
skyCat = gOb['sky_semantic_category'].split(',')
except:
skyCat = ['all']
# Camera & object position constraints
if len([x for x in G.objects if x.type=='EMPTY']) > 0:
try:
print('LOOKING FOR TF!!!\n\n')
TF = bvp.Settings['LibDefaults']['LockZtoFloor']
print("FOUND THE FUCKER!")
if TF:
print('Objects LOCKED THE FUCK DOWN!')
else:
print("Objects are FREEEEEE!")
camConstr,obConstr = GetConstr(G,LockZtoFloor=TF)
except:
# Fill in None values for now...
camConstr = None # Size=...
obConstr = None # Size=...
else:
# Needs modification! defaults should depend on real world size / size of floor mesh / something...
# OR: simply raise error, and demand that all files have pos constraints.
camConstr = bvp.CamConstraint() # Size=...
obConstr = bvp.bvpObConstraint() # Size=...
try:
rws = gOb['RealWorldSize'], # of the whole space
except:
rws = 100.
try:
Lens = gOb['Lens']
except:
Lens = 50.
d.append(dict(
fname=bpy.data.filepath,
name=G.name,
semantic_category=semCat,
real_world_size=rws,
lens=Lens,
nVertices=sum([len(oo.data.vertices) for oo in G.objects if oo.type=='MESH']),
nFaces=sum([len(oo.data.polygons) for oo in G.objects if oo.type=='MESH']),
obConstraints=obConstr,
CamConstraint=camConstr,
obSemanticCat=obCat, ## List of object categories that can (reasonably) populate this scene
sky_semantic_category=skyCat, ## List of sky categories that can go with this background.
obstacles=[bvp.Object(pos3D=list(o.location),size3D=max(o.dimensions)) for o in Obst], ## To come! ##
))
sName = bpy.data.filepath.replace('.blend','.pik')
savePik(d,sName)
| 33.650943
| 104
| 0.694701
|
4a0852f5801ee4dc931d0fc2831537ae812e5cc1
| 560
|
py
|
Python
|
Curso Python/Aula08/EstruturaWhile/EstruturaDeRepeticaoWhile.py
|
ElHa07/Python
|
d8014948a6472daa3dd0c9be5e536fc79742f02e
|
[
"MIT"
] | null | null | null |
Curso Python/Aula08/EstruturaWhile/EstruturaDeRepeticaoWhile.py
|
ElHa07/Python
|
d8014948a6472daa3dd0c9be5e536fc79742f02e
|
[
"MIT"
] | null | null | null |
Curso Python/Aula08/EstruturaWhile/EstruturaDeRepeticaoWhile.py
|
ElHa07/Python
|
d8014948a6472daa3dd0c9be5e536fc79742f02e
|
[
"MIT"
] | null | null | null |
#Estrutura de Repetição While
#Metdoso de Repetição
# Primeiro Metodo de Repetição FOR
# o For não é possivel usa-lo quando eu não sei o quanto valores serão.
#for c in range(0, 10):
# print(c)
#print('FIM!')
#Segundo Metodo de Repetição WHILE
# While serve para situações diversas quando eu sei quantos valores são e quando não sei quantos valores serão!#
#c = 1
#while c < 10:
# print(c)
# c += 1
#print('FIM!')
r = 'S'
while r == 'S':
n = int(input('Digite um valor: '))
r = str(input('Quer continuar [S/N] ? ')).upper()
print('FIM!')
| 23.333333
| 112
| 0.655357
|
4a08538deb777e00d544e825888b990fa3637e7f
| 1,826
|
py
|
Python
|
py/g1/apps/tests/test_utils.py
|
clchiou/garage
|
446ff34f86cdbd114b09b643da44988cf5d027a3
|
[
"MIT"
] | 3
|
2016-01-04T06:28:52.000Z
|
2020-09-20T13:18:40.000Z
|
py/g1/apps/tests/test_utils.py
|
clchiou/garage
|
446ff34f86cdbd114b09b643da44988cf5d027a3
|
[
"MIT"
] | null | null | null |
py/g1/apps/tests/test_utils.py
|
clchiou/garage
|
446ff34f86cdbd114b09b643da44988cf5d027a3
|
[
"MIT"
] | null | null | null |
import unittest
import functools
from g1.apps import utils
class GetAnnotationsTest(unittest.TestCase):
def test_get_annotations(self):
class Test:
def __init__(self, p: 'P'):
pass
def __call__(self, q: 'Q') -> 'R':
pass
def test(x: 'X') -> 'Y':
del x # Unused.
for func, annotations in (
(test, {
'x': 'X',
'return': 'Y',
}),
(Test, {
'p': 'P',
}),
(Test(0), {
'q': 'Q',
'return': 'R',
}),
(functools.partial(test), {
'x': 'X',
'return': 'Y',
}),
(functools.partial(Test), {
'p': 'P',
}),
(functools.partial(Test(0)), {
'q': 'Q',
'return': 'R',
}),
):
with self.subTest(func):
self.assertEqual(utils.get_annotations(func), annotations)
self.assertEqual(utils.get_annotations(test), test.__annotations__)
def test_no_annotation(self):
class Empty:
def __init__(self, p):
pass
def __call__(self, q):
pass
def empty(x):
del x # Unused.
for func in (
empty,
Empty,
Empty(0),
functools.partial(empty),
functools.partial(Empty),
functools.partial(Empty(0)),
):
with self.subTest(func):
self.assertEqual(utils.get_annotations(func), {})
self.assertEqual(utils.get_annotations(empty), empty.__annotations__)
if __name__ == '__main__':
unittest.main()
| 22.54321
| 77
| 0.427163
|
4a0853dcf1d4a80590d92542ed01606f41e709bf
| 2,520
|
bzl
|
Python
|
test/starlark_tests/rules/dsyms_test.bzl
|
BalestraPatrick/rules_apple
|
ae2246ebda88e6573a8290ab1f0f4f00fe4c07f2
|
[
"Apache-2.0"
] | 3
|
2020-11-30T15:35:37.000Z
|
2022-01-06T14:17:18.000Z
|
test/starlark_tests/rules/dsyms_test.bzl
|
BalestraPatrick/rules_apple
|
ae2246ebda88e6573a8290ab1f0f4f00fe4c07f2
|
[
"Apache-2.0"
] | 54
|
2020-06-23T17:34:04.000Z
|
2022-03-31T02:04:06.000Z
|
test/starlark_tests/rules/dsyms_test.bzl
|
BalestraPatrick/rules_apple
|
ae2246ebda88e6573a8290ab1f0f4f00fe4c07f2
|
[
"Apache-2.0"
] | 12
|
2020-07-14T23:59:57.000Z
|
2022-03-22T09:59:18.000Z
|
# Copyright 2019 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Starlark test rules for debug symbols."""
load(
"@build_bazel_rules_apple//apple:providers.bzl",
"AppleBundleInfo",
)
load(
"@bazel_skylib//lib:paths.bzl",
"paths",
)
load(
"@bazel_skylib//lib:unittest.bzl",
"analysistest",
"asserts",
)
def _dsyms_test_impl(ctx):
"""Implementation of the dsyms_test rule."""
env = analysistest.begin(ctx)
target_under_test = ctx.attr.target_under_test[0]
platform_type = target_under_test[AppleBundleInfo].platform_type
if platform_type == "watchos":
architecture = "i386"
else:
architecture = "x86_64"
outputs = {
x.short_path: None
for x in target_under_test[OutputGroupInfo]["dsyms"].to_list()
}
package = target_under_test.label.package
expected_infoplists = [
"{0}/{1}.dSYM/Contents/Info.plist".format(package, x)
for x in ctx.attr.expected_dsyms
]
expected_binaries = [
"{0}/{1}.dSYM/Contents/Resources/DWARF/{2}_{3}".format(
package,
x,
paths.split_extension(x)[0],
architecture,
)
for x in ctx.attr.expected_dsyms
]
for expected in expected_infoplists + expected_binaries:
asserts.true(
env,
expected in outputs,
msg = "Expected\n\n{0}\n\nto be built. Contents were:\n\n{1}\n\n".format(
expected,
"\n".join(outputs.keys()),
),
)
return analysistest.end(env)
dsyms_test = analysistest.make(
_dsyms_test_impl,
attrs = {
"expected_dsyms": attr.string_list(
mandatory = True,
doc = """
List of bundle names in the format <bundle_name>.<bundle_extension> to verify that dSYMs bundles are
created for them.
""",
),
},
config_settings = {
"//command_line_option:apple_generate_dsym": "true",
},
)
| 27.692308
| 100
| 0.637302
|
4a0854e25d9d4939e0ae5ae85587324108055062
| 4,939
|
py
|
Python
|
test/functional/interface_http.py
|
mrmikeo/MainNet-critical-fix
|
4c1b63af4dad9850fb99ed85d8a015a9440f6654
|
[
"MIT"
] | 2
|
2020-10-28T19:46:40.000Z
|
2021-08-15T13:22:54.000Z
|
test/functional/interface_http.py
|
mrmikeo/MainNet-critical-fix
|
4c1b63af4dad9850fb99ed85d8a015a9440f6654
|
[
"MIT"
] | null | null | null |
test/functional/interface_http.py
|
mrmikeo/MainNet-critical-fix
|
4c1b63af4dad9850fb99ed85d8a015a9440f6654
|
[
"MIT"
] | 2
|
2020-07-06T19:59:39.000Z
|
2020-09-07T05:42:48.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Copyright (c) 2017-2020 The Zelantus Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the RPC HTTP basics."""
import http.client
import urllib.parse
from test_framework.test_framework import ZelantusTestFramework
from test_framework.util import str_to_b64str, assert_equal
# noinspection PyUnresolvedReferences
class HTTPBasicsTest (ZelantusTestFramework):
def set_test_params(self):
self.num_nodes = 3
def setup_network(self):
self.setup_nodes()
def run_test(self):
#################################################
# lowlevel check for http persistent connection #
#################################################
url = urllib.parse.urlparse(self.nodes[0].url)
authpair = url.username + ':' + url.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock is not None) #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1) #must also response with a correct json-rpc message
assert(conn.sock is not None) #according to http/1.1 connection must still be open!
conn.close()
#same should be if we add keep-alive because this should be the std. behaviour
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection": "keep-alive"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock is not None) #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1) #must also response with a correct json-rpc message
assert(conn.sock is not None) #according to http/1.1 connection must still be open!
conn.close()
#now do the same with "Connection: close"
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection":"close"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock is None) #now the connection must be closed after the response
#node1 (2nd node) is running with disabled keep-alive option
urlNode1 = urllib.parse.urlparse(self.nodes[1].url)
authpair = urlNode1.username + ':' + urlNode1.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(urlNode1.hostname, urlNode1.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
#node2 (third node) is running with standard keep-alive parameters which means keep-alive is on
urlNode2 = urllib.parse.urlparse(self.nodes[2].url)
authpair = urlNode2.username + ':' + urlNode2.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock is not None) #connection must be closed because zelantusd should use keep-alive by default
# Check excessive request size
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', '/' + ('x'*1000), '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.NOT_FOUND)
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', '/' + ('x'*10000), '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.BAD_REQUEST)
if __name__ == '__main__':
HTTPBasicsTest ().main ()
| 44.495495
| 115
| 0.640211
|
4a0855b2ec96a58aba50bc6364d6f1c4410926fe
| 6,546
|
py
|
Python
|
python_scripts/read_genotypes.py
|
athro/openSNPAnalysis
|
c5eb9f83bf218b6cbfb1e7fbe48682ceeb1bafb0
|
[
"MIT"
] | null | null | null |
python_scripts/read_genotypes.py
|
athro/openSNPAnalysis
|
c5eb9f83bf218b6cbfb1e7fbe48682ceeb1bafb0
|
[
"MIT"
] | null | null | null |
python_scripts/read_genotypes.py
|
athro/openSNPAnalysis
|
c5eb9f83bf218b6cbfb1e7fbe48682ceeb1bafb0
|
[
"MIT"
] | null | null | null |
import csv
import sys
import os.path
import os
import glob
import re
import compress
import zipfile
import add_genotypes_to_db as db_geno
debug = True
import logging
logger_instance = logging.getLogger('openSNPAnalysis')
file_re = re.compile('user(\d+)\_file(\d+)\_yearofbirth\_(\w+)\_sex\_(\w+)\.(\S+)\.txt')
def load_mapping(mapping_dir,mapping_name):
mapping_file = '%s%s%s' % (mapping_dir,os.path.sep,mapping_name)
mapping = {}
if os.path.exists(mapping_file):
with open(mapping_file) as fh:
mappings_raw = fh.readlines()
for line in mappings_raw:
(fromM,toM) = line.strip().split(';')
mapping[fromM] = toM
return mapping
def snpify_line(a_line,mappings):
"""Returns a SNP data dictionary of id, chromosome, loc and allele vals, or returns None"""
# remove flanking whitepaces
a_line = a_line.strip()
# remove over use of """
a_line = a_line.replace('"','')
# hack to deal with alleles in form of "---"
a_line = a_line.replace('---','--')
# do not use empty lines
if a_line:
# split by using whitespacce
splitted = a_line.split()
# try with comma
if len(splitted)<=1:
splitted = a_line.split(',')
# if alleles were one single string
if len(splitted[-1]) >= 2:
if len(splitted[-1]) == 2:
splitted = splitted[:-1]+[splitted[-1][0],splitted[-1][1]]
# something is wrong - ugly return in the middle of the method
else:
return None
# save a lttle bit of length checking
len_splitted = len(splitted)
# translate chromosome and print error if chromosome is 0 or something unkown
chromosome = None
# create empty data structure
snp_data = None
if len_splitted >= 4 and len_splitted <= 5:
try:
chromosome = mappings['chromosome'][splitted[1]]
snp_data = { 'snp_id':splitted[0],
'chromosome':chromosome,
'location':splitted[2],
'allele1':splitted[3]
}
if len_splitted > 4:
snp_data['allele2'] = splitted[4]
# sanity check if location really contains an integer - if not an exception is raised
int(snp_data['location'])
except Exception as e:
# sys.stderr.write('Error on line: %s\n' % (a_line,))
# sys.stderr.write('snp_data: %s\n' % (snp_data,))
# sys.stderr.write('Exception occurred:\n %s' % (e,))
logger_instance.debug('Error on line: %s\n' % (a_line,))
snp_data = None
pass
else:
logger_instance.debug('Problems?: <<%s>>' % (splitted,))
sys.stderr.write('Problems?: <<%s>>' % (splitted,))
sys.stderr.write('\n')
return snp_data
else:
return None
def read_snp_file(file_handle,mappings):
"""Returns a list of SNP data dicts"""
snp_data = []
open_possible = False
with file_handle:
try:
data = file_handle.readlines()
except Exception as e:
sys.stderr.write('Could not read in data! Exception: %s\n' % (e,))
data = []
for line in data:
if isinstance(line,(bytes, bytearray)):
line = line.decode().strip()
if not line.startswith('#') and not line.startswith('RSID'):
snp_line_data = snpify_line(line,mappings)
# check if data (location) is actually set
if snp_line_data and snp_line_data['chromosome'] and snp_line_data['location']:
snp_data.append(snp_line_data)
logger_instance.info('Loaded %s snps' % (len(snp_data),))
return snp_data
#file_handle.seek(0)
def read_snps_by_user(user_id, data_dir_genotype, mappings):
"""Returns a list of (filename,method,snp_data) triples.
A user may have multiple files.
The snp_data is a list of dicts.
"""
return_values = []
if os.path.exists(data_dir_genotype):
potential_file_names = glob.glob('%s%suser%s_*.txt' % (data_dir_genotype, os.path.sep, user_id))
potential_file_names = [k for k in potential_file_names if not ('vcf.' in k) and not ('.IYG.' in k)]
if potential_file_names:
#sys.stderr.write('Trying to load user-id=%s (filename = <<%s>>)\n' % (user_id, potential_file_names))
for pot_file in potential_file_names:
#print(pot_file)
try:
with compress.compress_open(pot_file) as fh:
snp_data = read_snp_file(fh, mappings)
except zipfile.BadZipFile as e:
sys.stderr.write('Bad ZIP File - contents ignored (<<%s>>)\n' % (pot_file,))
else:
method = pot_file.split('.')[-2] # From filename. But can we determine this?
return_values.append((pot_file, method, snp_data))
# else:
# sys.stderr.write('No input file for user=<<%s>>\n' % (user_id,))
else:
sys.stderr.write('The directory <<%s>> does not exist\n' % (data_dir_genotype,))
return return_values
if __name__ == '__main__':
data_dir = '..'+os.path.sep+'..'+os.path.sep+'data'
data_dir_genotype = '%s%sgenotypes' % (data_dir,os.path.sep)
data_dir_phenotype = '%s%sphenotypes' % (data_dir,os.path.sep)
data_dir_annotation = '%s%sannotation' % (data_dir,os.path.sep)
mapping_dir = "mapping"
#example_file1 = '%s%suser972_file483_yearofbirth_unknown_sex_unknown.23andme.txt' % (data_dir_genotype,os.path.sep)
#example_file2 = '%s%suser4468_file3062_yearofbirth_unknown_sex_unknown.ancestry.txt' % (data_dir_genotype,os.path.sep)
#read_23andme(example_file1)
# read_ancestry(example_file2)
mappings = {}
mappings['chromosome'] = load_mapping(mapping_dir, 'chromosome')
# test special
#for i in [1497,125,881,1259,1111,850]:
# test all
#for i in range(6000):
# test not tested yet
#for i in range(2198,6000):
for i in [77,]:
snp_data = read_snps_by_user(i, data_dir_genotype, mappings)
if snp_data:
for (filename, method, genotype) in snp_data:
print(filename, method, len(genotype))
| 37.83815
| 123
| 0.580507
|
4a0855e1c3a12b64d3be202efa57562930c11351
| 16,534
|
py
|
Python
|
tccli/services/waf/waf_client.py
|
ws0416/tencentcloud-cli
|
0a90fa77c8be1efa30b196a3eeb31b8be1f6a325
|
[
"Apache-2.0"
] | null | null | null |
tccli/services/waf/waf_client.py
|
ws0416/tencentcloud-cli
|
0a90fa77c8be1efa30b196a3eeb31b8be1f6a325
|
[
"Apache-2.0"
] | null | null | null |
tccli/services/waf/waf_client.py
|
ws0416/tencentcloud-cli
|
0a90fa77c8be1efa30b196a3eeb31b8be1f6a325
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.waf.v20180125 import waf_client as waf_client_v20180125
from tencentcloud.waf.v20180125 import models as models_v20180125
def doDeleteAttackDownloadRecord(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.WafClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteAttackDownloadRecordRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteAttackDownloadRecord(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyCustomRuleStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.WafClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyCustomRuleStatusRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyCustomRuleStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCustomRules(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.WafClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCustomRulesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeCustomRules(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteDownloadRecord(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.WafClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteDownloadRecordRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteDownloadRecord(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAddCustomRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.WafClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AddCustomRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.AddCustomRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteSession(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.WafClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteSessionRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteSession(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyAccessPeriod(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.WafClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyAccessPeriodRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyAccessPeriod(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeFlowTrend(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.WafClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeFlowTrendRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeFlowTrend(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateAttackDownloadTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.WafClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateAttackDownloadTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateAttackDownloadTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeUserClbWafRegions(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.WafClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeUserClbWafRegionsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeUserClbWafRegions(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20180125": waf_client_v20180125,
}
MODELS_MAP = {
"v20180125": models_v20180125,
}
ACTION_MAP = {
"DeleteAttackDownloadRecord": doDeleteAttackDownloadRecord,
"ModifyCustomRuleStatus": doModifyCustomRuleStatus,
"DescribeCustomRules": doDescribeCustomRules,
"DeleteDownloadRecord": doDeleteDownloadRecord,
"AddCustomRule": doAddCustomRule,
"DeleteSession": doDeleteSession,
"ModifyAccessPeriod": doModifyAccessPeriod,
"DescribeFlowTrend": doDescribeFlowTrend,
"CreateAttackDownloadTask": doCreateAttackDownloadTask,
"DescribeUserClbWafRegions": doDescribeUserClbWafRegions,
}
AVAILABLE_VERSION_LIST = [
"v20180125",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["waf"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["waf"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
return g_param
| 41.647355
| 105
| 0.71985
|
4a0855e9b02835fce4ade0039d3be3b8db5a488b
| 818
|
py
|
Python
|
allocation/utils/propositional_logic/semantics.py
|
gabrielpereiram10/allocation
|
24bc33ca4b3377ebb02f9c4d2f6a878aa46bac14
|
[
"MIT"
] | null | null | null |
allocation/utils/propositional_logic/semantics.py
|
gabrielpereiram10/allocation
|
24bc33ca4b3377ebb02f9c4d2f6a878aa46bac14
|
[
"MIT"
] | null | null | null |
allocation/utils/propositional_logic/semantics.py
|
gabrielpereiram10/allocation
|
24bc33ca4b3377ebb02f9c4d2f6a878aa46bac14
|
[
"MIT"
] | null | null | null |
from allocation.entities.formula import *
from allocation.protocols.types import Interpretation
def truth_value(formula: Formula, interpretation: Interpretation) -> Union[bool, None]:
"""
Determines the true value of a formula for an interpretation (evaluation) complete or partial.
An interpretation can be defined as a set of tuples. For example, {(Atom('p'), True)}.
"""
if isinstance(formula, Atom):
return formula.get_value(interpretation)
if isinstance(formula, Not):
return Not(
truth_value(formula.inner, interpretation)
).truth_value()
if isinstance(formula, BinaryConnective):
return type(formula)(
truth_value(formula.left, interpretation),
truth_value(formula.right, interpretation)
).truth_value()
| 37.181818
| 98
| 0.690709
|
4a0857cf2c70973ce04e9386762d7b2dabb49c5b
| 334
|
py
|
Python
|
extra_discount/config/docs.py
|
riconova92/extra_discount
|
bc866ebb5e4ea147b8802ac650c8bacae97e0268
|
[
"MIT"
] | null | null | null |
extra_discount/config/docs.py
|
riconova92/extra_discount
|
bc866ebb5e4ea147b8802ac650c8bacae97e0268
|
[
"MIT"
] | null | null | null |
extra_discount/config/docs.py
|
riconova92/extra_discount
|
bc866ebb5e4ea147b8802ac650c8bacae97e0268
|
[
"MIT"
] | null | null | null |
"""
Configuration for docs
"""
# source_link = "https://github.com/[org_name]/extra_discount"
# docs_base_url = "https://[org_name].github.io/extra_discount"
# headline = "App that does everything"
# sub_heading = "Yes, you got that right the first time, everything"
def get_context(context):
context.brand_html = "Extra Discount"
| 27.833333
| 68
| 0.736527
|
4a0857f78e2c9697cb38cd4dced246e12dcb62b3
| 4,697
|
py
|
Python
|
sdk/python/pulumi_azure/mariadb/database.py
|
stack72/pulumi-azure
|
18245b4e74abbd3f768f9eda67adb1df609ff32e
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/mariadb/database.py
|
stack72/pulumi-azure
|
18245b4e74abbd3f768f9eda67adb1df609ff32e
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/mariadb/database.py
|
stack72/pulumi-azure
|
18245b4e74abbd3f768f9eda67adb1df609ff32e
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from .. import utilities, tables
class Database(pulumi.CustomResource):
charset: pulumi.Output[str]
"""
Specifies the Charset for the MariaDB Database, which needs [to be a valid MariaDB Charset](https://mariadb.com/kb/en/library/setting-character-sets-and-collations). Changing this forces a new resource to be created.
"""
collation: pulumi.Output[str]
"""
Specifies the Collation for the MariaDB Database, which needs [to be a valid MariaDB Collation](https://mariadb.com/kb/en/library/setting-character-sets-and-collations). Changing this forces a new resource to be created.
"""
name: pulumi.Output[str]
"""
Specifies the name of the MariaDB Database, which needs [to be a valid MariaDB identifier](https://mariadb.com/kb/en/library/identifier-names/). Changing this forces a
new resource to be created.
"""
resource_group_name: pulumi.Output[str]
"""
The name of the resource group in which the MariaDB Server exists. Changing this forces a new resource to be created.
"""
server_name: pulumi.Output[str]
"""
Specifies the name of the MariaDB Server. Changing this forces a new resource to be created.
"""
def __init__(__self__, resource_name, opts=None, charset=None, collation=None, name=None, resource_group_name=None, server_name=None, __name__=None, __opts__=None):
"""
Manages a MariaDB Database within a MariaDB Server
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] charset: Specifies the Charset for the MariaDB Database, which needs [to be a valid MariaDB Charset](https://mariadb.com/kb/en/library/setting-character-sets-and-collations). Changing this forces a new resource to be created.
:param pulumi.Input[str] collation: Specifies the Collation for the MariaDB Database, which needs [to be a valid MariaDB Collation](https://mariadb.com/kb/en/library/setting-character-sets-and-collations). Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the MariaDB Database, which needs [to be a valid MariaDB identifier](https://mariadb.com/kb/en/library/identifier-names/). Changing this forces a
new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the MariaDB Server exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] server_name: Specifies the name of the MariaDB Server. Changing this forces a new resource to be created.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if not resource_name:
raise TypeError('Missing resource name argument (for URN creation)')
if not isinstance(resource_name, str):
raise TypeError('Expected resource name to be a string')
if opts and not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
__props__ = dict()
if charset is None:
raise TypeError("Missing required property 'charset'")
__props__['charset'] = charset
if collation is None:
raise TypeError("Missing required property 'collation'")
__props__['collation'] = collation
__props__['name'] = name
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
if server_name is None:
raise TypeError("Missing required property 'server_name'")
__props__['server_name'] = server_name
super(Database, __self__).__init__(
'azure:mariadb/database:Database',
resource_name,
__props__,
opts)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 51.054348
| 264
| 0.70066
|
4a0858d2721a9f2e23208d59a643f22d082dd970
| 6,054
|
py
|
Python
|
research/table_extraction_using_block_detection/code/pdf2csv.py
|
cbgaindia/parsers
|
131498adf552ebb82b5c60b6cac3293042c75c7d
|
[
"MIT"
] | 15
|
2015-12-05T09:41:41.000Z
|
2021-05-27T13:27:36.000Z
|
research/table_extraction_using_block_detection/code/pdf2csv.py
|
cbgaindia/parsers
|
131498adf552ebb82b5c60b6cac3293042c75c7d
|
[
"MIT"
] | 31
|
2016-04-06T11:02:36.000Z
|
2021-12-13T19:43:42.000Z
|
research/table_extraction_using_block_detection/code/pdf2csv.py
|
cbgaindia/parsers
|
131498adf552ebb82b5c60b6cac3293042c75c7d
|
[
"MIT"
] | 7
|
2018-04-30T13:34:27.000Z
|
2021-01-02T09:07:34.000Z
|
'''The execution script to convert a folder of ddg pdfs to ddg csvs
'''
import os
import subprocess
import argparse
import cv2
import pandas as pd
from image_to_block_feature_generator import (BlockTextualFeatureGenerator,
filter_unwanted_blocks,
separate_blocks)
from block_labeler import (BlockLabeler,
mark_tables_using_titles,
combine_headers,
combine_horizontal,
remove_false_headers)
from labelled_blocks_to_csv import BlocksToCSV
from demand_draft_generator import combine_tables
from PyPDF2 import PdfFileReader
def fill_major_head(row):
'''Helper function to fill major head where not present.
'''
if pd.isnull(row['major_head']) and pd.notnull(row['head_of_account']):
row['major_head'] = row['head_of_account']
return row
def get_page_width_height(pdf, page_num):
'''Check orientation and extract width and height of a pdf page.
'''
page_layout = pdf.getPage(page_num)['/MediaBox']
if '/Rotate' in pdf.getPage(page_num) and pdf.getPage(page_num)['/Rotate'] == 90:
page_width = float(page_layout[3])
page_height = float(page_layout[2])
else:
page_width = float(page_layout[2])
page_height = float(page_layout[3])
return page_width, page_height
def get_page_image_from_pdf(pdf_file_path, page_num, image_file_name):
'''Extract pdf page as image.
'''
command = 'convert -density 300 "%s"[%s] "%s"' % (pdf_file_path,
page_num,
image_file_name)
subprocess.check_output(command, shell=True)
return cv2.imread(image_file_name, 0)
def check_and_create_folder(path):
'''Check if the folder exists, if not create it.
'''
if not os.path.isdir(path):
os.makedirs(path)
return True
def save_binary_image(blocked_image, save_path):
'''We work on binary images but to save images the opencv write functions
expects the range of 0 - 255 thus we do a simple replace and save images.
'''
blocked_image[blocked_image == 1] = 255
cv2.imwrite(save_path, blocked_image)
return True
def process_folder(input_folder_path, output_folder_path):
'''Process a folder of demand draft pdfs and store the output in the output
folder.
'''
pdf_files = os.listdir(input_folder_path)
for pdf_file_name in pdf_files:
target_folder = os.path.join(output_folder_path,
pdf_file_name.strip('.pdf'))
tables = pd.DataFrame()
pdf_file_path = os.path.join(input_folder_path, pdf_file_name)
pdf = PdfFileReader(open(pdf_file_path, 'rb'))
num_pages = pdf.getNumPages()
# skip first 2 pages to skip the index
# TODO: move this to config.
for page_num in range(2, num_pages):
page_width, page_height = get_page_width_height(pdf, page_num)
img_page = get_page_image_from_pdf(pdf_file_path, page_num, 'tmp.png')
image_height, image_width = img_page.shape
horizontal_ratio = page_width / image_width
vertical_ratio = page_height / image_height
dilate = True
feature_extractor = BlockTextualFeatureGenerator(img_page, horizontal_ratio,
vertical_ratio, page_num, pdf_file_path, (29,20),
[filter_unwanted_blocks, separate_blocks],
dilate)
block_features = feature_extractor.generate()
images_log_folder = os.path.join(target_folder, 'log_images')
check_and_create_folder(images_log_folder)
save_binary_image(feature_extractor.img_with_blocks,
'{0}/{1}.png'.format(images_log_folder, page_num))
# blank page check
if len(block_features.index) > 3:
block_features_with_labels = BlockLabeler(block_features, post_processors=[mark_tables_using_titles,
combine_headers,
combine_horizontal,
remove_false_headers,
]).label()
features_log_folder = os.path.join(target_folder, 'log_block_features')
check_and_create_folder(features_log_folder)
block_features_with_labels.to_csv('{0}/{1}.csv'.format(features_log_folder,
page_num), index=False)
try:
page_tables = BlocksToCSV(img_page, block_features_with_labels, page_num, target_folder).write_to_csv()
tables = pd.concat([tables, pd.DataFrame(page_tables)])
except Exception as err:
print(err)
print(page_num, pdf_file_name)
print(tables.columns)
tables.demand_no = tables.demand_no.fillna(method='ffill')
tables = tables.apply(fill_major_head, axis=1)
tables.major_head = tables.major_head.fillna(method='ffill')
combine_tables(tables[tables.detailed == True])
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser(description="Extracts CSV from a folder of pdfs.")
arg_parser.add_argument("input_folder", help="Input PDF folder")
arg_parser.add_argument("output_folder", help="Output folder")
input_args = arg_parser.parse_args()
process_folder(input_args.input_folder, input_args.output_folder)
| 46.930233
| 123
| 0.58672
|
4a0858fb1beba9566db82aa9d10cf6b0c365baf8
| 602
|
py
|
Python
|
Aug21/Django/learningdjango/qtbooks/books/migrations/0002_review_created.py
|
pythonbykhaja/intesivepython
|
d3074f35bf36a04d4d1d9b4ff4631733d40b5817
|
[
"Apache-2.0"
] | 2
|
2021-05-29T18:21:50.000Z
|
2021-07-24T13:03:30.000Z
|
Aug21/Django/learningdjango/qtbooks/books/migrations/0002_review_created.py
|
pythonbykhaja/intesivepython
|
d3074f35bf36a04d4d1d9b4ff4631733d40b5817
|
[
"Apache-2.0"
] | null | null | null |
Aug21/Django/learningdjango/qtbooks/books/migrations/0002_review_created.py
|
pythonbykhaja/intesivepython
|
d3074f35bf36a04d4d1d9b4ff4631733d40b5817
|
[
"Apache-2.0"
] | 2
|
2021-05-25T10:19:54.000Z
|
2021-09-21T12:20:48.000Z
|
# Generated by Django 3.2.7 on 2021-09-05 11:16
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('books', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='review',
name='created',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, to='auth.user'),
preserve_default=False,
),
]
| 26.173913
| 111
| 0.656146
|
4a085999a59191f68cbac73241549828f5198169
| 1,192
|
py
|
Python
|
tools/line_count/summarize-history.py
|
samotarnik/grpc
|
3278bdceda8030d5aa130f12765e5f07263c860d
|
[
"Apache-2.0"
] | 2,151
|
2020-04-18T07:31:17.000Z
|
2022-03-31T08:39:18.000Z
|
tools/line_count/summarize-history.py
|
samotarnik/grpc
|
3278bdceda8030d5aa130f12765e5f07263c860d
|
[
"Apache-2.0"
] | 395
|
2020-04-18T08:22:18.000Z
|
2021-12-08T13:04:49.000Z
|
tools/line_count/summarize-history.py
|
samotarnik/grpc
|
3278bdceda8030d5aa130f12765e5f07263c860d
|
[
"Apache-2.0"
] | 338
|
2020-04-18T08:03:10.000Z
|
2022-03-29T12:33:22.000Z
|
#!/usr/bin/env python
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import subprocess
import datetime
# this script is only of historical interest: it's the script that was used to
# bootstrap the dataset
def daterange(start, end):
for n in range(int((end - start).days)):
yield start + datetime.timedelta(n)
start_date = datetime.date(2017, 3, 26)
end_date = datetime.date(2017, 3, 29)
for dt in daterange(start_date, end_date):
dmy = dt.strftime('%Y-%m-%d')
print dmy
subprocess.check_call([
'tools/line_count/yaml2csv.py', '-i',
'../count/%s.yaml' % dmy, '-d', dmy, '-o',
'../count/%s.csv' % dmy
])
| 30.564103
| 78
| 0.69547
|
4a085a1e5dcc432f3650bd3f091367ce84691679
| 514
|
py
|
Python
|
labellab-flask/api/serializers/project.py
|
darkshredder/LabelLab
|
fc762e6eea52b9023e38ba5f32bbcaa7cbc17dbe
|
[
"Apache-2.0"
] | 70
|
2019-01-25T19:16:00.000Z
|
2022-03-23T14:37:28.000Z
|
labellab-flask/api/serializers/project.py
|
darkshredder/LabelLab
|
fc762e6eea52b9023e38ba5f32bbcaa7cbc17dbe
|
[
"Apache-2.0"
] | 350
|
2019-01-30T10:50:34.000Z
|
2022-03-31T19:58:44.000Z
|
labellab-flask/api/serializers/project.py
|
darkshredder/LabelLab
|
fc762e6eea52b9023e38ba5f32bbcaa7cbc17dbe
|
[
"Apache-2.0"
] | 140
|
2019-01-30T08:53:35.000Z
|
2022-03-25T15:37:12.000Z
|
from marshmallow import Schema, fields
from api.extensions import db, ma
from api.serializers.image import ImageSchema
from api.serializers.label import LabelSchema
class ProjectSchema(ma.ModelSchema):
"""
Serializer class for project
"""
id = fields.Int(dump_only=True)
project_name = fields.Str()
project_description = fields.Str()
admin_id = fields.Int(dump_only=True)
images = fields.Nested(ImageSchema, many=True)
labels = fields.Nested(LabelSchema, many=True)
| 28.555556
| 50
| 0.725681
|
4a085a34188a4e62a9ada9c71c9a68863dce2097
| 323
|
py
|
Python
|
Ejercicio2.py
|
Octavio785/examen-U1
|
1d0b725435fe98f42ac3f256ce4d91eaf1c2abb0
|
[
"Apache-2.0"
] | null | null | null |
Ejercicio2.py
|
Octavio785/examen-U1
|
1d0b725435fe98f42ac3f256ce4d91eaf1c2abb0
|
[
"Apache-2.0"
] | null | null | null |
Ejercicio2.py
|
Octavio785/examen-U1
|
1d0b725435fe98f42ac3f256ce4d91eaf1c2abb0
|
[
"Apache-2.0"
] | null | null | null |
#definicion de variables u otros
print("Ejercicios de examen")
bono=0
#Datos de Entrada
o=int(input("Puntos obtenidos:"))
a=int(input("Salario minimos:"))
#Proceso
if o<=100:
bono=((10*a)/100)
elif o<=150:
bono=((40*a)/100)
elif o>=151:
bono=((70*a)/100)
#Datos de Salida
print("El bono es: ", bono)
print("O.A.R.C.")
| 20.1875
| 33
| 0.662539
|
4a085b85e97b11f5fdba678a330f997b6f22c8d2
| 1,858
|
py
|
Python
|
tests/integration/services/news/test_models.py
|
GSH-LAN/byceps
|
ab8918634e90aaa8574bd1bb85627759cef122fe
|
[
"BSD-3-Clause"
] | null | null | null |
tests/integration/services/news/test_models.py
|
GSH-LAN/byceps
|
ab8918634e90aaa8574bd1bb85627759cef122fe
|
[
"BSD-3-Clause"
] | null | null | null |
tests/integration/services/news/test_models.py
|
GSH-LAN/byceps
|
ab8918634e90aaa8574bd1bb85627759cef122fe
|
[
"BSD-3-Clause"
] | null | null | null |
"""
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import pytest
from byceps.services.news import (
channel_service as news_channel_service,
service as news_service,
)
@pytest.fixture(scope='module')
def editor(make_user):
return make_user('NewsEditor')
@pytest.fixture(scope='module')
def brand(make_brand):
return make_brand()
@pytest.fixture(scope='module')
def channel(brand):
channel_id = f'{brand.id}-test'
url_prefix = 'https://www.acmecon.test/news/'
channel = news_channel_service.create_channel(
brand.id, channel_id, url_prefix
)
yield channel
news_channel_service.delete_channel(channel_id)
@pytest.fixture
def news_item_with_image(channel, editor):
item = create_item(
channel.id,
'with-image',
editor.id,
image_url_path='breaking.png',
)
yield item
news_service.delete_item(item.id)
@pytest.fixture
def news_item_without_image(channel, editor):
item = create_item(channel.id, 'without-image', editor.id)
yield item
news_service.delete_item(item.id)
def test_image_url_with_image(news_item_with_image, brand):
assert (
news_item_with_image.image_url_path
== f'/data/global/news_channels/{brand.id}-test/breaking.png'
)
def test_image_url_without_image(news_item_without_image):
assert news_item_without_image.image_url_path is None
# helpers
def create_item(channel_id, slug, editor_id, *, image_url_path=None):
title = 'the title'
body = 'the body'
item = news_service.create_item(
channel_id, slug, editor_id, title, body, image_url_path=image_url_path
)
# Return aggregated version of item.
channel_ids = {channel_id}
return news_service.find_aggregated_item_by_slug(channel_ids, slug)
| 21.604651
| 79
| 0.716362
|
4a085c49d911b3fa8a591f77b26a69f091cfeff8
| 1,036
|
py
|
Python
|
core/polyaxon/utils/enums_utils.py
|
erexer/polyaxon
|
be14dae1ed56d568983388736bcdaf27a7baa4a4
|
[
"Apache-2.0"
] | null | null | null |
core/polyaxon/utils/enums_utils.py
|
erexer/polyaxon
|
be14dae1ed56d568983388736bcdaf27a7baa4a4
|
[
"Apache-2.0"
] | null | null | null |
core/polyaxon/utils/enums_utils.py
|
erexer/polyaxon
|
be14dae1ed56d568983388736bcdaf27a7baa4a4
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
#
# Copyright 2018-2020 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from enum import Enum
from typing import Iterable, List, Set, Tuple, Type, Union
def enum_to_choices(enumeration: Type[Enum]) -> Iterable[Tuple]:
return tuple((e.value, e.value) for e in enumeration)
def enum_to_set(enumeration: Type[Enum]) -> Set:
return set(e.value for e in enumeration)
def values_to_choices(enumeration: Union[List, Set]) -> Iterable[Tuple]:
return tuple((e, e) for e in sorted(enumeration))
| 33.419355
| 74
| 0.742278
|
4a085c55dae9b816231b0dbbe2f4030bb674fb74
| 2,905
|
py
|
Python
|
Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/khmer-1.1-py2.7-linux-x86_64.egg/EGG-INFO/scripts/count-median.py
|
poojavade/Genomics_Docker
|
829b5094bba18bbe03ae97daf925fee40a8476e8
|
[
"Apache-2.0"
] | 1
|
2019-07-29T02:53:51.000Z
|
2019-07-29T02:53:51.000Z
|
Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/khmer-1.1-py2.7-linux-x86_64.egg/EGG-INFO/scripts/count-median.py
|
poojavade/Genomics_Docker
|
829b5094bba18bbe03ae97daf925fee40a8476e8
|
[
"Apache-2.0"
] | 1
|
2021-09-11T14:30:32.000Z
|
2021-09-11T14:30:32.000Z
|
Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/khmer-1.1-py2.7-linux-x86_64.egg/EGG-INFO/scripts/count-median.py
|
poojavade/Genomics_Docker
|
829b5094bba18bbe03ae97daf925fee40a8476e8
|
[
"Apache-2.0"
] | 2
|
2016-12-19T02:27:46.000Z
|
2019-07-29T02:53:54.000Z
|
#!/usr/bin/python2.7
#
# This file is part of khmer, http://github.com/ged-lab/khmer/, and is
# Copyright (C) Michigan State University, 2009-2014. It is licensed under
# the three-clause BSD license; see doc/LICENSE.txt.
# Contact: khmer-project@idyll.org
#
# pylint: disable=missing-docstring,invalid-name
"""
Count the median/avg k-mer abundance for each sequence in the input file,
based on the k-mer counts in the given k-mer counting table. Can be used to
estimate expression levels (mRNAseq) or coverage (genomic/metagenomic).
% scripts/count-median.py <htname> <input seqs> <output counts>
Use '-h' for parameter help.
The output file contains sequence id, median, average, stddev, and seq length.
NOTE: All 'N's in the input sequences are converted to 'G's.
"""
import screed
import argparse
import khmer
from khmer.file import check_file_status, check_space
from khmer.khmer_args import info
import textwrap
def get_parser():
epilog = """
Count the median/avg k-mer abundance for each sequence in the input file,
based on the k-mer counts in the given k-mer counting table. Can be used
to estimate expression levels (mRNAseq) or coverage (genomic/metagenomic).
The output file contains sequence id, median, average, stddev, and seq
length.
NOTE: All 'N's in the input sequences are converted to 'G's.
"""
parser = argparse.ArgumentParser(
description='Count k-mers summary stats for sequences',
epilog=textwrap.dedent(epilog))
parser.add_argument('ctfile', metavar='input_counting_table_filename',
help='input k-mer count table filename')
parser.add_argument('input', metavar='input_sequence_filename',
help='input FAST[AQ] sequence filename')
parser.add_argument('output', metavar='output_summary_filename',
help='output summary filename')
parser.add_argument('--version', action='version', version='%(prog)s '
+ khmer.__version__)
return parser
def main():
info('count-median.py', ['diginorm'])
args = get_parser().parse_args()
htfile = args.ctfile
input_filename = args.input
output_filename = args.output
infiles = [htfile, input_filename]
for infile in infiles:
check_file_status(infile)
check_space(infiles)
print 'loading k-mer counting table from', htfile
htable = khmer.load_counting_hash(htfile)
ksize = htable.ksize()
print 'writing to', output_filename
output = open(output_filename, 'w')
for record in screed.open(input_filename):
seq = record.sequence.upper()
if 'N' in seq:
seq = seq.replace('N', 'G')
if ksize <= len(seq):
medn, ave, stdev = htable.get_median_count(seq)
print >> output, record.name, medn, ave, stdev, len(seq)
if __name__ == '__main__':
main()
| 33.011364
| 78
| 0.681239
|
4a085c58d8c1c66a65d323696395a2bde53ce540
| 6,626
|
py
|
Python
|
utils/flags.py
|
hongliangduan/Reproducing-the-invention-of-a-named-reaction-Zero-shot-prediction-of-unseen-chemical-reactions
|
2d688bff2202e37321dedba7cdac67cd3c1e1fad
|
[
"MIT"
] | null | null | null |
utils/flags.py
|
hongliangduan/Reproducing-the-invention-of-a-named-reaction-Zero-shot-prediction-of-unseen-chemical-reactions
|
2d688bff2202e37321dedba7cdac67cd3c1e1fad
|
[
"MIT"
] | null | null | null |
utils/flags.py
|
hongliangduan/Reproducing-the-invention-of-a-named-reaction-Zero-shot-prediction-of-unseen-chemical-reactions
|
2d688bff2202e37321dedba7cdac67cd3c1e1fad
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# Copyright 2018 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common command-line flags."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
flags = tf.flags
FLAGS = flags.FLAGS
flags.DEFINE_bool("registry_help", False,
"If True, logs the contents of the registry and exits.")
flags.DEFINE_bool("tfdbg", False,
"If True, use the TF debugger CLI on train/eval.")
flags.DEFINE_bool("export_saved_model", False,
"DEPRECATED - see serving/export.py.")
flags.DEFINE_bool("dbgprofile", False,
"If True, record the timeline for chrome://tracing/.")
flags.DEFINE_string("model", None, "Which model to use.")
flags.DEFINE_string("hparams_set", None, "Which parameters to use.")
flags.DEFINE_string("hparams_range", None, "Parameters range.")
flags.DEFINE_string("hparams", "",
"A comma-separated list of `name=value` hyperparameter "
"values. This flag is used to override hyperparameter "
"settings either when manually selecting hyperparameters "
"or when using Vizier. If a hyperparameter setting is "
"specified by this flag then it must be a valid "
"hyperparameter name for the model.")
flags.DEFINE_string("problem", None, "Problem name.")
# data_dir is a common flag name - catch conflicts and define it once.
try:
flags.DEFINE_string("data_dir", None, "Directory with training data.")
except: # pylint: disable=bare-except
pass
flags.DEFINE_integer("train_steps", 250000,
"The number of steps to run training for.")
flags.DEFINE_string("eval_early_stopping_metric", "loss",
"If --eval_early_stopping_steps is not None, then stop "
"when --eval_early_stopping_metric has not decreased for "
"--eval_early_stopping_steps")
flags.DEFINE_float("eval_early_stopping_metric_delta", 0.1,
"Delta determining whether metric has plateaued.")
flags.DEFINE_integer("eval_early_stopping_steps", None,
"If --eval_early_stopping_steps is not None, then stop "
"when --eval_early_stopping_metric has not decreased for "
"--eval_early_stopping_steps")
flags.DEFINE_bool("eval_early_stopping_metric_minimize", True,
"Whether to check for the early stopping metric going down "
"or up.")
flags.DEFINE_bool("eval_run_autoregressive", False,
"Run eval autoregressively where we condition on previous"
"generated output instead of the actual target.")
flags.DEFINE_bool("eval_use_test_set", False,
"Whether to use the '-test' data for EVAL (and PREDICT).")
flags.DEFINE_integer("keep_checkpoint_max", 20,
"How many recent checkpoints to keep.")
flags.DEFINE_bool("enable_graph_rewriter", False,
"Enable graph optimizations that are not on by default.")
flags.DEFINE_integer("keep_checkpoint_every_n_hours", 10000,
"Number of hours between each checkpoint to be saved. "
"The default value 10,000 hours effectively disables it.")
flags.DEFINE_integer("save_checkpoints_secs", 0,
"Save checkpoints every this many seconds. "
"Default=0 means save checkpoints each x steps where x "
"is max(iterations_per_loop, local_eval_frequency).")
flags.DEFINE_bool("log_device_placement", False,
"Whether to log device placement.")
flags.DEFINE_string("warm_start_from", None, "Warm start from checkpoint.")
# Distributed training flags
# flags.DEFINE_integer("local_eval_frequency", 1000,
# "Save checkpoints and run evaluation every N steps during "
# "local training.")
flags.DEFINE_integer("local_eval_frequency", 100,
"Save checkpoints and run evaluation every N steps during "
"local training.")
flags.DEFINE_integer("eval_throttle_seconds", 600,
"Do not re-evaluate unless the last evaluation was started"
" at least this many seconds ago.")
flags.DEFINE_bool("locally_shard_to_cpu", False,
"Use CPU as a sharding device running locally. This allows "
"to test sharded model construction on a machine with 1 GPU.")
flags.DEFINE_bool("sync", False, "Sync compute on PS.")
flags.DEFINE_string("worker_job", "/job:localhost", "name of worker job")
flags.DEFINE_integer("worker_gpu", 1, "How many GPUs to use.")
flags.DEFINE_integer("worker_replicas", 1, "How many workers to use.")
flags.DEFINE_integer("worker_id", 0, "Which worker task are we.")
flags.DEFINE_float("worker_gpu_memory_fraction", 0.95,
"Fraction of GPU memory to allocate.")
flags.DEFINE_integer("ps_gpu", 0, "How many GPUs to use per ps.")
flags.DEFINE_string("gpu_order", "", "Optional order for daisy-chaining GPUs."
" e.g. \"1 3 2 4\"")
flags.DEFINE_string("ps_job", "/job:ps", "name of ps job")
flags.DEFINE_integer("ps_replicas", 0, "How many ps replicas.")
# Decoding flags
flags.DEFINE_string("decode_hparams", "",
"Comma-separated list of name=value pairs to control "
"decode behavior. See decoding.decode_hparams for "
"defaults.")
flags.DEFINE_string("decode_from_file", None,
"Path to the source file for decoding, used by "
"continuous_decode_from_file.")
flags.DEFINE_string("decode_to_file", None,
"Path to the decoded file generated by decoding, used by "
"continuous_decode_from_file.")
flags.DEFINE_string("decode_reference", None,
"Path to the reference file for decoding, used by "
"continuous_decode_from_file to compute BLEU score.")
| 51.364341
| 82
| 0.660278
|
4a085d04267ab978eb5b87371b0b45ef64664c17
| 224,744
|
py
|
Python
|
template_container_opossum/labels/slice_69.py
|
lkondratova/Brainplot
|
3c8a88c1995dedeaa5cbd88ee71499c7cf9c571d
|
[
"MIT"
] | null | null | null |
template_container_opossum/labels/slice_69.py
|
lkondratova/Brainplot
|
3c8a88c1995dedeaa5cbd88ee71499c7cf9c571d
|
[
"MIT"
] | null | null | null |
template_container_opossum/labels/slice_69.py
|
lkondratova/Brainplot
|
3c8a88c1995dedeaa5cbd88ee71499c7cf9c571d
|
[
"MIT"
] | null | null | null |
coordinates_74EAFF = ((48, 147),
(48, 149), (48, 150), (48, 151), (48, 152), (48, 153), (49, 145), (49, 146), (49, 154), (49, 155), (49, 156), (49, 157), (49, 158), (50, 143), (50, 147), (50, 148), (50, 149), (50, 150), (50, 151), (50, 152), (50, 153), (50, 159), (50, 161), (51, 140), (51, 141), (51, 145), (51, 146), (51, 147), (51, 148), (51, 149), (51, 150), (51, 151), (51, 152), (51, 153), (51, 154), (51, 155), (51, 156), (51, 157), (51, 158), (51, 162), (51, 164), (52, 138), (52, 139), (52, 142), (52, 143), (52, 144), (52, 145), (52, 146), (52, 147), (52, 148), (52, 149), (52, 150), (52, 151), (52, 152), (52, 153), (52, 154), (52, 155), (52, 156), (52, 157), (52, 158), (52, 159), (52, 160), (52, 161), (52, 165), (52, 166), (53, 136), (53, 137), (53, 140), (53, 141), (53, 142), (53, 143), (53, 144), (53, 145),
(53, 146), (53, 147), (53, 148), (53, 149), (53, 150), (53, 151), (53, 152), (53, 153), (53, 154), (53, 155), (53, 156), (53, 157), (53, 158), (53, 159), (53, 160), (53, 161), (53, 162), (53, 163), (53, 164), (53, 168), (53, 169), (54, 135), (54, 138), (54, 139), (54, 140), (54, 141), (54, 142), (54, 143), (54, 144), (54, 145), (54, 146), (54, 147), (54, 148), (54, 149), (54, 150), (54, 151), (54, 152), (54, 153), (54, 154), (54, 155), (54, 156), (54, 157), (54, 158), (54, 159), (54, 160), (54, 161), (54, 162), (54, 163), (54, 164), (54, 165), (54, 166), (54, 170), (54, 171), (55, 130), (55, 131), (55, 132), (55, 133), (55, 136), (55, 137), (55, 138), (55, 139), (55, 140), (55, 141), (55, 142), (55, 143), (55, 144), (55, 145), (55, 146), (55, 147), (55, 148), (55, 149), (55, 150),
(55, 151), (55, 152), (55, 153), (55, 154), (55, 155), (55, 156), (55, 157), (55, 158), (55, 159), (55, 160), (55, 161), (55, 162), (55, 163), (55, 164), (55, 165), (55, 166), (55, 167), (55, 168), (55, 169), (55, 172), (56, 125), (56, 127), (56, 128), (56, 129), (56, 134), (56, 135), (56, 136), (56, 137), (56, 138), (56, 139), (56, 140), (56, 141), (56, 142), (56, 143), (56, 144), (56, 145), (56, 146), (56, 147), (56, 148), (56, 149), (56, 150), (56, 151), (56, 152), (56, 153), (56, 154), (56, 155), (56, 156), (56, 157), (56, 158), (56, 159), (56, 160), (56, 161), (56, 162), (56, 163), (56, 164), (56, 165), (56, 166), (56, 167), (56, 168), (56, 169), (56, 170), (56, 171), (56, 174), (57, 124), (57, 130), (57, 131), (57, 132), (57, 133), (57, 134), (57, 135), (57, 136), (57, 137),
(57, 138), (57, 139), (57, 140), (57, 141), (57, 142), (57, 143), (57, 144), (57, 145), (57, 146), (57, 147), (57, 148), (57, 149), (57, 150), (57, 151), (57, 152), (57, 153), (57, 154), (57, 155), (57, 156), (57, 157), (57, 158), (57, 159), (57, 160), (57, 161), (57, 162), (57, 163), (57, 164), (57, 165), (57, 166), (57, 167), (57, 168), (57, 169), (57, 170), (57, 171), (57, 172), (57, 173), (57, 176), (58, 124), (58, 126), (58, 127), (58, 128), (58, 129), (58, 130), (58, 131), (58, 132), (58, 133), (58, 134), (58, 135), (58, 136), (58, 137), (58, 138), (58, 139), (58, 140), (58, 141), (58, 142), (58, 143), (58, 144), (58, 145), (58, 146), (58, 147), (58, 148), (58, 149), (58, 150), (58, 151), (58, 152), (58, 153), (58, 154), (58, 155), (58, 156), (58, 157), (58, 158), (58, 159),
(58, 160), (58, 161), (58, 162), (58, 163), (58, 164), (58, 165), (58, 166), (58, 167), (58, 168), (58, 169), (58, 170), (58, 171), (58, 172), (58, 173), (58, 174), (58, 177), (59, 123), (59, 125), (59, 126), (59, 127), (59, 128), (59, 129), (59, 130), (59, 131), (59, 132), (59, 133), (59, 134), (59, 135), (59, 136), (59, 137), (59, 138), (59, 139), (59, 140), (59, 141), (59, 142), (59, 143), (59, 144), (59, 145), (59, 146), (59, 147), (59, 148), (59, 149), (59, 150), (59, 151), (59, 152), (59, 153), (59, 154), (59, 155), (59, 156), (59, 157), (59, 158), (59, 159), (59, 160), (59, 161), (59, 162), (59, 163), (59, 164), (59, 165), (59, 166), (59, 167), (59, 168), (59, 169), (59, 170), (59, 171), (59, 172), (59, 173), (59, 174), (59, 175), (59, 176), (59, 179), (60, 122), (60, 124),
(60, 125), (60, 126), (60, 127), (60, 128), (60, 129), (60, 130), (60, 131), (60, 132), (60, 133), (60, 134), (60, 135), (60, 136), (60, 137), (60, 138), (60, 139), (60, 140), (60, 141), (60, 142), (60, 143), (60, 144), (60, 145), (60, 146), (60, 147), (60, 148), (60, 149), (60, 150), (60, 151), (60, 152), (60, 153), (60, 154), (60, 155), (60, 156), (60, 157), (60, 158), (60, 159), (60, 160), (60, 161), (60, 162), (60, 163), (60, 164), (60, 165), (60, 166), (60, 167), (60, 168), (60, 169), (60, 170), (60, 171), (60, 172), (60, 173), (60, 174), (60, 175), (60, 176), (60, 177), (60, 180), (61, 123), (61, 124), (61, 125), (61, 126), (61, 127), (61, 128), (61, 129), (61, 130), (61, 131), (61, 132), (61, 133), (61, 134), (61, 135), (61, 136), (61, 137), (61, 138), (61, 139), (61, 140),
(61, 141), (61, 142), (61, 143), (61, 144), (61, 145), (61, 146), (61, 147), (61, 148), (61, 149), (61, 150), (61, 151), (61, 152), (61, 153), (61, 154), (61, 155), (61, 156), (61, 157), (61, 158), (61, 159), (61, 160), (61, 161), (61, 162), (61, 163), (61, 164), (61, 165), (61, 166), (61, 167), (61, 168), (61, 169), (61, 170), (61, 171), (61, 172), (61, 173), (61, 174), (61, 175), (61, 176), (61, 177), (61, 178), (61, 179), (61, 181), (62, 115), (62, 116), (62, 117), (62, 118), (62, 119), (62, 122), (62, 123), (62, 124), (62, 125), (62, 126), (62, 127), (62, 128), (62, 129), (62, 130), (62, 131), (62, 132), (62, 133), (62, 134), (62, 135), (62, 136), (62, 137), (62, 138), (62, 139), (62, 140), (62, 141), (62, 142), (62, 143), (62, 144), (62, 145), (62, 146), (62, 147), (62, 148),
(62, 149), (62, 150), (62, 151), (62, 152), (62, 153), (62, 154), (62, 155), (62, 156), (62, 157), (62, 158), (62, 159), (62, 160), (62, 161), (62, 162), (62, 163), (62, 164), (62, 165), (62, 166), (62, 167), (62, 168), (62, 169), (62, 170), (62, 171), (62, 172), (62, 173), (62, 174), (62, 175), (62, 176), (62, 177), (62, 178), (62, 179), (62, 180), (62, 183), (63, 112), (63, 114), (63, 120), (63, 121), (63, 122), (63, 123), (63, 124), (63, 125), (63, 126), (63, 127), (63, 128), (63, 129), (63, 130), (63, 131), (63, 132), (63, 133), (63, 134), (63, 135), (63, 136), (63, 137), (63, 138), (63, 139), (63, 140), (63, 141), (63, 142), (63, 143), (63, 144), (63, 145), (63, 146), (63, 147), (63, 148), (63, 149), (63, 150), (63, 151), (63, 152), (63, 153), (63, 154), (63, 155), (63, 156),
(63, 157), (63, 158), (63, 159), (63, 160), (63, 161), (63, 162), (63, 163), (63, 164), (63, 165), (63, 166), (63, 167), (63, 168), (63, 169), (63, 170), (63, 171), (63, 172), (63, 173), (63, 174), (63, 175), (63, 176), (63, 177), (63, 178), (63, 179), (63, 180), (63, 181), (63, 184), (64, 111), (64, 115), (64, 116), (64, 117), (64, 118), (64, 119), (64, 120), (64, 121), (64, 122), (64, 123), (64, 124), (64, 125), (64, 126), (64, 127), (64, 128), (64, 129), (64, 130), (64, 131), (64, 132), (64, 133), (64, 134), (64, 135), (64, 136), (64, 137), (64, 138), (64, 139), (64, 140), (64, 141), (64, 142), (64, 143), (64, 144), (64, 145), (64, 146), (64, 147), (64, 148), (64, 149), (64, 150), (64, 151), (64, 152), (64, 153), (64, 154), (64, 155), (64, 156), (64, 157), (64, 158), (64, 159),
(64, 160), (64, 161), (64, 162), (64, 163), (64, 164), (64, 165), (64, 166), (64, 167), (64, 168), (64, 169), (64, 170), (64, 171), (64, 172), (64, 173), (64, 174), (64, 175), (64, 176), (64, 177), (64, 178), (64, 179), (64, 180), (64, 181), (64, 182), (64, 185), (65, 110), (65, 112), (65, 113), (65, 114), (65, 115), (65, 116), (65, 117), (65, 118), (65, 119), (65, 120), (65, 121), (65, 122), (65, 123), (65, 124), (65, 125), (65, 126), (65, 127), (65, 128), (65, 129), (65, 130), (65, 131), (65, 132), (65, 133), (65, 134), (65, 135), (65, 136), (65, 137), (65, 138), (65, 139), (65, 140), (65, 141), (65, 142), (65, 143), (65, 144), (65, 145), (65, 146), (65, 147), (65, 148), (65, 149), (65, 150), (65, 151), (65, 152), (65, 153), (65, 154), (65, 155), (65, 156), (65, 157), (65, 158),
(65, 159), (65, 160), (65, 161), (65, 162), (65, 163), (65, 164), (65, 165), (65, 166), (65, 167), (65, 168), (65, 169), (65, 170), (65, 171), (65, 172), (65, 173), (65, 174), (65, 175), (65, 176), (65, 177), (65, 178), (65, 179), (65, 180), (65, 181), (65, 182), (65, 183), (66, 111), (66, 113), (66, 114), (66, 115), (66, 116), (66, 117), (66, 118), (66, 119), (66, 120), (66, 121), (66, 122), (66, 123), (66, 124), (66, 125), (66, 126), (66, 127), (66, 128), (66, 129), (66, 130), (66, 131), (66, 132), (66, 133), (66, 134), (66, 135), (66, 136), (66, 137), (66, 138), (66, 139), (66, 140), (66, 141), (66, 142), (66, 143), (66, 144), (66, 145), (66, 146), (66, 147), (66, 148), (66, 149), (66, 150), (66, 151), (66, 152), (66, 153), (66, 154), (66, 155), (66, 156), (66, 157), (66, 158),
(66, 159), (66, 160), (66, 161), (66, 162), (66, 163), (66, 164), (66, 165), (66, 166), (66, 167), (66, 168), (66, 169), (66, 170), (66, 171), (66, 172), (66, 173), (66, 174), (66, 175), (66, 176), (66, 177), (66, 178), (66, 179), (66, 180), (66, 181), (66, 182), (66, 183), (66, 184), (66, 186), (67, 111), (67, 113), (67, 114), (67, 115), (67, 116), (67, 117), (67, 118), (67, 119), (67, 120), (67, 121), (67, 122), (67, 123), (67, 124), (67, 125), (67, 126), (67, 127), (67, 128), (67, 129), (67, 130), (67, 131), (67, 132), (67, 133), (67, 134), (67, 135), (67, 136), (67, 137), (67, 138), (67, 139), (67, 140), (67, 141), (67, 142), (67, 143), (67, 144), (67, 145), (67, 146), (67, 147), (67, 148), (67, 149), (67, 150), (67, 151), (67, 152), (67, 153), (67, 154), (67, 155), (67, 156),
(67, 157), (67, 158), (67, 159), (67, 160), (67, 161), (67, 162), (67, 163), (67, 164), (67, 165), (67, 166), (67, 167), (67, 168), (67, 169), (67, 170), (67, 171), (67, 172), (67, 173), (67, 174), (67, 175), (67, 176), (67, 177), (67, 178), (67, 179), (67, 180), (67, 181), (67, 182), (67, 183), (67, 184), (67, 185), (67, 187), (68, 112), (68, 114), (68, 115), (68, 116), (68, 117), (68, 118), (68, 119), (68, 120), (68, 121), (68, 122), (68, 123), (68, 124), (68, 125), (68, 126), (68, 127), (68, 128), (68, 129), (68, 130), (68, 131), (68, 132), (68, 133), (68, 134), (68, 135), (68, 136), (68, 137), (68, 138), (68, 139), (68, 140), (68, 141), (68, 142), (68, 143), (68, 144), (68, 145), (68, 146), (68, 147), (68, 148), (68, 149), (68, 150), (68, 151), (68, 152), (68, 153), (68, 154),
(68, 155), (68, 156), (68, 157), (68, 158), (68, 159), (68, 160), (68, 161), (68, 162), (68, 163), (68, 164), (68, 165), (68, 166), (68, 167), (68, 168), (68, 169), (68, 170), (68, 171), (68, 172), (68, 173), (68, 174), (68, 175), (68, 176), (68, 177), (68, 178), (68, 179), (68, 180), (68, 181), (68, 182), (68, 183), (68, 184), (68, 185), (68, 186), (68, 188), (69, 113), (69, 115), (69, 116), (69, 117), (69, 118), (69, 119), (69, 120), (69, 121), (69, 122), (69, 123), (69, 124), (69, 125), (69, 126), (69, 127), (69, 128), (69, 129), (69, 130), (69, 131), (69, 132), (69, 133), (69, 134), (69, 135), (69, 136), (69, 137), (69, 138), (69, 139), (69, 140), (69, 141), (69, 142), (69, 143), (69, 144), (69, 145), (69, 146), (69, 147), (69, 148), (69, 149), (69, 150), (69, 151), (69, 152),
(69, 153), (69, 154), (69, 155), (69, 156), (69, 157), (69, 158), (69, 159), (69, 160), (69, 161), (69, 162), (69, 163), (69, 164), (69, 165), (69, 166), (69, 167), (69, 168), (69, 169), (69, 170), (69, 171), (69, 172), (69, 173), (69, 174), (69, 175), (69, 176), (69, 177), (69, 178), (69, 179), (69, 180), (69, 181), (69, 182), (69, 183), (69, 184), (69, 185), (69, 186), (69, 188), (70, 113), (70, 115), (70, 116), (70, 117), (70, 118), (70, 119), (70, 120), (70, 121), (70, 122), (70, 123), (70, 124), (70, 125), (70, 126), (70, 127), (70, 128), (70, 129), (70, 130), (70, 131), (70, 132), (70, 133), (70, 134), (70, 135), (70, 136), (70, 137), (70, 138), (70, 139), (70, 140), (70, 141), (70, 142), (70, 143), (70, 144), (70, 145), (70, 146), (70, 147), (70, 148), (70, 149), (70, 150),
(70, 151), (70, 152), (70, 153), (70, 154), (70, 155), (70, 156), (70, 157), (70, 158), (70, 159), (70, 160), (70, 161), (70, 162), (70, 163), (70, 164), (70, 165), (70, 166), (70, 167), (70, 168), (70, 169), (70, 170), (70, 171), (70, 172), (70, 173), (70, 174), (70, 175), (70, 176), (70, 177), (70, 178), (70, 179), (70, 180), (70, 181), (70, 182), (70, 183), (70, 184), (70, 185), (70, 186), (70, 187), (70, 189), (71, 114), (71, 116), (71, 117), (71, 118), (71, 119), (71, 120), (71, 121), (71, 122), (71, 123), (71, 124), (71, 125), (71, 126), (71, 127), (71, 128), (71, 129), (71, 130), (71, 131), (71, 132), (71, 133), (71, 134), (71, 135), (71, 136), (71, 137), (71, 138), (71, 139), (71, 140), (71, 141), (71, 142), (71, 143), (71, 144), (71, 145), (71, 146), (71, 147), (71, 148),
(71, 149), (71, 150), (71, 151), (71, 152), (71, 153), (71, 154), (71, 155), (71, 156), (71, 157), (71, 158), (71, 159), (71, 160), (71, 161), (71, 162), (71, 163), (71, 164), (71, 165), (71, 166), (71, 167), (71, 168), (71, 169), (71, 170), (71, 171), (71, 172), (71, 173), (71, 174), (71, 175), (71, 176), (71, 177), (71, 178), (71, 179), (71, 180), (71, 181), (71, 182), (71, 183), (71, 184), (71, 185), (71, 186), (71, 187), (71, 189), (72, 115), (72, 117), (72, 118), (72, 119), (72, 120), (72, 121), (72, 122), (72, 123), (72, 124), (72, 125), (72, 126), (72, 127), (72, 128), (72, 129), (72, 130), (72, 131), (72, 132), (72, 133), (72, 134), (72, 135), (72, 136), (72, 137), (72, 138), (72, 139), (72, 140), (72, 141), (72, 142), (72, 143), (72, 144), (72, 145), (72, 146), (72, 147),
(72, 148), (72, 149), (72, 150), (72, 151), (72, 152), (72, 153), (72, 154), (72, 155), (72, 156), (72, 157), (72, 158), (72, 159), (72, 160), (72, 165), (72, 166), (72, 167), (72, 168), (72, 169), (72, 170), (72, 171), (72, 172), (72, 173), (72, 174), (72, 175), (72, 176), (72, 177), (72, 178), (72, 179), (72, 180), (72, 181), (72, 182), (72, 183), (72, 184), (72, 185), (72, 186), (72, 187), (72, 189), (73, 116), (73, 118), (73, 119), (73, 120), (73, 121), (73, 122), (73, 123), (73, 124), (73, 125), (73, 126), (73, 127), (73, 128), (73, 129), (73, 130), (73, 131), (73, 132), (73, 133), (73, 134), (73, 135), (73, 136), (73, 137), (73, 138), (73, 139), (73, 140), (73, 141), (73, 161), (73, 162), (73, 163), (73, 164), (73, 167), (73, 168), (73, 169), (73, 170), (73, 171), (73, 172),
(73, 173), (73, 174), (73, 175), (73, 176), (73, 177), (73, 178), (73, 179), (73, 180), (73, 181), (73, 182), (73, 183), (73, 184), (73, 185), (73, 186), (73, 187), (73, 188), (73, 190), (74, 117), (74, 119), (74, 120), (74, 121), (74, 122), (74, 123), (74, 124), (74, 125), (74, 126), (74, 127), (74, 128), (74, 129), (74, 130), (74, 131), (74, 132), (74, 133), (74, 134), (74, 135), (74, 136), (74, 137), (74, 138), (74, 139), (74, 140), (74, 143), (74, 144), (74, 145), (74, 146), (74, 147), (74, 148), (74, 149), (74, 150), (74, 151), (74, 152), (74, 153), (74, 154), (74, 155), (74, 156), (74, 157), (74, 158), (74, 159), (74, 160), (74, 165), (74, 168), (74, 169), (74, 170), (74, 171), (74, 172), (74, 173), (74, 174), (74, 175), (74, 176), (74, 177), (74, 178), (74, 179), (74, 180),
(74, 181), (74, 182), (74, 183), (74, 184), (74, 185), (74, 186), (74, 190), (75, 117), (75, 119), (75, 120), (75, 121), (75, 122), (75, 123), (75, 124), (75, 125), (75, 126), (75, 127), (75, 128), (75, 129), (75, 130), (75, 131), (75, 132), (75, 133), (75, 134), (75, 135), (75, 136), (75, 137), (75, 138), (75, 141), (75, 167), (75, 169), (75, 170), (75, 171), (75, 172), (75, 173), (75, 174), (75, 175), (75, 176), (75, 177), (75, 178), (75, 179), (75, 180), (75, 181), (75, 182), (75, 183), (75, 184), (75, 188), (76, 118), (76, 120), (76, 121), (76, 122), (76, 123), (76, 124), (76, 125), (76, 126), (76, 127), (76, 128), (76, 129), (76, 130), (76, 131), (76, 132), (76, 133), (76, 134), (76, 135), (76, 136), (76, 139), (76, 168), (76, 170), (76, 171), (76, 172), (76, 173), (76, 174),
(76, 175), (76, 176), (76, 177), (76, 178), (76, 179), (76, 180), (76, 181), (76, 182), (76, 183), (76, 186), (77, 119), (77, 121), (77, 122), (77, 123), (77, 124), (77, 125), (77, 126), (77, 127), (77, 128), (77, 129), (77, 130), (77, 131), (77, 132), (77, 133), (77, 134), (77, 137), (77, 138), (77, 168), (77, 170), (77, 171), (77, 172), (77, 173), (77, 174), (77, 175), (77, 176), (77, 177), (77, 178), (77, 179), (77, 180), (77, 181), (77, 182), (77, 184), (78, 120), (78, 122), (78, 123), (78, 124), (78, 125), (78, 126), (78, 127), (78, 128), (78, 129), (78, 130), (78, 131), (78, 132), (78, 136), (78, 168), (78, 171), (78, 172), (78, 173), (78, 174), (78, 175), (78, 176), (78, 177), (78, 178), (78, 179), (78, 180), (78, 183), (79, 121), (79, 123), (79, 124), (79, 125), (79, 126),
(79, 127), (79, 128), (79, 129), (79, 130), (79, 134), (79, 169), (79, 172), (79, 173), (79, 174), (79, 175), (79, 176), (79, 177), (79, 178), (79, 179), (79, 182), (80, 121), (80, 123), (80, 124), (80, 125), (80, 126), (80, 127), (80, 128), (80, 132), (80, 173), (80, 174), (80, 175), (80, 176), (80, 180), (81, 122), (81, 124), (81, 125), (81, 130), (81, 172), (81, 177), (81, 178), (81, 179), (82, 126), (82, 127), (82, 128), (82, 174), (82, 176), (83, 123), (83, 125), (232, 122), (233, 122), (233, 124), (234, 122), (234, 126), (235, 121), (235, 122), (235, 124), (235, 128), (236, 121), (236, 123), (236, 124), (236, 125), (236, 126), (236, 129), (237, 120), (237, 122), (237, 123), (237, 124), (237, 125), (237, 126), (237, 127), (237, 128), (237, 130), (238, 120), (238, 122), (238, 123), (238, 124), (238, 125),
(238, 126), (238, 127), (238, 128), (238, 129), (239, 119), (239, 121), (239, 122), (239, 123), (239, 124), (239, 125), (239, 126), (239, 127), (239, 128), (239, 129), (239, 130), (239, 133), (240, 118), (240, 120), (240, 121), (240, 122), (240, 123), (240, 124), (240, 125), (240, 126), (240, 127), (240, 128), (240, 129), (240, 130), (240, 131), (240, 134), (240, 170), (240, 172), (240, 173), (240, 174), (240, 175), (240, 176), (240, 177), (240, 178), (240, 179), (240, 180), (240, 181), (240, 183), (241, 117), (241, 119), (241, 120), (241, 121), (241, 122), (241, 123), (241, 124), (241, 125), (241, 126), (241, 127), (241, 128), (241, 129), (241, 130), (241, 131), (241, 132), (241, 133), (241, 136), (241, 169), (241, 184), (241, 186), (242, 116), (242, 118), (242, 119), (242, 120), (242, 121), (242, 122), (242, 123), (242, 124), (242, 125), (242, 126),
(242, 127), (242, 128), (242, 129), (242, 130), (242, 131), (242, 132), (242, 133), (242, 134), (242, 137), (242, 138), (242, 168), (242, 170), (242, 171), (242, 172), (242, 173), (242, 174), (242, 175), (242, 176), (242, 177), (242, 178), (242, 179), (242, 180), (242, 181), (242, 182), (242, 183), (242, 187), (243, 115), (243, 117), (243, 118), (243, 119), (243, 120), (243, 121), (243, 122), (243, 123), (243, 124), (243, 125), (243, 126), (243, 127), (243, 128), (243, 129), (243, 130), (243, 131), (243, 132), (243, 133), (243, 134), (243, 135), (243, 136), (243, 140), (243, 141), (243, 142), (243, 143), (243, 144), (243, 145), (243, 158), (243, 166), (243, 169), (243, 170), (243, 171), (243, 172), (243, 173), (243, 174), (243, 175), (243, 176), (243, 177), (243, 178), (243, 179), (243, 180), (243, 181), (243, 182), (243, 183), (243, 184), (243, 185),
(244, 114), (244, 117), (244, 118), (244, 119), (244, 120), (244, 121), (244, 122), (244, 123), (244, 124), (244, 125), (244, 126), (244, 127), (244, 128), (244, 129), (244, 130), (244, 131), (244, 132), (244, 133), (244, 134), (244, 135), (244, 136), (244, 137), (244, 138), (244, 139), (244, 146), (244, 147), (244, 148), (244, 157), (244, 158), (244, 161), (244, 163), (244, 166), (244, 168), (244, 169), (244, 170), (244, 171), (244, 172), (244, 173), (244, 174), (244, 175), (244, 176), (244, 177), (244, 178), (244, 179), (244, 180), (244, 181), (244, 182), (244, 183), (244, 184), (244, 185), (244, 186), (244, 188), (245, 114), (245, 116), (245, 117), (245, 118), (245, 119), (245, 120), (245, 121), (245, 122), (245, 123), (245, 124), (245, 125), (245, 126), (245, 127), (245, 128), (245, 129), (245, 130), (245, 131), (245, 132), (245, 133), (245, 134),
(245, 135), (245, 136), (245, 137), (245, 138), (245, 139), (245, 140), (245, 141), (245, 142), (245, 143), (245, 144), (245, 145), (245, 149), (245, 150), (245, 151), (245, 152), (245, 158), (245, 159), (245, 160), (245, 166), (245, 167), (245, 168), (245, 169), (245, 170), (245, 171), (245, 172), (245, 173), (245, 174), (245, 175), (245, 176), (245, 177), (245, 178), (245, 179), (245, 180), (245, 181), (245, 182), (245, 183), (245, 184), (245, 185), (245, 186), (245, 187), (245, 189), (246, 113), (246, 115), (246, 116), (246, 117), (246, 118), (246, 119), (246, 120), (246, 121), (246, 122), (246, 123), (246, 124), (246, 125), (246, 126), (246, 127), (246, 128), (246, 129), (246, 130), (246, 131), (246, 132), (246, 133), (246, 134), (246, 135), (246, 136), (246, 137), (246, 138), (246, 139), (246, 140), (246, 141), (246, 142), (246, 143), (246, 144),
(246, 145), (246, 146), (246, 147), (246, 148), (246, 153), (246, 154), (246, 155), (246, 157), (246, 158), (246, 161), (246, 162), (246, 163), (246, 166), (246, 167), (246, 168), (246, 169), (246, 170), (246, 171), (246, 172), (246, 173), (246, 174), (246, 175), (246, 176), (246, 177), (246, 178), (246, 179), (246, 180), (246, 181), (246, 182), (246, 183), (246, 184), (246, 185), (246, 186), (246, 187), (246, 188), (247, 112), (247, 114), (247, 115), (247, 116), (247, 117), (247, 118), (247, 119), (247, 120), (247, 121), (247, 122), (247, 123), (247, 124), (247, 125), (247, 126), (247, 127), (247, 128), (247, 129), (247, 130), (247, 131), (247, 132), (247, 133), (247, 134), (247, 135), (247, 136), (247, 137), (247, 138), (247, 139), (247, 140), (247, 141), (247, 142), (247, 143), (247, 144), (247, 145), (247, 146), (247, 147), (247, 148), (247, 149),
(247, 150), (247, 151), (247, 152), (247, 153), (247, 157), (247, 158), (247, 159), (247, 160), (247, 161), (247, 162), (247, 163), (247, 164), (247, 165), (247, 166), (247, 167), (247, 168), (247, 169), (247, 170), (247, 171), (247, 172), (247, 173), (247, 174), (247, 175), (247, 176), (247, 177), (247, 178), (247, 179), (247, 180), (247, 181), (247, 182), (247, 183), (247, 184), (247, 185), (247, 186), (247, 187), (247, 188), (248, 112), (248, 114), (248, 115), (248, 116), (248, 117), (248, 118), (248, 119), (248, 120), (248, 121), (248, 122), (248, 123), (248, 124), (248, 125), (248, 126), (248, 127), (248, 128), (248, 129), (248, 130), (248, 131), (248, 132), (248, 133), (248, 134), (248, 135), (248, 136), (248, 137), (248, 138), (248, 139), (248, 140), (248, 141), (248, 142), (248, 143), (248, 144), (248, 145), (248, 146), (248, 147), (248, 148),
(248, 149), (248, 150), (248, 151), (248, 152), (248, 153), (248, 154), (248, 155), (248, 156), (248, 157), (248, 158), (248, 159), (248, 160), (248, 161), (248, 162), (248, 163), (248, 164), (248, 165), (248, 166), (248, 167), (248, 168), (248, 169), (248, 170), (248, 171), (248, 172), (248, 173), (248, 174), (248, 175), (248, 176), (248, 177), (248, 178), (248, 179), (248, 180), (248, 181), (248, 182), (248, 183), (248, 184), (248, 185), (248, 186), (248, 187), (248, 188), (248, 190), (249, 111), (249, 113), (249, 114), (249, 115), (249, 116), (249, 117), (249, 118), (249, 119), (249, 120), (249, 121), (249, 122), (249, 123), (249, 124), (249, 125), (249, 126), (249, 127), (249, 128), (249, 129), (249, 130), (249, 131), (249, 132), (249, 133), (249, 134), (249, 135), (249, 136), (249, 137), (249, 138), (249, 139), (249, 140), (249, 141), (249, 142),
(249, 143), (249, 144), (249, 145), (249, 146), (249, 147), (249, 148), (249, 149), (249, 150), (249, 151), (249, 152), (249, 153), (249, 154), (249, 155), (249, 156), (249, 157), (249, 158), (249, 159), (249, 160), (249, 161), (249, 162), (249, 163), (249, 164), (249, 165), (249, 166), (249, 167), (249, 168), (249, 169), (249, 170), (249, 171), (249, 172), (249, 173), (249, 174), (249, 175), (249, 176), (249, 177), (249, 178), (249, 179), (249, 180), (249, 181), (249, 182), (249, 183), (249, 184), (249, 185), (249, 186), (249, 187), (249, 189), (250, 111), (250, 113), (250, 114), (250, 115), (250, 116), (250, 117), (250, 118), (250, 119), (250, 120), (250, 121), (250, 122), (250, 123), (250, 124), (250, 125), (250, 126), (250, 127), (250, 128), (250, 129), (250, 130), (250, 131), (250, 132), (250, 133), (250, 134), (250, 135), (250, 136), (250, 137),
(250, 138), (250, 139), (250, 140), (250, 141), (250, 142), (250, 143), (250, 144), (250, 145), (250, 146), (250, 147), (250, 148), (250, 149), (250, 150), (250, 151), (250, 152), (250, 153), (250, 154), (250, 155), (250, 156), (250, 157), (250, 158), (250, 159), (250, 160), (250, 161), (250, 162), (250, 163), (250, 164), (250, 165), (250, 166), (250, 167), (250, 168), (250, 169), (250, 170), (250, 171), (250, 172), (250, 173), (250, 174), (250, 175), (250, 176), (250, 177), (250, 178), (250, 179), (250, 180), (250, 181), (250, 182), (250, 183), (250, 184), (250, 185), (250, 186), (250, 188), (251, 111), (251, 112), (251, 113), (251, 114), (251, 115), (251, 116), (251, 117), (251, 118), (251, 119), (251, 120), (251, 121), (251, 122), (251, 123), (251, 124), (251, 125), (251, 126), (251, 127), (251, 128), (251, 129), (251, 130), (251, 131), (251, 132),
(251, 133), (251, 134), (251, 135), (251, 136), (251, 137), (251, 138), (251, 139), (251, 140), (251, 141), (251, 142), (251, 143), (251, 144), (251, 145), (251, 146), (251, 147), (251, 148), (251, 149), (251, 150), (251, 151), (251, 152), (251, 153), (251, 154), (251, 155), (251, 156), (251, 157), (251, 158), (251, 159), (251, 160), (251, 161), (251, 162), (251, 163), (251, 164), (251, 165), (251, 166), (251, 167), (251, 168), (251, 169), (251, 170), (251, 171), (251, 172), (251, 173), (251, 174), (251, 175), (251, 176), (251, 177), (251, 178), (251, 179), (251, 180), (251, 181), (251, 182), (251, 183), (251, 184), (251, 185), (251, 187), (252, 112), (252, 114), (252, 115), (252, 116), (252, 117), (252, 118), (252, 119), (252, 120), (252, 121), (252, 122), (252, 123), (252, 124), (252, 125), (252, 126), (252, 127), (252, 128), (252, 129), (252, 130),
(252, 131), (252, 132), (252, 133), (252, 134), (252, 135), (252, 136), (252, 137), (252, 138), (252, 139), (252, 140), (252, 141), (252, 142), (252, 143), (252, 144), (252, 145), (252, 146), (252, 147), (252, 148), (252, 149), (252, 150), (252, 151), (252, 152), (252, 153), (252, 154), (252, 155), (252, 156), (252, 157), (252, 158), (252, 159), (252, 160), (252, 161), (252, 162), (252, 163), (252, 164), (252, 165), (252, 166), (252, 167), (252, 168), (252, 169), (252, 170), (252, 171), (252, 172), (252, 173), (252, 174), (252, 175), (252, 176), (252, 177), (252, 178), (252, 179), (252, 180), (252, 181), (252, 182), (252, 183), (252, 184), (252, 185), (252, 187), (253, 112), (253, 114), (253, 115), (253, 116), (253, 117), (253, 118), (253, 119), (253, 120), (253, 121), (253, 122), (253, 123), (253, 124), (253, 125), (253, 126), (253, 127), (253, 128),
(253, 129), (253, 130), (253, 131), (253, 132), (253, 133), (253, 134), (253, 135), (253, 136), (253, 137), (253, 138), (253, 139), (253, 140), (253, 141), (253, 142), (253, 143), (253, 144), (253, 145), (253, 146), (253, 147), (253, 148), (253, 149), (253, 150), (253, 151), (253, 152), (253, 153), (253, 154), (253, 155), (253, 156), (253, 157), (253, 158), (253, 159), (253, 160), (253, 161), (253, 162), (253, 163), (253, 164), (253, 165), (253, 166), (253, 167), (253, 168), (253, 169), (253, 170), (253, 171), (253, 172), (253, 173), (253, 174), (253, 175), (253, 176), (253, 177), (253, 178), (253, 179), (253, 180), (253, 181), (253, 182), (253, 183), (253, 184), (253, 186), (254, 112), (254, 114), (254, 115), (254, 116), (254, 117), (254, 118), (254, 119), (254, 120), (254, 121), (254, 122), (254, 123), (254, 124), (254, 125), (254, 126), (254, 127),
(254, 128), (254, 129), (254, 130), (254, 131), (254, 132), (254, 133), (254, 134), (254, 135), (254, 136), (254, 137), (254, 138), (254, 139), (254, 140), (254, 141), (254, 142), (254, 143), (254, 144), (254, 145), (254, 146), (254, 147), (254, 148), (254, 149), (254, 150), (254, 151), (254, 152), (254, 153), (254, 154), (254, 155), (254, 156), (254, 157), (254, 158), (254, 159), (254, 160), (254, 161), (254, 162), (254, 163), (254, 164), (254, 165), (254, 166), (254, 167), (254, 168), (254, 169), (254, 170), (254, 171), (254, 172), (254, 173), (254, 174), (254, 175), (254, 176), (254, 177), (254, 178), (254, 179), (254, 180), (254, 181), (254, 182), (254, 183), (254, 184), (254, 186), (255, 113), (255, 115), (255, 116), (255, 117), (255, 118), (255, 119), (255, 120), (255, 121), (255, 122), (255, 123), (255, 124), (255, 125), (255, 126), (255, 127),
(255, 128), (255, 129), (255, 130), (255, 131), (255, 132), (255, 133), (255, 134), (255, 135), (255, 136), (255, 137), (255, 138), (255, 139), (255, 140), (255, 141), (255, 142), (255, 143), (255, 144), (255, 145), (255, 146), (255, 147), (255, 148), (255, 149), (255, 150), (255, 151), (255, 152), (255, 153), (255, 154), (255, 155), (255, 156), (255, 157), (255, 158), (255, 159), (255, 160), (255, 161), (255, 162), (255, 163), (255, 164), (255, 165), (255, 166), (255, 167), (255, 168), (255, 169), (255, 170), (255, 171), (255, 172), (255, 173), (255, 174), (255, 175), (255, 176), (255, 177), (255, 178), (255, 179), (255, 180), (255, 181), (255, 182), (255, 183), (255, 184), (255, 186), (256, 114), (256, 117), (256, 118), (256, 119), (256, 120), (256, 121), (256, 122), (256, 123), (256, 124), (256, 125), (256, 126), (256, 127), (256, 128), (256, 129),
(256, 130), (256, 131), (256, 132), (256, 133), (256, 134), (256, 135), (256, 136), (256, 137), (256, 138), (256, 139), (256, 140), (256, 141), (256, 142), (256, 143), (256, 144), (256, 145), (256, 146), (256, 147), (256, 148), (256, 149), (256, 150), (256, 151), (256, 152), (256, 153), (256, 154), (256, 155), (256, 156), (256, 157), (256, 158), (256, 159), (256, 160), (256, 161), (256, 162), (256, 163), (256, 164), (256, 165), (256, 166), (256, 167), (256, 168), (256, 169), (256, 170), (256, 171), (256, 172), (256, 173), (256, 174), (256, 175), (256, 176), (256, 177), (256, 178), (256, 179), (256, 180), (256, 181), (256, 182), (256, 183), (256, 184), (256, 186), (257, 115), (257, 120), (257, 121), (257, 122), (257, 123), (257, 124), (257, 125), (257, 126), (257, 127), (257, 128), (257, 129), (257, 130), (257, 131), (257, 132), (257, 133), (257, 134),
(257, 135), (257, 136), (257, 137), (257, 138), (257, 139), (257, 140), (257, 141), (257, 142), (257, 143), (257, 144), (257, 145), (257, 146), (257, 147), (257, 148), (257, 149), (257, 150), (257, 151), (257, 152), (257, 153), (257, 154), (257, 155), (257, 156), (257, 157), (257, 158), (257, 159), (257, 160), (257, 161), (257, 162), (257, 163), (257, 164), (257, 165), (257, 166), (257, 167), (257, 168), (257, 169), (257, 170), (257, 171), (257, 172), (257, 173), (257, 174), (257, 175), (257, 176), (257, 177), (257, 178), (257, 179), (257, 180), (257, 181), (257, 182), (257, 183), (257, 184), (257, 186), (258, 117), (258, 119), (258, 123), (258, 124), (258, 125), (258, 126), (258, 127), (258, 128), (258, 129), (258, 130), (258, 131), (258, 132), (258, 133), (258, 134), (258, 135), (258, 136), (258, 137), (258, 138), (258, 139), (258, 140), (258, 141),
(258, 142), (258, 143), (258, 144), (258, 145), (258, 146), (258, 147), (258, 148), (258, 149), (258, 150), (258, 151), (258, 152), (258, 153), (258, 154), (258, 155), (258, 156), (258, 157), (258, 158), (258, 159), (258, 160), (258, 161), (258, 162), (258, 163), (258, 164), (258, 165), (258, 166), (258, 167), (258, 168), (258, 169), (258, 170), (258, 171), (258, 172), (258, 173), (258, 174), (258, 175), (258, 176), (258, 177), (258, 178), (258, 179), (258, 180), (258, 181), (258, 182), (258, 183), (258, 184), (258, 186), (259, 120), (259, 122), (259, 123), (259, 127), (259, 128), (259, 129), (259, 130), (259, 131), (259, 132), (259, 133), (259, 134), (259, 135), (259, 136), (259, 137), (259, 138), (259, 139), (259, 140), (259, 141), (259, 142), (259, 143), (259, 144), (259, 145), (259, 146), (259, 147), (259, 148), (259, 149), (259, 150), (259, 151),
(259, 152), (259, 153), (259, 154), (259, 155), (259, 156), (259, 157), (259, 158), (259, 159), (259, 160), (259, 161), (259, 162), (259, 163), (259, 164), (259, 165), (259, 166), (259, 167), (259, 168), (259, 169), (259, 170), (259, 171), (259, 172), (259, 173), (259, 174), (259, 175), (259, 176), (259, 177), (259, 178), (259, 179), (259, 180), (259, 181), (259, 182), (259, 183), (260, 124), (260, 125), (260, 126), (260, 130), (260, 131), (260, 132), (260, 133), (260, 134), (260, 135), (260, 136), (260, 137), (260, 138), (260, 139), (260, 140), (260, 141), (260, 142), (260, 143), (260, 144), (260, 145), (260, 146), (260, 147), (260, 148), (260, 149), (260, 150), (260, 151), (260, 152), (260, 153), (260, 154), (260, 155), (260, 156), (260, 157), (260, 158), (260, 159), (260, 160), (260, 161), (260, 162), (260, 163), (260, 164), (260, 165), (260, 166),
(260, 167), (260, 168), (260, 169), (260, 170), (260, 171), (260, 172), (260, 173), (260, 174), (260, 175), (260, 176), (260, 177), (260, 178), (260, 179), (260, 180), (260, 181), (260, 185), (261, 127), (261, 128), (261, 129), (261, 132), (261, 133), (261, 134), (261, 135), (261, 136), (261, 137), (261, 138), (261, 139), (261, 140), (261, 141), (261, 142), (261, 143), (261, 144), (261, 145), (261, 146), (261, 147), (261, 148), (261, 149), (261, 150), (261, 151), (261, 152), (261, 153), (261, 154), (261, 155), (261, 156), (261, 157), (261, 158), (261, 159), (261, 160), (261, 161), (261, 162), (261, 163), (261, 164), (261, 165), (261, 166), (261, 167), (261, 168), (261, 169), (261, 170), (261, 171), (261, 172), (261, 173), (261, 174), (261, 175), (261, 176), (261, 177), (261, 178), (261, 179), (261, 183), (262, 130), (262, 133), (262, 134), (262, 135),
(262, 136), (262, 137), (262, 138), (262, 139), (262, 140), (262, 141), (262, 142), (262, 143), (262, 144), (262, 145), (262, 146), (262, 147), (262, 148), (262, 149), (262, 150), (262, 151), (262, 152), (262, 153), (262, 154), (262, 155), (262, 156), (262, 157), (262, 158), (262, 159), (262, 160), (262, 161), (262, 162), (262, 163), (262, 164), (262, 165), (262, 166), (262, 167), (262, 168), (262, 169), (262, 170), (262, 171), (262, 172), (262, 173), (262, 174), (262, 175), (262, 176), (262, 177), (262, 181), (263, 134), (263, 135), (263, 136), (263, 137), (263, 138), (263, 139), (263, 140), (263, 141), (263, 142), (263, 143), (263, 144), (263, 145), (263, 146), (263, 147), (263, 148), (263, 149), (263, 150), (263, 151), (263, 152), (263, 153), (263, 154), (263, 155), (263, 156), (263, 157), (263, 158), (263, 159), (263, 160), (263, 161), (263, 162),
(263, 163), (263, 164), (263, 165), (263, 166), (263, 167), (263, 168), (263, 169), (263, 170), (263, 171), (263, 172), (263, 173), (263, 174), (263, 175), (263, 179), (264, 135), (264, 136), (264, 137), (264, 138), (264, 139), (264, 140), (264, 141), (264, 142), (264, 143), (264, 144), (264, 145), (264, 146), (264, 147), (264, 148), (264, 149), (264, 150), (264, 151), (264, 152), (264, 153), (264, 154), (264, 155), (264, 156), (264, 157), (264, 158), (264, 159), (264, 160), (264, 161), (264, 162), (264, 163), (264, 164), (264, 165), (264, 166), (264, 167), (264, 168), (264, 169), (264, 170), (264, 171), (264, 172), (264, 173), (264, 177), (265, 136), (265, 137), (265, 138), (265, 139), (265, 140), (265, 141), (265, 142), (265, 143), (265, 144), (265, 145), (265, 146), (265, 147), (265, 148), (265, 149), (265, 150), (265, 151), (265, 152), (265, 153),
(265, 154), (265, 155), (265, 156), (265, 157), (265, 158), (265, 159), (265, 160), (265, 161), (265, 162), (265, 163), (265, 164), (265, 165), (265, 166), (265, 167), (265, 168), (265, 169), (265, 170), (265, 175), (266, 134), (266, 137), (266, 138), (266, 139), (266, 140), (266, 141), (266, 142), (266, 143), (266, 144), (266, 145), (266, 146), (266, 147), (266, 148), (266, 149), (266, 150), (266, 151), (266, 152), (266, 153), (266, 154), (266, 155), (266, 156), (266, 157), (266, 158), (266, 159), (266, 160), (266, 161), (266, 162), (266, 163), (266, 164), (266, 165), (266, 166), (266, 167), (266, 171), (266, 172), (267, 136), (267, 143), (267, 144), (267, 145), (267, 146), (267, 147), (267, 148), (267, 149), (267, 150), (267, 151), (267, 152), (267, 153), (267, 154), (267, 155), (267, 156), (267, 157), (267, 158), (267, 159), (267, 160), (267, 161),
(267, 162), (267, 163), (267, 164), (267, 168), (267, 170), (268, 137), (268, 139), (268, 140), (268, 141), (268, 142), (268, 146), (268, 147), (268, 148), (268, 149), (268, 150), (268, 151), (268, 152), (268, 153), (268, 154), (268, 155), (268, 156), (268, 157), (268, 158), (268, 159), (268, 160), (268, 165), (268, 167), (269, 143), (269, 145), (269, 161), (269, 162), (269, 163), (269, 164), (270, 146), (270, 147), (270, 148), (270, 149), (270, 150), (270, 151), (270, 152), (270, 153), (270, 154), (270, 155), (270, 156), (270, 157), (270, 158), (270, 159), (270, 160), )
coordinates_FF00FF = ((66, 101),
(66, 103), (66, 104), (66, 105), (66, 106), (66, 108), (67, 99), (67, 109), (68, 97), (68, 98), (68, 101), (68, 102), (68, 103), (68, 104), (68, 105), (68, 106), (68, 107), (68, 108), (68, 110), (69, 96), (69, 99), (69, 100), (69, 101), (69, 102), (69, 103), (69, 104), (69, 105), (69, 106), (69, 107), (69, 108), (69, 110), (70, 94), (70, 97), (70, 98), (70, 99), (70, 100), (70, 101), (70, 102), (70, 103), (70, 104), (70, 105), (70, 106), (70, 107), (70, 108), (70, 109), (70, 111), (71, 93), (71, 96), (71, 97), (71, 98), (71, 99), (71, 100), (71, 101), (71, 102), (71, 103), (71, 104), (71, 105), (71, 106), (71, 107), (71, 108), (71, 109), (71, 110), (71, 112), (72, 91), (72, 94), (72, 95), (72, 96), (72, 97), (72, 98), (72, 99), (72, 100), (72, 101), (72, 102),
(72, 103), (72, 104), (72, 105), (72, 106), (72, 107), (72, 108), (72, 109), (72, 110), (72, 111), (72, 113), (73, 89), (73, 93), (73, 94), (73, 95), (73, 96), (73, 97), (73, 98), (73, 99), (73, 100), (73, 101), (73, 102), (73, 103), (73, 104), (73, 105), (73, 106), (73, 107), (73, 108), (73, 109), (73, 110), (73, 111), (73, 113), (74, 88), (74, 91), (74, 92), (74, 93), (74, 94), (74, 95), (74, 96), (74, 97), (74, 98), (74, 99), (74, 100), (74, 101), (74, 102), (74, 103), (74, 104), (74, 105), (74, 106), (74, 107), (74, 108), (74, 109), (74, 110), (74, 111), (74, 112), (74, 114), (75, 87), (75, 89), (75, 90), (75, 91), (75, 92), (75, 93), (75, 94), (75, 95), (75, 96), (75, 97), (75, 98), (75, 99), (75, 100), (75, 101), (75, 102), (75, 103), (75, 104),
(75, 105), (75, 106), (75, 107), (75, 108), (75, 109), (75, 110), (75, 111), (75, 112), (75, 113), (75, 115), (76, 85), (76, 88), (76, 89), (76, 90), (76, 91), (76, 92), (76, 93), (76, 94), (76, 95), (76, 96), (76, 97), (76, 98), (76, 99), (76, 100), (76, 101), (76, 102), (76, 103), (76, 104), (76, 105), (76, 106), (76, 107), (76, 108), (76, 109), (76, 110), (76, 111), (76, 112), (76, 113), (76, 114), (76, 116), (77, 84), (77, 87), (77, 88), (77, 89), (77, 90), (77, 91), (77, 92), (77, 93), (77, 94), (77, 95), (77, 96), (77, 97), (77, 98), (77, 99), (77, 100), (77, 101), (77, 102), (77, 103), (77, 104), (77, 105), (77, 106), (77, 107), (77, 108), (77, 109), (77, 110), (77, 111), (77, 112), (77, 113), (77, 114), (77, 115), (77, 117), (78, 82), (78, 85),
(78, 86), (78, 87), (78, 88), (78, 89), (78, 90), (78, 91), (78, 92), (78, 93), (78, 94), (78, 95), (78, 96), (78, 97), (78, 98), (78, 99), (78, 100), (78, 101), (78, 102), (78, 103), (78, 104), (78, 105), (78, 106), (78, 107), (78, 108), (78, 109), (78, 110), (78, 111), (78, 112), (78, 113), (78, 114), (78, 115), (78, 117), (79, 81), (79, 84), (79, 85), (79, 86), (79, 87), (79, 88), (79, 89), (79, 90), (79, 91), (79, 92), (79, 93), (79, 94), (79, 95), (79, 96), (79, 97), (79, 98), (79, 99), (79, 100), (79, 101), (79, 102), (79, 103), (79, 104), (79, 105), (79, 106), (79, 107), (79, 108), (79, 109), (79, 110), (79, 111), (79, 112), (79, 113), (79, 114), (79, 115), (79, 116), (79, 118), (80, 80), (80, 83), (80, 84), (80, 85), (80, 86), (80, 87),
(80, 88), (80, 89), (80, 90), (80, 91), (80, 92), (80, 93), (80, 94), (80, 95), (80, 96), (80, 97), (80, 98), (80, 99), (80, 100), (80, 101), (80, 102), (80, 103), (80, 104), (80, 105), (80, 106), (80, 107), (80, 108), (80, 109), (80, 110), (80, 111), (80, 112), (80, 113), (80, 114), (80, 115), (80, 116), (80, 117), (80, 119), (81, 79), (81, 81), (81, 82), (81, 83), (81, 84), (81, 85), (81, 86), (81, 87), (81, 88), (81, 89), (81, 90), (81, 91), (81, 92), (81, 93), (81, 94), (81, 95), (81, 96), (81, 97), (81, 98), (81, 99), (81, 100), (81, 101), (81, 102), (81, 103), (81, 104), (81, 105), (81, 106), (81, 107), (81, 108), (81, 109), (81, 110), (81, 111), (81, 112), (81, 113), (81, 114), (81, 115), (81, 116), (81, 117), (81, 118), (81, 120), (82, 78),
(82, 80), (82, 81), (82, 82), (82, 83), (82, 84), (82, 85), (82, 86), (82, 87), (82, 88), (82, 89), (82, 90), (82, 91), (82, 92), (82, 93), (82, 94), (82, 95), (82, 96), (82, 97), (82, 98), (82, 99), (82, 100), (82, 101), (82, 102), (82, 103), (82, 104), (82, 105), (82, 106), (82, 107), (82, 108), (82, 109), (82, 110), (82, 111), (82, 112), (82, 113), (82, 114), (82, 115), (82, 116), (82, 117), (82, 118), (82, 120), (83, 77), (83, 79), (83, 80), (83, 81), (83, 82), (83, 83), (83, 84), (83, 85), (83, 86), (83, 87), (83, 88), (83, 89), (83, 90), (83, 91), (83, 92), (83, 93), (83, 94), (83, 95), (83, 96), (83, 97), (83, 98), (83, 99), (83, 100), (83, 101), (83, 102), (83, 103), (83, 104), (83, 105), (83, 106), (83, 107), (83, 108), (83, 109),
(83, 110), (83, 111), (83, 112), (83, 113), (83, 114), (83, 115), (83, 116), (83, 117), (83, 118), (83, 121), (84, 76), (84, 78), (84, 79), (84, 80), (84, 81), (84, 82), (84, 83), (84, 84), (84, 85), (84, 86), (84, 87), (84, 88), (84, 89), (84, 90), (84, 91), (84, 92), (84, 93), (84, 94), (84, 95), (84, 96), (84, 97), (84, 98), (84, 99), (84, 100), (84, 101), (84, 102), (84, 103), (84, 104), (84, 105), (84, 106), (84, 107), (84, 108), (84, 109), (84, 110), (84, 111), (84, 112), (84, 113), (84, 114), (84, 115), (84, 116), (84, 117), (84, 119), (84, 121), (85, 75), (85, 77), (85, 78), (85, 79), (85, 80), (85, 81), (85, 82), (85, 83), (85, 84), (85, 85), (85, 86), (85, 87), (85, 88), (85, 89), (85, 90), (85, 91), (85, 92), (85, 93), (85, 94),
(85, 95), (85, 96), (85, 97), (85, 98), (85, 99), (85, 100), (85, 101), (85, 102), (85, 103), (85, 104), (85, 105), (85, 106), (85, 107), (85, 108), (85, 109), (85, 110), (85, 111), (85, 112), (85, 113), (85, 114), (85, 115), (85, 118), (86, 74), (86, 76), (86, 77), (86, 78), (86, 79), (86, 80), (86, 81), (86, 82), (86, 83), (86, 84), (86, 85), (86, 86), (86, 87), (86, 88), (86, 89), (86, 90), (86, 91), (86, 92), (86, 93), (86, 94), (86, 95), (86, 96), (86, 97), (86, 98), (86, 99), (86, 100), (86, 101), (86, 102), (86, 103), (86, 104), (86, 111), (86, 112), (86, 113), (86, 114), (86, 117), (87, 75), (87, 76), (87, 77), (87, 78), (87, 79), (87, 80), (87, 81), (87, 82), (87, 83), (87, 84), (87, 85), (87, 86), (87, 87), (87, 88), (87, 89),
(87, 90), (87, 91), (87, 92), (87, 93), (87, 94), (87, 95), (87, 96), (87, 97), (87, 98), (87, 99), (87, 100), (87, 101), (87, 102), (87, 105), (87, 106), (87, 107), (87, 108), (87, 109), (87, 110), (87, 115), (88, 72), (88, 74), (88, 75), (88, 76), (88, 77), (88, 78), (88, 79), (88, 80), (88, 81), (88, 82), (88, 83), (88, 84), (88, 85), (88, 86), (88, 87), (88, 88), (88, 89), (88, 90), (88, 91), (88, 92), (88, 93), (88, 94), (88, 95), (88, 96), (88, 97), (88, 98), (88, 99), (88, 100), (88, 101), (88, 104), (88, 111), (88, 112), (88, 114), (89, 70), (89, 74), (89, 75), (89, 76), (89, 77), (89, 78), (89, 79), (89, 80), (89, 81), (89, 82), (89, 83), (89, 84), (89, 85), (89, 86), (89, 87), (89, 88), (89, 89), (89, 90), (89, 91),
(89, 92), (89, 93), (89, 94), (89, 95), (89, 96), (89, 97), (89, 98), (89, 99), (89, 102), (90, 69), (90, 72), (90, 73), (90, 74), (90, 75), (90, 76), (90, 77), (90, 78), (90, 79), (90, 80), (90, 81), (90, 82), (90, 83), (90, 84), (90, 85), (90, 86), (90, 87), (90, 88), (90, 89), (90, 90), (90, 91), (90, 92), (90, 93), (90, 94), (90, 95), (90, 96), (90, 97), (90, 98), (90, 101), (91, 69), (91, 71), (91, 72), (91, 73), (91, 74), (91, 75), (91, 76), (91, 77), (91, 78), (91, 79), (91, 80), (91, 81), (91, 82), (91, 83), (91, 84), (91, 85), (91, 86), (91, 87), (91, 88), (91, 89), (91, 90), (91, 91), (91, 92), (91, 93), (91, 94), (91, 99), (92, 68), (92, 70), (92, 71), (92, 72), (92, 73), (92, 74), (92, 75), (92, 76),
(92, 77), (92, 78), (92, 79), (92, 80), (92, 81), (92, 82), (92, 83), (92, 84), (92, 85), (92, 86), (92, 87), (92, 88), (92, 89), (92, 90), (92, 91), (92, 92), (92, 93), (92, 95), (92, 96), (92, 97), (93, 68), (93, 70), (93, 71), (93, 72), (93, 73), (93, 74), (93, 75), (93, 76), (93, 77), (93, 78), (93, 79), (93, 80), (93, 81), (93, 82), (93, 83), (93, 84), (93, 85), (93, 86), (93, 87), (93, 88), (93, 89), (93, 90), (93, 91), (93, 92), (94, 67), (94, 69), (94, 70), (94, 71), (94, 72), (94, 73), (94, 74), (94, 75), (94, 76), (94, 77), (94, 78), (94, 79), (94, 80), (94, 81), (94, 82), (94, 83), (94, 84), (94, 85), (94, 86), (94, 87), (94, 88), (94, 89), (94, 90), (94, 91), (94, 93), (95, 67), (95, 69), (95, 70),
(95, 71), (95, 72), (95, 73), (95, 74), (95, 75), (95, 76), (95, 77), (95, 78), (95, 79), (95, 80), (95, 81), (95, 82), (95, 83), (95, 84), (95, 85), (95, 86), (95, 87), (95, 88), (95, 89), (95, 90), (95, 92), (96, 66), (96, 68), (96, 69), (96, 70), (96, 71), (96, 72), (96, 73), (96, 74), (96, 75), (96, 76), (96, 77), (96, 78), (96, 79), (96, 80), (96, 81), (96, 82), (96, 83), (96, 84), (96, 85), (96, 86), (96, 87), (96, 88), (96, 89), (96, 90), (96, 92), (97, 66), (97, 68), (97, 69), (97, 70), (97, 71), (97, 72), (97, 73), (97, 74), (97, 75), (97, 76), (97, 77), (97, 78), (97, 79), (97, 80), (97, 81), (97, 82), (97, 83), (97, 84), (97, 85), (97, 86), (97, 87), (97, 88), (97, 89), (97, 91), (98, 65), (98, 67),
(98, 68), (98, 69), (98, 70), (98, 71), (98, 72), (98, 73), (98, 74), (98, 75), (98, 76), (98, 77), (98, 78), (98, 79), (98, 80), (98, 81), (98, 82), (98, 83), (98, 84), (98, 85), (98, 86), (98, 87), (98, 88), (98, 89), (98, 91), (99, 65), (99, 67), (99, 68), (99, 69), (99, 70), (99, 71), (99, 72), (99, 73), (99, 74), (99, 75), (99, 76), (99, 77), (99, 78), (99, 79), (99, 80), (99, 81), (99, 82), (99, 83), (99, 84), (99, 85), (99, 86), (99, 87), (99, 88), (99, 89), (99, 91), (100, 64), (100, 65), (100, 66), (100, 67), (100, 68), (100, 69), (100, 70), (100, 71), (100, 72), (100, 73), (100, 74), (100, 75), (100, 76), (100, 77), (100, 78), (100, 79), (100, 80), (100, 81), (100, 82), (100, 83), (100, 84), (100, 85), (100, 86), (100, 87),
(100, 88), (100, 90), (101, 64), (101, 66), (101, 67), (101, 68), (101, 69), (101, 70), (101, 71), (101, 72), (101, 73), (101, 74), (101, 75), (101, 76), (101, 77), (101, 78), (101, 79), (101, 80), (101, 81), (101, 82), (101, 83), (101, 84), (101, 85), (101, 86), (101, 89), (102, 64), (102, 66), (102, 67), (102, 68), (102, 69), (102, 70), (102, 71), (102, 72), (102, 73), (102, 74), (102, 75), (102, 76), (102, 77), (102, 78), (102, 79), (102, 80), (102, 81), (102, 82), (102, 83), (102, 84), (102, 85), (102, 88), (103, 63), (103, 65), (103, 66), (103, 67), (103, 68), (103, 69), (103, 70), (103, 71), (103, 72), (103, 73), (103, 74), (103, 75), (103, 76), (103, 77), (103, 78), (103, 79), (103, 80), (103, 81), (103, 82), (103, 83), (103, 84), (103, 86), (104, 63), (104, 65), (104, 66),
(104, 67), (104, 68), (104, 69), (104, 70), (104, 71), (104, 72), (104, 73), (104, 74), (104, 75), (104, 76), (104, 77), (104, 78), (104, 79), (104, 80), (104, 81), (104, 82), (104, 85), (105, 62), (105, 64), (105, 65), (105, 66), (105, 67), (105, 68), (105, 69), (105, 70), (105, 71), (105, 72), (105, 73), (105, 74), (105, 75), (105, 76), (105, 77), (105, 78), (105, 79), (105, 80), (105, 81), (105, 84), (106, 62), (106, 64), (106, 65), (106, 66), (106, 67), (106, 68), (106, 69), (106, 70), (106, 71), (106, 72), (106, 73), (106, 74), (106, 75), (106, 76), (106, 77), (106, 78), (106, 79), (106, 80), (107, 61), (107, 63), (107, 64), (107, 65), (107, 66), (107, 67), (107, 68), (107, 69), (107, 70), (107, 71), (107, 72), (107, 73), (107, 74), (107, 75), (107, 76), (107, 77), (107, 78),
(107, 79), (107, 81), (108, 59), (108, 62), (108, 63), (108, 64), (108, 65), (108, 66), (108, 67), (108, 68), (108, 69), (108, 70), (108, 71), (108, 72), (108, 73), (108, 74), (108, 75), (108, 76), (108, 77), (108, 78), (108, 80), (109, 58), (109, 61), (109, 62), (109, 63), (109, 64), (109, 65), (109, 66), (109, 67), (109, 68), (109, 69), (109, 70), (109, 71), (109, 72), (109, 73), (109, 74), (109, 75), (109, 76), (109, 77), (109, 79), (110, 58), (110, 60), (110, 61), (110, 62), (110, 63), (110, 64), (110, 65), (110, 66), (110, 67), (110, 68), (110, 69), (110, 70), (110, 71), (110, 72), (110, 73), (110, 74), (110, 75), (110, 76), (110, 78), (111, 57), (111, 59), (111, 60), (111, 61), (111, 62), (111, 63), (111, 64), (111, 65), (111, 66), (111, 67), (111, 68), (111, 69), (111, 70),
(111, 71), (111, 72), (111, 73), (111, 74), (111, 75), (111, 76), (111, 78), (112, 57), (112, 59), (112, 60), (112, 61), (112, 62), (112, 63), (112, 64), (112, 65), (112, 66), (112, 67), (112, 68), (112, 69), (112, 70), (112, 71), (112, 72), (112, 73), (112, 74), (112, 75), (112, 77), (113, 57), (113, 58), (113, 59), (113, 60), (113, 61), (113, 62), (113, 63), (113, 64), (113, 65), (113, 66), (113, 67), (113, 68), (113, 69), (113, 70), (113, 71), (113, 72), (113, 73), (113, 74), (113, 75), (113, 77), (114, 56), (114, 58), (114, 59), (114, 60), (114, 61), (114, 62), (114, 63), (114, 64), (114, 65), (114, 66), (114, 67), (114, 68), (114, 69), (114, 70), (114, 71), (114, 72), (114, 73), (114, 74), (114, 76), (115, 56), (115, 58), (115, 59), (115, 60), (115, 61), (115, 62), (115, 63),
(115, 64), (115, 65), (115, 66), (115, 67), (115, 68), (115, 69), (115, 70), (115, 71), (115, 72), (115, 73), (115, 74), (115, 76), (116, 55), (116, 57), (116, 58), (116, 59), (116, 60), (116, 61), (116, 62), (116, 63), (116, 64), (116, 65), (116, 66), (116, 67), (116, 68), (116, 69), (116, 70), (116, 71), (116, 72), (116, 73), (116, 75), (117, 55), (117, 57), (117, 58), (117, 59), (117, 60), (117, 61), (117, 62), (117, 63), (117, 64), (117, 65), (117, 66), (117, 67), (117, 68), (117, 69), (117, 70), (117, 71), (117, 72), (117, 73), (118, 55), (118, 57), (118, 58), (118, 59), (118, 60), (118, 61), (118, 62), (118, 63), (118, 64), (118, 65), (118, 66), (118, 67), (118, 68), (118, 69), (118, 70), (118, 71), (118, 72), (118, 74), (119, 54), (119, 56), (119, 57), (119, 58), (119, 59),
(119, 60), (119, 61), (119, 62), (119, 63), (119, 64), (119, 65), (119, 66), (119, 67), (119, 68), (119, 69), (119, 70), (119, 71), (119, 73), (120, 54), (120, 56), (120, 57), (120, 58), (120, 59), (120, 60), (120, 61), (120, 62), (120, 63), (120, 64), (120, 65), (120, 66), (120, 67), (120, 68), (120, 69), (120, 70), (120, 71), (120, 73), (121, 54), (121, 56), (121, 57), (121, 58), (121, 59), (121, 60), (121, 61), (121, 62), (121, 63), (121, 64), (121, 65), (121, 66), (121, 67), (121, 68), (121, 69), (121, 70), (121, 72), (122, 53), (122, 55), (122, 56), (122, 57), (122, 58), (122, 59), (122, 60), (122, 61), (122, 62), (122, 63), (122, 64), (122, 65), (122, 66), (122, 67), (122, 68), (122, 69), (122, 70), (122, 72), (123, 53), (123, 55), (123, 56), (123, 57), (123, 58), (123, 59),
(123, 60), (123, 61), (123, 62), (123, 63), (123, 64), (123, 65), (123, 66), (123, 67), (123, 68), (123, 69), (123, 71), (124, 53), (124, 55), (124, 56), (124, 57), (124, 58), (124, 59), (124, 60), (124, 61), (124, 62), (124, 63), (124, 64), (124, 65), (124, 66), (124, 67), (124, 68), (124, 69), (124, 71), (125, 52), (125, 54), (125, 55), (125, 56), (125, 57), (125, 58), (125, 59), (125, 60), (125, 61), (125, 62), (125, 63), (125, 64), (125, 65), (125, 66), (125, 67), (125, 68), (125, 70), (126, 52), (126, 54), (126, 55), (126, 56), (126, 57), (126, 58), (126, 59), (126, 60), (126, 61), (126, 62), (126, 63), (126, 64), (126, 65), (126, 66), (126, 67), (126, 69), (127, 52), (127, 54), (127, 55), (127, 56), (127, 57), (127, 58), (127, 59), (127, 60), (127, 61), (127, 62), (127, 63),
(127, 64), (127, 65), (127, 66), (127, 67), (127, 69), (128, 51), (128, 53), (128, 54), (128, 55), (128, 56), (128, 57), (128, 58), (128, 59), (128, 60), (128, 61), (128, 62), (128, 63), (128, 64), (128, 65), (128, 66), (128, 67), (128, 69), (129, 51), (129, 53), (129, 54), (129, 55), (129, 56), (129, 57), (129, 58), (129, 59), (129, 60), (129, 61), (129, 62), (129, 63), (129, 64), (129, 65), (129, 66), (129, 68), (130, 51), (130, 53), (130, 54), (130, 55), (130, 56), (130, 57), (130, 58), (130, 59), (130, 60), (130, 61), (130, 62), (130, 63), (130, 64), (130, 65), (130, 66), (130, 68), (131, 50), (131, 52), (131, 53), (131, 54), (131, 55), (131, 56), (131, 57), (131, 58), (131, 59), (131, 60), (131, 61), (131, 62), (131, 63), (131, 64), (131, 65), (131, 66), (131, 68), (132, 50),
(132, 52), (132, 53), (132, 54), (132, 55), (132, 56), (132, 57), (132, 58), (132, 59), (132, 60), (132, 61), (132, 62), (132, 63), (132, 64), (132, 65), (132, 66), (132, 68), (133, 49), (133, 51), (133, 52), (133, 53), (133, 54), (133, 55), (133, 56), (133, 57), (133, 58), (133, 59), (133, 60), (133, 61), (133, 62), (133, 63), (133, 64), (133, 65), (133, 66), (133, 68), (134, 49), (134, 51), (134, 52), (134, 53), (134, 54), (134, 55), (134, 56), (134, 57), (134, 58), (134, 59), (134, 60), (134, 61), (134, 62), (134, 63), (134, 64), (134, 65), (134, 67), (135, 48), (135, 50), (135, 51), (135, 52), (135, 53), (135, 54), (135, 55), (135, 56), (135, 57), (135, 58), (135, 59), (135, 60), (135, 61), (135, 62), (135, 63), (135, 64), (135, 65), (135, 67), (136, 48), (136, 50), (136, 51),
(136, 52), (136, 53), (136, 54), (136, 55), (136, 56), (136, 57), (136, 58), (136, 59), (136, 60), (136, 61), (136, 62), (136, 63), (136, 64), (136, 65), (136, 67), (137, 48), (137, 50), (137, 51), (137, 52), (137, 53), (137, 54), (137, 55), (137, 56), (137, 57), (137, 58), (137, 59), (137, 60), (137, 61), (137, 62), (137, 63), (137, 64), (137, 65), (137, 67), (138, 48), (138, 50), (138, 51), (138, 52), (138, 53), (138, 54), (138, 55), (138, 56), (138, 57), (138, 58), (138, 59), (138, 60), (138, 61), (138, 62), (138, 63), (138, 64), (138, 66), (139, 48), (139, 50), (139, 51), (139, 52), (139, 53), (139, 54), (139, 55), (139, 56), (139, 57), (139, 58), (139, 59), (139, 60), (139, 61), (139, 62), (139, 63), (139, 64), (139, 66), (140, 48), (140, 50), (140, 51), (140, 52), (140, 53),
(140, 54), (140, 55), (140, 56), (140, 57), (140, 58), (140, 59), (140, 60), (140, 61), (140, 62), (140, 63), (140, 64), (140, 65), (140, 67), (141, 49), (141, 51), (141, 52), (141, 53), (141, 54), (141, 55), (141, 56), (141, 57), (141, 58), (141, 59), (141, 60), (141, 61), (141, 62), (141, 63), (141, 64), (141, 65), (141, 67), (142, 49), (142, 51), (142, 52), (142, 53), (142, 54), (142, 55), (142, 56), (142, 57), (142, 58), (142, 59), (142, 60), (142, 61), (142, 62), (142, 63), (142, 64), (142, 65), (142, 67), (143, 49), (143, 51), (143, 52), (143, 53), (143, 54), (143, 55), (143, 56), (143, 57), (143, 58), (143, 59), (143, 60), (143, 61), (143, 62), (143, 63), (143, 64), (143, 65), (143, 66), (143, 68), (143, 74), (143, 75), (144, 49), (144, 51), (144, 52), (144, 53), (144, 54),
(144, 55), (144, 56), (144, 57), (144, 58), (144, 59), (144, 60), (144, 61), (144, 62), (144, 63), (144, 64), (144, 65), (144, 66), (144, 67), (144, 69), (144, 73), (144, 77), (145, 49), (145, 51), (145, 52), (145, 53), (145, 54), (145, 55), (145, 56), (145, 57), (145, 58), (145, 59), (145, 60), (145, 61), (145, 62), (145, 63), (145, 64), (145, 65), (145, 66), (145, 67), (145, 68), (145, 71), (145, 74), (145, 75), (145, 79), (146, 50), (146, 52), (146, 53), (146, 54), (146, 55), (146, 56), (146, 57), (146, 58), (146, 59), (146, 60), (146, 61), (146, 62), (146, 63), (146, 64), (146, 65), (146, 66), (146, 67), (146, 68), (146, 69), (146, 73), (146, 74), (146, 75), (146, 76), (146, 77), (146, 81), (147, 50), (147, 52), (147, 53), (147, 54), (147, 55), (147, 56), (147, 57), (147, 58),
(147, 59), (147, 60), (147, 61), (147, 62), (147, 63), (147, 64), (147, 65), (147, 66), (147, 67), (147, 68), (147, 69), (147, 70), (147, 71), (147, 72), (147, 73), (147, 74), (147, 75), (147, 76), (147, 77), (147, 78), (147, 79), (147, 82), (147, 83), (147, 85), (148, 49), (148, 50), (148, 51), (148, 52), (148, 53), (148, 54), (148, 55), (148, 56), (148, 57), (148, 58), (148, 59), (148, 60), (148, 61), (148, 62), (148, 63), (148, 64), (148, 65), (148, 66), (148, 67), (148, 68), (148, 69), (148, 70), (148, 71), (148, 72), (148, 73), (148, 74), (148, 75), (148, 76), (148, 77), (148, 78), (148, 79), (148, 80), (148, 81), (149, 49), (149, 51), (149, 52), (149, 53), (149, 54), (149, 55), (149, 56), (149, 57), (149, 58), (149, 59), (149, 60), (149, 61), (149, 62), (149, 63), (149, 64),
(149, 65), (149, 66), (149, 67), (149, 68), (149, 69), (149, 70), (149, 71), (149, 72), (149, 73), (149, 74), (149, 75), (149, 76), (149, 77), (149, 78), (149, 79), (149, 80), (149, 81), (149, 82), (149, 83), (149, 84), (149, 86), (150, 49), (150, 51), (150, 52), (150, 53), (150, 54), (150, 55), (150, 56), (150, 57), (150, 58), (150, 59), (150, 60), (150, 61), (150, 62), (150, 63), (150, 64), (150, 65), (150, 66), (150, 67), (150, 68), (150, 69), (150, 70), (150, 71), (150, 72), (150, 73), (150, 74), (150, 75), (150, 76), (150, 77), (150, 78), (150, 79), (150, 80), (150, 81), (150, 82), (150, 83), (150, 84), (150, 86), (151, 49), (151, 51), (151, 52), (151, 53), (151, 54), (151, 55), (151, 56), (151, 57), (151, 58), (151, 59), (151, 60), (151, 61), (151, 62), (151, 63), (151, 64),
(151, 65), (151, 66), (151, 67), (151, 68), (151, 69), (151, 70), (151, 71), (151, 72), (151, 73), (151, 74), (151, 75), (151, 76), (151, 77), (151, 78), (151, 79), (151, 80), (151, 81), (151, 82), (151, 83), (151, 84), (151, 85), (151, 87), (152, 49), (152, 51), (152, 52), (152, 53), (152, 54), (152, 55), (152, 56), (152, 57), (152, 58), (152, 59), (152, 60), (152, 61), (152, 62), (152, 63), (152, 64), (152, 65), (152, 66), (152, 67), (152, 68), (152, 69), (152, 70), (152, 71), (152, 72), (152, 73), (152, 74), (152, 75), (152, 76), (152, 77), (152, 78), (152, 79), (152, 80), (152, 81), (152, 82), (152, 83), (152, 84), (152, 85), (152, 87), (153, 49), (153, 51), (153, 52), (153, 53), (153, 54), (153, 55), (153, 56), (153, 57), (153, 58), (153, 59), (153, 60), (153, 61), (153, 62),
(153, 63), (153, 64), (153, 65), (153, 66), (153, 67), (153, 68), (153, 69), (153, 70), (153, 71), (153, 72), (153, 73), (153, 74), (153, 75), (153, 76), (153, 77), (153, 78), (153, 79), (153, 80), (153, 81), (153, 82), (153, 83), (153, 84), (153, 85), (153, 87), (154, 50), (154, 52), (154, 53), (154, 54), (154, 55), (154, 56), (154, 57), (154, 58), (154, 59), (154, 60), (154, 61), (154, 62), (154, 63), (154, 64), (154, 65), (154, 66), (154, 67), (154, 68), (154, 69), (154, 70), (154, 71), (154, 72), (154, 73), (154, 74), (154, 75), (154, 76), (154, 77), (154, 78), (154, 79), (154, 80), (154, 81), (154, 82), (154, 83), (154, 84), (154, 85), (154, 86), (154, 88), (155, 50), (155, 53), (155, 54), (155, 55), (155, 56), (155, 57), (155, 58), (155, 59), (155, 60), (155, 61), (155, 62),
(155, 63), (155, 64), (155, 65), (155, 66), (155, 67), (155, 68), (155, 69), (155, 70), (155, 71), (155, 72), (155, 73), (155, 74), (155, 75), (155, 76), (155, 77), (155, 78), (155, 79), (155, 80), (155, 81), (155, 82), (155, 83), (155, 84), (155, 85), (155, 86), (155, 88), (156, 51), (156, 54), (156, 55), (156, 56), (156, 57), (156, 58), (156, 59), (156, 60), (156, 61), (156, 62), (156, 63), (156, 64), (156, 65), (156, 66), (156, 67), (156, 68), (156, 69), (156, 70), (156, 71), (156, 72), (156, 73), (156, 74), (156, 75), (156, 76), (156, 77), (156, 78), (156, 79), (156, 80), (156, 81), (156, 82), (156, 83), (156, 84), (156, 85), (156, 86), (156, 88), (157, 52), (157, 54), (157, 55), (157, 56), (157, 57), (157, 58), (157, 59), (157, 60), (157, 61), (157, 62), (157, 63), (157, 64),
(157, 65), (157, 66), (157, 67), (157, 68), (157, 69), (157, 70), (157, 71), (157, 72), (157, 73), (157, 74), (157, 75), (157, 76), (157, 77), (157, 78), (157, 79), (157, 80), (157, 81), (157, 82), (157, 83), (157, 84), (157, 85), (157, 87), (158, 53), (158, 55), (158, 56), (158, 57), (158, 58), (158, 59), (158, 60), (158, 61), (158, 62), (158, 63), (158, 64), (158, 65), (158, 66), (158, 67), (158, 68), (158, 69), (158, 70), (158, 71), (158, 72), (158, 73), (158, 74), (158, 75), (158, 76), (158, 77), (158, 78), (158, 79), (158, 80), (158, 81), (158, 82), (158, 83), (158, 84), (158, 85), (158, 87), (159, 54), (159, 56), (159, 57), (159, 58), (159, 59), (159, 60), (159, 61), (159, 62), (159, 63), (159, 64), (159, 65), (159, 66), (159, 67), (159, 68), (159, 69), (159, 70), (159, 71),
(159, 72), (159, 73), (159, 74), (159, 75), (159, 76), (159, 77), (159, 78), (159, 79), (159, 80), (159, 81), (159, 82), (159, 83), (159, 84), (159, 86), (160, 55), (160, 57), (160, 58), (160, 59), (160, 60), (160, 61), (160, 62), (160, 63), (160, 64), (160, 65), (160, 66), (160, 67), (160, 68), (160, 69), (160, 70), (160, 71), (160, 72), (160, 73), (160, 74), (160, 75), (160, 76), (160, 77), (160, 78), (160, 79), (160, 80), (160, 81), (160, 82), (160, 83), (160, 84), (160, 86), (161, 55), (161, 56), (161, 57), (161, 58), (161, 59), (161, 60), (161, 61), (161, 62), (161, 63), (161, 64), (161, 65), (161, 66), (161, 67), (161, 68), (161, 69), (161, 70), (161, 71), (161, 72), (161, 73), (161, 74), (161, 75), (161, 76), (161, 77), (161, 78), (161, 79), (161, 80), (161, 81), (161, 82),
(161, 83), (161, 84), (161, 85), (161, 87), (162, 53), (162, 55), (162, 56), (162, 57), (162, 58), (162, 59), (162, 60), (162, 61), (162, 62), (162, 63), (162, 64), (162, 65), (162, 66), (162, 67), (162, 68), (162, 69), (162, 70), (162, 71), (162, 72), (162, 73), (162, 74), (162, 75), (162, 76), (162, 77), (162, 78), (162, 79), (162, 80), (162, 81), (162, 82), (162, 83), (162, 84), (162, 85), (162, 87), (163, 51), (163, 54), (163, 55), (163, 56), (163, 57), (163, 58), (163, 59), (163, 60), (163, 61), (163, 62), (163, 63), (163, 64), (163, 65), (163, 66), (163, 67), (163, 68), (163, 69), (163, 70), (163, 71), (163, 72), (163, 73), (163, 74), (163, 75), (163, 76), (163, 77), (163, 78), (163, 79), (163, 80), (163, 81), (163, 82), (163, 83), (163, 84), (163, 85), (163, 87), (164, 50),
(164, 53), (164, 54), (164, 55), (164, 56), (164, 57), (164, 58), (164, 59), (164, 60), (164, 61), (164, 62), (164, 63), (164, 64), (164, 65), (164, 66), (164, 67), (164, 68), (164, 69), (164, 70), (164, 71), (164, 72), (164, 73), (164, 74), (164, 75), (164, 76), (164, 77), (164, 78), (164, 79), (164, 80), (164, 81), (164, 82), (164, 83), (164, 84), (164, 86), (165, 50), (165, 52), (165, 53), (165, 54), (165, 55), (165, 56), (165, 57), (165, 58), (165, 59), (165, 60), (165, 61), (165, 62), (165, 63), (165, 64), (165, 65), (165, 66), (165, 67), (165, 68), (165, 69), (165, 70), (165, 71), (165, 72), (165, 73), (165, 74), (165, 75), (165, 76), (165, 77), (165, 78), (165, 79), (165, 80), (165, 81), (165, 82), (165, 83), (165, 84), (165, 86), (166, 49), (166, 50), (166, 51), (166, 52),
(166, 53), (166, 54), (166, 55), (166, 56), (166, 57), (166, 58), (166, 59), (166, 60), (166, 61), (166, 62), (166, 63), (166, 64), (166, 65), (166, 66), (166, 67), (166, 68), (166, 69), (166, 70), (166, 71), (166, 72), (166, 73), (166, 74), (166, 75), (166, 76), (166, 77), (166, 78), (166, 79), (166, 80), (166, 81), (166, 82), (166, 83), (166, 84), (166, 86), (167, 49), (167, 51), (167, 52), (167, 53), (167, 54), (167, 55), (167, 56), (167, 57), (167, 58), (167, 59), (167, 60), (167, 61), (167, 62), (167, 63), (167, 64), (167, 65), (167, 66), (167, 67), (167, 68), (167, 69), (167, 70), (167, 71), (167, 72), (167, 73), (167, 74), (167, 75), (167, 76), (167, 77), (167, 78), (167, 79), (167, 80), (167, 81), (167, 82), (167, 85), (168, 49), (168, 51), (168, 52), (168, 53), (168, 54),
(168, 55), (168, 56), (168, 57), (168, 58), (168, 59), (168, 60), (168, 61), (168, 62), (168, 63), (168, 64), (168, 65), (168, 66), (168, 67), (168, 68), (168, 69), (168, 70), (168, 71), (168, 72), (168, 73), (168, 74), (168, 75), (168, 76), (168, 77), (168, 78), (168, 79), (168, 80), (168, 83), (168, 85), (169, 49), (169, 51), (169, 52), (169, 53), (169, 54), (169, 55), (169, 56), (169, 57), (169, 58), (169, 59), (169, 60), (169, 61), (169, 62), (169, 63), (169, 64), (169, 65), (169, 66), (169, 67), (169, 68), (169, 69), (169, 70), (169, 71), (169, 72), (169, 73), (169, 74), (169, 75), (169, 76), (169, 77), (169, 78), (169, 82), (170, 48), (170, 50), (170, 51), (170, 52), (170, 53), (170, 54), (170, 55), (170, 56), (170, 57), (170, 58), (170, 59), (170, 60), (170, 61), (170, 62),
(170, 63), (170, 64), (170, 65), (170, 66), (170, 67), (170, 68), (170, 69), (170, 70), (170, 71), (170, 72), (170, 73), (170, 74), (170, 75), (170, 76), (170, 77), (170, 80), (171, 48), (171, 50), (171, 51), (171, 52), (171, 53), (171, 54), (171, 55), (171, 56), (171, 57), (171, 58), (171, 59), (171, 60), (171, 61), (171, 62), (171, 63), (171, 64), (171, 65), (171, 66), (171, 67), (171, 68), (171, 69), (171, 70), (171, 71), (171, 72), (171, 73), (171, 74), (171, 75), (171, 76), (171, 78), (172, 48), (172, 50), (172, 51), (172, 52), (172, 53), (172, 54), (172, 55), (172, 56), (172, 57), (172, 58), (172, 59), (172, 60), (172, 61), (172, 62), (172, 63), (172, 64), (172, 65), (172, 66), (172, 67), (172, 68), (172, 69), (172, 70), (172, 71), (172, 72), (172, 73), (172, 74), (172, 75),
(172, 77), (173, 48), (173, 50), (173, 51), (173, 52), (173, 53), (173, 54), (173, 55), (173, 56), (173, 57), (173, 58), (173, 59), (173, 60), (173, 61), (173, 62), (173, 63), (173, 64), (173, 65), (173, 66), (173, 67), (173, 68), (173, 69), (173, 70), (173, 71), (173, 72), (173, 73), (173, 74), (173, 76), (174, 48), (174, 50), (174, 51), (174, 52), (174, 53), (174, 54), (174, 55), (174, 56), (174, 57), (174, 58), (174, 59), (174, 60), (174, 61), (174, 62), (174, 63), (174, 64), (174, 65), (174, 66), (174, 67), (174, 68), (174, 69), (174, 70), (174, 75), (175, 48), (175, 50), (175, 51), (175, 52), (175, 53), (175, 54), (175, 55), (175, 56), (175, 57), (175, 58), (175, 59), (175, 60), (175, 61), (175, 62), (175, 63), (175, 64), (175, 65), (175, 66), (175, 67), (175, 68), (175, 69),
(175, 71), (175, 72), (175, 74), (176, 48), (176, 50), (176, 51), (176, 52), (176, 53), (176, 54), (176, 55), (176, 56), (176, 57), (176, 58), (176, 59), (176, 60), (176, 61), (176, 62), (176, 63), (176, 64), (176, 65), (176, 66), (176, 67), (176, 68), (176, 69), (177, 48), (177, 50), (177, 51), (177, 52), (177, 53), (177, 54), (177, 55), (177, 56), (177, 57), (177, 58), (177, 59), (177, 60), (177, 61), (177, 62), (177, 63), (177, 64), (177, 65), (177, 66), (177, 67), (177, 69), (178, 48), (178, 50), (178, 51), (178, 52), (178, 53), (178, 54), (178, 55), (178, 56), (178, 57), (178, 58), (178, 59), (178, 60), (178, 61), (178, 62), (178, 63), (178, 64), (178, 65), (178, 66), (178, 68), (179, 48), (179, 50), (179, 51), (179, 52), (179, 53), (179, 54), (179, 55), (179, 56), (179, 57),
(179, 58), (179, 59), (179, 60), (179, 61), (179, 62), (179, 63), (179, 64), (179, 65), (179, 66), (179, 68), (180, 48), (180, 50), (180, 51), (180, 52), (180, 53), (180, 54), (180, 55), (180, 56), (180, 57), (180, 58), (180, 59), (180, 60), (180, 61), (180, 62), (180, 63), (180, 64), (180, 65), (180, 66), (180, 68), (181, 48), (181, 50), (181, 51), (181, 52), (181, 53), (181, 54), (181, 55), (181, 56), (181, 57), (181, 58), (181, 59), (181, 60), (181, 61), (181, 62), (181, 63), (181, 64), (181, 65), (181, 67), (182, 49), (182, 51), (182, 52), (182, 53), (182, 54), (182, 55), (182, 56), (182, 57), (182, 58), (182, 59), (182, 60), (182, 61), (182, 62), (182, 63), (182, 64), (182, 65), (182, 67), (183, 49), (183, 51), (183, 52), (183, 53), (183, 54), (183, 55), (183, 56), (183, 57),
(183, 58), (183, 59), (183, 60), (183, 61), (183, 62), (183, 63), (183, 64), (183, 66), (184, 49), (184, 51), (184, 52), (184, 53), (184, 54), (184, 55), (184, 56), (184, 57), (184, 58), (184, 59), (184, 60), (184, 61), (184, 62), (184, 63), (184, 64), (184, 65), (184, 67), (185, 49), (185, 51), (185, 52), (185, 53), (185, 54), (185, 55), (185, 56), (185, 57), (185, 58), (185, 59), (185, 60), (185, 61), (185, 62), (185, 63), (185, 64), (185, 65), (185, 67), (186, 49), (186, 51), (186, 52), (186, 53), (186, 54), (186, 55), (186, 56), (186, 57), (186, 58), (186, 59), (186, 60), (186, 61), (186, 62), (186, 63), (186, 64), (186, 65), (186, 66), (186, 68), (187, 49), (187, 51), (187, 52), (187, 53), (187, 54), (187, 55), (187, 56), (187, 57), (187, 58), (187, 59), (187, 60), (187, 61),
(187, 62), (187, 63), (187, 64), (187, 65), (187, 66), (187, 68), (188, 50), (188, 52), (188, 53), (188, 54), (188, 55), (188, 56), (188, 57), (188, 58), (188, 59), (188, 60), (188, 61), (188, 62), (188, 63), (188, 64), (188, 65), (188, 66), (188, 67), (188, 69), (189, 50), (189, 52), (189, 53), (189, 54), (189, 55), (189, 56), (189, 57), (189, 58), (189, 59), (189, 60), (189, 61), (189, 62), (189, 63), (189, 64), (189, 65), (189, 66), (189, 67), (189, 69), (190, 50), (190, 52), (190, 53), (190, 54), (190, 55), (190, 56), (190, 57), (190, 58), (190, 59), (190, 60), (190, 61), (190, 62), (190, 63), (190, 64), (190, 65), (190, 66), (190, 67), (190, 68), (190, 70), (191, 50), (191, 52), (191, 53), (191, 54), (191, 55), (191, 56), (191, 57), (191, 58), (191, 59), (191, 60), (191, 61),
(191, 62), (191, 63), (191, 64), (191, 65), (191, 66), (191, 67), (191, 68), (191, 69), (191, 70), (192, 51), (192, 53), (192, 54), (192, 55), (192, 56), (192, 57), (192, 58), (192, 59), (192, 60), (192, 61), (192, 62), (192, 63), (192, 64), (192, 65), (192, 66), (192, 67), (192, 68), (192, 69), (192, 71), (193, 51), (193, 53), (193, 54), (193, 55), (193, 56), (193, 57), (193, 58), (193, 59), (193, 60), (193, 61), (193, 62), (193, 63), (193, 64), (193, 65), (193, 66), (193, 67), (193, 68), (193, 69), (193, 70), (193, 72), (194, 52), (194, 54), (194, 55), (194, 56), (194, 57), (194, 58), (194, 59), (194, 60), (194, 61), (194, 62), (194, 63), (194, 64), (194, 65), (194, 66), (194, 67), (194, 68), (194, 69), (194, 70), (194, 72), (195, 52), (195, 54), (195, 55), (195, 56), (195, 57),
(195, 58), (195, 59), (195, 60), (195, 61), (195, 62), (195, 63), (195, 64), (195, 65), (195, 66), (195, 67), (195, 68), (195, 69), (195, 70), (195, 71), (195, 73), (196, 53), (196, 55), (196, 56), (196, 57), (196, 58), (196, 59), (196, 60), (196, 61), (196, 62), (196, 63), (196, 64), (196, 65), (196, 66), (196, 67), (196, 68), (196, 69), (196, 70), (196, 71), (196, 72), (196, 73), (197, 53), (197, 55), (197, 56), (197, 57), (197, 58), (197, 59), (197, 60), (197, 61), (197, 62), (197, 63), (197, 64), (197, 65), (197, 66), (197, 67), (197, 68), (197, 69), (197, 70), (197, 71), (197, 72), (197, 74), (198, 54), (198, 56), (198, 57), (198, 58), (198, 59), (198, 60), (198, 61), (198, 62), (198, 63), (198, 64), (198, 65), (198, 66), (198, 67), (198, 68), (198, 69), (198, 70), (198, 71),
(198, 72), (198, 73), (198, 75), (199, 54), (199, 56), (199, 57), (199, 58), (199, 59), (199, 60), (199, 61), (199, 62), (199, 63), (199, 64), (199, 65), (199, 66), (199, 67), (199, 68), (199, 69), (199, 70), (199, 71), (199, 72), (199, 73), (199, 75), (200, 55), (200, 57), (200, 58), (200, 59), (200, 60), (200, 61), (200, 62), (200, 63), (200, 64), (200, 65), (200, 66), (200, 67), (200, 68), (200, 69), (200, 70), (200, 71), (200, 72), (200, 73), (200, 74), (200, 76), (201, 55), (201, 57), (201, 58), (201, 59), (201, 60), (201, 61), (201, 62), (201, 63), (201, 64), (201, 65), (201, 66), (201, 67), (201, 68), (201, 69), (201, 70), (201, 71), (201, 72), (201, 73), (201, 74), (201, 75), (201, 77), (202, 56), (202, 58), (202, 59), (202, 60), (202, 61), (202, 62), (202, 63), (202, 64),
(202, 65), (202, 66), (202, 67), (202, 68), (202, 69), (202, 70), (202, 71), (202, 72), (202, 73), (202, 74), (202, 75), (202, 77), (203, 56), (203, 58), (203, 59), (203, 60), (203, 61), (203, 62), (203, 63), (203, 64), (203, 65), (203, 66), (203, 67), (203, 68), (203, 69), (203, 70), (203, 71), (203, 72), (203, 73), (203, 74), (203, 75), (203, 76), (203, 78), (204, 57), (204, 59), (204, 60), (204, 61), (204, 62), (204, 63), (204, 64), (204, 65), (204, 66), (204, 67), (204, 68), (204, 69), (204, 70), (204, 71), (204, 72), (204, 73), (204, 74), (204, 75), (204, 76), (204, 77), (204, 79), (205, 57), (205, 59), (205, 60), (205, 61), (205, 62), (205, 63), (205, 64), (205, 65), (205, 66), (205, 67), (205, 68), (205, 69), (205, 70), (205, 71), (205, 72), (205, 73), (205, 74), (205, 75),
(205, 76), (205, 77), (205, 79), (206, 58), (206, 60), (206, 61), (206, 62), (206, 63), (206, 64), (206, 65), (206, 66), (206, 67), (206, 68), (206, 69), (206, 70), (206, 71), (206, 72), (206, 73), (206, 74), (206, 75), (206, 76), (206, 77), (206, 78), (206, 80), (207, 58), (207, 60), (207, 61), (207, 62), (207, 63), (207, 64), (207, 65), (207, 66), (207, 67), (207, 68), (207, 69), (207, 70), (207, 71), (207, 72), (207, 73), (207, 74), (207, 75), (207, 76), (207, 77), (207, 78), (207, 79), (207, 81), (208, 59), (208, 61), (208, 62), (208, 63), (208, 64), (208, 65), (208, 66), (208, 67), (208, 68), (208, 69), (208, 70), (208, 71), (208, 72), (208, 73), (208, 74), (208, 75), (208, 76), (208, 77), (208, 78), (208, 79), (208, 81), (209, 59), (209, 61), (209, 62), (209, 63), (209, 64),
(209, 65), (209, 66), (209, 67), (209, 68), (209, 69), (209, 70), (209, 71), (209, 72), (209, 73), (209, 74), (209, 75), (209, 76), (209, 77), (209, 78), (209, 79), (209, 80), (209, 82), (210, 60), (210, 62), (210, 63), (210, 64), (210, 65), (210, 66), (210, 67), (210, 68), (210, 69), (210, 70), (210, 71), (210, 72), (210, 73), (210, 74), (210, 75), (210, 76), (210, 77), (210, 78), (210, 79), (210, 80), (210, 81), (210, 83), (211, 61), (211, 62), (211, 63), (211, 64), (211, 65), (211, 66), (211, 67), (211, 68), (211, 69), (211, 70), (211, 71), (211, 72), (211, 73), (211, 74), (211, 75), (211, 76), (211, 77), (211, 78), (211, 79), (211, 80), (211, 81), (211, 82), (211, 84), (212, 61), (212, 63), (212, 64), (212, 65), (212, 66), (212, 67), (212, 68), (212, 69), (212, 70), (212, 71),
(212, 72), (212, 73), (212, 74), (212, 75), (212, 76), (212, 77), (212, 78), (212, 79), (212, 80), (212, 81), (212, 82), (212, 84), (213, 62), (213, 64), (213, 65), (213, 66), (213, 67), (213, 68), (213, 69), (213, 70), (213, 71), (213, 72), (213, 73), (213, 74), (213, 75), (213, 76), (213, 77), (213, 78), (213, 79), (213, 80), (213, 81), (213, 82), (213, 83), (213, 85), (214, 62), (214, 64), (214, 65), (214, 66), (214, 67), (214, 68), (214, 69), (214, 70), (214, 71), (214, 72), (214, 73), (214, 74), (214, 75), (214, 76), (214, 77), (214, 78), (214, 79), (214, 80), (214, 81), (214, 82), (214, 83), (214, 84), (214, 86), (215, 63), (215, 65), (215, 66), (215, 67), (215, 68), (215, 69), (215, 70), (215, 71), (215, 72), (215, 73), (215, 74), (215, 75), (215, 76), (215, 77), (215, 78),
(215, 79), (215, 80), (215, 81), (215, 82), (215, 83), (215, 84), (215, 85), (215, 87), (216, 63), (216, 65), (216, 66), (216, 67), (216, 68), (216, 69), (216, 70), (216, 71), (216, 72), (216, 73), (216, 74), (216, 75), (216, 76), (216, 77), (216, 78), (216, 79), (216, 80), (216, 81), (216, 82), (216, 83), (216, 84), (216, 85), (216, 86), (216, 88), (217, 64), (217, 66), (217, 67), (217, 68), (217, 69), (217, 70), (217, 71), (217, 72), (217, 73), (217, 74), (217, 75), (217, 76), (217, 77), (217, 78), (217, 79), (217, 80), (217, 81), (217, 82), (217, 83), (217, 84), (217, 85), (217, 86), (217, 89), (218, 65), (218, 67), (218, 68), (218, 69), (218, 70), (218, 71), (218, 72), (218, 73), (218, 74), (218, 75), (218, 76), (218, 77), (218, 78), (218, 79), (218, 80), (218, 81), (218, 82),
(218, 83), (218, 84), (218, 85), (218, 86), (218, 87), (218, 90), (219, 65), (219, 67), (219, 68), (219, 69), (219, 70), (219, 71), (219, 72), (219, 73), (219, 74), (219, 75), (219, 76), (219, 77), (219, 78), (219, 79), (219, 80), (219, 81), (219, 82), (219, 83), (219, 84), (219, 85), (219, 86), (219, 87), (219, 88), (219, 91), (220, 66), (220, 68), (220, 69), (220, 70), (220, 71), (220, 72), (220, 73), (220, 74), (220, 75), (220, 76), (220, 77), (220, 78), (220, 79), (220, 80), (220, 81), (220, 82), (220, 83), (220, 84), (220, 85), (220, 86), (220, 87), (220, 88), (220, 89), (220, 92), (221, 67), (221, 69), (221, 70), (221, 71), (221, 72), (221, 73), (221, 74), (221, 75), (221, 76), (221, 77), (221, 78), (221, 79), (221, 80), (221, 81), (221, 82), (221, 83), (221, 84), (221, 85),
(221, 86), (221, 87), (221, 88), (221, 89), (221, 90), (221, 91), (221, 93), (222, 67), (222, 69), (222, 70), (222, 71), (222, 72), (222, 73), (222, 74), (222, 75), (222, 76), (222, 77), (222, 78), (222, 79), (222, 80), (222, 81), (222, 82), (222, 83), (222, 84), (222, 85), (222, 86), (222, 87), (222, 88), (222, 89), (222, 90), (222, 91), (222, 92), (222, 95), (223, 68), (223, 70), (223, 71), (223, 72), (223, 73), (223, 74), (223, 75), (223, 76), (223, 77), (223, 78), (223, 79), (223, 80), (223, 81), (223, 82), (223, 83), (223, 84), (223, 85), (223, 86), (223, 87), (223, 88), (223, 89), (223, 90), (223, 91), (223, 92), (223, 93), (223, 94), (223, 97), (224, 69), (224, 71), (224, 72), (224, 73), (224, 74), (224, 75), (224, 76), (224, 77), (224, 78), (224, 79), (224, 80), (224, 81),
(224, 82), (224, 83), (224, 84), (224, 85), (224, 86), (224, 87), (224, 88), (224, 89), (224, 90), (224, 91), (224, 92), (224, 93), (224, 94), (224, 95), (224, 99), (225, 69), (225, 71), (225, 72), (225, 73), (225, 74), (225, 75), (225, 76), (225, 77), (225, 78), (225, 79), (225, 80), (225, 81), (225, 82), (225, 83), (225, 84), (225, 85), (225, 86), (225, 87), (225, 88), (225, 89), (225, 90), (225, 91), (225, 92), (225, 93), (225, 94), (225, 95), (225, 96), (225, 97), (225, 101), (226, 70), (226, 72), (226, 73), (226, 74), (226, 75), (226, 76), (226, 77), (226, 78), (226, 79), (226, 80), (226, 81), (226, 82), (226, 83), (226, 84), (226, 85), (226, 86), (226, 87), (226, 88), (226, 89), (226, 90), (226, 91), (226, 92), (226, 93), (226, 94), (226, 95), (226, 96), (226, 97), (226, 98),
(226, 99), (226, 103), (227, 71), (227, 73), (227, 74), (227, 75), (227, 76), (227, 77), (227, 78), (227, 79), (227, 80), (227, 81), (227, 82), (227, 83), (227, 84), (227, 85), (227, 86), (227, 87), (227, 88), (227, 89), (227, 90), (227, 91), (227, 92), (227, 93), (227, 94), (227, 95), (227, 96), (227, 97), (227, 98), (227, 99), (227, 100), (227, 101), (227, 104), (228, 72), (228, 74), (228, 75), (228, 76), (228, 77), (228, 78), (228, 79), (228, 80), (228, 81), (228, 82), (228, 83), (228, 84), (228, 85), (228, 86), (228, 87), (228, 88), (228, 89), (228, 90), (228, 91), (228, 92), (228, 93), (228, 94), (228, 95), (228, 96), (228, 97), (228, 98), (228, 99), (228, 100), (228, 101), (228, 102), (228, 103), (228, 106), (228, 111), (229, 72), (229, 74), (229, 75), (229, 76), (229, 77), (229, 78),
(229, 79), (229, 80), (229, 81), (229, 82), (229, 83), (229, 84), (229, 85), (229, 86), (229, 87), (229, 88), (229, 89), (229, 90), (229, 91), (229, 92), (229, 93), (229, 94), (229, 95), (229, 96), (229, 97), (229, 98), (229, 99), (229, 100), (229, 101), (229, 102), (229, 103), (229, 104), (229, 105), (229, 108), (229, 109), (229, 110), (229, 111), (229, 112), (229, 113), (230, 73), (230, 75), (230, 76), (230, 77), (230, 78), (230, 79), (230, 80), (230, 81), (230, 82), (230, 83), (230, 84), (230, 85), (230, 86), (230, 87), (230, 88), (230, 89), (230, 90), (230, 91), (230, 92), (230, 93), (230, 94), (230, 95), (230, 96), (230, 97), (230, 98), (230, 99), (230, 100), (230, 101), (230, 102), (230, 103), (230, 104), (230, 105), (230, 106), (230, 111), (230, 115), (230, 116), (231, 74), (231, 76), (231, 77),
(231, 78), (231, 79), (231, 80), (231, 81), (231, 82), (231, 83), (231, 84), (231, 85), (231, 86), (231, 87), (231, 88), (231, 89), (231, 90), (231, 91), (231, 92), (231, 93), (231, 94), (231, 95), (231, 96), (231, 97), (231, 98), (231, 99), (231, 100), (231, 101), (231, 102), (231, 103), (231, 104), (231, 105), (231, 106), (231, 107), (231, 108), (231, 109), (231, 110), (231, 111), (231, 112), (231, 113), (231, 114), (231, 119), (232, 75), (232, 77), (232, 78), (232, 79), (232, 80), (232, 81), (232, 82), (232, 83), (232, 84), (232, 85), (232, 86), (232, 87), (232, 88), (232, 89), (232, 90), (232, 91), (232, 92), (232, 93), (232, 94), (232, 95), (232, 96), (232, 97), (232, 98), (232, 99), (232, 100), (232, 101), (232, 102), (232, 103), (232, 104), (232, 105), (232, 106), (232, 107), (232, 108), (232, 109),
(232, 110), (232, 111), (232, 112), (232, 113), (232, 114), (232, 115), (232, 116), (232, 120), (233, 76), (233, 78), (233, 79), (233, 80), (233, 81), (233, 82), (233, 83), (233, 84), (233, 85), (233, 86), (233, 87), (233, 88), (233, 89), (233, 90), (233, 91), (233, 92), (233, 93), (233, 94), (233, 95), (233, 96), (233, 97), (233, 98), (233, 99), (233, 100), (233, 101), (233, 102), (233, 103), (233, 104), (233, 105), (233, 106), (233, 107), (233, 108), (233, 109), (233, 110), (233, 111), (233, 112), (233, 113), (233, 114), (233, 115), (233, 116), (233, 117), (233, 118), (233, 120), (234, 77), (234, 79), (234, 80), (234, 81), (234, 82), (234, 83), (234, 84), (234, 85), (234, 86), (234, 87), (234, 88), (234, 89), (234, 90), (234, 91), (234, 92), (234, 93), (234, 94), (234, 95), (234, 96), (234, 97), (234, 98),
(234, 99), (234, 100), (234, 101), (234, 102), (234, 103), (234, 104), (234, 105), (234, 106), (234, 107), (234, 108), (234, 109), (234, 110), (234, 111), (234, 112), (234, 113), (234, 114), (234, 115), (234, 116), (234, 117), (234, 118), (234, 120), (235, 78), (235, 80), (235, 81), (235, 82), (235, 83), (235, 84), (235, 85), (235, 86), (235, 87), (235, 88), (235, 89), (235, 90), (235, 91), (235, 92), (235, 93), (235, 94), (235, 95), (235, 96), (235, 97), (235, 98), (235, 99), (235, 100), (235, 101), (235, 102), (235, 103), (235, 104), (235, 105), (235, 106), (235, 107), (235, 108), (235, 109), (235, 110), (235, 111), (235, 112), (235, 113), (235, 114), (235, 115), (235, 116), (235, 117), (235, 119), (236, 79), (236, 81), (236, 82), (236, 83), (236, 84), (236, 85), (236, 86), (236, 87), (236, 88), (236, 89), (236, 90),
(236, 91), (236, 92), (236, 93), (236, 94), (236, 95), (236, 96), (236, 97), (236, 98), (236, 99), (236, 100), (236, 101), (236, 102), (236, 103), (236, 104), (236, 105), (236, 106), (236, 107), (236, 108), (236, 109), (236, 110), (236, 111), (236, 112), (236, 113), (236, 114), (236, 115), (236, 116), (236, 117), (236, 119), (237, 80), (237, 82), (237, 83), (237, 84), (237, 85), (237, 86), (237, 87), (237, 88), (237, 89), (237, 90), (237, 91), (237, 92), (237, 93), (237, 94), (237, 95), (237, 96), (237, 97), (237, 98), (237, 99), (237, 100), (237, 101), (237, 102), (237, 103), (237, 104), (237, 105), (237, 106), (237, 107), (237, 108), (237, 109), (237, 110), (237, 111), (237, 112), (237, 113), (237, 114), (237, 115), (237, 116), (237, 118), (238, 81), (238, 83), (238, 84), (238, 85), (238, 86), (238, 87), (238, 88),
(238, 89), (238, 90), (238, 91), (238, 92), (238, 93), (238, 94), (238, 95), (238, 96), (238, 97), (238, 98), (238, 99), (238, 100), (238, 101), (238, 102), (238, 103), (238, 104), (238, 105), (238, 106), (238, 107), (238, 108), (238, 109), (238, 110), (238, 111), (238, 112), (238, 113), (238, 114), (238, 115), (238, 117), (239, 82), (239, 84), (239, 85), (239, 86), (239, 87), (239, 88), (239, 89), (239, 90), (239, 91), (239, 92), (239, 93), (239, 94), (239, 95), (239, 96), (239, 97), (239, 98), (239, 99), (239, 100), (239, 101), (239, 102), (239, 103), (239, 104), (239, 105), (239, 106), (239, 107), (239, 108), (239, 109), (239, 110), (239, 111), (239, 112), (239, 113), (239, 114), (239, 116), (240, 83), (240, 85), (240, 86), (240, 87), (240, 88), (240, 89), (240, 90), (240, 91), (240, 92), (240, 93), (240, 94),
(240, 95), (240, 96), (240, 97), (240, 98), (240, 99), (240, 100), (240, 101), (240, 102), (240, 103), (240, 104), (240, 105), (240, 106), (240, 107), (240, 108), (240, 109), (240, 110), (240, 111), (240, 112), (240, 113), (241, 84), (241, 87), (241, 88), (241, 89), (241, 90), (241, 91), (241, 92), (241, 93), (241, 94), (241, 95), (241, 96), (241, 97), (241, 98), (241, 99), (241, 100), (241, 101), (241, 102), (241, 103), (241, 104), (241, 105), (241, 106), (241, 107), (241, 108), (241, 109), (241, 110), (241, 111), (241, 112), (241, 115), (242, 85), (242, 88), (242, 89), (242, 90), (242, 91), (242, 92), (242, 93), (242, 94), (242, 95), (242, 96), (242, 97), (242, 98), (242, 99), (242, 100), (242, 101), (242, 102), (242, 103), (242, 104), (242, 105), (242, 106), (242, 107), (242, 108), (242, 109), (242, 110), (242, 111),
(242, 112), (242, 114), (243, 87), (243, 89), (243, 90), (243, 91), (243, 92), (243, 93), (243, 94), (243, 95), (243, 96), (243, 97), (243, 98), (243, 99), (243, 100), (243, 101), (243, 102), (243, 103), (243, 104), (243, 105), (243, 106), (243, 107), (243, 108), (243, 109), (243, 110), (243, 111), (243, 113), (244, 88), (244, 91), (244, 92), (244, 93), (244, 94), (244, 95), (244, 96), (244, 97), (244, 98), (244, 99), (244, 100), (244, 101), (244, 102), (244, 103), (244, 104), (244, 105), (244, 106), (244, 107), (244, 108), (244, 109), (244, 110), (244, 112), (245, 89), (245, 92), (245, 93), (245, 94), (245, 95), (245, 96), (245, 97), (245, 98), (245, 99), (245, 100), (245, 101), (245, 102), (245, 103), (245, 104), (245, 105), (245, 106), (245, 107), (245, 108), (245, 109), (245, 111), (246, 91), (246, 94), (246, 95),
(246, 96), (246, 97), (246, 98), (246, 99), (246, 100), (246, 101), (246, 102), (246, 103), (246, 104), (246, 105), (246, 106), (246, 107), (246, 108), (246, 110), (247, 92), (247, 96), (247, 97), (247, 98), (247, 99), (247, 100), (247, 101), (247, 102), (247, 103), (247, 104), (247, 105), (247, 106), (247, 107), (247, 108), (247, 110), (248, 94), (248, 97), (248, 98), (248, 99), (248, 100), (248, 101), (248, 102), (248, 103), (248, 104), (248, 105), (248, 106), (248, 107), (248, 109), (249, 96), (249, 100), (249, 101), (249, 102), (249, 103), (249, 104), (249, 105), (249, 106), (249, 107), (249, 109), (250, 98), (250, 99), (250, 104), (250, 105), (250, 106), (250, 109), (251, 100), (251, 102), (251, 103), (251, 107), (252, 104), (252, 106), )
coordinates_FF0000 = ((108, 161),
(109, 160), (109, 162), (110, 160), (110, 163), (111, 160), (111, 163), (112, 159), (112, 161), (112, 162), (112, 164), (113, 159), (113, 161), (113, 162), (113, 164), (114, 158), (114, 160), (114, 161), (114, 162), (114, 163), (114, 165), (115, 159), (115, 160), (115, 161), (115, 162), (115, 163), (115, 165), (116, 156), (116, 158), (116, 159), (116, 160), (116, 161), (116, 162), (116, 163), (116, 164), (116, 166), (117, 157), (117, 159), (117, 160), (117, 161), (117, 162), (117, 163), (117, 164), (117, 166), (118, 156), (118, 158), (118, 159), (118, 160), (118, 161), (118, 162), (118, 163), (118, 164), (118, 165), (118, 167), (119, 156), (119, 158), (119, 159), (119, 160), (119, 161), (119, 162), (119, 163), (119, 164), (119, 165), (119, 166), (119, 168), (120, 154), (120, 156), (120, 157), (120, 158), (120, 159), (120, 160), (120, 161), (120, 162),
(120, 163), (120, 164), (120, 165), (120, 166), (120, 167), (120, 169), (121, 154), (121, 156), (121, 157), (121, 158), (121, 159), (121, 160), (121, 161), (121, 162), (121, 163), (121, 164), (121, 165), (121, 166), (121, 167), (121, 168), (122, 154), (122, 156), (122, 157), (122, 158), (122, 159), (122, 160), (122, 161), (122, 162), (122, 163), (122, 164), (122, 165), (122, 166), (122, 167), (122, 168), (122, 169), (122, 172), (123, 154), (123, 156), (123, 157), (123, 158), (123, 159), (123, 160), (123, 161), (123, 162), (123, 163), (123, 164), (123, 165), (123, 166), (123, 167), (123, 168), (123, 169), (123, 170), (123, 173), (124, 154), (124, 156), (124, 157), (124, 158), (124, 159), (124, 160), (124, 161), (124, 162), (124, 163), (124, 164), (124, 165), (124, 166), (124, 167), (124, 168), (124, 169), (124, 173), (125, 142), (125, 143), (125, 144),
(125, 146), (125, 153), (125, 155), (125, 156), (125, 157), (125, 158), (125, 159), (125, 160), (125, 161), (125, 162), (125, 163), (125, 164), (125, 165), (125, 171), (126, 136), (126, 138), (126, 139), (126, 140), (126, 141), (126, 147), (126, 148), (126, 149), (126, 152), (126, 154), (126, 155), (126, 156), (126, 157), (126, 158), (126, 159), (126, 160), (126, 161), (126, 162), (126, 163), (126, 166), (126, 167), (126, 168), (126, 169), (127, 136), (127, 142), (127, 143), (127, 144), (127, 145), (127, 146), (127, 150), (127, 153), (127, 154), (127, 155), (127, 156), (127, 157), (127, 158), (127, 159), (127, 160), (127, 161), (127, 162), (127, 165), (128, 136), (128, 138), (128, 139), (128, 140), (128, 141), (128, 142), (128, 143), (128, 144), (128, 145), (128, 146), (128, 147), (128, 148), (128, 149), (128, 151), (128, 152), (128, 153), (128, 154),
(128, 155), (128, 156), (128, 157), (128, 158), (128, 159), (128, 160), (128, 161), (128, 163), (129, 136), (129, 138), (129, 139), (129, 140), (129, 141), (129, 142), (129, 143), (129, 144), (129, 145), (129, 146), (129, 147), (129, 148), (129, 149), (129, 150), (129, 151), (129, 152), (129, 153), (129, 154), (129, 155), (129, 156), (129, 157), (129, 158), (129, 159), (129, 160), (129, 162), (130, 135), (130, 137), (130, 138), (130, 139), (130, 140), (130, 141), (130, 142), (130, 143), (130, 144), (130, 145), (130, 146), (130, 147), (130, 148), (130, 149), (130, 150), (130, 151), (130, 152), (130, 153), (130, 154), (130, 155), (130, 156), (130, 157), (130, 158), (130, 159), (130, 161), (131, 135), (131, 137), (131, 138), (131, 139), (131, 140), (131, 141), (131, 142), (131, 143), (131, 144), (131, 145), (131, 146), (131, 147), (131, 148), (131, 149),
(131, 150), (131, 151), (131, 152), (131, 153), (131, 154), (131, 155), (131, 156), (131, 157), (131, 158), (131, 160), (132, 135), (132, 137), (132, 138), (132, 139), (132, 140), (132, 141), (132, 142), (132, 143), (132, 144), (132, 145), (132, 146), (132, 147), (132, 148), (132, 149), (132, 150), (132, 151), (132, 152), (132, 153), (132, 154), (132, 155), (132, 156), (132, 157), (132, 159), (133, 134), (133, 136), (133, 137), (133, 138), (133, 139), (133, 140), (133, 141), (133, 142), (133, 143), (133, 144), (133, 145), (133, 146), (133, 147), (133, 148), (133, 149), (133, 150), (133, 151), (133, 152), (133, 153), (133, 154), (133, 155), (133, 156), (133, 158), (134, 133), (134, 135), (134, 136), (134, 137), (134, 138), (134, 139), (134, 140), (134, 141), (134, 142), (134, 143), (134, 144), (134, 145), (134, 146), (134, 147), (134, 148), (134, 149),
(134, 150), (134, 151), (134, 152), (134, 153), (134, 154), (134, 155), (134, 156), (134, 158), (135, 133), (135, 135), (135, 136), (135, 137), (135, 138), (135, 139), (135, 140), (135, 141), (135, 142), (135, 143), (135, 144), (135, 145), (135, 146), (135, 147), (135, 148), (135, 149), (135, 150), (135, 151), (135, 152), (135, 153), (135, 154), (135, 155), (135, 157), (136, 132), (136, 134), (136, 135), (136, 136), (136, 137), (136, 138), (136, 139), (136, 140), (136, 141), (136, 142), (136, 143), (136, 144), (136, 145), (136, 146), (136, 147), (136, 148), (136, 149), (136, 150), (136, 151), (136, 152), (136, 153), (136, 154), (136, 156), (137, 130), (137, 133), (137, 134), (137, 135), (137, 136), (137, 137), (137, 138), (137, 139), (137, 140), (137, 141), (137, 142), (137, 143), (137, 144), (137, 145), (137, 146), (137, 147), (137, 148), (137, 149),
(137, 150), (137, 151), (137, 152), (137, 153), (137, 154), (137, 156), (138, 130), (138, 132), (138, 133), (138, 134), (138, 135), (138, 136), (138, 137), (138, 138), (138, 139), (138, 140), (138, 141), (138, 142), (138, 143), (138, 144), (138, 145), (138, 146), (138, 147), (138, 148), (138, 149), (138, 150), (138, 151), (138, 152), (138, 153), (138, 155), (139, 131), (139, 133), (139, 134), (139, 135), (139, 136), (139, 137), (139, 138), (139, 139), (139, 140), (139, 141), (139, 142), (139, 143), (139, 144), (139, 145), (139, 146), (139, 147), (139, 148), (139, 149), (139, 150), (139, 151), (139, 152), (139, 153), (139, 155), (140, 132), (140, 134), (140, 135), (140, 136), (140, 137), (140, 138), (140, 139), (140, 140), (140, 141), (140, 142), (140, 143), (140, 144), (140, 145), (140, 146), (140, 147), (140, 148), (140, 149), (140, 150), (140, 151),
(140, 152), (140, 154), (141, 133), (141, 135), (141, 136), (141, 137), (141, 138), (141, 139), (141, 140), (141, 141), (141, 142), (141, 143), (141, 144), (141, 145), (141, 146), (141, 147), (141, 148), (141, 149), (141, 150), (141, 151), (141, 152), (141, 154), (142, 133), (142, 135), (142, 136), (142, 137), (142, 138), (142, 139), (142, 140), (142, 141), (142, 142), (142, 143), (142, 144), (142, 145), (142, 146), (142, 147), (142, 148), (142, 149), (142, 150), (142, 151), (142, 153), (143, 133), (143, 135), (143, 136), (143, 137), (143, 138), (143, 139), (143, 140), (143, 141), (143, 142), (143, 143), (143, 144), (143, 145), (143, 146), (143, 147), (143, 148), (143, 149), (143, 150), (143, 151), (143, 153), (144, 134), (144, 136), (144, 137), (144, 138), (144, 139), (144, 140), (144, 141), (144, 142), (144, 143), (144, 144), (144, 145), (144, 146),
(144, 147), (144, 148), (144, 149), (144, 150), (144, 152), (145, 134), (145, 139), (145, 140), (145, 141), (145, 142), (145, 143), (145, 144), (145, 145), (145, 146), (145, 147), (145, 148), (145, 149), (145, 150), (145, 152), (146, 134), (146, 136), (146, 137), (146, 138), (146, 145), (146, 146), (146, 147), (146, 148), (146, 149), (146, 150), (146, 152), (147, 139), (147, 140), (147, 141), (147, 142), (147, 143), (147, 144), (147, 145), (147, 151), (148, 146), (148, 147), (148, 148), (148, 149), (148, 151), (170, 132), (170, 141), (170, 142), (170, 143), (170, 144), (170, 145), (170, 146), (170, 147), (170, 148), (170, 150), (171, 132), (171, 135), (171, 136), (171, 139), (171, 140), (171, 151), (172, 132), (172, 141), (172, 142), (172, 143), (172, 144), (172, 145), (172, 146), (172, 147), (172, 148), (172, 149), (172, 151), (173, 132), (173, 134),
(173, 135), (173, 136), (173, 137), (173, 139), (173, 140), (173, 141), (173, 142), (173, 143), (173, 144), (173, 145), (173, 146), (173, 147), (173, 148), (173, 149), (173, 151), (174, 132), (174, 134), (174, 135), (174, 136), (174, 137), (174, 138), (174, 139), (174, 140), (174, 141), (174, 142), (174, 143), (174, 144), (174, 145), (174, 146), (174, 147), (174, 148), (174, 149), (174, 151), (175, 132), (175, 134), (175, 135), (175, 136), (175, 137), (175, 138), (175, 139), (175, 140), (175, 141), (175, 142), (175, 143), (175, 144), (175, 145), (175, 146), (175, 147), (175, 148), (175, 149), (175, 151), (176, 132), (176, 134), (176, 135), (176, 136), (176, 137), (176, 138), (176, 139), (176, 140), (176, 141), (176, 142), (176, 143), (176, 144), (176, 145), (176, 146), (176, 147), (176, 148), (176, 149), (176, 151), (177, 132), (177, 134), (177, 135),
(177, 136), (177, 137), (177, 138), (177, 139), (177, 140), (177, 141), (177, 142), (177, 143), (177, 144), (177, 145), (177, 146), (177, 147), (177, 148), (177, 149), (177, 151), (178, 132), (178, 134), (178, 135), (178, 136), (178, 137), (178, 138), (178, 139), (178, 140), (178, 141), (178, 142), (178, 143), (178, 144), (178, 145), (178, 146), (178, 147), (178, 148), (178, 149), (178, 150), (178, 152), (179, 132), (179, 134), (179, 135), (179, 136), (179, 137), (179, 138), (179, 139), (179, 140), (179, 141), (179, 142), (179, 143), (179, 144), (179, 145), (179, 146), (179, 147), (179, 148), (179, 149), (179, 150), (179, 152), (180, 131), (180, 133), (180, 134), (180, 135), (180, 136), (180, 137), (180, 138), (180, 139), (180, 140), (180, 141), (180, 142), (180, 143), (180, 144), (180, 145), (180, 146), (180, 147), (180, 148), (180, 149), (180, 150),
(180, 152), (181, 132), (181, 133), (181, 134), (181, 135), (181, 136), (181, 137), (181, 138), (181, 139), (181, 140), (181, 141), (181, 142), (181, 143), (181, 144), (181, 145), (181, 146), (181, 147), (181, 148), (181, 149), (181, 150), (181, 152), (182, 131), (182, 133), (182, 134), (182, 135), (182, 136), (182, 137), (182, 138), (182, 139), (182, 140), (182, 141), (182, 142), (182, 143), (182, 144), (182, 145), (182, 146), (182, 147), (182, 148), (182, 149), (182, 150), (182, 151), (182, 153), (183, 131), (183, 133), (183, 134), (183, 135), (183, 136), (183, 137), (183, 138), (183, 139), (183, 140), (183, 141), (183, 142), (183, 143), (183, 144), (183, 145), (183, 146), (183, 147), (183, 148), (183, 149), (183, 150), (183, 151), (183, 152), (183, 154), (184, 132), (184, 134), (184, 135), (184, 136), (184, 137), (184, 138), (184, 139), (184, 140),
(184, 141), (184, 142), (184, 143), (184, 144), (184, 145), (184, 146), (184, 147), (184, 148), (184, 149), (184, 150), (184, 151), (184, 152), (184, 153), (184, 155), (185, 132), (185, 134), (185, 135), (185, 136), (185, 137), (185, 138), (185, 139), (185, 140), (185, 141), (185, 142), (185, 143), (185, 144), (185, 145), (185, 146), (185, 147), (185, 148), (185, 149), (185, 150), (185, 151), (185, 152), (185, 153), (185, 156), (186, 133), (186, 135), (186, 136), (186, 137), (186, 138), (186, 139), (186, 140), (186, 141), (186, 142), (186, 143), (186, 144), (186, 145), (186, 146), (186, 147), (186, 148), (186, 149), (186, 150), (186, 151), (186, 152), (186, 153), (186, 154), (186, 155), (186, 157), (187, 134), (187, 136), (187, 137), (187, 138), (187, 139), (187, 140), (187, 141), (187, 142), (187, 143), (187, 144), (187, 145), (187, 146), (187, 147),
(187, 148), (187, 149), (187, 150), (187, 151), (187, 152), (187, 153), (187, 154), (187, 155), (187, 156), (187, 158), (188, 135), (188, 137), (188, 138), (188, 139), (188, 140), (188, 141), (188, 142), (188, 143), (188, 144), (188, 145), (188, 146), (188, 147), (188, 148), (188, 149), (188, 150), (188, 151), (188, 152), (188, 153), (188, 154), (188, 155), (188, 156), (188, 157), (188, 159), (189, 136), (189, 138), (189, 139), (189, 140), (189, 141), (189, 142), (189, 143), (189, 144), (189, 145), (189, 146), (189, 147), (189, 148), (189, 149), (189, 150), (189, 151), (189, 152), (189, 153), (189, 154), (189, 155), (189, 156), (189, 157), (189, 159), (190, 136), (190, 138), (190, 139), (190, 143), (190, 145), (190, 148), (190, 149), (190, 150), (190, 151), (190, 152), (190, 153), (190, 154), (190, 155), (190, 156), (190, 158), (191, 137), (191, 140),
(191, 141), (191, 142), (191, 144), (191, 148), (191, 149), (191, 150), (191, 151), (191, 152), (191, 153), (191, 154), (191, 155), (191, 156), (191, 158), (192, 137), (192, 139), (192, 145), (192, 148), (192, 153), (192, 154), (192, 155), (192, 156), (192, 157), (192, 159), (192, 163), (192, 164), (192, 165), (192, 167), (193, 137), (193, 148), (193, 151), (193, 154), (193, 155), (193, 156), (193, 157), (193, 158), (193, 159), (193, 160), (193, 161), (193, 162), (193, 168), (193, 169), (194, 149), (194, 153), (194, 155), (194, 156), (194, 157), (194, 158), (194, 159), (194, 163), (194, 164), (194, 165), (194, 166), (194, 167), (194, 171), (194, 172), (195, 154), (195, 156), (195, 157), (195, 158), (195, 159), (195, 160), (195, 161), (195, 162), (195, 163), (195, 164), (195, 165), (195, 166), (195, 167), (195, 168), (195, 169), (195, 173), (195, 174),
(195, 175), (195, 177), (196, 154), (196, 156), (196, 157), (196, 158), (196, 159), (196, 160), (196, 161), (196, 162), (196, 163), (196, 164), (196, 165), (196, 166), (196, 167), (196, 168), (196, 169), (196, 170), (196, 178), (197, 155), (197, 157), (197, 158), (197, 159), (197, 160), (197, 161), (197, 162), (197, 163), (197, 164), (197, 165), (197, 166), (197, 167), (197, 171), (197, 172), (197, 173), (197, 174), (197, 175), (197, 176), (197, 177), (197, 179), (198, 155), (198, 157), (198, 158), (198, 159), (198, 160), (198, 161), (198, 162), (198, 163), (198, 164), (198, 165), (198, 168), (198, 169), (199, 155), (199, 157), (199, 158), (199, 159), (199, 160), (199, 161), (199, 162), (199, 163), (199, 164), (199, 167), (200, 156), (200, 158), (200, 159), (200, 160), (200, 161), (200, 162), (200, 163), (200, 165), (201, 156), (201, 158), (201, 159),
(201, 160), (201, 161), (201, 162), (201, 164), (202, 156), (202, 158), (202, 159), (202, 160), (202, 161), (202, 163), (203, 156), (203, 158), (203, 159), (203, 160), (204, 157), (204, 159), (204, 162), (205, 157), (205, 159), (205, 161), (206, 158), (206, 160), (207, 159), )
coordinates_00540A = ((114, 92),
(115, 91), (115, 92), (116, 90), (116, 93), (117, 89), (117, 91), (117, 93), (118, 94), (119, 95), (120, 96), (127, 105), (128, 106), (129, 109), (130, 111), (130, 113), (131, 114), (131, 115), (131, 116), (131, 117), (131, 118), (132, 120), (135, 126), (136, 127), (137, 128), (138, 128), (181, 126), (181, 127), (182, 125), (182, 127), (183, 123), (185, 110), (185, 112), (185, 113), (185, 114), (185, 115), (185, 116), (185, 117), (186, 109), (187, 107), (189, 104), (190, 102), (191, 101), (192, 100), (193, 99), (194, 97), (195, 96), (196, 88), (196, 90), (196, 91), (196, 92), (196, 93), (196, 95), (197, 89), (198, 90), (198, 94), (199, 91), (199, 93), (200, 92), (200, 93), (201, 93), )
coordinates_FFDA00 = ((76, 143),
(76, 145), (76, 146), (76, 147), (76, 148), (76, 149), (76, 150), (76, 151), (76, 152), (76, 153), (76, 154), (76, 155), (76, 156), (76, 157), (76, 158), (76, 159), (76, 162), (76, 163), (76, 165), (77, 141), (77, 146), (77, 147), (77, 148), (77, 149), (77, 150), (77, 151), (77, 152), (77, 153), (77, 157), (77, 166), (78, 139), (78, 140), (78, 144), (78, 155), (79, 138), (79, 142), (79, 143), (80, 136), (80, 140), (80, 141), (81, 134), (81, 138), (81, 139), (82, 132), (82, 136), (82, 137), (83, 129), (83, 130), (83, 134), (83, 135), (84, 127), (84, 128), (84, 132), (84, 133), (85, 124), (85, 125), (85, 129), (85, 131), (86, 120), (86, 122), (86, 123), (86, 126), (86, 127), (87, 118), (87, 124), (88, 117), (89, 106), (89, 108), (89, 116), (89, 120), (89, 121), (89, 123), (90, 104),
(90, 109), (90, 110), (90, 111), (90, 112), (90, 113), (90, 114), (90, 118), (90, 119), (91, 102), (91, 103), (91, 106), (91, 107), (91, 108), (91, 117), (92, 101), (92, 104), (92, 105), (92, 106), (92, 107), (92, 108), (92, 109), (92, 110), (92, 111), (92, 112), (92, 113), (92, 114), (92, 116), (93, 99), (93, 103), (93, 104), (93, 105), (93, 106), (93, 107), (93, 108), (93, 109), (93, 110), (93, 111), (93, 115), (94, 97), (94, 98), (94, 101), (94, 102), (94, 103), (94, 104), (94, 105), (94, 106), (94, 107), (94, 108), (94, 109), (94, 113), (94, 115), (95, 95), (95, 99), (95, 100), (95, 101), (95, 102), (95, 103), (95, 104), (95, 105), (95, 106), (95, 107), (95, 108), (95, 111), (96, 94), (96, 97), (96, 98), (96, 99), (96, 100), (96, 101), (96, 102), (96, 103), (96, 104),
(96, 105), (96, 106), (96, 109), (97, 94), (97, 96), (97, 97), (97, 98), (97, 99), (97, 100), (97, 101), (97, 102), (97, 103), (97, 104), (97, 105), (97, 108), (98, 93), (98, 95), (98, 96), (98, 97), (98, 98), (98, 99), (98, 100), (98, 101), (98, 102), (98, 103), (98, 104), (98, 106), (99, 93), (99, 95), (99, 96), (99, 97), (99, 98), (99, 99), (99, 100), (99, 101), (99, 102), (99, 103), (99, 105), (100, 92), (100, 94), (100, 95), (100, 96), (100, 97), (100, 98), (100, 99), (100, 100), (100, 101), (100, 102), (100, 104), (101, 92), (101, 94), (101, 95), (101, 96), (101, 97), (101, 98), (101, 99), (101, 100), (101, 101), (101, 103), (102, 91), (102, 93), (102, 94), (102, 95), (102, 96), (102, 97), (102, 98), (102, 99), (102, 100), (102, 102), (103, 89), (103, 92), (103, 93),
(103, 94), (103, 95), (103, 96), (103, 97), (103, 98), (103, 99), (103, 101), (104, 88), (104, 91), (104, 92), (104, 93), (104, 94), (104, 95), (104, 96), (104, 97), (104, 98), (104, 99), (104, 101), (105, 87), (105, 89), (105, 90), (105, 91), (105, 92), (105, 93), (105, 94), (105, 95), (105, 96), (105, 97), (106, 85), (106, 88), (106, 89), (106, 90), (106, 91), (106, 92), (106, 93), (106, 94), (106, 95), (106, 98), (106, 100), (107, 84), (107, 87), (107, 88), (107, 89), (107, 90), (107, 91), (107, 92), (107, 93), (107, 94), (107, 97), (108, 83), (108, 85), (108, 86), (108, 87), (108, 88), (108, 89), (108, 90), (108, 91), (108, 92), (108, 95), (109, 82), (109, 84), (109, 85), (109, 86), (109, 87), (109, 88), (109, 89), (109, 90), (109, 91), (109, 92), (109, 94), (110, 81), (110, 83),
(110, 84), (110, 85), (110, 86), (110, 87), (110, 88), (110, 89), (110, 90), (110, 91), (110, 93), (111, 80), (111, 82), (111, 83), (111, 84), (111, 85), (111, 86), (111, 87), (111, 88), (111, 89), (111, 90), (111, 92), (112, 80), (112, 82), (112, 83), (112, 84), (112, 85), (112, 86), (112, 87), (112, 88), (112, 89), (112, 91), (113, 79), (113, 81), (113, 82), (113, 83), (113, 84), (113, 85), (113, 86), (113, 87), (113, 88), (113, 91), (114, 79), (114, 81), (114, 82), (114, 83), (114, 84), (114, 85), (114, 86), (114, 87), (114, 90), (115, 78), (115, 80), (115, 81), (115, 82), (115, 83), (115, 84), (115, 85), (115, 86), (116, 77), (116, 79), (116, 80), (116, 81), (116, 82), (116, 83), (116, 84), (116, 85), (116, 87), (117, 77), (117, 79), (117, 80), (117, 81), (117, 82), (117, 86),
(118, 76), (118, 78), (118, 79), (118, 80), (118, 81), (118, 83), (118, 85), (119, 76), (119, 78), (119, 79), (119, 82), (120, 75), (120, 77), (120, 78), (120, 81), (121, 75), (121, 77), (121, 80), (122, 74), (122, 76), (122, 77), (122, 79), (123, 73), (123, 75), (123, 76), (123, 78), (124, 73), (124, 75), (124, 77), (125, 72), (125, 74), (125, 76), (126, 72), (126, 74), (126, 76), (127, 71), (127, 73), (127, 75), (128, 71), (128, 73), (128, 75), (129, 71), (129, 74), (130, 70), (130, 72), (130, 74), (131, 70), (131, 73), (132, 70), (132, 73), (133, 70), (133, 72), (134, 70), (134, 72), (135, 70), (135, 71), (136, 69), (136, 71), (137, 69), (137, 71), (138, 68), (138, 70), (139, 70), (140, 69), (140, 71), (141, 69), (141, 72), (142, 70), (142, 72), (143, 71), (177, 71), (177, 74),
(178, 71), (178, 74), (179, 70), (179, 73), (180, 70), (181, 70), (181, 71), (182, 69), (182, 72), (183, 69), (183, 72), (184, 69), (184, 71), (184, 73), (185, 69), (185, 71), (185, 72), (185, 74), (186, 70), (186, 72), (186, 74), (187, 71), (187, 73), (187, 75), (188, 71), (188, 73), (188, 75), (189, 72), (189, 74), (189, 76), (190, 72), (190, 74), (190, 75), (190, 77), (191, 73), (191, 75), (191, 76), (191, 78), (192, 73), (192, 75), (192, 76), (192, 77), (192, 79), (193, 74), (193, 76), (193, 77), (193, 78), (193, 80), (194, 75), (194, 77), (194, 78), (194, 79), (194, 81), (195, 75), (195, 77), (195, 78), (195, 79), (195, 80), (196, 76), (196, 78), (196, 79), (196, 80), (196, 81), (196, 84), (197, 76), (197, 78), (197, 79), (197, 80), (197, 81), (197, 82), (197, 83), (197, 86),
(198, 77), (198, 79), (198, 80), (198, 81), (198, 82), (198, 83), (198, 84), (198, 87), (199, 78), (199, 80), (199, 81), (199, 82), (199, 83), (199, 84), (199, 85), (199, 86), (199, 89), (200, 78), (200, 80), (200, 81), (200, 82), (200, 83), (200, 84), (200, 85), (200, 86), (200, 87), (200, 90), (201, 79), (201, 81), (201, 82), (201, 83), (201, 84), (201, 85), (201, 86), (201, 87), (201, 88), (201, 91), (202, 80), (202, 82), (202, 83), (202, 84), (202, 85), (202, 86), (202, 87), (202, 88), (202, 89), (202, 90), (202, 92), (203, 80), (203, 82), (203, 83), (203, 84), (203, 85), (203, 86), (203, 87), (203, 88), (203, 89), (203, 90), (203, 91), (203, 93), (204, 81), (204, 83), (204, 84), (204, 85), (204, 86), (204, 87), (204, 88), (204, 89), (204, 90), (204, 91), (204, 92), (204, 94),
(205, 82), (205, 84), (205, 85), (205, 86), (205, 87), (205, 88), (205, 89), (205, 90), (205, 91), (205, 92), (205, 93), (205, 96), (206, 82), (206, 84), (206, 85), (206, 86), (206, 87), (206, 88), (206, 89), (206, 90), (206, 91), (206, 92), (206, 93), (206, 94), (206, 97), (207, 83), (207, 85), (207, 86), (207, 87), (207, 88), (207, 89), (207, 90), (207, 91), (207, 92), (207, 93), (207, 94), (207, 95), (207, 98), (208, 84), (208, 86), (208, 87), (208, 88), (208, 89), (208, 90), (208, 91), (208, 92), (208, 93), (208, 94), (208, 95), (208, 96), (208, 98), (209, 87), (209, 88), (209, 89), (209, 90), (209, 91), (209, 92), (209, 93), (209, 94), (209, 95), (209, 96), (209, 97), (209, 99), (210, 85), (210, 87), (210, 88), (210, 89), (210, 90), (210, 91), (210, 92), (210, 93), (210, 94),
(210, 95), (210, 96), (210, 97), (210, 99), (211, 86), (211, 88), (211, 89), (211, 90), (211, 91), (211, 92), (211, 93), (211, 94), (211, 95), (211, 96), (211, 97), (211, 98), (211, 100), (212, 87), (212, 89), (212, 90), (212, 91), (212, 92), (212, 93), (212, 94), (212, 95), (212, 96), (212, 97), (212, 98), (212, 99), (212, 101), (213, 88), (213, 90), (213, 91), (213, 92), (213, 93), (213, 94), (213, 95), (213, 96), (213, 97), (213, 98), (213, 99), (213, 101), (214, 88), (214, 91), (214, 92), (214, 93), (214, 94), (214, 95), (214, 96), (214, 97), (214, 98), (214, 99), (214, 100), (214, 102), (215, 89), (215, 91), (215, 92), (215, 93), (215, 94), (215, 95), (215, 96), (215, 97), (215, 98), (215, 99), (215, 100), (215, 101), (215, 103), (216, 90), (216, 92), (216, 93), (216, 94), (216, 95),
(216, 96), (216, 97), (216, 98), (216, 99), (216, 100), (216, 101), (216, 102), (216, 104), (217, 91), (217, 94), (217, 95), (217, 96), (217, 97), (217, 98), (217, 99), (217, 100), (217, 101), (217, 102), (217, 103), (217, 105), (218, 92), (218, 95), (218, 96), (218, 97), (218, 98), (218, 99), (218, 100), (218, 101), (218, 102), (218, 103), (218, 104), (218, 106), (219, 93), (219, 94), (219, 97), (219, 98), (219, 99), (219, 100), (219, 101), (219, 102), (219, 103), (219, 104), (219, 105), (219, 107), (220, 95), (220, 99), (220, 100), (220, 101), (220, 102), (220, 103), (220, 104), (220, 105), (220, 106), (220, 108), (221, 97), (221, 101), (221, 102), (221, 103), (221, 104), (221, 105), (221, 106), (221, 107), (221, 110), (222, 99), (222, 103), (222, 104), (222, 105), (222, 106), (222, 107), (222, 108), (222, 111), (223, 101),
(223, 104), (223, 105), (223, 106), (223, 107), (223, 108), (223, 109), (223, 110), (223, 113), (224, 103), (224, 106), (224, 107), (224, 108), (224, 109), (224, 110), (224, 111), (224, 115), (225, 104), (225, 108), (225, 109), (225, 110), (225, 112), (225, 113), (225, 116), (226, 106), (226, 111), (226, 115), (226, 117), (227, 108), (227, 109), (227, 113), (227, 114), (227, 118), (227, 119), (227, 120), (227, 121), (227, 123), (228, 116), (228, 117), (228, 124), (229, 118), (229, 119), (229, 125), (230, 121), (230, 122), (230, 125), (231, 124), (231, 126), (232, 126), (232, 127), (233, 128), (233, 129), (234, 129), (234, 131), (235, 131), (235, 133), (236, 132), (236, 135), (237, 133), (237, 136), (237, 137), (238, 134), (238, 139), (238, 140), (239, 136), (239, 141), (239, 142), (239, 143), (240, 138), (240, 140), (240, 141), (240, 144), (240, 145),
(241, 143), (241, 144), (241, 145), (241, 146), (241, 147), (241, 148), (242, 147), (242, 148), (242, 149), (242, 150), (242, 151), (242, 157), (242, 160), (242, 162), (243, 150), (243, 151), (243, 152), (243, 153), (243, 154), (243, 156), (243, 160), (244, 155), )
coordinates_4682B4 = ((108, 99),
(108, 101), (108, 102), (108, 103), (108, 105), (109, 97), (109, 106), (109, 108), (110, 95), (110, 99), (110, 100), (110, 101), (110, 102), (110, 103), (110, 104), (110, 105), (110, 109), (110, 110), (111, 94), (111, 97), (111, 98), (111, 99), (111, 100), (111, 101), (111, 102), (111, 103), (111, 104), (111, 105), (111, 106), (111, 107), (111, 108), (111, 111), (112, 94), (112, 96), (112, 97), (112, 98), (112, 99), (112, 100), (112, 101), (112, 102), (112, 103), (112, 104), (112, 105), (112, 106), (112, 107), (112, 108), (112, 109), (112, 110), (112, 113), (113, 94), (113, 96), (113, 97), (113, 98), (113, 99), (113, 100), (113, 101), (113, 102), (113, 103), (113, 104), (113, 105), (113, 106), (113, 107), (113, 108), (113, 109), (113, 110), (113, 111), (113, 114), (114, 94), (114, 96), (114, 97), (114, 98), (114, 99), (114, 100),
(114, 101), (114, 102), (114, 103), (114, 104), (114, 105), (114, 106), (114, 107), (114, 108), (114, 109), (114, 110), (114, 111), (114, 112), (114, 113), (114, 115), (115, 95), (115, 97), (115, 98), (115, 99), (115, 100), (115, 101), (115, 102), (115, 103), (115, 104), (115, 105), (115, 106), (115, 107), (115, 108), (115, 109), (115, 110), (115, 111), (115, 112), (115, 113), (115, 114), (115, 117), (116, 95), (116, 97), (116, 98), (116, 99), (116, 100), (116, 101), (116, 102), (116, 103), (116, 104), (116, 105), (116, 106), (116, 107), (116, 108), (116, 109), (116, 110), (116, 111), (116, 112), (116, 113), (116, 114), (116, 115), (116, 118), (117, 98), (117, 99), (117, 100), (117, 101), (117, 102), (117, 103), (117, 104), (117, 105), (117, 106), (117, 107), (117, 108), (117, 109), (117, 110), (117, 111), (117, 112), (117, 113), (117, 114),
(117, 115), (117, 116), (117, 117), (117, 119), (118, 96), (118, 99), (118, 100), (118, 101), (118, 102), (118, 103), (118, 104), (118, 105), (118, 106), (118, 107), (118, 108), (118, 109), (118, 110), (118, 111), (118, 112), (118, 113), (118, 114), (118, 115), (118, 116), (118, 117), (118, 118), (118, 121), (119, 97), (119, 100), (119, 101), (119, 102), (119, 103), (119, 104), (119, 105), (119, 106), (119, 107), (119, 108), (119, 109), (119, 110), (119, 111), (119, 112), (119, 113), (119, 114), (119, 115), (119, 116), (119, 117), (119, 118), (119, 119), (119, 122), (120, 99), (120, 101), (120, 102), (120, 103), (120, 104), (120, 105), (120, 106), (120, 107), (120, 108), (120, 109), (120, 110), (120, 111), (120, 112), (120, 113), (120, 114), (120, 115), (120, 116), (120, 117), (120, 118), (120, 119), (120, 120), (120, 121), (120, 124), (121, 100),
(121, 102), (121, 103), (121, 104), (121, 105), (121, 106), (121, 107), (121, 108), (121, 109), (121, 110), (121, 111), (121, 112), (121, 113), (121, 114), (121, 115), (121, 116), (121, 117), (121, 118), (121, 119), (121, 120), (121, 121), (121, 122), (121, 126), (122, 101), (122, 104), (122, 105), (122, 106), (122, 107), (122, 108), (122, 109), (122, 110), (122, 111), (122, 112), (122, 113), (122, 114), (122, 115), (122, 116), (122, 117), (122, 118), (122, 119), (122, 120), (122, 121), (122, 122), (122, 123), (122, 124), (122, 128), (123, 102), (123, 105), (123, 106), (123, 107), (123, 108), (123, 109), (123, 110), (123, 111), (123, 112), (123, 113), (123, 114), (123, 115), (123, 116), (123, 117), (123, 118), (123, 119), (123, 120), (123, 121), (123, 122), (123, 123), (123, 124), (123, 125), (123, 126), (123, 130), (124, 103), (124, 106), (124, 107),
(124, 108), (124, 109), (124, 110), (124, 111), (124, 112), (124, 113), (124, 114), (124, 115), (124, 116), (124, 117), (124, 118), (124, 119), (124, 120), (124, 121), (124, 122), (124, 123), (124, 124), (124, 125), (124, 126), (124, 127), (124, 128), (124, 131), (124, 132), (125, 105), (125, 108), (125, 109), (125, 110), (125, 111), (125, 112), (125, 113), (125, 114), (125, 115), (125, 116), (125, 117), (125, 118), (125, 119), (125, 120), (125, 121), (125, 122), (125, 123), (125, 124), (125, 125), (125, 126), (125, 127), (125, 128), (125, 129), (125, 130), (125, 134), (126, 106), (126, 111), (126, 112), (126, 113), (126, 114), (126, 115), (126, 116), (126, 117), (126, 118), (126, 119), (126, 120), (126, 121), (126, 122), (126, 123), (126, 124), (126, 125), (126, 126), (126, 127), (126, 128), (126, 129), (126, 130), (126, 131), (126, 132), (126, 134),
(127, 108), (127, 110), (127, 115), (127, 116), (127, 117), (127, 118), (127, 119), (127, 120), (127, 121), (127, 122), (127, 123), (127, 124), (127, 125), (127, 126), (127, 127), (127, 128), (127, 129), (127, 130), (127, 131), (127, 132), (127, 134), (128, 111), (128, 112), (128, 113), (128, 114), (128, 120), (128, 121), (128, 122), (128, 123), (128, 124), (128, 125), (128, 126), (128, 127), (128, 128), (128, 129), (128, 130), (128, 131), (128, 132), (128, 134), (129, 115), (129, 116), (129, 117), (129, 118), (129, 119), (129, 122), (129, 123), (129, 124), (129, 125), (129, 126), (129, 127), (129, 128), (129, 129), (129, 130), (129, 131), (129, 133), (130, 120), (130, 123), (130, 124), (130, 125), (130, 126), (130, 127), (130, 128), (130, 129), (130, 130), (130, 131), (130, 133), (131, 122), (131, 126), (131, 127), (131, 128), (131, 129), (131, 130),
(131, 131), (131, 133), (132, 123), (132, 127), (132, 128), (132, 129), (132, 130), (132, 132), (133, 126), (133, 129), (133, 130), (133, 132), (134, 127), (134, 131), (135, 129), (135, 130), (183, 128), (183, 129), (184, 126), (184, 129), (185, 122), (185, 124), (185, 125), (185, 128), (185, 130), (186, 119), (186, 120), (186, 126), (186, 127), (186, 128), (186, 129), (186, 131), (187, 111), (187, 112), (187, 113), (187, 114), (187, 115), (187, 116), (187, 117), (187, 118), (187, 122), (187, 123), (187, 124), (187, 125), (187, 126), (187, 127), (187, 128), (187, 129), (188, 108), (188, 110), (188, 119), (188, 120), (188, 121), (188, 122), (188, 123), (188, 124), (188, 125), (188, 126), (188, 127), (188, 128), (188, 129), (188, 130), (188, 132), (189, 107), (189, 111), (189, 112), (189, 113), (189, 114), (189, 115), (189, 116), (189, 117), (189, 118),
(189, 119), (189, 120), (189, 121), (189, 122), (189, 123), (189, 124), (189, 125), (189, 126), (189, 127), (189, 128), (189, 129), (189, 130), (189, 131), (189, 133), (190, 105), (190, 108), (190, 109), (190, 110), (190, 111), (190, 112), (190, 113), (190, 114), (190, 115), (190, 116), (190, 117), (190, 118), (190, 119), (190, 120), (190, 121), (190, 122), (190, 123), (190, 124), (190, 125), (190, 126), (190, 127), (190, 128), (190, 129), (190, 130), (190, 131), (190, 132), (190, 134), (191, 104), (191, 107), (191, 108), (191, 109), (191, 110), (191, 111), (191, 112), (191, 113), (191, 114), (191, 115), (191, 116), (191, 117), (191, 118), (191, 119), (191, 120), (191, 121), (191, 122), (191, 123), (191, 124), (191, 125), (191, 126), (191, 127), (191, 128), (191, 129), (191, 130), (191, 131), (191, 132), (191, 134), (192, 103), (192, 105), (192, 106),
(192, 107), (192, 108), (192, 109), (192, 110), (192, 111), (192, 112), (192, 113), (192, 114), (192, 115), (192, 116), (192, 117), (192, 118), (192, 119), (192, 120), (192, 121), (192, 122), (192, 123), (192, 124), (192, 125), (192, 126), (192, 127), (192, 128), (192, 135), (193, 101), (193, 104), (193, 105), (193, 106), (193, 107), (193, 108), (193, 109), (193, 110), (193, 111), (193, 112), (193, 113), (193, 114), (193, 115), (193, 116), (193, 117), (193, 118), (193, 119), (193, 120), (193, 121), (193, 122), (193, 123), (193, 124), (193, 125), (193, 129), (193, 130), (193, 131), (193, 132), (193, 133), (193, 135), (194, 100), (194, 103), (194, 104), (194, 105), (194, 106), (194, 107), (194, 108), (194, 109), (194, 110), (194, 111), (194, 112), (194, 113), (194, 114), (194, 115), (194, 116), (194, 117), (194, 118), (194, 119), (194, 120), (194, 121),
(194, 122), (194, 123), (194, 126), (194, 127), (194, 128), (195, 99), (195, 101), (195, 102), (195, 103), (195, 104), (195, 105), (195, 106), (195, 107), (195, 108), (195, 109), (195, 110), (195, 111), (195, 112), (195, 113), (195, 114), (195, 115), (195, 116), (195, 117), (195, 118), (195, 119), (195, 120), (195, 121), (195, 122), (195, 125), (196, 98), (196, 100), (196, 101), (196, 102), (196, 103), (196, 104), (196, 105), (196, 106), (196, 107), (196, 108), (196, 109), (196, 110), (196, 111), (196, 112), (196, 113), (196, 114), (196, 115), (196, 116), (196, 117), (196, 118), (196, 119), (196, 120), (196, 121), (196, 123), (197, 97), (197, 99), (197, 100), (197, 101), (197, 102), (197, 103), (197, 104), (197, 105), (197, 106), (197, 107), (197, 108), (197, 109), (197, 110), (197, 111), (197, 112), (197, 113), (197, 114), (197, 115), (197, 116),
(197, 117), (197, 118), (197, 119), (197, 120), (197, 122), (198, 96), (198, 98), (198, 99), (198, 100), (198, 101), (198, 102), (198, 103), (198, 104), (198, 105), (198, 106), (198, 107), (198, 108), (198, 109), (198, 110), (198, 111), (198, 112), (198, 113), (198, 114), (198, 115), (198, 116), (198, 117), (198, 118), (198, 119), (198, 121), (199, 96), (199, 98), (199, 99), (199, 100), (199, 101), (199, 102), (199, 103), (199, 104), (199, 105), (199, 106), (199, 107), (199, 108), (199, 109), (199, 110), (199, 111), (199, 112), (199, 113), (199, 114), (199, 115), (199, 116), (199, 117), (199, 118), (199, 120), (200, 95), (200, 97), (200, 98), (200, 99), (200, 100), (200, 101), (200, 102), (200, 103), (200, 104), (200, 105), (200, 106), (200, 107), (200, 108), (200, 109), (200, 110), (200, 111), (200, 112), (200, 113), (200, 120), (201, 95),
(201, 97), (201, 98), (201, 99), (201, 100), (201, 101), (201, 102), (201, 103), (201, 104), (201, 105), (201, 106), (201, 107), (201, 108), (201, 109), (201, 110), (201, 111), (201, 114), (201, 115), (201, 116), (201, 117), (201, 118), (201, 120), (202, 95), (202, 97), (202, 98), (202, 99), (202, 100), (202, 101), (202, 102), (202, 103), (202, 104), (202, 105), (202, 106), (202, 107), (202, 108), (202, 109), (202, 113), (203, 96), (203, 98), (203, 99), (203, 100), (203, 101), (203, 102), (203, 103), (203, 104), (203, 105), (203, 106), (203, 107), (203, 111), (204, 97), (204, 100), (204, 101), (204, 102), (204, 103), (204, 109), (205, 98), (205, 104), (205, 105), (205, 106), (205, 107), (206, 99), (206, 101), (206, 102), (206, 103), )
coordinates_E60086 = ((87, 138),
(88, 139), (89, 140), (90, 140), (91, 126), (92, 127), (93, 128), (93, 129), (94, 130), (96, 113), (96, 118), (97, 111), (97, 112), (97, 121), (98, 109), (98, 112), (98, 115), (99, 108), (99, 111), (99, 112), (99, 117), (100, 106), (100, 109), (100, 110), (100, 111), (100, 112), (100, 113), (100, 114), (100, 115), (100, 118), (101, 105), (101, 108), (101, 109), (101, 110), (101, 111), (101, 112), (101, 113), (101, 114), (101, 115), (101, 116), (101, 117), (101, 142), (102, 104), (102, 107), (102, 108), (102, 109), (102, 110), (102, 111), (102, 112), (102, 113), (102, 114), (102, 115), (102, 116), (102, 117), (102, 118), (102, 121), (103, 104), (103, 106), (103, 107), (103, 108), (103, 109), (103, 110), (103, 111), (103, 112), (103, 113), (103, 114), (103, 115), (103, 116), (103, 117), (103, 118), (103, 119), (103, 122), (104, 103),
(104, 105), (104, 106), (104, 107), (104, 108), (104, 109), (104, 110), (104, 111), (104, 112), (104, 113), (104, 114), (104, 115), (104, 116), (104, 118), (104, 124), (105, 103), (105, 107), (105, 108), (105, 109), (105, 110), (105, 111), (105, 112), (105, 113), (105, 114), (105, 115), (105, 116), (105, 118), (105, 127), (106, 102), (106, 103), (106, 104), (106, 105), (106, 106), (106, 109), (106, 110), (106, 111), (106, 112), (106, 113), (106, 114), (106, 115), (106, 116), (106, 118), (106, 133), (106, 136), (107, 107), (107, 108), (107, 111), (107, 112), (107, 113), (107, 114), (107, 115), (107, 117), (107, 131), (107, 136), (108, 109), (108, 110), (108, 113), (108, 114), (108, 115), (108, 117), (108, 129), (108, 133), (108, 134), (108, 136), (109, 111), (109, 114), (109, 115), (109, 116), (109, 126), (109, 127), (109, 128), (109, 131), (109, 132),
(109, 133), (109, 134), (109, 135), (109, 137), (110, 113), (110, 118), (110, 119), (110, 120), (110, 121), (110, 122), (110, 123), (110, 124), (110, 125), (110, 129), (110, 130), (110, 131), (110, 132), (110, 133), (110, 134), (110, 135), (110, 137), (111, 114), (111, 117), (111, 126), (111, 127), (111, 128), (111, 129), (111, 130), (111, 131), (111, 132), (111, 133), (111, 134), (111, 135), (111, 137), (111, 144), (112, 116), (112, 118), (112, 119), (112, 120), (112, 121), (112, 122), (112, 123), (112, 124), (112, 125), (112, 126), (112, 127), (112, 128), (112, 129), (112, 130), (112, 131), (112, 132), (112, 133), (112, 134), (112, 135), (112, 136), (112, 137), (112, 143), (113, 117), (113, 120), (113, 121), (113, 122), (113, 123), (113, 124), (113, 125), (113, 126), (113, 127), (113, 128), (113, 129), (113, 130), (113, 131), (113, 132), (113, 133),
(113, 134), (113, 135), (113, 136), (113, 137), (113, 139), (113, 141), (113, 149), (113, 150), (113, 151), (113, 153), (114, 118), (114, 121), (114, 122), (114, 123), (114, 124), (114, 125), (114, 126), (114, 127), (114, 128), (114, 129), (114, 130), (114, 131), (114, 132), (114, 133), (114, 134), (114, 135), (114, 136), (114, 137), (114, 138), (114, 141), (114, 145), (114, 147), (114, 148), (114, 154), (115, 120), (115, 123), (115, 124), (115, 125), (115, 126), (115, 127), (115, 128), (115, 129), (115, 130), (115, 131), (115, 132), (115, 133), (115, 134), (115, 135), (115, 136), (115, 137), (115, 138), (115, 139), (115, 140), (115, 141), (115, 142), (115, 149), (115, 150), (115, 151), (115, 152), (115, 154), (116, 121), (116, 124), (116, 125), (116, 126), (116, 127), (116, 128), (116, 129), (116, 130), (116, 131), (116, 132), (116, 133), (116, 134),
(116, 135), (116, 136), (116, 137), (116, 138), (116, 139), (116, 140), (116, 141), (116, 143), (116, 145), (116, 146), (116, 147), (116, 148), (116, 149), (116, 150), (116, 151), (116, 152), (116, 154), (117, 123), (117, 126), (117, 127), (117, 128), (117, 129), (117, 130), (117, 131), (117, 132), (117, 133), (117, 134), (117, 135), (117, 136), (117, 137), (117, 138), (117, 139), (117, 140), (117, 141), (117, 142), (117, 144), (117, 145), (117, 146), (117, 147), (117, 148), (117, 149), (117, 150), (117, 151), (117, 152), (117, 154), (118, 124), (118, 128), (118, 129), (118, 130), (118, 131), (118, 132), (118, 133), (118, 134), (118, 135), (118, 136), (118, 137), (118, 138), (118, 139), (118, 140), (118, 141), (118, 142), (118, 143), (118, 144), (118, 145), (118, 146), (118, 147), (118, 148), (118, 149), (118, 150), (118, 151), (118, 152), (118, 154),
(119, 126), (119, 130), (119, 131), (119, 132), (119, 133), (119, 134), (119, 135), (119, 136), (119, 137), (119, 138), (119, 139), (119, 140), (119, 141), (119, 142), (119, 143), (119, 144), (119, 145), (119, 146), (119, 147), (119, 148), (119, 149), (119, 150), (119, 152), (120, 128), (120, 131), (120, 132), (120, 133), (120, 134), (120, 135), (120, 136), (120, 137), (120, 138), (120, 139), (120, 140), (120, 141), (120, 142), (120, 143), (120, 144), (120, 145), (120, 146), (120, 147), (120, 148), (120, 149), (120, 150), (120, 152), (121, 130), (121, 133), (121, 134), (121, 135), (121, 136), (121, 137), (121, 138), (121, 139), (121, 140), (121, 141), (121, 142), (121, 143), (121, 144), (121, 145), (121, 146), (121, 147), (121, 148), (121, 149), (121, 150), (121, 152), (122, 132), (122, 135), (122, 136), (122, 137), (122, 138), (122, 148), (122, 149),
(122, 150), (122, 152), (123, 133), (123, 134), (123, 139), (123, 140), (123, 141), (123, 142), (123, 143), (123, 144), (123, 145), (123, 146), (123, 152), (124, 135), (124, 137), (124, 138), (124, 148), (124, 151), (193, 141), (193, 143), (193, 144), (194, 139), (194, 144), (194, 147), (195, 130), (195, 131), (195, 132), (195, 133), (195, 134), (195, 135), (195, 136), (195, 137), (195, 138), (195, 141), (195, 142), (195, 143), (195, 147), (195, 151), (196, 127), (196, 129), (196, 139), (196, 140), (196, 141), (196, 142), (196, 143), (196, 144), (196, 147), (196, 150), (196, 152), (197, 125), (197, 130), (197, 131), (197, 132), (197, 133), (197, 134), (197, 135), (197, 136), (197, 137), (197, 138), (197, 139), (197, 140), (197, 141), (197, 142), (197, 143), (197, 144), (197, 145), (197, 146), (197, 147), (197, 152), (198, 123), (198, 127), (198, 128),
(198, 129), (198, 130), (198, 131), (198, 132), (198, 133), (198, 134), (198, 135), (198, 136), (198, 137), (198, 138), (198, 139), (198, 140), (198, 141), (198, 143), (198, 144), (198, 145), (198, 146), (198, 147), (198, 150), (198, 151), (198, 153), (199, 123), (199, 125), (199, 126), (199, 127), (199, 128), (199, 129), (199, 130), (199, 131), (199, 132), (199, 133), (199, 134), (199, 135), (199, 136), (199, 137), (199, 138), (199, 139), (199, 142), (199, 143), (199, 144), (199, 146), (199, 147), (199, 148), (199, 149), (199, 150), (199, 151), (199, 153), (200, 122), (200, 124), (200, 125), (200, 126), (200, 127), (200, 128), (200, 129), (200, 130), (200, 131), (200, 132), (200, 133), (200, 134), (200, 135), (200, 136), (200, 137), (200, 138), (200, 139), (200, 140), (200, 141), (200, 143), (200, 144), (200, 145), (200, 148), (200, 149), (200, 150),
(200, 151), (201, 122), (201, 124), (201, 125), (201, 126), (201, 127), (201, 128), (201, 129), (201, 130), (201, 131), (201, 132), (201, 133), (201, 134), (201, 135), (201, 136), (201, 137), (201, 139), (201, 146), (201, 151), (201, 154), (202, 122), (202, 124), (202, 125), (202, 126), (202, 127), (202, 128), (202, 129), (202, 130), (202, 131), (202, 132), (202, 133), (202, 134), (202, 135), (202, 136), (202, 138), (202, 148), (202, 151), (202, 154), (203, 115), (203, 117), (203, 118), (203, 119), (203, 120), (203, 122), (203, 123), (203, 124), (203, 125), (203, 126), (203, 127), (203, 128), (203, 129), (203, 130), (203, 131), (203, 132), (203, 133), (203, 134), (203, 135), (203, 136), (203, 137), (203, 139), (203, 143), (204, 113), (204, 122), (204, 123), (204, 124), (204, 125), (204, 126), (204, 127), (204, 128), (204, 129), (204, 130), (204, 131),
(204, 132), (204, 133), (204, 134), (204, 135), (204, 136), (204, 137), (204, 139), (204, 143), (205, 111), (205, 115), (205, 116), (205, 117), (205, 118), (205, 119), (205, 120), (205, 121), (205, 122), (205, 123), (205, 124), (205, 125), (205, 126), (205, 127), (205, 128), (205, 129), (205, 130), (205, 131), (205, 132), (205, 133), (205, 134), (205, 135), (205, 136), (205, 143), (206, 109), (206, 113), (206, 114), (206, 115), (206, 116), (206, 117), (206, 118), (206, 119), (206, 120), (206, 121), (206, 122), (206, 123), (206, 124), (206, 125), (206, 126), (206, 127), (206, 128), (206, 129), (206, 130), (206, 131), (206, 132), (206, 133), (207, 106), (207, 107), (207, 111), (207, 112), (207, 113), (207, 114), (207, 115), (207, 116), (207, 117), (207, 118), (207, 119), (207, 120), (207, 121), (207, 122), (207, 123), (207, 124), (207, 125), (207, 126),
(207, 127), (207, 128), (207, 129), (207, 130), (207, 131), (207, 132), (207, 133), (207, 135), (207, 136), (207, 138), (208, 101), (208, 103), (208, 104), (208, 108), (208, 109), (208, 110), (208, 111), (208, 112), (208, 113), (208, 114), (208, 115), (208, 116), (208, 117), (208, 118), (208, 119), (208, 120), (208, 121), (208, 122), (208, 123), (208, 124), (208, 125), (208, 126), (208, 127), (208, 128), (208, 129), (208, 130), (208, 131), (208, 133), (208, 138), (209, 101), (209, 105), (209, 106), (209, 107), (209, 108), (209, 109), (209, 110), (209, 111), (209, 112), (209, 113), (209, 114), (209, 115), (209, 116), (209, 117), (209, 118), (209, 119), (209, 120), (209, 121), (209, 122), (209, 123), (209, 124), (209, 125), (209, 126), (209, 127), (209, 128), (209, 129), (209, 130), (209, 131), (209, 133), (210, 102), (210, 104), (210, 105), (210, 106),
(210, 107), (210, 108), (210, 109), (210, 110), (210, 111), (210, 112), (210, 113), (210, 114), (210, 115), (210, 116), (210, 117), (210, 118), (210, 119), (210, 120), (210, 121), (210, 122), (210, 123), (210, 124), (210, 125), (210, 126), (210, 127), (210, 130), (210, 132), (211, 102), (211, 104), (211, 105), (211, 106), (211, 107), (211, 108), (211, 109), (211, 110), (211, 111), (211, 112), (211, 113), (211, 114), (211, 115), (211, 116), (211, 117), (211, 118), (211, 119), (211, 120), (211, 121), (211, 122), (211, 123), (211, 124), (211, 125), (211, 126), (211, 127), (211, 132), (212, 103), (212, 105), (212, 106), (212, 107), (212, 108), (212, 109), (212, 110), (212, 111), (212, 112), (212, 113), (212, 114), (212, 115), (212, 116), (212, 117), (212, 118), (212, 119), (212, 120), (212, 121), (212, 122), (212, 123), (212, 124), (212, 125), (212, 127),
(212, 130), (212, 132), (213, 104), (213, 106), (213, 107), (213, 108), (213, 109), (213, 110), (213, 111), (213, 112), (213, 113), (213, 114), (213, 115), (213, 116), (213, 117), (213, 118), (213, 119), (213, 120), (213, 127), (214, 105), (214, 107), (214, 108), (214, 109), (214, 110), (214, 111), (214, 112), (214, 113), (214, 114), (214, 115), (214, 116), (214, 121), (214, 122), (214, 123), (214, 124), (214, 125), (214, 126), (214, 127), (215, 108), (215, 109), (215, 110), (215, 117), (215, 118), (215, 119), (215, 120), (215, 130), (216, 106), (216, 109), (216, 112), (216, 113), (216, 114), (216, 115), (216, 116), (216, 137), (216, 139), (217, 110), (217, 122), (217, 135), (217, 138), (218, 109), (218, 119), (218, 122), (219, 115), (219, 117), (219, 122), (219, 129), (220, 114), (220, 118), (220, 119), (220, 120), (220, 122), (220, 129), (220, 131),
(221, 112), (221, 117), (221, 129), (222, 114), (222, 116), (223, 139), (224, 138), (224, 139), (225, 137), (225, 138), )
coordinates_708090 = ((124, 175),
(124, 177), (124, 178), (124, 180), (125, 174), (125, 177), (126, 173), (126, 176), (127, 171), (127, 175), (128, 167), (128, 169), (128, 173), (128, 175), (129, 165), (129, 170), (129, 171), (129, 172), (129, 173), (129, 175), (130, 163), (130, 167), (130, 168), (130, 169), (130, 170), (130, 171), (130, 172), (130, 173), (130, 175), (131, 162), (131, 165), (131, 166), (131, 167), (131, 168), (131, 169), (131, 170), (131, 171), (131, 172), (131, 173), (131, 175), (132, 162), (132, 164), (132, 165), (132, 166), (132, 167), (132, 168), (132, 169), (132, 170), (132, 171), (132, 172), (132, 173), (132, 175), (133, 161), (133, 163), (133, 164), (133, 165), (133, 166), (133, 167), (133, 168), (133, 169), (133, 170), (133, 171), (133, 172), (133, 173), (133, 174), (133, 176), (134, 160), (134, 162), (134, 163), (134, 164), (134, 165), (134, 166), (134, 167),
(134, 168), (134, 169), (134, 170), (134, 171), (134, 172), (134, 173), (134, 174), (134, 176), (135, 159), (135, 161), (135, 162), (135, 163), (135, 164), (135, 165), (135, 166), (135, 167), (135, 168), (135, 169), (135, 170), (135, 171), (135, 172), (135, 173), (135, 174), (135, 175), (135, 177), (136, 159), (136, 161), (136, 162), (136, 163), (136, 164), (136, 165), (136, 166), (136, 167), (136, 168), (136, 169), (136, 170), (136, 171), (136, 172), (136, 173), (136, 174), (136, 175), (136, 177), (137, 158), (137, 160), (137, 161), (137, 162), (137, 163), (137, 164), (137, 165), (137, 166), (137, 167), (137, 168), (137, 169), (137, 170), (137, 171), (137, 172), (137, 173), (137, 174), (137, 175), (137, 176), (137, 178), (138, 158), (138, 160), (138, 161), (138, 162), (138, 163), (138, 164), (138, 165), (138, 166), (138, 167), (138, 168), (138, 169),
(138, 170), (138, 171), (138, 172), (138, 173), (138, 174), (138, 175), (138, 176), (138, 178), (139, 157), (139, 159), (139, 160), (139, 161), (139, 162), (139, 163), (139, 164), (139, 165), (139, 166), (139, 167), (139, 168), (139, 169), (139, 170), (139, 171), (139, 172), (139, 173), (139, 174), (139, 175), (139, 176), (139, 177), (139, 179), (140, 156), (140, 158), (140, 159), (140, 160), (140, 161), (140, 162), (140, 163), (140, 164), (140, 165), (140, 166), (140, 167), (140, 168), (140, 169), (140, 170), (140, 171), (140, 172), (140, 173), (140, 174), (140, 175), (140, 176), (140, 177), (140, 179), (141, 156), (141, 158), (141, 159), (141, 160), (141, 161), (141, 162), (141, 163), (141, 164), (141, 165), (141, 166), (141, 167), (141, 168), (141, 169), (141, 170), (141, 171), (141, 172), (141, 173), (141, 174), (141, 175), (141, 176), (141, 177),
(141, 178), (141, 180), (142, 156), (142, 157), (142, 158), (142, 159), (142, 160), (142, 161), (142, 162), (142, 163), (142, 164), (142, 165), (142, 166), (142, 167), (142, 168), (142, 169), (142, 170), (142, 171), (142, 172), (142, 173), (142, 174), (142, 175), (142, 176), (142, 177), (142, 178), (142, 179), (142, 181), (143, 155), (143, 157), (143, 158), (143, 159), (143, 160), (143, 161), (143, 162), (143, 163), (143, 164), (143, 165), (143, 166), (143, 167), (143, 168), (143, 169), (143, 170), (143, 171), (143, 172), (143, 173), (143, 174), (143, 175), (143, 176), (143, 177), (143, 178), (143, 179), (143, 181), (144, 155), (144, 159), (144, 160), (144, 161), (144, 162), (144, 163), (144, 164), (144, 165), (144, 166), (144, 167), (144, 168), (144, 169), (144, 170), (144, 171), (144, 172), (144, 173), (144, 174), (144, 175), (144, 176), (144, 177),
(144, 178), (144, 179), (144, 180), (144, 182), (145, 154), (145, 157), (145, 159), (145, 160), (145, 161), (145, 162), (145, 163), (145, 164), (145, 165), (145, 166), (145, 167), (145, 168), (145, 169), (145, 170), (145, 171), (145, 172), (145, 173), (145, 174), (145, 175), (145, 176), (145, 177), (145, 178), (145, 179), (145, 180), (145, 181), (145, 183), (146, 154), (146, 159), (146, 161), (146, 162), (146, 163), (146, 164), (146, 165), (146, 166), (146, 167), (146, 168), (146, 169), (146, 170), (146, 171), (146, 172), (146, 173), (146, 174), (146, 175), (146, 176), (146, 177), (146, 178), (146, 179), (146, 180), (146, 181), (146, 183), (147, 154), (147, 159), (147, 161), (147, 162), (147, 163), (147, 164), (147, 165), (147, 166), (147, 167), (147, 168), (147, 169), (147, 170), (147, 171), (147, 172), (147, 173), (147, 174), (147, 175), (147, 176),
(147, 177), (147, 178), (147, 179), (147, 180), (147, 181), (147, 182), (147, 184), (148, 153), (148, 159), (148, 161), (148, 162), (148, 163), (148, 164), (148, 165), (148, 166), (148, 167), (148, 168), (148, 169), (148, 170), (148, 171), (148, 172), (148, 173), (148, 174), (148, 175), (148, 176), (148, 177), (148, 178), (148, 179), (148, 180), (148, 181), (148, 182), (148, 184), (149, 159), (149, 161), (149, 162), (149, 163), (149, 164), (149, 165), (149, 166), (149, 167), (149, 168), (149, 169), (149, 170), (149, 171), (149, 172), (149, 173), (149, 174), (149, 175), (149, 176), (149, 177), (149, 178), (149, 179), (149, 180), (149, 181), (149, 182), (149, 184), (150, 158), (150, 160), (150, 161), (150, 162), (150, 163), (150, 164), (150, 165), (150, 166), (150, 167), (150, 168), (150, 169), (150, 170), (150, 171), (150, 172), (150, 173), (150, 174),
(150, 175), (150, 176), (150, 177), (150, 178), (150, 179), (150, 180), (150, 181), (150, 182), (150, 184), (151, 156), (151, 159), (151, 160), (151, 161), (151, 162), (151, 163), (151, 164), (151, 165), (151, 166), (151, 167), (151, 168), (151, 169), (151, 170), (151, 171), (151, 172), (151, 173), (151, 174), (151, 175), (151, 176), (151, 177), (151, 178), (151, 179), (151, 180), (151, 181), (151, 182), (151, 184), (152, 153), (152, 154), (152, 158), (152, 159), (152, 160), (152, 161), (152, 162), (152, 163), (152, 164), (152, 165), (152, 166), (152, 167), (152, 168), (152, 169), (152, 170), (152, 171), (152, 172), (152, 173), (152, 174), (152, 175), (152, 176), (152, 177), (152, 178), (152, 179), (152, 180), (152, 181), (152, 182), (152, 183), (152, 185), (153, 151), (153, 152), (153, 155), (153, 156), (153, 157), (153, 158), (153, 159), (153, 160),
(153, 161), (153, 162), (153, 163), (153, 164), (153, 165), (153, 166), (153, 167), (153, 168), (153, 169), (153, 170), (153, 171), (153, 172), (153, 173), (153, 174), (153, 175), (153, 176), (153, 177), (153, 178), (153, 179), (153, 180), (153, 181), (153, 182), (153, 183), (153, 184), (153, 186), (154, 149), (154, 153), (154, 154), (154, 155), (154, 156), (154, 157), (154, 158), (154, 159), (154, 160), (154, 161), (154, 162), (154, 163), (154, 164), (154, 165), (154, 166), (154, 167), (154, 168), (154, 169), (154, 170), (154, 171), (154, 172), (154, 173), (154, 174), (154, 175), (154, 176), (154, 177), (154, 178), (154, 179), (154, 180), (154, 181), (154, 182), (154, 183), (154, 184), (154, 186), (155, 147), (155, 154), (155, 155), (155, 156), (155, 157), (155, 158), (155, 159), (155, 160), (155, 161), (155, 162), (155, 163), (155, 164), (155, 165),
(155, 166), (155, 167), (155, 168), (155, 169), (155, 170), (155, 171), (155, 172), (155, 173), (155, 174), (155, 175), (155, 176), (155, 177), (155, 178), (155, 179), (155, 180), (155, 181), (155, 182), (155, 183), (155, 184), (155, 186), (156, 146), (156, 148), (156, 149), (156, 150), (156, 151), (156, 152), (156, 153), (156, 178), (156, 179), (156, 180), (156, 181), (156, 182), (156, 183), (156, 184), (156, 186), (157, 154), (157, 155), (157, 156), (157, 157), (157, 158), (157, 159), (157, 160), (157, 161), (157, 162), (157, 163), (157, 164), (157, 165), (157, 166), (157, 167), (157, 168), (157, 169), (157, 170), (157, 171), (157, 172), (157, 173), (157, 174), (157, 175), (157, 176), (157, 177), (157, 178), (157, 184), (157, 186), (158, 178), (158, 179), (158, 180), (158, 181), (158, 182), (158, 183), (158, 184), (158, 185), (158, 186), (159, 184),
(160, 184), (160, 185), (161, 157), (161, 158), (161, 159), (161, 160), (161, 161), (161, 162), (161, 163), (161, 164), (161, 165), (161, 166), (161, 167), (161, 168), (161, 169), (161, 170), (161, 171), (161, 172), (161, 182), (161, 185), (162, 146), (162, 149), (162, 150), (162, 151), (162, 152), (162, 153), (162, 154), (162, 155), (162, 156), (162, 173), (162, 174), (162, 175), (162, 179), (162, 180), (162, 181), (162, 185), (163, 147), (163, 157), (163, 158), (163, 159), (163, 160), (163, 161), (163, 162), (163, 163), (163, 164), (163, 165), (163, 166), (163, 167), (163, 168), (163, 169), (163, 170), (163, 171), (163, 172), (163, 176), (163, 177), (163, 178), (163, 182), (163, 183), (163, 185), (164, 148), (164, 149), (164, 151), (164, 152), (164, 153), (164, 154), (164, 155), (164, 156), (164, 157), (164, 158), (164, 159), (164, 160), (164, 161),
(164, 162), (164, 163), (164, 164), (164, 165), (164, 166), (164, 167), (164, 168), (164, 169), (164, 170), (164, 171), (164, 172), (164, 173), (164, 174), (164, 175), (164, 179), (164, 180), (164, 181), (164, 182), (164, 183), (164, 185), (165, 150), (165, 153), (165, 154), (165, 155), (165, 156), (165, 157), (165, 158), (165, 159), (165, 160), (165, 161), (165, 162), (165, 163), (165, 164), (165, 165), (165, 166), (165, 167), (165, 168), (165, 169), (165, 170), (165, 171), (165, 172), (165, 173), (165, 174), (165, 175), (165, 176), (165, 177), (165, 178), (165, 179), (165, 180), (165, 181), (165, 182), (165, 183), (165, 185), (166, 151), (166, 152), (166, 154), (166, 155), (166, 156), (166, 157), (166, 158), (166, 159), (166, 160), (166, 161), (166, 162), (166, 163), (166, 164), (166, 165), (166, 166), (166, 167), (166, 168), (166, 169), (166, 170),
(166, 171), (166, 172), (166, 173), (166, 174), (166, 175), (166, 176), (166, 177), (166, 178), (166, 179), (166, 180), (166, 181), (166, 182), (166, 183), (166, 185), (167, 153), (167, 156), (167, 157), (167, 158), (167, 159), (167, 160), (167, 161), (167, 162), (167, 163), (167, 164), (167, 165), (167, 166), (167, 167), (167, 168), (167, 169), (167, 170), (167, 171), (167, 172), (167, 173), (167, 174), (167, 175), (167, 176), (167, 177), (167, 178), (167, 179), (167, 180), (167, 181), (167, 182), (167, 183), (167, 185), (168, 154), (168, 155), (168, 156), (168, 157), (168, 158), (168, 159), (168, 160), (168, 161), (168, 162), (168, 163), (168, 164), (168, 165), (168, 166), (168, 167), (168, 168), (168, 169), (168, 170), (168, 171), (168, 172), (168, 173), (168, 174), (168, 175), (168, 176), (168, 177), (168, 178), (168, 179), (168, 180), (168, 181),
(168, 182), (168, 183), (168, 185), (169, 156), (169, 158), (169, 159), (169, 160), (169, 161), (169, 162), (169, 163), (169, 164), (169, 165), (169, 166), (169, 167), (169, 168), (169, 169), (169, 170), (169, 171), (169, 172), (169, 173), (169, 174), (169, 175), (169, 176), (169, 177), (169, 178), (169, 179), (169, 180), (169, 181), (169, 182), (169, 183), (169, 185), (170, 156), (170, 158), (170, 159), (170, 160), (170, 161), (170, 162), (170, 163), (170, 164), (170, 165), (170, 166), (170, 167), (170, 168), (170, 169), (170, 170), (170, 171), (170, 172), (170, 173), (170, 174), (170, 175), (170, 176), (170, 177), (170, 178), (170, 179), (170, 180), (170, 181), (170, 182), (170, 183), (170, 184), (170, 185), (171, 155), (171, 156), (171, 157), (171, 158), (171, 159), (171, 160), (171, 161), (171, 162), (171, 163), (171, 164), (171, 165), (171, 166),
(171, 167), (171, 168), (171, 169), (171, 170), (171, 171), (171, 172), (171, 173), (171, 174), (171, 175), (171, 176), (171, 177), (171, 178), (171, 179), (171, 180), (171, 181), (171, 182), (171, 184), (172, 153), (172, 156), (172, 157), (172, 158), (172, 159), (172, 160), (172, 161), (172, 162), (172, 163), (172, 164), (172, 165), (172, 166), (172, 167), (172, 168), (172, 169), (172, 170), (172, 171), (172, 172), (172, 173), (172, 174), (172, 175), (172, 176), (172, 177), (172, 178), (172, 179), (172, 180), (172, 181), (172, 182), (172, 184), (173, 153), (173, 155), (173, 156), (173, 157), (173, 158), (173, 159), (173, 160), (173, 161), (173, 162), (173, 163), (173, 164), (173, 165), (173, 166), (173, 167), (173, 168), (173, 169), (173, 170), (173, 171), (173, 172), (173, 173), (173, 174), (173, 175), (173, 176), (173, 177), (173, 178), (173, 179),
(173, 180), (173, 181), (173, 183), (174, 153), (174, 155), (174, 156), (174, 157), (174, 158), (174, 159), (174, 160), (174, 161), (174, 162), (174, 163), (174, 164), (174, 165), (174, 166), (174, 167), (174, 168), (174, 169), (174, 170), (174, 171), (174, 172), (174, 173), (174, 174), (174, 175), (174, 176), (174, 177), (174, 178), (174, 179), (174, 180), (174, 181), (174, 183), (175, 153), (175, 154), (175, 155), (175, 156), (175, 157), (175, 158), (175, 159), (175, 160), (175, 161), (175, 162), (175, 163), (175, 164), (175, 165), (175, 166), (175, 167), (175, 168), (175, 169), (175, 170), (175, 171), (175, 172), (175, 173), (175, 174), (175, 175), (175, 176), (175, 177), (175, 178), (175, 179), (175, 180), (175, 182), (176, 154), (176, 156), (176, 157), (176, 158), (176, 159), (176, 160), (176, 161), (176, 162), (176, 163), (176, 164), (176, 165),
(176, 166), (176, 167), (176, 168), (176, 169), (176, 170), (176, 171), (176, 172), (176, 173), (176, 174), (176, 175), (176, 176), (176, 177), (176, 178), (176, 179), (176, 180), (176, 181), (177, 154), (177, 156), (177, 157), (177, 158), (177, 159), (177, 160), (177, 161), (177, 162), (177, 163), (177, 164), (177, 165), (177, 166), (177, 167), (177, 168), (177, 169), (177, 170), (177, 171), (177, 172), (177, 173), (177, 174), (177, 175), (177, 176), (177, 177), (177, 178), (177, 179), (177, 181), (178, 154), (178, 156), (178, 157), (178, 158), (178, 159), (178, 160), (178, 161), (178, 162), (178, 163), (178, 164), (178, 165), (178, 166), (178, 167), (178, 168), (178, 169), (178, 170), (178, 171), (178, 172), (178, 173), (178, 174), (178, 175), (178, 176), (178, 177), (178, 178), (178, 180), (179, 154), (179, 156), (179, 157), (179, 158), (179, 159),
(179, 160), (179, 161), (179, 162), (179, 163), (179, 164), (179, 165), (179, 166), (179, 167), (179, 168), (179, 169), (179, 170), (179, 171), (179, 172), (179, 173), (179, 174), (179, 175), (179, 176), (179, 177), (179, 178), (179, 180), (180, 154), (180, 156), (180, 157), (180, 158), (180, 159), (180, 160), (180, 161), (180, 162), (180, 163), (180, 164), (180, 165), (180, 166), (180, 167), (180, 168), (180, 169), (180, 170), (180, 171), (180, 172), (180, 173), (180, 174), (180, 175), (180, 176), (180, 177), (180, 179), (181, 155), (181, 157), (181, 158), (181, 159), (181, 160), (181, 161), (181, 162), (181, 163), (181, 164), (181, 165), (181, 166), (181, 167), (181, 168), (181, 169), (181, 170), (181, 171), (181, 172), (181, 173), (181, 174), (181, 175), (181, 176), (181, 177), (181, 178), (182, 155), (182, 157), (182, 158), (182, 159), (182, 160),
(182, 161), (182, 162), (182, 163), (182, 164), (182, 165), (182, 166), (182, 167), (182, 168), (182, 169), (182, 170), (182, 171), (182, 172), (182, 173), (182, 174), (182, 175), (182, 176), (182, 178), (183, 156), (183, 159), (183, 160), (183, 161), (183, 162), (183, 163), (183, 164), (183, 165), (183, 166), (183, 167), (183, 168), (183, 169), (183, 170), (183, 171), (183, 172), (183, 173), (183, 174), (183, 175), (183, 177), (184, 157), (184, 160), (184, 161), (184, 162), (184, 163), (184, 164), (184, 165), (184, 166), (184, 167), (184, 168), (184, 169), (184, 170), (184, 171), (184, 172), (184, 173), (184, 174), (184, 175), (184, 177), (185, 158), (185, 161), (185, 162), (185, 163), (185, 164), (185, 165), (185, 166), (185, 167), (185, 168), (185, 169), (185, 170), (185, 171), (185, 172), (185, 173), (185, 174), (185, 175), (185, 177), (186, 159),
(186, 162), (186, 163), (186, 164), (186, 165), (186, 166), (186, 167), (186, 168), (186, 169), (186, 170), (186, 171), (186, 172), (186, 173), (186, 174), (186, 176), (187, 163), (187, 164), (187, 165), (187, 166), (187, 167), (187, 168), (187, 169), (187, 170), (187, 171), (187, 172), (187, 173), (187, 174), (187, 176), (188, 162), (188, 165), (188, 166), (188, 167), (188, 168), (188, 169), (188, 170), (188, 171), (188, 172), (188, 173), (188, 174), (188, 176), (189, 163), (189, 164), (189, 168), (189, 169), (189, 170), (189, 171), (189, 172), (189, 173), (189, 175), (190, 165), (190, 167), (190, 171), (190, 172), (190, 173), (190, 175), (191, 169), (191, 170), (191, 175), (192, 171), (192, 173), (192, 175), (193, 175), (195, 179), )
coordinates_9F2DEB = ((78, 159),
(78, 161), (78, 163), (79, 146), (79, 148), (79, 149), (79, 150), (79, 151), (79, 152), (79, 157), (79, 158), (79, 165), (80, 144), (80, 153), (80, 154), (80, 155), (80, 156), (80, 159), (80, 160), (80, 161), (80, 162), (80, 163), (80, 165), (81, 142), (81, 146), (81, 147), (81, 148), (81, 149), (81, 150), (81, 151), (81, 152), (81, 157), (81, 158), (81, 159), (81, 160), (81, 161), (81, 162), (81, 163), (81, 165), (82, 140), (82, 141), (82, 144), (82, 145), (82, 146), (82, 147), (82, 148), (82, 149), (82, 150), (82, 151), (82, 152), (82, 153), (82, 154), (82, 155), (82, 156), (82, 157), (82, 158), (82, 159), (82, 160), (82, 161), (82, 162), (82, 164), (83, 138), (83, 139), (83, 142), (83, 143), (83, 144), (83, 145), (83, 146), (83, 147), (83, 148), (83, 149), (83, 150), (83, 151),
(83, 152), (83, 153), (83, 154), (83, 155), (83, 156), (83, 157), (83, 158), (83, 159), (83, 160), (83, 161), (83, 162), (83, 164), (84, 136), (84, 137), (84, 140), (84, 141), (84, 142), (84, 143), (84, 144), (84, 145), (84, 146), (84, 147), (84, 148), (84, 149), (84, 150), (84, 151), (84, 152), (84, 153), (84, 154), (84, 155), (84, 156), (84, 157), (84, 158), (84, 159), (84, 160), (84, 161), (84, 163), (85, 134), (85, 135), (85, 138), (85, 141), (85, 142), (85, 143), (85, 144), (85, 145), (85, 146), (85, 147), (85, 148), (85, 149), (85, 150), (85, 151), (85, 152), (85, 153), (85, 154), (85, 155), (85, 156), (85, 157), (85, 158), (85, 159), (85, 160), (85, 162), (86, 133), (86, 135), (86, 140), (86, 142), (86, 143), (86, 144), (86, 145), (86, 146), (86, 147), (86, 148), (86, 149),
(86, 150), (86, 151), (86, 152), (86, 153), (86, 154), (86, 155), (86, 156), (86, 157), (86, 158), (86, 159), (86, 160), (86, 162), (87, 130), (87, 131), (87, 135), (87, 136), (87, 141), (87, 143), (87, 144), (87, 145), (87, 146), (87, 147), (87, 148), (87, 149), (87, 150), (87, 151), (87, 152), (87, 153), (87, 154), (87, 155), (87, 156), (87, 157), (87, 158), (87, 159), (87, 160), (87, 162), (88, 126), (88, 128), (88, 129), (88, 132), (88, 133), (88, 134), (88, 136), (88, 141), (88, 143), (88, 144), (88, 145), (88, 146), (88, 147), (88, 148), (88, 149), (88, 150), (88, 151), (88, 152), (88, 153), (88, 154), (88, 155), (88, 156), (88, 157), (88, 158), (88, 159), (88, 160), (88, 162), (89, 125), (89, 129), (89, 130), (89, 131), (89, 132), (89, 133), (89, 134), (89, 135), (89, 138),
(89, 142), (89, 144), (89, 145), (89, 146), (89, 147), (89, 148), (89, 149), (89, 150), (89, 151), (89, 152), (89, 153), (89, 154), (89, 155), (89, 156), (89, 157), (89, 158), (89, 159), (89, 160), (89, 162), (89, 163), (90, 128), (90, 131), (90, 132), (90, 133), (90, 134), (90, 135), (90, 136), (90, 138), (90, 142), (90, 144), (90, 145), (90, 146), (90, 147), (90, 148), (90, 149), (90, 150), (90, 151), (90, 152), (90, 153), (90, 154), (90, 155), (90, 156), (90, 157), (90, 158), (90, 159), (90, 160), (90, 161), (90, 163), (91, 129), (91, 132), (91, 133), (91, 134), (91, 135), (91, 136), (91, 138), (91, 142), (91, 144), (91, 145), (91, 146), (91, 147), (91, 148), (91, 149), (91, 150), (91, 151), (91, 152), (91, 153), (91, 154), (91, 155), (91, 156), (91, 157), (91, 158), (91, 159),
(91, 160), (91, 161), (91, 163), (92, 118), (92, 120), (92, 121), (92, 122), (92, 124), (92, 131), (92, 133), (92, 134), (92, 135), (92, 136), (92, 137), (92, 139), (92, 142), (92, 144), (92, 145), (92, 146), (92, 147), (92, 148), (92, 149), (92, 150), (92, 151), (92, 152), (92, 153), (92, 154), (92, 155), (92, 156), (92, 157), (92, 158), (92, 159), (92, 160), (92, 161), (92, 163), (93, 122), (93, 123), (93, 125), (93, 132), (93, 134), (93, 135), (93, 136), (93, 137), (93, 138), (93, 140), (93, 141), (93, 142), (93, 143), (93, 144), (93, 145), (93, 146), (93, 147), (93, 148), (93, 149), (93, 150), (93, 151), (93, 152), (93, 153), (93, 154), (93, 155), (93, 156), (93, 157), (93, 158), (93, 159), (93, 160), (93, 161), (93, 162), (94, 117), (94, 119), (94, 122), (94, 124), (94, 133),
(94, 135), (94, 136), (94, 137), (94, 138), (94, 139), (94, 142), (94, 143), (94, 144), (94, 145), (94, 146), (94, 147), (94, 148), (94, 149), (94, 150), (94, 151), (94, 152), (94, 153), (94, 154), (94, 155), (94, 156), (94, 157), (94, 158), (94, 159), (94, 160), (94, 162), (95, 120), (95, 121), (95, 125), (95, 128), (95, 133), (95, 135), (95, 136), (95, 137), (95, 139), (95, 142), (95, 144), (95, 145), (95, 146), (95, 147), (95, 148), (95, 149), (95, 150), (95, 151), (95, 152), (95, 153), (95, 154), (95, 155), (95, 156), (95, 157), (95, 158), (95, 159), (95, 160), (95, 162), (96, 124), (96, 129), (96, 130), (96, 131), (96, 132), (96, 133), (96, 134), (96, 135), (96, 136), (96, 137), (96, 138), (96, 139), (96, 142), (96, 143), (96, 146), (96, 147), (96, 148), (96, 149), (96, 150),
(96, 151), (96, 152), (96, 153), (96, 154), (96, 155), (96, 156), (96, 157), (96, 158), (96, 159), (96, 161), (97, 117), (97, 126), (97, 127), (97, 128), (97, 132), (97, 133), (97, 134), (97, 135), (97, 136), (97, 137), (97, 138), (97, 139), (97, 140), (97, 141), (97, 142), (97, 144), (97, 145), (97, 146), (97, 147), (97, 148), (97, 149), (97, 150), (97, 151), (97, 152), (97, 153), (97, 154), (97, 155), (97, 156), (97, 157), (97, 158), (97, 159), (97, 161), (98, 118), (98, 120), (98, 127), (98, 129), (98, 130), (98, 131), (98, 132), (98, 133), (98, 134), (98, 135), (98, 136), (98, 137), (98, 138), (98, 139), (98, 140), (98, 142), (98, 146), (98, 148), (98, 149), (98, 150), (98, 151), (98, 152), (98, 153), (98, 154), (98, 155), (98, 156), (98, 157), (98, 158), (98, 159), (98, 161),
(99, 120), (99, 122), (99, 123), (99, 127), (99, 131), (99, 132), (99, 133), (99, 134), (99, 135), (99, 136), (99, 137), (99, 138), (99, 139), (99, 142), (99, 146), (99, 149), (99, 150), (99, 151), (99, 152), (99, 153), (99, 154), (99, 155), (99, 156), (99, 157), (99, 158), (99, 159), (99, 160), (99, 161), (100, 121), (100, 124), (100, 125), (100, 126), (100, 127), (100, 129), (100, 130), (100, 131), (100, 132), (100, 133), (100, 134), (100, 135), (100, 136), (100, 137), (100, 139), (100, 144), (100, 146), (100, 149), (100, 151), (100, 152), (100, 153), (100, 154), (100, 155), (100, 156), (100, 157), (100, 158), (100, 160), (101, 122), (101, 127), (101, 131), (101, 133), (101, 134), (101, 135), (101, 136), (101, 137), (101, 139), (101, 144), (101, 145), (101, 148), (101, 150), (101, 151), (101, 152), (101, 153), (101, 154),
(101, 155), (101, 156), (101, 157), (101, 158), (101, 160), (102, 124), (102, 127), (102, 131), (102, 133), (102, 134), (102, 135), (102, 136), (102, 137), (102, 140), (102, 144), (102, 145), (102, 148), (102, 149), (102, 150), (102, 151), (102, 152), (102, 153), (102, 154), (102, 155), (102, 156), (102, 157), (102, 158), (102, 160), (103, 125), (103, 127), (103, 131), (103, 137), (103, 141), (103, 143), (103, 144), (103, 145), (103, 146), (103, 147), (103, 148), (103, 149), (103, 150), (103, 151), (103, 152), (103, 153), (103, 154), (103, 155), (103, 156), (103, 157), (103, 158), (103, 160), (104, 129), (104, 133), (104, 135), (104, 136), (104, 138), (104, 139), (104, 140), (104, 143), (104, 144), (104, 145), (104, 146), (104, 147), (104, 148), (104, 149), (104, 150), (104, 151), (104, 152), (104, 153), (104, 154), (104, 155), (104, 156), (104, 157),
(104, 158), (104, 160), (105, 120), (105, 122), (105, 137), (105, 140), (105, 141), (105, 142), (105, 143), (105, 144), (105, 145), (105, 146), (105, 147), (105, 148), (105, 149), (105, 150), (105, 151), (105, 152), (105, 153), (105, 154), (105, 155), (105, 156), (105, 157), (105, 158), (105, 160), (106, 120), (106, 124), (106, 129), (106, 138), (106, 140), (106, 141), (106, 142), (106, 143), (106, 144), (106, 145), (106, 146), (106, 147), (106, 148), (106, 149), (106, 150), (106, 151), (106, 152), (106, 153), (106, 154), (106, 155), (106, 156), (106, 157), (106, 158), (106, 160), (107, 120), (107, 125), (107, 126), (107, 127), (107, 138), (107, 140), (107, 141), (107, 142), (107, 143), (107, 144), (107, 145), (107, 146), (107, 147), (107, 148), (107, 149), (107, 150), (107, 151), (107, 152), (107, 153), (107, 154), (107, 155), (107, 156), (107, 157),
(107, 159), (108, 119), (108, 120), (108, 121), (108, 122), (108, 123), (108, 124), (108, 139), (108, 141), (108, 142), (108, 145), (108, 146), (108, 147), (108, 148), (108, 149), (108, 150), (108, 151), (108, 152), (108, 153), (108, 154), (108, 155), (108, 156), (108, 157), (108, 159), (109, 139), (109, 141), (109, 144), (109, 145), (109, 146), (109, 147), (109, 148), (109, 149), (109, 150), (109, 151), (109, 152), (109, 153), (109, 154), (109, 155), (109, 156), (109, 158), (110, 139), (110, 142), (110, 146), (110, 155), (110, 156), (110, 158), (111, 139), (111, 141), (111, 146), (111, 148), (111, 149), (111, 150), (111, 151), (111, 152), (111, 153), (111, 155), (111, 156), (111, 157), (112, 146), (112, 155), (112, 157), (113, 155), (113, 157), (114, 143), (114, 156), (202, 141), (203, 141), (203, 146), (203, 153), (204, 141), (204, 146), (204, 148),
(204, 149), (204, 150), (204, 151), (204, 152), (204, 154), (205, 141), (205, 145), (205, 148), (205, 152), (205, 153), (206, 139), (206, 141), (206, 145), (206, 148), (206, 152), (206, 153), (207, 140), (207, 142), (207, 143), (207, 145), (207, 146), (207, 147), (207, 148), (207, 149), (207, 150), (207, 151), (207, 152), (207, 153), (207, 154), (207, 156), (208, 141), (208, 145), (208, 146), (208, 147), (208, 148), (208, 152), (208, 153), (208, 157), (209, 135), (209, 140), (209, 142), (209, 143), (209, 144), (209, 145), (209, 146), (209, 147), (209, 148), (209, 149), (209, 150), (209, 151), (209, 152), (209, 153), (209, 154), (209, 155), (209, 156), (209, 158), (210, 135), (210, 137), (210, 138), (210, 139), (210, 140), (210, 141), (210, 142), (210, 143), (210, 144), (210, 145), (210, 146), (210, 147), (210, 148), (210, 149), (210, 150), (210, 151),
(210, 152), (210, 153), (210, 154), (210, 155), (210, 156), (210, 158), (211, 135), (211, 138), (211, 141), (211, 143), (211, 144), (211, 145), (211, 146), (211, 147), (211, 148), (211, 149), (211, 150), (211, 151), (211, 152), (211, 153), (211, 154), (211, 155), (211, 156), (211, 158), (212, 134), (212, 136), (212, 138), (212, 141), (212, 143), (212, 144), (212, 145), (212, 146), (212, 147), (212, 148), (212, 149), (212, 150), (212, 151), (212, 152), (212, 153), (212, 154), (212, 155), (212, 156), (212, 158), (213, 129), (213, 134), (213, 135), (213, 138), (213, 141), (213, 143), (213, 144), (213, 145), (213, 146), (213, 147), (213, 148), (213, 149), (213, 150), (213, 151), (213, 152), (213, 153), (213, 154), (213, 155), (213, 156), (213, 158), (214, 110), (214, 132), (214, 134), (214, 137), (214, 141), (214, 142), (214, 143), (214, 144), (214, 145),
(214, 146), (214, 147), (214, 148), (214, 149), (214, 150), (214, 151), (214, 152), (214, 153), (214, 154), (214, 155), (214, 156), (214, 158), (215, 132), (215, 135), (215, 141), (215, 143), (215, 144), (215, 145), (215, 146), (215, 147), (215, 148), (215, 149), (215, 150), (215, 151), (215, 152), (215, 153), (215, 154), (215, 155), (215, 156), (215, 158), (216, 124), (216, 126), (216, 127), (216, 128), (216, 132), (216, 134), (216, 141), (216, 143), (216, 144), (216, 145), (216, 146), (216, 147), (216, 148), (216, 149), (216, 150), (216, 151), (216, 152), (216, 153), (216, 154), (216, 155), (216, 156), (216, 157), (216, 159), (217, 117), (217, 124), (217, 129), (217, 130), (217, 132), (217, 141), (217, 143), (217, 144), (217, 145), (217, 146), (217, 147), (217, 148), (217, 149), (217, 150), (217, 151), (217, 152), (217, 153), (217, 154), (217, 155),
(217, 156), (217, 157), (217, 159), (218, 112), (218, 113), (218, 132), (218, 140), (218, 141), (218, 142), (218, 143), (218, 144), (218, 145), (218, 146), (218, 147), (218, 148), (218, 149), (218, 150), (218, 151), (218, 152), (218, 153), (218, 154), (218, 155), (218, 156), (218, 157), (218, 159), (219, 111), (219, 112), (219, 127), (219, 133), (219, 135), (219, 136), (219, 137), (219, 138), (219, 141), (219, 142), (219, 143), (219, 144), (219, 145), (219, 146), (219, 147), (219, 148), (219, 149), (219, 150), (219, 151), (219, 152), (219, 153), (219, 154), (219, 155), (219, 156), (219, 157), (219, 159), (220, 124), (220, 127), (220, 133), (220, 141), (220, 142), (220, 143), (220, 144), (220, 145), (220, 146), (220, 147), (220, 148), (220, 149), (220, 150), (220, 151), (220, 152), (220, 153), (220, 154), (220, 155), (220, 156), (220, 157), (220, 158),
(220, 160), (221, 124), (221, 126), (221, 132), (221, 134), (221, 135), (221, 136), (221, 139), (221, 140), (221, 141), (221, 142), (221, 143), (221, 144), (221, 145), (221, 146), (221, 147), (221, 148), (221, 149), (221, 150), (221, 151), (221, 152), (221, 153), (221, 154), (221, 155), (221, 156), (221, 157), (221, 158), (221, 160), (222, 118), (222, 120), (222, 121), (222, 122), (222, 124), (222, 126), (222, 131), (222, 133), (222, 134), (222, 135), (222, 138), (222, 142), (222, 146), (222, 147), (222, 148), (222, 149), (222, 150), (222, 151), (222, 152), (222, 153), (222, 154), (222, 155), (222, 156), (222, 157), (222, 158), (222, 160), (223, 118), (223, 124), (223, 125), (223, 126), (223, 127), (223, 128), (223, 129), (223, 132), (223, 133), (223, 134), (223, 136), (223, 141), (223, 142), (223, 143), (223, 144), (223, 145), (223, 146), (223, 147),
(223, 148), (223, 149), (223, 150), (223, 151), (223, 152), (223, 153), (223, 154), (223, 155), (223, 156), (223, 157), (223, 158), (223, 159), (223, 161), (224, 118), (224, 126), (224, 131), (224, 132), (224, 133), (224, 135), (224, 141), (224, 147), (224, 148), (224, 149), (224, 150), (224, 151), (224, 152), (224, 153), (224, 154), (224, 155), (224, 156), (224, 157), (224, 158), (224, 159), (224, 161), (225, 119), (225, 121), (225, 122), (225, 123), (225, 124), (225, 125), (225, 126), (225, 127), (225, 128), (225, 129), (225, 130), (225, 131), (225, 132), (225, 133), (225, 135), (225, 140), (225, 142), (225, 146), (225, 147), (225, 148), (225, 149), (225, 150), (225, 151), (225, 152), (225, 153), (225, 154), (225, 155), (225, 156), (225, 157), (225, 158), (225, 159), (225, 160), (225, 162), (226, 126), (226, 128), (226, 129), (226, 130), (226, 131),
(226, 132), (226, 133), (226, 135), (226, 141), (226, 142), (226, 143), (226, 144), (226, 145), (226, 146), (226, 147), (226, 148), (226, 149), (226, 150), (226, 151), (226, 152), (226, 153), (226, 154), (226, 155), (226, 156), (226, 157), (226, 158), (226, 159), (226, 160), (226, 162), (227, 126), (227, 128), (227, 129), (227, 130), (227, 131), (227, 132), (227, 133), (227, 134), (227, 135), (227, 136), (227, 137), (227, 138), (227, 140), (227, 141), (227, 142), (227, 143), (227, 146), (227, 147), (227, 148), (227, 149), (227, 150), (227, 151), (227, 152), (227, 153), (227, 154), (227, 155), (227, 156), (227, 157), (227, 158), (227, 159), (227, 160), (227, 161), (227, 163), (228, 126), (228, 128), (228, 129), (228, 130), (228, 131), (228, 132), (228, 133), (228, 134), (228, 135), (228, 140), (228, 141), (228, 142), (228, 143), (228, 144), (228, 145),
(228, 146), (228, 147), (228, 148), (228, 149), (228, 150), (228, 151), (228, 152), (228, 153), (228, 154), (228, 155), (228, 156), (228, 157), (228, 158), (228, 159), (228, 160), (228, 161), (228, 163), (229, 127), (229, 129), (229, 130), (229, 131), (229, 132), (229, 133), (229, 134), (229, 135), (229, 136), (229, 137), (229, 138), (229, 139), (229, 140), (229, 141), (229, 142), (229, 143), (229, 144), (229, 145), (229, 146), (229, 147), (229, 148), (229, 149), (229, 150), (229, 151), (229, 152), (229, 153), (229, 154), (229, 155), (229, 156), (229, 157), (229, 158), (229, 159), (229, 160), (229, 161), (229, 162), (229, 164), (230, 128), (230, 131), (230, 132), (230, 133), (230, 134), (230, 135), (230, 136), (230, 137), (230, 138), (230, 139), (230, 140), (230, 141), (230, 142), (230, 143), (230, 144), (230, 145), (230, 146), (230, 147), (230, 148),
(230, 149), (230, 150), (230, 151), (230, 152), (230, 153), (230, 154), (230, 155), (230, 156), (230, 157), (230, 158), (230, 159), (230, 160), (230, 161), (230, 162), (230, 163), (230, 165), (231, 129), (231, 132), (231, 133), (231, 134), (231, 135), (231, 136), (231, 137), (231, 138), (231, 139), (231, 140), (231, 141), (231, 142), (231, 143), (231, 144), (231, 145), (231, 146), (231, 147), (231, 148), (231, 149), (231, 150), (231, 151), (231, 152), (231, 153), (231, 154), (231, 155), (231, 156), (231, 157), (231, 158), (231, 159), (231, 160), (231, 161), (231, 162), (231, 163), (231, 164), (231, 166), (232, 131), (232, 134), (232, 135), (232, 136), (232, 137), (232, 138), (232, 139), (232, 140), (232, 141), (232, 142), (232, 143), (232, 144), (232, 145), (232, 146), (232, 147), (232, 148), (232, 149), (232, 150), (232, 151), (232, 152), (232, 153),
(232, 154), (232, 155), (232, 156), (232, 157), (232, 158), (232, 159), (232, 160), (232, 161), (232, 162), (232, 163), (232, 164), (232, 165), (232, 167), (233, 133), (233, 137), (233, 138), (233, 139), (233, 140), (233, 141), (233, 142), (233, 143), (233, 144), (233, 145), (233, 146), (233, 147), (233, 148), (233, 149), (233, 150), (233, 151), (233, 152), (233, 153), (233, 154), (233, 155), (233, 156), (233, 157), (233, 158), (233, 159), (233, 160), (233, 161), (233, 162), (233, 163), (233, 164), (233, 165), (233, 166), (233, 168), (234, 135), (234, 139), (234, 140), (234, 141), (234, 142), (234, 143), (234, 144), (234, 145), (234, 146), (234, 147), (234, 148), (234, 149), (234, 150), (234, 151), (234, 152), (234, 153), (234, 154), (234, 155), (234, 156), (234, 157), (234, 158), (234, 159), (234, 160), (234, 161), (234, 162), (234, 163), (234, 164),
(234, 165), (234, 166), (234, 167), (234, 169), (235, 137), (235, 138), (235, 142), (235, 143), (235, 144), (235, 145), (235, 146), (235, 147), (235, 148), (235, 149), (235, 150), (235, 151), (235, 152), (235, 153), (235, 154), (235, 155), (235, 156), (235, 157), (235, 158), (235, 159), (235, 160), (235, 161), (235, 162), (235, 163), (235, 164), (235, 165), (235, 166), (235, 167), (235, 168), (235, 170), (236, 139), (236, 140), (236, 141), (236, 145), (236, 146), (236, 147), (236, 148), (236, 149), (236, 150), (236, 151), (236, 152), (236, 153), (236, 154), (236, 155), (236, 156), (236, 157), (236, 158), (236, 159), (236, 160), (236, 161), (236, 162), (236, 163), (236, 164), (236, 165), (236, 166), (236, 167), (236, 168), (236, 170), (237, 142), (237, 144), (237, 147), (237, 148), (237, 149), (237, 150), (237, 151), (237, 152), (237, 153), (237, 154),
(237, 155), (237, 156), (237, 157), (237, 158), (237, 159), (237, 160), (237, 161), (237, 162), (237, 163), (237, 164), (237, 165), (237, 166), (237, 167), (237, 168), (237, 169), (237, 171), (238, 145), (238, 146), (238, 150), (238, 151), (238, 152), (238, 153), (238, 154), (238, 155), (238, 156), (238, 157), (238, 158), (238, 159), (238, 160), (238, 161), (238, 162), (238, 163), (238, 164), (238, 165), (238, 166), (238, 167), (238, 171), (239, 148), (239, 149), (239, 152), (239, 153), (239, 154), (239, 155), (239, 165), (239, 166), (239, 169), (240, 150), (240, 151), (240, 156), (240, 157), (240, 158), (240, 159), (240, 160), (240, 161), (240, 162), (240, 163), (240, 167), (241, 153), (241, 155), (241, 166), )
coordinates_0000FF = ((119, 89),
(119, 90), (119, 92), (120, 84), (120, 86), (120, 87), (120, 93), (121, 82), (121, 85), (121, 86), (121, 87), (121, 88), (121, 89), (121, 90), (121, 91), (121, 95), (122, 81), (122, 83), (122, 92), (122, 93), (122, 96), (123, 80), (123, 81), (123, 95), (123, 98), (124, 80), (124, 96), (124, 99), (125, 79), (125, 97), (125, 101), (126, 78), (126, 98), (126, 101), (127, 99), (127, 102), (128, 77), (128, 100), (128, 102), (129, 76), (129, 101), (129, 103), (130, 76), (130, 102), (130, 104), (131, 75), (131, 102), (131, 104), (132, 75), (132, 103), (132, 105), (133, 103), (133, 106), (134, 74), (134, 104), (134, 107), (135, 74), (135, 104), (135, 107), (136, 73), (136, 74), (136, 104), (136, 107), (137, 73), (137, 74), (137, 105), (137, 107), (138, 72), (138, 105), (138, 107), (139, 72), (139, 73), (139, 105),
(139, 107), (140, 73), (140, 105), (140, 107), (141, 105), (141, 107), (142, 105), (142, 107), (143, 105), (143, 107), (144, 105), (144, 108), (145, 106), (145, 109), (146, 107), (146, 110), (147, 107), (148, 108), (148, 111), (149, 108), (149, 110), (149, 112), (150, 108), (150, 110), (150, 112), (151, 108), (151, 110), (151, 112), (152, 108), (152, 110), (152, 112), (153, 108), (153, 110), (153, 112), (154, 108), (154, 110), (154, 112), (155, 107), (155, 108), (155, 109), (155, 110), (155, 112), (156, 107), (156, 109), (156, 110), (156, 112), (157, 107), (157, 109), (157, 110), (157, 112), (158, 107), (158, 109), (158, 110), (158, 112), (159, 107), (159, 109), (159, 110), (159, 112), (160, 108), (160, 110), (160, 112), (161, 108), (161, 110), (161, 112), (162, 108), (162, 110), (162, 112), (163, 108), (163, 110), (163, 112), (164, 108), (164, 111),
(165, 108), (165, 111), (166, 108), (166, 111), (167, 108), (167, 111), (168, 108), (168, 111), (169, 108), (169, 111), (170, 108), (170, 110), (171, 107), (171, 110), (172, 108), (172, 110), (173, 105), (173, 107), (173, 108), (173, 110), (174, 105), (174, 107), (174, 108), (174, 110), (175, 105), (175, 107), (175, 109), (176, 105), (176, 107), (176, 109), (177, 105), (177, 107), (177, 109), (178, 105), (178, 107), (178, 109), (179, 104), (179, 106), (179, 107), (179, 108), (180, 104), (180, 106), (180, 108), (181, 75), (181, 104), (181, 108), (182, 75), (182, 103), (182, 107), (183, 75), (183, 103), (184, 102), (184, 104), (185, 76), (185, 102), (186, 76), (186, 101), (186, 103), (187, 77), (187, 100), (188, 78), (188, 99), (188, 102), (189, 79), (189, 98), (189, 101), (190, 80), (190, 96), (190, 99), (191, 81), (191, 82), (191, 94),
(191, 95), (191, 98), (192, 82), (192, 84), (192, 85), (192, 91), (192, 92), (192, 93), (192, 97), (193, 83), (193, 86), (193, 87), (193, 88), (193, 89), (193, 96), (194, 84), (194, 87), (194, 88), (194, 89), (194, 90), (194, 91), (194, 92), (194, 93), (194, 95), (195, 86), )
coordinates_27EED9 = ((123, 85),
(123, 86), (123, 87), (123, 88), (123, 89), (123, 90), (124, 83), (124, 91), (124, 93), (125, 81), (125, 85), (125, 86), (125, 87), (125, 88), (125, 89), (125, 90), (125, 94), (126, 80), (126, 83), (126, 84), (126, 85), (126, 86), (126, 87), (126, 88), (126, 89), (126, 90), (126, 91), (126, 92), (126, 93), (126, 96), (127, 79), (127, 81), (127, 82), (127, 83), (127, 84), (127, 85), (127, 86), (127, 87), (127, 88), (127, 89), (127, 90), (127, 91), (127, 92), (127, 93), (127, 94), (127, 97), (128, 79), (128, 81), (128, 82), (128, 83), (128, 84), (128, 85), (128, 86), (128, 87), (128, 88), (128, 89), (128, 90), (128, 91), (128, 92), (128, 93), (128, 94), (128, 95), (128, 96), (128, 98), (129, 78), (129, 80), (129, 81), (129, 82), (129, 83), (129, 84), (129, 85), (129, 86), (129, 87),
(129, 88), (129, 89), (129, 90), (129, 91), (129, 92), (129, 93), (129, 94), (129, 95), (129, 96), (129, 99), (130, 78), (130, 80), (130, 81), (130, 82), (130, 83), (130, 84), (130, 85), (130, 86), (130, 87), (130, 88), (130, 89), (130, 90), (130, 91), (130, 92), (130, 93), (130, 94), (130, 95), (130, 96), (130, 97), (130, 99), (131, 77), (131, 79), (131, 80), (131, 81), (131, 82), (131, 83), (131, 84), (131, 85), (131, 86), (131, 87), (131, 88), (131, 89), (131, 90), (131, 91), (131, 92), (131, 93), (131, 94), (131, 95), (131, 96), (131, 97), (131, 98), (131, 100), (132, 77), (132, 79), (132, 80), (132, 81), (132, 82), (132, 83), (132, 84), (132, 85), (132, 86), (132, 87), (132, 88), (132, 89), (132, 90), (132, 91), (132, 92), (132, 93), (132, 94), (132, 95), (132, 96), (132, 97),
(132, 98), (132, 100), (133, 77), (133, 79), (133, 80), (133, 81), (133, 82), (133, 83), (133, 84), (133, 85), (133, 86), (133, 87), (133, 88), (133, 89), (133, 90), (133, 91), (133, 92), (133, 93), (133, 94), (133, 95), (133, 96), (133, 97), (133, 98), (133, 99), (133, 101), (134, 76), (134, 78), (134, 79), (134, 80), (134, 81), (134, 82), (134, 83), (134, 84), (134, 85), (134, 86), (134, 87), (134, 88), (134, 89), (134, 90), (134, 91), (134, 92), (134, 93), (134, 94), (134, 95), (134, 96), (134, 97), (134, 98), (134, 99), (134, 101), (135, 76), (135, 78), (135, 79), (135, 80), (135, 81), (135, 82), (135, 83), (135, 84), (135, 85), (135, 86), (135, 87), (135, 88), (135, 89), (135, 90), (135, 91), (135, 92), (135, 93), (135, 94), (135, 95), (135, 96), (135, 97), (135, 98), (135, 99),
(135, 100), (135, 102), (136, 76), (136, 78), (136, 79), (136, 80), (136, 81), (136, 82), (136, 83), (136, 84), (136, 85), (136, 86), (136, 87), (136, 88), (136, 89), (136, 90), (136, 91), (136, 92), (136, 93), (136, 94), (136, 95), (136, 96), (136, 97), (136, 98), (136, 99), (136, 100), (136, 102), (137, 76), (137, 78), (137, 79), (137, 80), (137, 81), (137, 82), (137, 83), (137, 84), (137, 85), (137, 86), (137, 87), (137, 88), (137, 89), (137, 90), (137, 91), (137, 92), (137, 93), (137, 94), (137, 95), (137, 96), (137, 97), (137, 98), (137, 99), (137, 100), (137, 102), (138, 76), (138, 78), (138, 79), (138, 80), (138, 81), (138, 82), (138, 83), (138, 84), (138, 85), (138, 86), (138, 87), (138, 88), (138, 89), (138, 90), (138, 91), (138, 92), (138, 93), (138, 94), (138, 95), (138, 96),
(138, 97), (138, 98), (138, 99), (138, 100), (138, 102), (139, 76), (139, 78), (139, 79), (139, 80), (139, 81), (139, 82), (139, 83), (139, 84), (139, 85), (139, 86), (139, 87), (139, 88), (139, 89), (139, 90), (139, 91), (139, 92), (139, 93), (139, 94), (139, 95), (139, 96), (139, 97), (139, 98), (139, 99), (139, 100), (139, 101), (139, 103), (140, 76), (140, 78), (140, 79), (140, 80), (140, 81), (140, 82), (140, 83), (140, 84), (140, 85), (140, 86), (140, 87), (140, 88), (140, 89), (140, 90), (140, 91), (140, 92), (140, 93), (140, 94), (140, 95), (140, 96), (140, 97), (140, 98), (140, 99), (140, 100), (140, 101), (140, 103), (141, 76), (141, 79), (141, 80), (141, 81), (141, 82), (141, 83), (141, 84), (141, 85), (141, 86), (141, 87), (141, 88), (141, 89), (141, 90), (141, 91), (141, 92),
(141, 93), (141, 94), (141, 95), (141, 96), (141, 97), (141, 98), (141, 99), (141, 100), (141, 101), (141, 103), (142, 77), (142, 81), (142, 82), (142, 83), (142, 84), (142, 85), (142, 86), (142, 87), (142, 96), (142, 97), (142, 98), (142, 99), (142, 100), (142, 101), (142, 103), (143, 79), (143, 83), (143, 84), (143, 85), (143, 88), (143, 89), (143, 90), (143, 91), (143, 92), (143, 93), (143, 94), (143, 95), (143, 96), (143, 97), (143, 98), (143, 99), (143, 103), (144, 81), (144, 87), (144, 96), (144, 98), (144, 99), (144, 101), (145, 83), (145, 85), (145, 96), (145, 98), (145, 99), (146, 96), (146, 98), (147, 96), (147, 98), (148, 96), (148, 99), (149, 96), (149, 99), (150, 97), (150, 98), (165, 97), (165, 99), (166, 95), (166, 100), (167, 95), (167, 97), (167, 98), (167, 100), (168, 96),
(168, 98), (168, 100), (169, 96), (169, 98), (169, 100), (170, 84), (170, 86), (170, 88), (170, 96), (170, 97), (170, 100), (171, 82), (171, 90), (171, 96), (171, 98), (171, 99), (171, 101), (172, 80), (172, 84), (172, 85), (172, 86), (172, 87), (172, 88), (172, 91), (172, 92), (172, 93), (172, 94), (172, 96), (172, 97), (172, 98), (172, 99), (172, 100), (172, 103), (173, 79), (173, 82), (173, 83), (173, 84), (173, 85), (173, 86), (173, 87), (173, 88), (173, 89), (173, 90), (173, 96), (173, 97), (173, 98), (173, 99), (173, 100), (173, 101), (173, 103), (174, 77), (174, 80), (174, 81), (174, 82), (174, 83), (174, 84), (174, 85), (174, 86), (174, 87), (174, 88), (174, 89), (174, 90), (174, 91), (174, 92), (174, 93), (174, 94), (174, 95), (174, 96), (174, 97), (174, 98), (174, 99), (174, 100),
(174, 101), (174, 103), (175, 77), (175, 79), (175, 80), (175, 81), (175, 82), (175, 83), (175, 84), (175, 85), (175, 86), (175, 87), (175, 88), (175, 89), (175, 90), (175, 91), (175, 92), (175, 93), (175, 94), (175, 95), (175, 96), (175, 97), (175, 98), (175, 99), (175, 100), (175, 101), (175, 103), (176, 77), (176, 78), (176, 79), (176, 80), (176, 81), (176, 82), (176, 83), (176, 84), (176, 85), (176, 86), (176, 87), (176, 88), (176, 89), (176, 90), (176, 91), (176, 92), (176, 93), (176, 94), (176, 95), (176, 96), (176, 97), (176, 98), (176, 99), (176, 100), (176, 101), (176, 103), (177, 77), (177, 79), (177, 80), (177, 81), (177, 82), (177, 83), (177, 84), (177, 85), (177, 86), (177, 87), (177, 88), (177, 89), (177, 90), (177, 91), (177, 92), (177, 93), (177, 94), (177, 95), (177, 96),
(177, 97), (177, 98), (177, 99), (177, 100), (177, 101), (177, 103), (178, 77), (178, 79), (178, 80), (178, 81), (178, 82), (178, 83), (178, 84), (178, 85), (178, 86), (178, 87), (178, 88), (178, 89), (178, 90), (178, 91), (178, 92), (178, 93), (178, 94), (178, 95), (178, 96), (178, 97), (178, 98), (178, 99), (178, 100), (178, 102), (179, 77), (179, 79), (179, 80), (179, 81), (179, 82), (179, 83), (179, 84), (179, 85), (179, 86), (179, 87), (179, 88), (179, 89), (179, 90), (179, 91), (179, 92), (179, 93), (179, 94), (179, 95), (179, 96), (179, 97), (179, 98), (179, 99), (179, 100), (179, 102), (180, 77), (180, 79), (180, 80), (180, 81), (180, 82), (180, 83), (180, 84), (180, 85), (180, 86), (180, 87), (180, 88), (180, 89), (180, 90), (180, 91), (180, 92), (180, 93), (180, 94), (180, 95),
(180, 96), (180, 97), (180, 98), (180, 99), (180, 100), (180, 102), (181, 77), (181, 79), (181, 80), (181, 81), (181, 82), (181, 83), (181, 84), (181, 85), (181, 86), (181, 87), (181, 88), (181, 89), (181, 90), (181, 91), (181, 92), (181, 93), (181, 94), (181, 95), (181, 96), (181, 97), (181, 98), (181, 99), (181, 101), (182, 77), (182, 79), (182, 80), (182, 81), (182, 82), (182, 83), (182, 84), (182, 85), (182, 86), (182, 87), (182, 88), (182, 89), (182, 90), (182, 91), (182, 92), (182, 93), (182, 94), (182, 95), (182, 96), (182, 97), (182, 98), (182, 99), (182, 101), (183, 77), (183, 79), (183, 80), (183, 81), (183, 82), (183, 83), (183, 84), (183, 85), (183, 86), (183, 87), (183, 88), (183, 89), (183, 90), (183, 91), (183, 92), (183, 93), (183, 94), (183, 95), (183, 96), (183, 97),
(183, 98), (183, 99), (183, 100), (184, 78), (184, 80), (184, 81), (184, 82), (184, 83), (184, 84), (184, 85), (184, 86), (184, 87), (184, 88), (184, 89), (184, 90), (184, 91), (184, 92), (184, 93), (184, 94), (184, 95), (184, 96), (184, 97), (184, 98), (184, 100), (185, 78), (185, 80), (185, 81), (185, 82), (185, 83), (185, 84), (185, 85), (185, 86), (185, 87), (185, 88), (185, 89), (185, 90), (185, 91), (185, 92), (185, 93), (185, 94), (185, 95), (185, 96), (185, 97), (185, 99), (186, 79), (186, 81), (186, 82), (186, 83), (186, 84), (186, 85), (186, 86), (186, 87), (186, 88), (186, 89), (186, 90), (186, 91), (186, 92), (186, 93), (186, 94), (186, 95), (186, 96), (186, 98), (187, 82), (187, 83), (187, 84), (187, 85), (187, 86), (187, 87), (187, 88), (187, 89), (187, 90), (187, 91),
(187, 92), (187, 93), (187, 94), (187, 95), (187, 97), (188, 80), (188, 84), (188, 85), (188, 86), (188, 87), (188, 88), (188, 89), (188, 90), (188, 91), (188, 92), (188, 93), (188, 96), (189, 82), (189, 95), (190, 84), (190, 86), (190, 87), (190, 88), (190, 89), (190, 90), (190, 91), (190, 92), )
coordinates_07004B = ((128, 104),
(129, 105), (130, 106), (130, 107), (131, 107), (131, 109), (132, 108), (132, 111), (132, 112), (133, 108), (133, 109), (133, 114), (133, 115), (133, 116), (134, 109), (134, 111), (134, 112), (134, 117), (134, 118), (134, 119), (134, 120), (134, 121), (134, 123), (135, 109), (135, 111), (135, 112), (135, 113), (135, 114), (135, 115), (135, 116), (135, 124), (136, 109), (136, 111), (136, 112), (136, 113), (136, 114), (136, 115), (136, 116), (136, 117), (136, 118), (136, 119), (136, 120), (136, 121), (136, 122), (136, 124), (137, 109), (137, 111), (137, 112), (137, 113), (137, 114), (137, 115), (137, 116), (137, 117), (137, 118), (137, 119), (137, 120), (137, 121), (137, 122), (137, 123), (137, 125), (138, 109), (138, 111), (138, 112), (138, 113), (138, 114), (138, 115), (138, 116), (138, 117), (138, 118), (138, 119), (138, 120), (138, 121), (138, 122),
(138, 123), (138, 124), (138, 126), (139, 109), (139, 111), (139, 112), (139, 113), (139, 114), (139, 115), (139, 116), (139, 117), (139, 118), (139, 119), (139, 120), (139, 121), (139, 122), (139, 123), (139, 124), (139, 125), (139, 127), (140, 109), (140, 111), (140, 112), (140, 113), (140, 114), (140, 115), (140, 116), (140, 117), (140, 118), (140, 119), (140, 120), (140, 121), (140, 122), (140, 123), (140, 124), (140, 125), (140, 127), (141, 109), (141, 111), (141, 112), (141, 113), (141, 114), (141, 115), (141, 116), (141, 117), (141, 118), (141, 119), (141, 120), (141, 121), (141, 122), (141, 123), (141, 124), (141, 125), (141, 127), (142, 109), (142, 111), (142, 112), (142, 113), (142, 114), (142, 115), (142, 116), (142, 117), (142, 118), (142, 119), (142, 120), (142, 121), (142, 122), (142, 123), (142, 124), (142, 125), (142, 127), (143, 110),
(143, 112), (143, 113), (143, 114), (143, 115), (143, 116), (143, 117), (143, 118), (143, 119), (143, 120), (143, 121), (143, 122), (143, 123), (143, 124), (143, 125), (143, 127), (144, 113), (144, 114), (144, 115), (144, 116), (144, 117), (144, 118), (144, 119), (144, 120), (144, 121), (144, 122), (144, 123), (144, 124), (144, 125), (144, 127), (145, 111), (145, 113), (145, 114), (145, 115), (145, 116), (145, 117), (145, 118), (145, 119), (145, 120), (145, 121), (145, 122), (145, 123), (145, 124), (145, 125), (145, 127), (146, 112), (146, 114), (146, 115), (146, 116), (146, 117), (146, 118), (146, 119), (146, 120), (146, 121), (146, 122), (146, 123), (146, 124), (146, 125), (146, 127), (147, 113), (147, 115), (147, 116), (147, 117), (147, 118), (147, 119), (147, 120), (147, 121), (147, 122), (147, 123), (147, 124), (147, 125), (147, 126), (147, 127),
(148, 113), (148, 114), (148, 116), (148, 117), (148, 118), (148, 119), (148, 120), (148, 121), (148, 122), (148, 123), (148, 124), (148, 126), (149, 114), (149, 116), (149, 117), (149, 118), (149, 119), (149, 120), (149, 121), (149, 122), (149, 123), (149, 124), (149, 126), (150, 114), (150, 116), (150, 117), (150, 118), (150, 119), (150, 120), (150, 121), (150, 122), (150, 123), (150, 124), (150, 125), (150, 126), (150, 127), (151, 114), (151, 116), (151, 117), (151, 118), (151, 119), (151, 120), (151, 121), (151, 122), (151, 123), (151, 124), (151, 125), (151, 127), (152, 114), (152, 116), (152, 117), (152, 118), (152, 119), (152, 120), (152, 121), (152, 122), (152, 123), (152, 124), (152, 125), (152, 127), (153, 114), (153, 116), (153, 117), (153, 118), (153, 119), (153, 120), (153, 121), (153, 122), (153, 123), (153, 124), (153, 125), (153, 127),
(154, 114), (154, 116), (154, 117), (154, 118), (154, 119), (154, 120), (154, 121), (154, 122), (154, 123), (154, 124), (154, 125), (154, 126), (154, 128), (155, 114), (155, 116), (155, 117), (155, 118), (155, 119), (155, 120), (155, 121), (155, 122), (155, 123), (155, 124), (155, 125), (155, 126), (155, 128), (156, 114), (156, 116), (156, 117), (156, 118), (156, 119), (156, 120), (156, 121), (156, 122), (156, 123), (156, 124), (156, 125), (156, 126), (156, 128), (157, 114), (157, 116), (157, 117), (157, 118), (157, 119), (157, 120), (157, 121), (157, 122), (157, 123), (157, 124), (157, 125), (157, 126), (157, 128), (158, 114), (158, 116), (158, 117), (158, 118), (158, 119), (158, 120), (158, 121), (158, 122), (158, 123), (158, 124), (158, 125), (158, 126), (158, 128), (159, 114), (159, 116), (159, 117), (159, 118), (159, 119), (159, 120), (159, 121),
(159, 122), (159, 123), (159, 124), (159, 125), (159, 126), (159, 128), (160, 114), (160, 116), (160, 117), (160, 118), (160, 119), (160, 120), (160, 121), (160, 122), (160, 123), (160, 124), (160, 125), (160, 126), (160, 128), (161, 114), (161, 116), (161, 117), (161, 118), (161, 119), (161, 120), (161, 121), (161, 122), (161, 123), (161, 124), (161, 125), (161, 126), (161, 128), (162, 114), (162, 116), (162, 117), (162, 118), (162, 119), (162, 120), (162, 121), (162, 122), (162, 123), (162, 124), (162, 125), (162, 126), (162, 128), (163, 114), (163, 116), (163, 117), (163, 118), (163, 119), (163, 120), (163, 121), (163, 122), (163, 123), (163, 124), (163, 125), (163, 126), (163, 128), (164, 114), (164, 116), (164, 117), (164, 118), (164, 119), (164, 120), (164, 121), (164, 122), (164, 123), (164, 124), (164, 125), (164, 126), (164, 128), (165, 113),
(165, 114), (165, 115), (165, 116), (165, 117), (165, 118), (165, 119), (165, 120), (165, 121), (165, 122), (165, 123), (165, 124), (165, 125), (165, 126), (165, 128), (166, 113), (166, 115), (166, 116), (166, 117), (166, 118), (166, 119), (166, 120), (166, 121), (166, 122), (166, 123), (166, 124), (166, 125), (166, 127), (167, 113), (167, 115), (167, 116), (167, 117), (167, 118), (167, 119), (167, 120), (167, 121), (167, 122), (167, 123), (167, 124), (167, 126), (168, 113), (168, 115), (168, 116), (168, 117), (168, 118), (168, 119), (168, 120), (168, 121), (168, 122), (168, 123), (168, 124), (168, 126), (169, 113), (169, 115), (169, 116), (169, 117), (169, 118), (169, 119), (169, 120), (169, 121), (169, 122), (169, 123), (169, 124), (169, 126), (170, 113), (170, 115), (170, 116), (170, 117), (170, 118), (170, 119), (170, 120), (170, 121), (170, 122),
(170, 123), (170, 124), (170, 126), (171, 112), (171, 114), (171, 115), (171, 116), (171, 117), (171, 118), (171, 119), (171, 120), (171, 121), (171, 122), (171, 123), (171, 124), (171, 126), (172, 112), (172, 114), (172, 115), (172, 116), (172, 117), (172, 118), (172, 119), (172, 120), (172, 121), (172, 122), (172, 123), (172, 124), (172, 125), (172, 127), (173, 112), (173, 114), (173, 115), (173, 116), (173, 117), (173, 118), (173, 119), (173, 120), (173, 121), (173, 122), (173, 123), (173, 124), (173, 125), (173, 127), (174, 112), (174, 114), (174, 115), (174, 116), (174, 117), (174, 118), (174, 119), (174, 120), (174, 121), (174, 122), (174, 123), (174, 124), (174, 125), (174, 127), (175, 112), (175, 114), (175, 115), (175, 116), (175, 117), (175, 118), (175, 119), (175, 120), (175, 121), (175, 122), (175, 123), (175, 124), (175, 125), (175, 127),
(176, 111), (176, 113), (176, 114), (176, 115), (176, 116), (176, 117), (176, 118), (176, 119), (176, 120), (176, 121), (176, 122), (176, 123), (176, 124), (176, 125), (176, 127), (177, 111), (177, 113), (177, 114), (177, 115), (177, 116), (177, 117), (177, 118), (177, 119), (177, 120), (177, 121), (177, 122), (177, 123), (177, 124), (177, 125), (177, 127), (178, 111), (178, 113), (178, 114), (178, 115), (178, 116), (178, 117), (178, 118), (178, 119), (178, 120), (178, 121), (178, 122), (178, 123), (178, 124), (178, 126), (179, 111), (179, 113), (179, 114), (179, 115), (179, 116), (179, 117), (179, 118), (179, 119), (179, 120), (179, 121), (179, 122), (179, 123), (179, 126), (180, 110), (180, 111), (180, 112), (180, 113), (180, 114), (180, 115), (180, 116), (180, 117), (180, 118), (180, 119), (180, 120), (180, 121), (180, 122), (180, 124), (181, 110),
(181, 112), (181, 113), (181, 114), (181, 115), (181, 116), (181, 117), (181, 118), (181, 119), (181, 120), (181, 121), (181, 123), (182, 109), (182, 122), (183, 108), (183, 110), (183, 111), (183, 112), (183, 113), (183, 114), (183, 115), (183, 116), (183, 117), (183, 118), (183, 119), (183, 121), (184, 107), (184, 109), (185, 106), (186, 105), (186, 106), (187, 105), )
coordinates_9F00E2 = ((147, 157),
(148, 134), (148, 157), (149, 133), (149, 136), (149, 137), (149, 138), (149, 139), (149, 140), (149, 141), (149, 142), (149, 143), (149, 154), (149, 156), (150, 133), (150, 135), (150, 144), (150, 145), (150, 146), (150, 147), (150, 148), (150, 149), (150, 150), (150, 151), (150, 152), (150, 153), (151, 133), (151, 134), (151, 135), (151, 136), (151, 137), (151, 138), (151, 139), (151, 140), (151, 141), (151, 142), (151, 143), (151, 151), (152, 132), (152, 134), (152, 135), (152, 136), (152, 137), (152, 138), (152, 139), (152, 140), (152, 141), (152, 142), (152, 143), (152, 144), (152, 145), (152, 149), (153, 133), (153, 134), (153, 135), (153, 136), (153, 137), (153, 138), (153, 139), (153, 140), (153, 141), (153, 142), (153, 143), (153, 144), (153, 147), (154, 132), (154, 134), (154, 135), (154, 136), (154, 137), (154, 138), (154, 139), (154, 140),
(154, 141), (154, 142), (154, 143), (154, 145), (155, 133), (155, 135), (155, 136), (155, 137), (155, 138), (155, 139), (155, 140), (155, 141), (155, 144), (156, 134), (156, 143), (157, 135), (157, 137), (157, 138), (157, 139), (157, 140), (157, 141), (161, 133), (161, 135), (161, 136), (161, 137), (161, 138), (161, 139), (162, 132), (162, 140), (162, 141), (162, 142), (162, 144), (163, 132), (163, 134), (163, 135), (163, 136), (163, 137), (163, 138), (163, 139), (163, 145), (164, 131), (164, 133), (164, 134), (164, 135), (164, 136), (164, 137), (164, 138), (164, 139), (164, 140), (164, 141), (164, 142), (164, 143), (164, 146), (165, 131), (165, 133), (165, 134), (165, 135), (165, 136), (165, 137), (165, 138), (165, 139), (165, 140), (165, 141), (165, 142), (165, 143), (165, 144), (165, 145), (165, 147), (166, 132), (166, 134), (166, 135), (166, 136),
(166, 137), (166, 138), (166, 139), (166, 140), (166, 141), (166, 142), (166, 143), (166, 144), (166, 145), (166, 148), (167, 132), (167, 135), (167, 136), (167, 137), (167, 138), (167, 139), (167, 140), (167, 146), (167, 147), (167, 150), (168, 132), (168, 134), (168, 141), (168, 142), (168, 143), (168, 144), (168, 145), (168, 149), (168, 151), (169, 135), (169, 136), (169, 137), (169, 138), (169, 139), (169, 151), (169, 153), )
coordinates_0734C9 = ((76, 190),
(77, 188), (77, 190), (78, 186), (78, 191), (79, 167), (79, 185), (79, 188), (79, 189), (79, 191), (80, 167), (80, 168), (80, 183), (80, 186), (80, 187), (80, 188), (80, 189), (80, 190), (80, 192), (81, 167), (81, 169), (81, 182), (81, 185), (81, 186), (81, 187), (81, 188), (81, 189), (81, 190), (81, 192), (82, 166), (82, 168), (82, 170), (82, 180), (82, 183), (82, 184), (82, 185), (82, 186), (82, 187), (82, 188), (82, 189), (82, 190), (82, 191), (82, 193), (83, 166), (83, 168), (83, 169), (83, 172), (83, 178), (83, 179), (83, 182), (83, 183), (83, 184), (83, 185), (83, 186), (83, 187), (83, 188), (83, 189), (83, 190), (83, 191), (83, 193), (84, 165), (84, 167), (84, 168), (84, 169), (84, 170), (84, 173), (84, 174), (84, 175), (84, 176), (84, 177), (84, 180), (84, 181), (84, 182),
(84, 183), (84, 184), (84, 185), (84, 186), (84, 187), (84, 188), (84, 189), (84, 190), (84, 191), (84, 192), (84, 194), (85, 165), (85, 167), (85, 168), (85, 169), (85, 170), (85, 171), (85, 172), (85, 178), (85, 179), (85, 180), (85, 181), (85, 182), (85, 183), (85, 184), (85, 185), (85, 186), (85, 187), (85, 188), (85, 189), (85, 190), (85, 191), (85, 192), (85, 194), (86, 164), (86, 166), (86, 167), (86, 168), (86, 169), (86, 170), (86, 171), (86, 172), (86, 173), (86, 174), (86, 175), (86, 176), (86, 177), (86, 178), (86, 179), (86, 180), (86, 181), (86, 182), (86, 183), (86, 184), (86, 185), (86, 186), (86, 187), (86, 188), (86, 189), (86, 190), (86, 191), (86, 192), (86, 193), (86, 195), (87, 164), (87, 166), (87, 167), (87, 168), (87, 169), (87, 170), (87, 171), (87, 172),
(87, 173), (87, 174), (87, 175), (87, 176), (87, 177), (87, 178), (87, 179), (87, 180), (87, 181), (87, 182), (87, 183), (87, 184), (87, 185), (87, 186), (87, 187), (87, 188), (87, 189), (87, 190), (87, 191), (87, 192), (87, 193), (87, 195), (88, 165), (88, 167), (88, 168), (88, 169), (88, 170), (88, 171), (88, 172), (88, 173), (88, 174), (88, 175), (88, 176), (88, 177), (88, 178), (88, 179), (88, 180), (88, 181), (88, 182), (88, 183), (88, 184), (88, 185), (88, 186), (88, 187), (88, 188), (88, 189), (88, 190), (88, 191), (88, 192), (88, 193), (88, 194), (88, 195), (89, 165), (89, 167), (89, 168), (89, 169), (89, 170), (89, 171), (89, 172), (89, 173), (89, 174), (89, 175), (89, 176), (89, 177), (89, 178), (89, 179), (89, 180), (89, 181), (89, 182), (89, 183), (89, 184), (89, 185),
(89, 186), (89, 187), (89, 188), (89, 189), (89, 190), (89, 191), (89, 192), (89, 193), (89, 194), (89, 195), (90, 165), (90, 167), (90, 168), (90, 169), (90, 170), (90, 171), (90, 172), (90, 173), (90, 174), (90, 175), (90, 176), (90, 177), (90, 178), (90, 179), (90, 180), (90, 181), (90, 182), (90, 183), (90, 184), (90, 185), (90, 186), (90, 187), (90, 188), (90, 189), (90, 190), (90, 191), (90, 192), (90, 193), (90, 194), (90, 195), (90, 196), (91, 165), (91, 167), (91, 168), (91, 169), (91, 170), (91, 171), (91, 172), (91, 173), (91, 174), (91, 175), (91, 176), (91, 177), (91, 178), (91, 179), (91, 180), (91, 181), (91, 182), (91, 183), (91, 184), (91, 185), (91, 186), (91, 187), (91, 188), (91, 189), (91, 190), (91, 191), (91, 192), (91, 193), (91, 194), (91, 195), (91, 196),
(92, 165), (92, 167), (92, 168), (92, 169), (92, 170), (92, 171), (92, 172), (92, 173), (92, 174), (92, 175), (92, 176), (92, 177), (92, 178), (92, 179), (92, 180), (92, 181), (92, 182), (92, 183), (92, 184), (92, 185), (92, 186), (92, 187), (92, 188), (92, 189), (92, 190), (92, 191), (92, 192), (92, 193), (92, 194), (92, 195), (92, 196), (93, 165), (93, 167), (93, 168), (93, 169), (93, 170), (93, 171), (93, 172), (93, 173), (93, 174), (93, 175), (93, 176), (93, 177), (93, 178), (93, 179), (93, 180), (93, 181), (93, 182), (93, 183), (93, 184), (93, 185), (93, 186), (93, 187), (93, 188), (93, 189), (93, 190), (93, 191), (93, 192), (93, 193), (93, 194), (93, 195), (93, 196), (94, 164), (94, 166), (94, 167), (94, 168), (94, 169), (94, 170), (94, 171), (94, 172), (94, 173), (94, 174),
(94, 175), (94, 176), (94, 177), (94, 178), (94, 179), (94, 180), (94, 181), (94, 182), (94, 183), (94, 184), (94, 185), (94, 186), (94, 187), (94, 188), (94, 189), (94, 190), (94, 191), (94, 192), (94, 193), (94, 194), (94, 195), (94, 196), (95, 164), (95, 166), (95, 167), (95, 168), (95, 169), (95, 170), (95, 171), (95, 172), (95, 173), (95, 174), (95, 175), (95, 176), (95, 177), (95, 178), (95, 179), (95, 180), (95, 181), (95, 182), (95, 183), (95, 184), (95, 185), (95, 186), (95, 187), (95, 188), (95, 189), (95, 190), (95, 191), (95, 192), (95, 193), (95, 194), (95, 195), (95, 196), (96, 164), (96, 166), (96, 167), (96, 168), (96, 169), (96, 170), (96, 171), (96, 172), (96, 173), (96, 174), (96, 175), (96, 176), (96, 177), (96, 178), (96, 179), (96, 180), (96, 181), (96, 182),
(96, 183), (96, 184), (96, 185), (96, 186), (96, 187), (96, 188), (96, 189), (96, 190), (96, 191), (96, 192), (96, 193), (96, 194), (96, 195), (96, 196), (97, 163), (97, 165), (97, 166), (97, 167), (97, 168), (97, 169), (97, 170), (97, 171), (97, 172), (97, 173), (97, 174), (97, 175), (97, 176), (97, 177), (97, 178), (97, 179), (97, 180), (97, 181), (97, 182), (97, 183), (97, 184), (97, 185), (97, 186), (97, 187), (97, 188), (97, 189), (97, 190), (97, 191), (97, 192), (97, 193), (97, 194), (97, 195), (97, 196), (98, 163), (98, 165), (98, 166), (98, 167), (98, 168), (98, 169), (98, 170), (98, 171), (98, 172), (98, 173), (98, 174), (98, 175), (98, 176), (98, 177), (98, 178), (98, 179), (98, 180), (98, 181), (98, 182), (98, 183), (98, 184), (98, 185), (98, 186), (98, 187), (98, 188),
(98, 189), (98, 190), (98, 191), (98, 192), (98, 193), (98, 194), (98, 195), (98, 196), (99, 163), (99, 165), (99, 166), (99, 167), (99, 168), (99, 169), (99, 170), (99, 171), (99, 172), (99, 173), (99, 174), (99, 175), (99, 176), (99, 177), (99, 178), (99, 179), (99, 180), (99, 181), (99, 182), (99, 183), (99, 184), (99, 185), (99, 186), (99, 187), (99, 188), (99, 189), (99, 190), (99, 191), (99, 192), (99, 193), (99, 194), (99, 195), (99, 196), (100, 163), (100, 165), (100, 166), (100, 167), (100, 168), (100, 169), (100, 170), (100, 171), (100, 172), (100, 173), (100, 174), (100, 175), (100, 176), (100, 177), (100, 178), (100, 179), (100, 180), (100, 181), (100, 182), (100, 183), (100, 184), (100, 185), (100, 186), (100, 187), (100, 188), (100, 189), (100, 190), (100, 191), (100, 192), (100, 193), (100, 194),
(100, 195), (100, 196), (101, 162), (101, 163), (101, 164), (101, 165), (101, 166), (101, 167), (101, 168), (101, 169), (101, 170), (101, 171), (101, 172), (101, 173), (101, 174), (101, 175), (101, 176), (101, 177), (101, 178), (101, 179), (101, 180), (101, 181), (101, 182), (101, 183), (101, 184), (101, 185), (101, 186), (101, 187), (101, 188), (101, 189), (101, 190), (101, 191), (101, 192), (101, 193), (101, 194), (101, 195), (101, 196), (102, 162), (102, 164), (102, 165), (102, 166), (102, 167), (102, 168), (102, 169), (102, 170), (102, 171), (102, 172), (102, 173), (102, 174), (102, 175), (102, 176), (102, 177), (102, 178), (102, 179), (102, 180), (102, 181), (102, 182), (102, 183), (102, 184), (102, 185), (102, 186), (102, 187), (102, 188), (102, 189), (102, 190), (102, 191), (102, 192), (102, 193), (102, 194), (102, 195), (102, 196), (103, 162),
(103, 164), (103, 165), (103, 166), (103, 167), (103, 168), (103, 169), (103, 170), (103, 171), (103, 172), (103, 173), (103, 174), (103, 175), (103, 176), (103, 177), (103, 178), (103, 179), (103, 180), (103, 181), (103, 182), (103, 183), (103, 184), (103, 185), (103, 186), (103, 187), (103, 188), (103, 189), (103, 190), (103, 191), (103, 192), (103, 193), (103, 194), (103, 195), (104, 162), (104, 164), (104, 165), (104, 166), (104, 167), (104, 168), (104, 169), (104, 170), (104, 171), (104, 172), (104, 173), (104, 174), (104, 175), (104, 176), (104, 177), (104, 178), (104, 179), (104, 180), (104, 181), (104, 182), (104, 183), (104, 184), (104, 185), (104, 186), (104, 187), (104, 188), (104, 189), (104, 190), (104, 191), (104, 192), (104, 193), (104, 194), (105, 162), (105, 164), (105, 165), (105, 166), (105, 167), (105, 168), (105, 169), (105, 170),
(105, 171), (105, 172), (105, 173), (105, 174), (105, 175), (105, 176), (105, 177), (105, 178), (105, 179), (105, 180), (105, 181), (105, 182), (105, 183), (105, 184), (105, 185), (105, 186), (105, 187), (105, 188), (105, 189), (105, 190), (105, 191), (105, 192), (105, 193), (105, 195), (106, 162), (106, 164), (106, 165), (106, 166), (106, 167), (106, 168), (106, 169), (106, 170), (106, 171), (106, 172), (106, 173), (106, 174), (106, 175), (106, 176), (106, 177), (106, 178), (106, 179), (106, 180), (106, 181), (106, 182), (106, 183), (106, 184), (106, 185), (106, 186), (106, 187), (106, 188), (106, 189), (106, 190), (106, 191), (106, 192), (106, 193), (107, 163), (107, 165), (107, 166), (107, 167), (107, 168), (107, 169), (107, 170), (107, 171), (107, 172), (107, 173), (107, 174), (107, 175), (107, 176), (107, 177), (107, 178), (107, 179), (107, 180),
(107, 181), (107, 182), (107, 183), (107, 184), (107, 185), (107, 186), (107, 187), (107, 188), (107, 189), (107, 190), (107, 191), (107, 192), (107, 194), (108, 164), (108, 166), (108, 167), (108, 168), (108, 169), (108, 170), (108, 171), (108, 172), (108, 173), (108, 174), (108, 175), (108, 176), (108, 177), (108, 178), (108, 179), (108, 180), (108, 181), (108, 182), (108, 183), (108, 184), (108, 185), (108, 186), (108, 187), (108, 188), (108, 189), (108, 190), (108, 191), (108, 193), (109, 164), (109, 166), (109, 167), (109, 168), (109, 169), (109, 170), (109, 171), (109, 172), (109, 173), (109, 174), (109, 175), (109, 176), (109, 177), (109, 178), (109, 179), (109, 180), (109, 181), (109, 182), (109, 183), (109, 184), (109, 185), (109, 186), (109, 187), (109, 188), (109, 189), (109, 190), (109, 192), (110, 165), (110, 167), (110, 168), (110, 169),
(110, 170), (110, 171), (110, 172), (110, 173), (110, 174), (110, 175), (110, 176), (110, 177), (110, 178), (110, 179), (110, 180), (110, 181), (110, 182), (110, 183), (110, 184), (110, 185), (110, 186), (110, 187), (110, 188), (110, 189), (110, 191), (111, 165), (111, 167), (111, 168), (111, 169), (111, 170), (111, 171), (111, 172), (111, 173), (111, 174), (111, 175), (111, 176), (111, 177), (111, 178), (111, 179), (111, 180), (111, 181), (111, 182), (111, 183), (111, 184), (111, 185), (111, 186), (111, 187), (111, 188), (111, 190), (112, 166), (112, 168), (112, 169), (112, 170), (112, 171), (112, 172), (112, 173), (112, 174), (112, 175), (112, 176), (112, 177), (112, 178), (112, 179), (112, 180), (112, 181), (112, 182), (112, 183), (112, 184), (112, 185), (112, 186), (112, 187), (112, 189), (113, 167), (113, 169), (113, 170), (113, 171), (113, 172),
(113, 173), (113, 174), (113, 175), (113, 176), (113, 177), (113, 178), (113, 179), (113, 180), (113, 181), (113, 182), (113, 183), (113, 184), (113, 185), (113, 186), (113, 187), (113, 189), (114, 167), (114, 169), (114, 170), (114, 171), (114, 172), (114, 173), (114, 174), (114, 175), (114, 176), (114, 177), (114, 178), (114, 179), (114, 180), (114, 181), (114, 182), (114, 183), (114, 184), (114, 185), (114, 186), (114, 188), (115, 168), (115, 170), (115, 171), (115, 172), (115, 173), (115, 174), (115, 175), (115, 176), (115, 177), (115, 178), (115, 179), (115, 180), (115, 181), (115, 182), (115, 183), (115, 184), (115, 185), (115, 186), (115, 187), (116, 168), (116, 170), (116, 171), (116, 172), (116, 173), (116, 174), (116, 175), (116, 176), (116, 177), (116, 178), (116, 179), (116, 180), (116, 181), (116, 182), (116, 183), (116, 184), (116, 185),
(116, 187), (117, 169), (117, 171), (117, 172), (117, 173), (117, 174), (117, 175), (117, 176), (117, 177), (117, 178), (117, 179), (117, 180), (117, 181), (117, 182), (117, 183), (117, 184), (117, 186), (118, 170), (118, 172), (118, 173), (118, 174), (118, 175), (118, 176), (118, 177), (118, 178), (118, 179), (118, 180), (118, 181), (118, 182), (118, 183), (118, 184), (118, 186), (119, 171), (119, 173), (119, 174), (119, 175), (119, 176), (119, 177), (119, 178), (119, 179), (119, 180), (119, 181), (119, 182), (119, 183), (119, 185), (120, 172), (120, 175), (120, 176), (120, 177), (120, 178), (120, 179), (120, 180), (120, 181), (120, 182), (120, 185), (121, 173), (121, 182), (121, 184), (122, 174), (122, 175), (122, 176), (122, 177), (122, 178), (122, 179), (122, 180), (122, 183), (123, 182), (199, 172), (199, 173), (199, 174), (199, 175), (199, 176),
(199, 177), (199, 178), (199, 179), (199, 180), (199, 181), (199, 182), (199, 184), (200, 168), (200, 169), (200, 170), (200, 171), (200, 185), (201, 167), (201, 171), (201, 172), (201, 173), (201, 174), (201, 175), (201, 176), (201, 177), (201, 178), (201, 179), (201, 180), (201, 181), (201, 182), (201, 183), (201, 185), (202, 166), (202, 168), (202, 169), (202, 170), (202, 171), (202, 172), (202, 173), (202, 174), (202, 175), (202, 176), (202, 177), (202, 178), (202, 179), (202, 180), (202, 181), (202, 182), (202, 183), (202, 184), (202, 186), (203, 165), (203, 167), (203, 168), (203, 169), (203, 170), (203, 171), (203, 172), (203, 173), (203, 174), (203, 175), (203, 176), (203, 177), (203, 178), (203, 179), (203, 180), (203, 181), (203, 182), (203, 183), (203, 184), (203, 186), (204, 164), (204, 166), (204, 167), (204, 168), (204, 169), (204, 170),
(204, 171), (204, 172), (204, 173), (204, 174), (204, 175), (204, 176), (204, 177), (204, 178), (204, 179), (204, 180), (204, 181), (204, 182), (204, 183), (204, 184), (204, 185), (204, 187), (205, 163), (205, 165), (205, 166), (205, 167), (205, 168), (205, 169), (205, 170), (205, 171), (205, 172), (205, 173), (205, 174), (205, 175), (205, 176), (205, 177), (205, 178), (205, 179), (205, 180), (205, 181), (205, 182), (205, 183), (205, 184), (205, 185), (205, 186), (205, 188), (206, 162), (206, 164), (206, 165), (206, 166), (206, 167), (206, 168), (206, 169), (206, 170), (206, 171), (206, 172), (206, 173), (206, 174), (206, 175), (206, 176), (206, 177), (206, 178), (206, 179), (206, 180), (206, 181), (206, 182), (206, 183), (206, 184), (206, 185), (206, 186), (206, 188), (207, 161), (207, 163), (207, 164), (207, 165), (207, 166), (207, 167), (207, 168),
(207, 169), (207, 170), (207, 171), (207, 172), (207, 173), (207, 174), (207, 175), (207, 176), (207, 177), (207, 178), (207, 179), (207, 180), (207, 181), (207, 182), (207, 183), (207, 184), (207, 185), (207, 186), (207, 187), (207, 189), (208, 160), (208, 163), (208, 164), (208, 165), (208, 166), (208, 167), (208, 168), (208, 169), (208, 170), (208, 171), (208, 172), (208, 173), (208, 174), (208, 175), (208, 176), (208, 177), (208, 178), (208, 179), (208, 180), (208, 181), (208, 182), (208, 183), (208, 184), (208, 185), (208, 186), (208, 187), (208, 189), (209, 160), (209, 162), (209, 163), (209, 164), (209, 165), (209, 166), (209, 167), (209, 168), (209, 169), (209, 170), (209, 171), (209, 172), (209, 173), (209, 174), (209, 175), (209, 176), (209, 177), (209, 178), (209, 179), (209, 180), (209, 181), (209, 182), (209, 183), (209, 184), (209, 185),
(209, 186), (209, 187), (209, 188), (209, 190), (210, 160), (210, 162), (210, 163), (210, 164), (210, 165), (210, 166), (210, 167), (210, 168), (210, 169), (210, 170), (210, 171), (210, 172), (210, 173), (210, 174), (210, 175), (210, 176), (210, 177), (210, 178), (210, 179), (210, 180), (210, 181), (210, 182), (210, 183), (210, 184), (210, 185), (210, 186), (210, 187), (210, 188), (210, 189), (210, 191), (211, 160), (211, 162), (211, 163), (211, 164), (211, 165), (211, 166), (211, 167), (211, 168), (211, 169), (211, 170), (211, 171), (211, 172), (211, 173), (211, 174), (211, 175), (211, 176), (211, 177), (211, 178), (211, 179), (211, 180), (211, 181), (211, 182), (211, 183), (211, 184), (211, 185), (211, 186), (211, 187), (211, 188), (211, 189), (211, 191), (212, 160), (212, 162), (212, 163), (212, 164), (212, 165), (212, 166), (212, 167), (212, 168),
(212, 169), (212, 170), (212, 171), (212, 172), (212, 173), (212, 174), (212, 175), (212, 176), (212, 177), (212, 178), (212, 179), (212, 180), (212, 181), (212, 182), (212, 183), (212, 184), (212, 185), (212, 186), (212, 187), (212, 188), (212, 189), (212, 190), (212, 192), (213, 160), (213, 162), (213, 163), (213, 164), (213, 165), (213, 166), (213, 167), (213, 168), (213, 169), (213, 170), (213, 171), (213, 172), (213, 173), (213, 174), (213, 175), (213, 176), (213, 177), (213, 178), (213, 179), (213, 180), (213, 181), (213, 182), (213, 183), (213, 184), (213, 185), (213, 186), (213, 187), (213, 188), (213, 189), (213, 190), (213, 192), (214, 161), (214, 163), (214, 164), (214, 165), (214, 166), (214, 167), (214, 168), (214, 169), (214, 170), (214, 171), (214, 172), (214, 173), (214, 174), (214, 175), (214, 176), (214, 177), (214, 178), (214, 179),
(214, 180), (214, 181), (214, 182), (214, 183), (214, 184), (214, 185), (214, 186), (214, 187), (214, 188), (214, 189), (214, 190), (214, 191), (214, 193), (215, 161), (215, 163), (215, 164), (215, 165), (215, 166), (215, 167), (215, 168), (215, 169), (215, 170), (215, 171), (215, 172), (215, 173), (215, 174), (215, 175), (215, 176), (215, 177), (215, 178), (215, 179), (215, 180), (215, 181), (215, 182), (215, 183), (215, 184), (215, 185), (215, 186), (215, 187), (215, 188), (215, 189), (215, 190), (215, 191), (215, 192), (215, 194), (216, 161), (216, 163), (216, 164), (216, 165), (216, 166), (216, 167), (216, 168), (216, 169), (216, 170), (216, 171), (216, 172), (216, 173), (216, 174), (216, 175), (216, 176), (216, 177), (216, 178), (216, 179), (216, 180), (216, 181), (216, 182), (216, 183), (216, 184), (216, 185), (216, 186), (216, 187), (216, 188),
(216, 189), (216, 190), (216, 191), (216, 192), (216, 194), (217, 161), (217, 163), (217, 164), (217, 165), (217, 166), (217, 167), (217, 168), (217, 169), (217, 170), (217, 171), (217, 172), (217, 173), (217, 174), (217, 175), (217, 176), (217, 177), (217, 178), (217, 179), (217, 180), (217, 181), (217, 182), (217, 183), (217, 184), (217, 185), (217, 186), (217, 187), (217, 188), (217, 189), (217, 190), (217, 191), (217, 192), (217, 193), (217, 194), (218, 161), (218, 163), (218, 164), (218, 165), (218, 166), (218, 167), (218, 168), (218, 169), (218, 170), (218, 171), (218, 172), (218, 173), (218, 174), (218, 175), (218, 176), (218, 177), (218, 178), (218, 179), (218, 180), (218, 181), (218, 182), (218, 183), (218, 184), (218, 185), (218, 186), (218, 187), (218, 188), (218, 189), (218, 190), (218, 191), (218, 192), (218, 193), (218, 195), (219, 162),
(219, 164), (219, 165), (219, 166), (219, 167), (219, 168), (219, 169), (219, 170), (219, 171), (219, 172), (219, 173), (219, 174), (219, 175), (219, 176), (219, 177), (219, 178), (219, 179), (219, 180), (219, 181), (219, 182), (219, 183), (219, 184), (219, 185), (219, 186), (219, 187), (219, 188), (219, 189), (219, 190), (219, 191), (219, 192), (219, 193), (219, 194), (219, 195), (220, 162), (220, 164), (220, 165), (220, 166), (220, 167), (220, 168), (220, 169), (220, 170), (220, 171), (220, 172), (220, 173), (220, 174), (220, 175), (220, 176), (220, 177), (220, 178), (220, 179), (220, 180), (220, 181), (220, 182), (220, 183), (220, 184), (220, 185), (220, 186), (220, 187), (220, 188), (220, 189), (220, 190), (220, 191), (220, 192), (220, 193), (220, 194), (220, 195), (221, 162), (221, 164), (221, 165), (221, 166), (221, 167), (221, 168), (221, 169),
(221, 170), (221, 171), (221, 172), (221, 173), (221, 174), (221, 175), (221, 176), (221, 177), (221, 178), (221, 179), (221, 180), (221, 181), (221, 182), (221, 183), (221, 184), (221, 185), (221, 186), (221, 187), (221, 188), (221, 189), (221, 190), (221, 191), (221, 192), (221, 193), (221, 194), (221, 195), (221, 196), (222, 163), (222, 165), (222, 166), (222, 167), (222, 168), (222, 169), (222, 170), (222, 171), (222, 172), (222, 173), (222, 174), (222, 175), (222, 176), (222, 177), (222, 178), (222, 179), (222, 180), (222, 181), (222, 182), (222, 183), (222, 184), (222, 185), (222, 186), (222, 187), (222, 188), (222, 189), (222, 190), (222, 191), (222, 192), (222, 193), (222, 194), (222, 195), (222, 196), (223, 163), (223, 165), (223, 166), (223, 167), (223, 168), (223, 169), (223, 170), (223, 171), (223, 172), (223, 173), (223, 174), (223, 175),
(223, 176), (223, 177), (223, 178), (223, 179), (223, 180), (223, 181), (223, 182), (223, 183), (223, 184), (223, 185), (223, 186), (223, 187), (223, 188), (223, 189), (223, 190), (223, 191), (223, 192), (223, 193), (223, 194), (223, 195), (223, 196), (224, 163), (224, 165), (224, 166), (224, 167), (224, 168), (224, 169), (224, 170), (224, 171), (224, 172), (224, 173), (224, 174), (224, 175), (224, 176), (224, 177), (224, 178), (224, 179), (224, 180), (224, 181), (224, 182), (224, 183), (224, 184), (224, 185), (224, 186), (224, 187), (224, 188), (224, 189), (224, 190), (224, 191), (224, 192), (224, 193), (224, 194), (224, 195), (224, 196), (225, 164), (225, 166), (225, 167), (225, 168), (225, 169), (225, 170), (225, 171), (225, 172), (225, 173), (225, 174), (225, 175), (225, 176), (225, 177), (225, 178), (225, 179), (225, 180), (225, 181), (225, 182),
(225, 183), (225, 184), (225, 185), (225, 186), (225, 187), (225, 188), (225, 189), (225, 190), (225, 191), (225, 192), (225, 193), (225, 194), (225, 195), (225, 196), (226, 164), (226, 166), (226, 167), (226, 168), (226, 169), (226, 170), (226, 171), (226, 172), (226, 173), (226, 174), (226, 175), (226, 176), (226, 177), (226, 178), (226, 179), (226, 180), (226, 181), (226, 182), (226, 183), (226, 184), (226, 185), (226, 186), (226, 187), (226, 188), (226, 189), (226, 190), (226, 191), (226, 192), (226, 193), (226, 194), (226, 195), (226, 196), (227, 165), (227, 167), (227, 168), (227, 169), (227, 170), (227, 171), (227, 172), (227, 173), (227, 174), (227, 175), (227, 176), (227, 177), (227, 178), (227, 179), (227, 180), (227, 181), (227, 182), (227, 183), (227, 184), (227, 185), (227, 186), (227, 187), (227, 188), (227, 189), (227, 190), (227, 191),
(227, 192), (227, 193), (227, 194), (227, 195), (227, 196), (228, 166), (228, 168), (228, 169), (228, 170), (228, 171), (228, 172), (228, 173), (228, 174), (228, 175), (228, 176), (228, 177), (228, 178), (228, 179), (228, 180), (228, 181), (228, 182), (228, 183), (228, 184), (228, 185), (228, 186), (228, 187), (228, 188), (228, 189), (228, 190), (228, 191), (228, 192), (228, 193), (228, 194), (228, 195), (229, 169), (229, 170), (229, 171), (229, 172), (229, 173), (229, 174), (229, 175), (229, 176), (229, 177), (229, 178), (229, 179), (229, 180), (229, 181), (229, 182), (229, 183), (229, 184), (229, 185), (229, 186), (229, 187), (229, 188), (229, 189), (229, 190), (229, 191), (229, 192), (229, 193), (229, 194), (229, 195), (230, 170), (230, 171), (230, 172), (230, 173), (230, 174), (230, 175), (230, 176), (230, 177), (230, 178), (230, 179), (230, 180),
(230, 181), (230, 182), (230, 183), (230, 184), (230, 185), (230, 186), (230, 187), (230, 188), (230, 189), (230, 190), (230, 191), (230, 192), (230, 193), (230, 195), (231, 171), (231, 172), (231, 173), (231, 174), (231, 175), (231, 176), (231, 177), (231, 178), (231, 179), (231, 180), (231, 181), (231, 182), (231, 183), (231, 184), (231, 185), (231, 186), (231, 187), (231, 188), (231, 189), (231, 190), (231, 191), (231, 192), (231, 193), (231, 195), (232, 170), (232, 172), (232, 173), (232, 174), (232, 175), (232, 176), (232, 177), (232, 178), (232, 179), (232, 180), (232, 181), (232, 182), (232, 183), (232, 184), (232, 185), (232, 186), (232, 187), (232, 188), (232, 189), (232, 190), (232, 191), (232, 192), (232, 194), (233, 171), (233, 173), (233, 174), (233, 175), (233, 176), (233, 177), (233, 178), (233, 179), (233, 180), (233, 181), (233, 182),
(233, 183), (233, 184), (233, 185), (233, 186), (233, 187), (233, 188), (233, 189), (233, 190), (233, 191), (233, 192), (233, 194), (234, 171), (234, 173), (234, 174), (234, 175), (234, 176), (234, 177), (234, 178), (234, 179), (234, 180), (234, 181), (234, 182), (234, 183), (234, 184), (234, 185), (234, 186), (234, 187), (234, 188), (234, 189), (234, 190), (234, 191), (234, 193), (235, 172), (235, 174), (235, 175), (235, 176), (235, 177), (235, 178), (235, 179), (235, 180), (235, 181), (235, 182), (235, 183), (235, 184), (235, 185), (235, 186), (235, 187), (235, 188), (235, 189), (235, 190), (235, 191), (235, 193), (236, 173), (236, 180), (236, 181), (236, 182), (236, 183), (236, 184), (236, 185), (236, 186), (236, 187), (236, 188), (236, 189), (236, 190), (236, 192), (237, 173), (237, 175), (237, 176), (237, 177), (237, 178), (237, 179), (237, 185),
(237, 186), (237, 187), (237, 188), (237, 189), (237, 190), (237, 192), (238, 180), (238, 181), (238, 182), (238, 183), (238, 184), (238, 187), (238, 188), (238, 189), (238, 190), (238, 192), (239, 185), (239, 188), (239, 189), (239, 191), (240, 187), (240, 189), (240, 191), (241, 188), (241, 191), (242, 189), (242, 190), (243, 190), )
coordinates_FFFF00 = ((145, 192),
(146, 191), (147, 191), (147, 193), (148, 191), (148, 193), (149, 191), (149, 193), (150, 190), (150, 193), (151, 190), (151, 193), (152, 189), (152, 191), (152, 193), (153, 188), (153, 190), (153, 191), (153, 193), (154, 188), (154, 190), (154, 192), (155, 188), (155, 191), (156, 188), (156, 191), (157, 188), (157, 191), (158, 188), (158, 191), (159, 188), (159, 191), (160, 188), (160, 191), (161, 188), (161, 191), (162, 188), (162, 191), (163, 187), (163, 189), (163, 191), (164, 187), (164, 189), (164, 191), (165, 187), (165, 189), (165, 191), (166, 187), (166, 189), (166, 191), (167, 187), (167, 189), (167, 191), (168, 188), (168, 191), (169, 189), (169, 191), (170, 189), (170, 191), (171, 190), (171, 191), (172, 190), (172, 191), (173, 191), (174, 191), (175, 191), )
coordinates_228B22 = ((145, 89),
(145, 90), (145, 91), (145, 92), (145, 94), (145, 103), (145, 104), (146, 87), (146, 94), (146, 101), (146, 104), (147, 87), (147, 89), (147, 90), (147, 91), (147, 92), (147, 94), (147, 101), (147, 103), (147, 105), (148, 88), (148, 90), (148, 91), (148, 92), (148, 94), (148, 101), (148, 103), (148, 104), (148, 106), (149, 88), (149, 90), (149, 91), (149, 92), (149, 94), (149, 101), (149, 103), (149, 104), (149, 106), (150, 89), (150, 91), (150, 92), (150, 94), (150, 101), (150, 103), (150, 104), (150, 106), (151, 89), (151, 91), (151, 92), (151, 93), (151, 95), (151, 99), (151, 101), (151, 102), (151, 103), (151, 104), (151, 106), (152, 89), (152, 91), (152, 92), (152, 93), (152, 94), (152, 97), (152, 98), (152, 101), (152, 102), (152, 103), (152, 104), (152, 106), (153, 90), (153, 92), (153, 93), (153, 94),
(153, 95), (153, 99), (153, 100), (153, 101), (153, 102), (153, 103), (153, 104), (153, 105), (154, 90), (154, 92), (154, 93), (154, 94), (154, 95), (154, 96), (154, 97), (154, 98), (154, 99), (154, 100), (154, 101), (154, 102), (154, 103), (154, 105), (155, 90), (155, 92), (155, 93), (155, 94), (155, 95), (155, 96), (155, 97), (155, 98), (155, 99), (155, 100), (155, 101), (155, 102), (155, 103), (155, 105), (156, 92), (156, 93), (156, 94), (156, 95), (156, 96), (156, 97), (156, 98), (156, 99), (156, 100), (156, 101), (156, 103), (157, 92), (157, 94), (157, 95), (157, 96), (157, 97), (157, 98), (157, 99), (157, 100), (157, 101), (157, 103), (158, 92), (158, 94), (158, 95), (158, 96), (158, 97), (158, 98), (158, 99), (158, 100), (158, 101), (158, 103), (159, 91), (159, 93), (159, 94), (159, 95), (159, 96),
(159, 97), (159, 98), (159, 99), (159, 100), (159, 101), (159, 102), (159, 103), (159, 105), (160, 90), (160, 92), (160, 93), (160, 94), (160, 95), (160, 96), (160, 97), (160, 98), (160, 99), (160, 100), (160, 101), (160, 102), (160, 103), (160, 105), (161, 89), (161, 91), (161, 92), (161, 93), (161, 94), (161, 95), (161, 96), (161, 97), (161, 98), (161, 99), (161, 100), (161, 101), (161, 102), (161, 103), (161, 105), (162, 89), (162, 91), (162, 92), (162, 93), (162, 94), (162, 95), (162, 101), (162, 102), (162, 103), (162, 104), (162, 106), (163, 89), (163, 91), (163, 92), (163, 93), (163, 96), (163, 97), (163, 98), (163, 99), (163, 102), (163, 103), (163, 104), (163, 106), (164, 89), (164, 91), (164, 92), (164, 94), (164, 95), (164, 101), (164, 102), (164, 103), (164, 104), (164, 106), (165, 88), (165, 90),
(165, 91), (165, 93), (165, 102), (165, 104), (165, 106), (166, 88), (166, 90), (166, 92), (166, 102), (166, 104), (166, 106), (167, 88), (167, 90), (167, 92), (167, 102), (167, 104), (167, 106), (168, 88), (168, 93), (168, 102), (168, 104), (168, 106), (169, 90), (169, 94), (169, 102), (169, 106), (170, 92), (170, 94), (170, 103), (170, 105), )
coordinates_18EEC3 = ((126, 179),
(126, 180), (127, 178), (127, 181), (128, 177), (128, 179), (128, 180), (128, 182), (129, 177), (129, 179), (129, 180), (129, 181), (129, 183), (130, 177), (130, 179), (130, 180), (130, 181), (130, 182), (130, 184), (131, 177), (131, 179), (131, 180), (131, 181), (131, 182), (131, 184), (132, 178), (132, 180), (132, 181), (132, 182), (132, 183), (132, 185), (133, 178), (133, 180), (133, 181), (133, 182), (133, 183), (133, 184), (133, 186), (134, 179), (134, 180), (134, 181), (134, 182), (134, 183), (134, 184), (134, 185), (134, 187), (135, 179), (135, 181), (135, 182), (135, 183), (135, 184), (135, 185), (135, 187), (136, 179), (136, 181), (136, 182), (136, 183), (136, 184), (136, 185), (136, 186), (136, 188), (137, 180), (137, 182), (137, 183), (137, 184), (137, 185), (137, 186), (137, 187), (137, 188), (138, 181), (138, 182), (138, 183), (138, 184),
(138, 185), (138, 186), (138, 187), (138, 189), (139, 181), (139, 183), (139, 184), (139, 185), (139, 186), (139, 187), (139, 188), (139, 189), (140, 182), (140, 184), (140, 185), (140, 186), (140, 187), (140, 188), (140, 190), (141, 182), (141, 184), (141, 185), (141, 186), (141, 187), (141, 188), (141, 190), (142, 183), (142, 185), (142, 186), (142, 187), (142, 188), (142, 189), (142, 191), (143, 184), (143, 186), (143, 187), (143, 188), (143, 189), (143, 191), (144, 184), (144, 186), (144, 187), (144, 188), (145, 185), (145, 187), (145, 189), (146, 186), (146, 189), (147, 186), (147, 189), (148, 186), (148, 188), (149, 186), (149, 188), (150, 188), (151, 187), (151, 188), (169, 187), (170, 187), (171, 186), (171, 187), (172, 186), (172, 188), (173, 186), (173, 188), (174, 185), (174, 187), (174, 189), (175, 184), (175, 186), (175, 187), (175, 189),
(176, 184), (176, 186), (176, 187), (176, 189), (177, 183), (177, 185), (177, 186), (177, 187), (177, 188), (177, 190), (178, 183), (178, 185), (178, 186), (178, 187), (178, 188), (178, 189), (178, 191), (179, 182), (179, 184), (179, 185), (179, 186), (179, 187), (179, 188), (179, 189), (179, 191), (180, 181), (180, 183), (180, 184), (180, 185), (180, 186), (180, 187), (180, 188), (180, 190), (181, 181), (181, 183), (181, 184), (181, 185), (181, 186), (181, 187), (181, 188), (181, 190), (182, 180), (182, 182), (182, 183), (182, 184), (182, 185), (182, 186), (182, 187), (182, 188), (182, 189), (183, 180), (183, 182), (183, 183), (183, 184), (183, 185), (183, 186), (183, 187), (183, 189), (184, 179), (184, 181), (184, 182), (184, 183), (184, 184), (184, 185), (184, 186), (184, 188), (185, 179), (185, 181), (185, 182), (185, 183), (185, 184), (185, 185),
(185, 186), (185, 188), (186, 179), (186, 181), (186, 182), (186, 183), (186, 184), (186, 185), (186, 187), (187, 178), (187, 180), (187, 181), (187, 182), (187, 183), (187, 184), (187, 185), (187, 187), (188, 178), (188, 180), (188, 181), (188, 182), (188, 183), (188, 184), (188, 186), (189, 178), (189, 180), (189, 181), (189, 182), (189, 183), (189, 185), (190, 178), (190, 180), (190, 181), (190, 182), (190, 184), (191, 177), (191, 179), (191, 180), (191, 181), (191, 183), (192, 177), (192, 182), (193, 177), (193, 179), (193, 181), (194, 178), )
coordinates_00B2D2 = ((154, 130),
(155, 130), (156, 130), (156, 131), (157, 130), (157, 132), (158, 130), (158, 133), (159, 130), (159, 133), (159, 134), (159, 135), (159, 136), (159, 137), (159, 138), (159, 139), (159, 140), (159, 141), (159, 142), (159, 147), (159, 148), (159, 149), (159, 150), (159, 151), (159, 152), (159, 153), (159, 154), (159, 155), (159, 156), (159, 157), (159, 158), (159, 159), (159, 160), (159, 161), (159, 162), (159, 163), (159, 164), (159, 165), (159, 166), (159, 167), (159, 168), (159, 169), (159, 170), (159, 171), (159, 172), (159, 173), (160, 130), (160, 141), (160, 142), (160, 143), (160, 144), (160, 145), (160, 146), (160, 147), (160, 148), (160, 149), (160, 150), (160, 151), (160, 152), (160, 153), (160, 174), (160, 175), (160, 176), (160, 177), (160, 178), (160, 179), (160, 180), (160, 181), (161, 130), (162, 130), )
coordinates_6C5CD2 = ((141, 129),
(141, 130), (142, 129), (142, 131), (143, 129), (143, 131), (144, 129), (144, 131), (145, 129), (145, 132), (146, 129), (146, 132), (147, 129), (147, 132), (148, 129), (148, 132), (149, 129), (149, 131), (150, 129), (150, 131), (151, 129), (151, 130), (152, 129), (152, 130), (167, 129), (167, 130), (168, 130), (169, 130), (170, 129), (170, 130), (171, 129), (171, 130), (172, 129), (172, 130), (173, 129), (173, 130), (174, 129), (174, 130), (175, 129), (175, 130), (176, 130), (177, 129), (177, 130), (178, 129), (178, 130), (179, 128), (179, 130), (180, 129), )
coordinates_0C00B2 = ((190, 161),
(190, 162), (191, 161), (193, 164), (194, 165), )
| 751.652174
| 865
| 0.478553
|
4a085d90360d5d5ddc294c0da3d5e2a9917583fa
| 13,833
|
py
|
Python
|
pycrc/crc_parser.py
|
dshumko/PhpEPG
|
8484b7abc2060c768c0832ee1f50e44c5010ea50
|
[
"MIT"
] | 3
|
2017-01-17T12:37:42.000Z
|
2021-09-19T19:31:45.000Z
|
pycrc/crc_parser.py
|
dshumko/PhpEPG
|
8484b7abc2060c768c0832ee1f50e44c5010ea50
|
[
"MIT"
] | null | null | null |
pycrc/crc_parser.py
|
dshumko/PhpEPG
|
8484b7abc2060c768c0832ee1f50e44c5010ea50
|
[
"MIT"
] | null | null | null |
# -*- coding: Latin-1 -*-
# pycrc -- parametrisable CRC calculation utility and C source code generator
#
# Copyright (c) 2006-2011 Thomas Pircher <tehpeh@gmx.net>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Macro Language parser for pycrc.
use as follows:
import sys
from crc_opt import Options
from crc_parser import MacroParser
opt = Options()
opt.parse(sys.argv[1:])
mp = MacroParser(opt)
if mp.parse("Test 1 2 3"):
print(mp.out_str)
This file is part of pycrc.
"""
from crc_symtable import SymbolTable
from crc_lexer import Lexer
import re
import sys
# Class ParseError
###############################################################################
class ParseError(Exception):
"""
The exception class for the parser.
"""
# Class constructor
###############################################################################
def __init__(self, reason):
self.reason = reason
# function __str__
###############################################################################
def __str__(self):
return self.reason
# Class MacroParser
###############################################################################
class MacroParser(object):
"""
The macro language parser and code generator class.
"""
re_is_int = re.compile("^[-+]?[0-9]+$")
#re_is_hex = re.compile("^(0[xX])?[0-9a-fA-F]+$")
re_is_hex = re.compile("^0[xX][0-9a-fA-F]+$")
opt = None
sym = None
lex = Lexer()
# Class constructor
###############################################################################
def __init__(self, opt):
self.opt = opt
self.sym = SymbolTable(opt)
self.out_str = None
# function parse
#
# The used grammar is:
# term: LITERAL
# | IDENTIFIER
# | '(' or_exp ')'
# ;
#
# comp_exp: term OP_COMPARISON term
# ;
#
# and_exp: term
# | and_exp OP_AND comp_exp
# ;
#
# or_exp: and_exp
# | or_exp OP_OR and_exp
# ;
#
# else_block: /* empty */
# | ELSE '{:' data ':}'
# ;
#
# elif_blocks: /* empty */
# | elif_blocks ELIF '(' or_exp ')' '{:' data ':}'
# ;
#
# if_block: IF '(' or_exp ')' '{:' data ':}' elif_blocks else_block
# ;
#
# data: /* empty */
# | data GIBBERISH
# | data IDENTIFIER
# | data '{:' data ':}'
# | data if_block
# ;
#
###############################################################################
def parse(self, in_str):
"""
Parse a macro string.
"""
self.lex.set_str(in_str)
self.out_str = ""
self._parse_data(do_print = True)
tok = self.lex.peek()
if tok != self.lex.tok_EOF:
raise ParseError("%s: error: misaligned closing block '%s'" % (sys.argv[0], self.lex.text))
# function _parse_data
###############################################################################
def _parse_data(self, do_print):
"""
Private top-level parsing function.
"""
tok = self.lex.peek()
while tok != self.lex.tok_EOF:
if tok == self.lex.tok_gibberish:
self._parse_gibberish(do_print)
elif tok == self.lex.tok_block_open:
self._parse_data_block(do_print)
elif tok == self.lex.tok_identifier and self.lex.text == "if":
self._parse_if_block(do_print)
elif tok == self.lex.tok_identifier:
self._parse_identifier(do_print)
elif tok == self.lex.tok_block_close:
return
else:
raise ParseError("%s: error: wrong token '%s'" % (sys.argv[0], self.lex.text))
tok = self.lex.peek()
# function _parse_gibberish
###############################################################################
def _parse_gibberish(self, do_print):
"""
Parse gibberish.
Actually, just print the characters in 'text' if do_print is True.
"""
if do_print:
self.out_str = self.out_str + self.lex.text
self.lex.advance()
# function _parse_identifier
###############################################################################
def _parse_identifier(self, do_print):
"""
Parse an identifier.
"""
try:
sym_value = self.sym.getTerminal(self.lex.text)
except LookupError:
raise ParseError("%s: error: unknown terminal '%s'" % (sys.argv[0], self.lex.text))
# if sym_value == None:
# sym_value = 'Undefined'
self.lex.advance()
if do_print:
self.lex.prepend(sym_value)
# function _parse_if_block
###############################################################################
def _parse_if_block(self, do_print):
"""
Parse an if block.
"""
# parse the expression following the 'if' and the associated block.
exp_res = self._parse_conditional_block(do_print)
do_print = do_print and not exp_res
# try $elif
tok = self.lex.peek()
while tok == self.lex.tok_identifier and self.lex.text == "elif":
exp_res = self._parse_conditional_block(do_print)
do_print = do_print and not exp_res
tok = self.lex.peek()
# try $else
if tok == self.lex.tok_identifier and self.lex.text == "else":
# get rid of the tok_identifier, 'else' and following spaces
self.lex.advance()
self.lex.delete_spaces()
# expect a data block
self._parse_data_block(do_print)
# function _parse_conditional_block
###############################################################################
def _parse_conditional_block(self, do_print):
"""
Parse a conditional block (such as $if or $elif).
Return the truth value of the expression.
"""
# get rid of the tok_identifier, 'if' or 'elif'
self.lex.advance()
self.lex.set_state(self.lex.state_expr)
# expect an open parenthesis
tok = self.lex.peek()
if tok != self.lex.tok_par_open:
raise ParseError("%s: error: open parenthesis expected: '%s'" % (sys.argv[0], self.lex.text))
self.lex.advance()
# parse the boolean expression
exp_res = self._parse_exp_or()
# expect a closed parenthesis
tok = self.lex.peek()
if tok != self.lex.tok_par_close:
raise ParseError("%s: error: closed parenthesis expected: '%s'" % (sys.argv[0], self.lex.text))
self.lex.advance()
# get rid of eventual spaces, and switch back to gibberish.
self.lex.delete_spaces()
self.lex.set_state(self.lex.state_gibberish)
# expect a data block
self._parse_data_block(do_print and exp_res)
# get rid of eventual spaces
# but only if followed by $if, $else or $elif
self.lex.delete_spaces(skip_unconditional = False)
return exp_res
# function _parse_data_block
###############################################################################
def _parse_data_block(self, do_print):
"""
Parse a data block.
"""
# expect an open block
tok = self.lex.peek()
if tok != self.lex.tok_block_open:
raise ParseError("%s: error: open block expected: '%s'" % (sys.argv[0], self.lex.text))
self.lex.advance(skip_nl = True)
# more data follows...
self._parse_data(do_print)
# expect a closed block
tok = self.lex.peek()
if tok != self.lex.tok_block_close:
raise ParseError("%s: error: closed block expected: '%s'" % (sys.argv[0], self.lex.text))
self.lex.advance(skip_nl = True)
# function _parse_exp_or
###############################################################################
def _parse_exp_or(self):
"""
Parse a boolean 'or' expression.
"""
ret = False
while True:
ret = self._parse_exp_and() or ret
# is the expression terminated?
tok = self.lex.peek()
if tok == self.lex.tok_par_close:
return ret
# expect an 'or' token.
elif tok == self.lex.tok_or:
self.lex.advance()
# everything else is the end of the expression.
# Let the caling function worry about error reporting.
else:
return ret
return False
# function _parse_exp_and
###############################################################################
def _parse_exp_and(self):
"""
Parse a boolean 'and' expression.
"""
ret = True
while True:
ret = self._parse_exp_comparison() and ret
# is the expression terminated?
tok = self.lex.peek()
if tok == self.lex.tok_par_close:
return ret
# expect an 'and' token.
elif tok == self.lex.tok_and:
self.lex.advance()
# everything else is a parse error.
else:
return ret
# raise ParseError("Unexpected token '%s'" % self.lex.text)
return False
# function _parse_exp_comparison
###############################################################################
def _parse_exp_comparison(self):
"""
Parse a boolean comparison.
"""
# left hand side of the comparison
lhs = self._parse_exp_term()
# expect a comparison
tok = self.lex.peek()
if tok != self.lex.tok_op:
raise ParseError("%s: error: operator expected: '%s'" % (sys.argv[0], self.lex.text))
operator = self.lex.text
self.lex.advance()
# right hand side of the comparison
rhs = self._parse_exp_term()
# if both operands ar numbers, convert them
num_l = self._get_num(lhs)
num_r = self._get_num(rhs)
if num_l != None and num_r != None:
lhs = num_l
rhs = num_r
# now calculate the result of the comparison, whatever that means
if operator == "<=":
ret = lhs <= rhs
elif operator == "<":
ret = lhs < rhs
elif operator == "==":
ret = lhs == rhs
elif operator == "!=":
ret = lhs != rhs
elif operator == ">=":
ret = lhs >= rhs
elif operator == ">":
ret = lhs > rhs
else:
raise ParseError("%s: error: unknow operator: '%s'" % (sys.argv[0], self.lex.text))
return ret
# function _parse_exp_term
###############################################################################
def _parse_exp_term(self):
"""
Parse a terminal.
"""
tok = self.lex.peek()
# identifier
if tok == self.lex.tok_identifier:
try:
ret = self.sym.getTerminal(self.lex.text)
except LookupError:
raise ParseError("%s: error: unknown terminal '%s'" % (sys.argv[0], self.lex.text))
if ret == None:
ret = "Undefined"
# string
elif tok == self.lex.tok_str:
ret = self.lex.text
# number
elif tok == self.lex.tok_num:
ret = self.lex.text
# parenthesised expression
elif tok == self.lex.tok_par_open:
self.lex.advance()
ret = self._parse_exp_or()
tok = self.lex.peek()
if tok != self.lex.tok_par_close:
raise ParseError("%s: error: closed parenthesis expected: '%s'" % (sys.argv[0], self.lex.text))
self.lex.advance()
return ret
# function _get_num
###############################################################################
def _get_num(self, in_str):
"""
Check if in_str is a number and return the numeric value.
"""
ret = None
if in_str != None:
m = self.re_is_int.match(in_str)
if m != None:
ret = int(in_str)
m = self.re_is_hex.match(in_str)
if m != None:
ret = int(in_str, 16)
return ret
| 32.471831
| 111
| 0.495265
|
4a085dfa7df2752075b212a72482ac05ddeb9faa
| 3,707
|
py
|
Python
|
contrib/macdeploy/custom_dsstore.py
|
Liquid369/ksoc
|
e5db7b5ee042372e0f5a5cc5a913714d1eb82a51
|
[
"MIT"
] | null | null | null |
contrib/macdeploy/custom_dsstore.py
|
Liquid369/ksoc
|
e5db7b5ee042372e0f5a5cc5a913714d1eb82a51
|
[
"MIT"
] | null | null | null |
contrib/macdeploy/custom_dsstore.py
|
Liquid369/ksoc
|
e5db7b5ee042372e0f5a5cc5a913714d1eb82a51
|
[
"MIT"
] | 1
|
2022-02-01T15:10:19.000Z
|
2022-02-01T15:10:19.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2013-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import biplist
from ds_store import DSStore
from mac_alias import Alias
import sys
output_file = sys.argv[1]
package_name_ns = sys.argv[2]
ds = DSStore.open(output_file, 'w+')
ds['.']['bwsp'] = {
'ShowStatusBar': False,
'WindowBounds': '{{300, 280}, {500, 343}}',
'ContainerShowSidebar': False,
'SidebarWidth': 0,
'ShowTabView': False,
'PreviewPaneVisibility': False,
'ShowToolbar': False,
'ShowSidebar': False,
'ShowPathbar': True
}
icvp = {
'gridOffsetX': 0.0,
'textSize': 12.0,
'viewOptionsVersion': 1,
'backgroundImageAlias': b'\x00\x00\x00\x00\x02\x1e\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd1\x94\\\xb0H+\x00\x05\x00\x00\x00\x98\x0fbackground.tiff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x99\xd19\xb0\xf8\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\r\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0b.background\x00\x00\x10\x00\x08\x00\x00\xd1\x94\\\xb0\x00\x00\x00\x11\x00\x08\x00\x00\xd19\xb0\xf8\x00\x00\x00\x01\x00\x04\x00\x00\x00\x98\x00\x0e\x00 \x00\x0f\x00b\x00a\x00c\x00k\x00g\x00r\x00o\x00u\x00n\x00d\x00.\x00t\x00i\x00f\x00f\x00\x0f\x00\x02\x00\x00\x00\x12\x00\x1c/.background/background.tiff\x00\x14\x01\x06\x00\x00\x00\x00\x01\x06\x00\x02\x00\x00\x0cMacintosh HD\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xce\x97\xab\xc3H+\x00\x00\x01\x88[\x88\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02u\xab\x8d\xd1\x94\\\xb0devrddsk\xff\xff\xff\xff\x00\x00\t \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07bitcoin\x00\x00\x10\x00\x08\x00\x00\xce\x97\xab\xc3\x00\x00\x00\x11\x00\x08\x00\x00\xd1\x94\\\xb0\x00\x00\x00\x01\x00\x14\x01\x88[\x88\x00\x16\xa9\t\x00\x08\xfaR\x00\x08\xfaQ\x00\x02d\x8e\x00\x0e\x00\x02\x00\x00\x00\x0f\x00\x1a\x00\x0c\x00M\x00a\x00c\x00i\x00n\x00t\x00o\x00s\x00h\x00 \x00H\x00D\x00\x13\x00\x01/\x00\x00\x15\x00\x02\x00\x14\xff\xff\x00\x00\xff\xff\x00\x00',
'backgroundColorBlue': 1.0,
'iconSize': 96.0,
'backgroundColorGreen': 1.0,
'arrangeBy': 'none',
'showIconPreview': True,
'gridSpacing': 100.0,
'gridOffsetY': 0.0,
'showItemInfo': False,
'labelOnBottom': True,
'backgroundType': 2,
'backgroundColorRed': 1.0
}
alias = Alias.from_bytes(icvp['backgroundImageAlias'])
alias.volume.name = package_name_ns
alias.volume.posix_path = '/Volumes/' + package_name_ns
alias.volume.disk_image_alias.target.filename = package_name_ns + '.temp.dmg'
alias.volume.disk_image_alias.target.carbon_path = 'Macintosh HD:Users:\x00bitcoinuser:\x00Documents:\x00bitcoin:\x00bitcoin:\x00' + package_name_ns + '.temp.dmg'
alias.volume.disk_image_alias.target.posix_path = 'Users/bitcoinuser/Documents/bitcoin/bitcoin/' + package_name_ns + '.temp.dmg'
alias.target.carbon_path = package_name_ns + ':.background:\x00background.tiff'
icvp['backgroundImageAlias'] = biplist.Data(alias.to_bytes())
ds['.']['icvp'] = icvp
ds['.']['vSrn'] = ('long', 1)
ds['Applications']['Iloc'] = (370, 156)
ds['KSOC-Qt.app']['Iloc'] = (128, 156)
ds.flush()
ds.close()
| 61.783333
| 1,817
| 0.724036
|
4a085e64f0e5abe6717aef9e89d187e43ecdfbf8
| 182
|
py
|
Python
|
backend/authentication/serializers.py
|
vieirafrancisco/ProjExt-web
|
761a60a1842c815cbd00e0faabf3f6af32fcb005
|
[
"MIT"
] | null | null | null |
backend/authentication/serializers.py
|
vieirafrancisco/ProjExt-web
|
761a60a1842c815cbd00e0faabf3f6af32fcb005
|
[
"MIT"
] | null | null | null |
backend/authentication/serializers.py
|
vieirafrancisco/ProjExt-web
|
761a60a1842c815cbd00e0faabf3f6af32fcb005
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
from .models import User
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = "__all__"
| 18.2
| 50
| 0.71978
|
4a085eaf3d1b5b30a66fedcecbd6e6bca5f97465
| 17,390
|
py
|
Python
|
src/reader/_sqlite_utils.py
|
lemon24/reader
|
d226baa5d320bfedee786163730fe23871414ede
|
[
"BSD-3-Clause"
] | 205
|
2018-07-14T12:54:21.000Z
|
2022-03-29T06:47:13.000Z
|
src/reader/_sqlite_utils.py
|
lemon24/reader
|
d226baa5d320bfedee786163730fe23871414ede
|
[
"BSD-3-Clause"
] | 275
|
2018-01-28T20:57:13.000Z
|
2022-03-29T21:45:11.000Z
|
src/reader/_sqlite_utils.py
|
lemon24/reader
|
d226baa5d320bfedee786163730fe23871414ede
|
[
"BSD-3-Clause"
] | 12
|
2021-01-01T17:15:53.000Z
|
2022-03-22T09:38:12.000Z
|
"""
sqlite3 utilities. Contains no business logic.
"""
import functools
import sqlite3
import time
import traceback
from contextlib import closing
from contextlib import contextmanager
from dataclasses import dataclass
from datetime import datetime
from typing import Any
from typing import Callable
from typing import cast
from typing import Dict
from typing import Iterator
from typing import no_type_check
from typing import Optional
from typing import Sequence
from typing import Tuple
from typing import TypeVar
SQLiteType = TypeVar('SQLiteType', None, int, float, str, bytes, datetime)
@contextmanager
def ddl_transaction(db: sqlite3.Connection) -> Iterator[sqlite3.Connection]:
"""Automatically commit/rollback transactions containing DDL statements.
Usage:
with ddl_transaction(db):
db.execute(...)
db.execute(...)
Note: ddl_transaction() does not work with executescript().
Normally, one would expect to be able to use DDL statements in a
transaction like so:
with db:
db.execute(ddl_statement)
db.execute(other_statement)
Initially, this worked around https://bugs.python.org/issue10740;
the sqlite3 transaction handling would trigger an implicit commit
if the first execute() was a DDL statement, which prevented it from
being rolled back if there was an exception after it.
This was fixed in Python 3.6, but there are still some cases that behave
in the same way, e.g.:
db = sqlite3.connect(':memory:')
try:
with db:
db.execute("create table t (a, b);")
1 / 0
except ZeroDivisionError:
pass
# table t exists even if it shouldn't
https://docs.python.org/3.5/library/sqlite3.html#controlling-transactions
"""
# initialy from https://github.com/lemon24/boomtime/blob/master/boomtime/db.py
isolation_level = db.isolation_level
try:
db.isolation_level = None
db.execute("BEGIN;")
yield db
db.execute("COMMIT;")
except Exception:
db.execute("ROLLBACK;")
raise
finally:
db.isolation_level = isolation_level
@contextmanager
def wrap_exceptions(
exc_type: Callable[[str], Exception], message: str = "unexpected error"
) -> Iterator[None]:
"""Wrap sqlite3 exceptions in a custom exception.
Only wraps exceptions that are unlikely to be programming errors (bugs),
can only be fixed by the user (e.g. access permission denied), and aren't
domain-related (those should have other custom exceptions).
This is an imprecise science, since the DB-API exceptions are somewhat
fuzzy in their meaning and we can't access the SQLite result code.
Full discussion at https://github.com/lemon24/reader/issues/21
"""
try:
yield
except sqlite3.OperationalError as e:
raise exc_type(message) from e
except sqlite3.ProgrammingError as e:
if "cannot operate on a closed database" in str(e).lower():
raise exc_type("operation on closed database") from None
raise
except sqlite3.DatabaseError as e:
# most sqlite3 exceptions are subclasses of DatabaseError
if type(e) is sqlite3.DatabaseError: # pragma: no cover
# test_database_error_other should test both branches of this, but doesn't for some reason
# SQLITE_CORRUPT: either on connect(), or after
if "file is not a database" in str(e).lower():
raise exc_type(message) from e
raise
FuncType = Callable[..., Any]
F = TypeVar('F', bound=FuncType)
def wrap_exceptions_iter(exc_type: Callable[[str], Exception]) -> Callable[[F], F]:
"""Like wrap_exceptions(), but for generators."""
def decorator(fn: F) -> F:
@functools.wraps(fn)
def wrapper(*args, **kwargs): # type: ignore
with wrap_exceptions(exc_type):
yield from fn(*args, **kwargs)
return cast(F, wrapper)
return decorator
@contextmanager
def foreign_keys_off(db: sqlite3.Connection) -> Iterator[sqlite3.Connection]:
"""Disable foreign key checks temporarily.
This is useful when changing the schema in ways not supported by ALTER[1]
(e.g. changing column constraints, renaming/removing columns).
You should check for any foreign key constraint violations
(see foreign_key_check() below), preferably inside of a transaction.
Note: foreign_keys_off() must be used outside transactions, because[2]:
> It is not possible to enable or disable foreign key constraints
> in the middle of a multi-statement transaction [...]. Attempting
> to do so does not return an error; it simply has no effect.
[1]: https://sqlite.org/lang_altertable.html#otheralter
[2]: https://sqlite.org/foreignkeys.html#fk_enable
"""
# TODO: this assert should fail with DBError
assert not db.in_transaction, "foreign_keys_off must be used outside transactions"
# TODO: this assignment should fail with DBError
(foreign_keys,) = db.execute("PRAGMA foreign_keys;").fetchone()
try:
db.execute("PRAGMA foreign_keys = OFF;")
yield db
finally:
db.execute(f"PRAGMA foreign_keys = {'ON' if foreign_keys else 'OFF'};")
def foreign_key_check(db: sqlite3.Connection) -> None:
"""Check foreign key constraint violations.
Raises:
IntegrityError: If there were any violations.
"""
failed_checks = list(db.execute("PRAGMA foreign_key_check;"))
if not failed_checks:
return
# TODO: More details regarding what failed.
raise IntegrityError("FOREIGN KEY constraint failed")
class DBError(Exception):
display_name = "database error"
def __str__(self) -> str:
return "{}: {}".format(self.display_name, super().__str__())
class SchemaVersionError(DBError):
display_name = "schema version error"
class IntegrityError(DBError):
display_name = "integrity error"
class RequirementError(DBError):
display_name = "database requirement error"
class IdError(DBError):
display_name = "application id error"
db_errors = [DBError, SchemaVersionError, IntegrityError, RequirementError]
_DBFunction = Callable[[sqlite3.Connection], None]
@dataclass
class HeavyMigration:
create: _DBFunction
version: int # must be positive
migrations: Dict[int, _DBFunction]
id: int = 0
def migrate(self, db: sqlite3.Connection) -> None:
# pseudo-code for how the application_id is handled:
# https://github.com/lemon24/reader/issues/211#issuecomment-778392468
# unlike there, we allow bypassing it for testing
with foreign_keys_off(db), ddl_transaction(db):
if self.id:
id = self.get_id(db)
if id and id != self.id:
raise IdError(f"invalid id: 0x{id:x}")
version = self.get_version(db)
if not version:
# avoid clobbering a database with application_id
if table_count(db) != 0:
# TODO: maybe use a custom exception here?
raise DBError("database with no version already has tables")
self.create(db)
self.set_version(db, self.version)
self.set_id(db, self.id)
return
if version == self.version:
if self.id:
if not id:
raise IdError("database with version has missing id")
return
if version > self.version:
raise SchemaVersionError(f"invalid version: {version}")
# version < self.version
# the actual migration code;
#
# might clobber a database if all of the below are true:
#
# * an application_id was not used from the start
# * the database has a non-zero version which predates
# the migration which set application_id
# * all of the migrations succeed
for from_version in range(version, self.version):
to_version = from_version + 1
migration = self.migrations.get(from_version)
if migration is None:
raise SchemaVersionError(
f"no migration from {from_version} to {to_version}; "
f"expected migrations for all versions "
f"later than {version}"
)
self.set_version(db, to_version)
migration(db)
try:
foreign_key_check(db)
except IntegrityError as e:
raise IntegrityError(
f"after migrating to version {to_version}: {e}"
) from None
if self.id:
id = self.get_id(db)
if id != self.id:
raise IdError(f"missing or invalid id after migration: 0x{id:x}")
@staticmethod
def get_version(db: sqlite3.Connection) -> int:
return get_int_pragma(db, 'user_version')
@staticmethod
def set_version(db: sqlite3.Connection, version: int) -> None:
set_int_pragma(db, 'user_version', version)
@staticmethod
def get_id(db: sqlite3.Connection) -> int:
return get_int_pragma(db, 'application_id')
@staticmethod
def set_id(db: sqlite3.Connection, id: int) -> None:
set_int_pragma(db, 'application_id', id)
def get_int_pragma(db: sqlite3.Connection, pragma: str) -> int:
(value,) = db.execute(f"PRAGMA {pragma};").fetchone()
assert isinstance(value, int), value # for mypy
return value
def set_int_pragma(
db: sqlite3.Connection, pragma: str, value: int, lower_bound: int = 0
) -> None:
if not isinstance(value, int):
raise ValueError(f"{pragma} must be an integer, got {value!r}")
if lower_bound is not None and value < lower_bound:
raise ValueError(f"{pragma} must be >={lower_bound}, got {value!r}")
db.execute(f"PRAGMA {pragma} = {value};")
def table_count(db: sqlite3.Connection) -> int:
(value,) = db.execute("select count(*) from sqlite_master;").fetchone()
assert isinstance(value, int), value # for mypy
return value
def require_version(db: sqlite3.Connection, version_info: Tuple[int, ...]) -> None:
with closing(db.cursor()) as cursor:
# TODO: this assignment should fail with DBError
((version,),) = cursor.execute("SELECT sqlite_version();")
version_ints = tuple(int(i) for i in version.split('.'))
if version_info > version_ints:
raise RequirementError(
"at least SQLite version {} required, {} installed".format(
".".join(str(i) for i in version_info),
".".join(str(i) for i in sqlite3.sqlite_version_info),
)
)
def require_compile_options(db: sqlite3.Connection, options: Sequence[str]) -> None:
with closing(db.cursor()) as cursor:
actual_options = [r[0] for r in cursor.execute("PRAGMA compile_options;")]
missing = set(options).difference(actual_options)
if missing:
raise RequirementError(
f"required SQLite compile options missing: {sorted(missing)}"
)
def setup_db(
db: sqlite3.Connection,
*,
create: _DBFunction,
version: int,
migrations: Dict[int, _DBFunction],
id: int,
minimum_sqlite_version: Tuple[int, ...],
required_sqlite_compile_options: Sequence[str] = (),
wal_enabled: Optional[bool] = None,
) -> None:
require_version(db, minimum_sqlite_version)
require_compile_options(db, required_sqlite_compile_options)
with closing(db.cursor()) as cursor:
cursor.execute("PRAGMA foreign_keys = ON;")
# Can't do this in a transaction, so we just do it all the time.
#
# Also, every cursor up to here must be closed explictly, othewise
# we get an "cannot commit transaction - SQL statements in progress"
# on PyPy.
#
# https://github.com/lemon24/reader/issues/169
#
if wal_enabled is not None:
if wal_enabled:
cursor.execute("PRAGMA journal_mode = WAL;")
else:
cursor.execute("PRAGMA journal_mode = DELETE;")
migration = HeavyMigration(create, version, migrations, id)
migration.migrate(db)
def rowcount_exactly_one(
cursor: sqlite3.Cursor, make_exc: Callable[[], Exception]
) -> None:
if cursor.rowcount == 0:
raise make_exc()
assert cursor.rowcount == 1, "shouldn't have more than 1 row"
# BEGIN DebugConnection
# No type annotations or coverage for this;
# its only used for debugging and not exposed publicly.
@no_type_check
def _make_debug_method_wrapper(method, stmt=False): # pragma: no cover
@functools.wraps(method)
def wrapper(self, *args):
data = {
'method': method if isinstance(method, str) else method.__name__,
'start': time.time(),
}
if stmt:
data['stmt'] = args[0] if args else None
try:
tb = traceback.extract_stack()
frame = tb[-2]
data['caller'] = frame.filename, frame.name
except IndexError:
pass
try:
io_counters = self.connection._io_counters
except AttributeError:
io_counters = self._io_counters
if io_counters:
fields = ['read_count', 'write_count', 'read_bytes', 'write_bytes']
try:
import psutil # type: ignore
process = psutil.Process()
except ImportError:
process = None
try:
start_io_counters = process.io_counters()
except AttributeError:
pass
start = time.perf_counter()
try:
if callable(method):
return method(self, *args)
except Exception as e:
data['exception'] = f"{type(e).__module__}.{type(e).__qualname__}: {e}"
raise
finally:
end = time.perf_counter()
data['duration'] = end - start
if io_counters:
try:
end_io_counters = process.io_counters()
data['io_counters'] = {
f: getattr(end_io_counters, f) - getattr(start_io_counters, f)
for f in fields
}
except AttributeError:
pass
self._log(data)
return wrapper
@no_type_check
def _make_debug_connection_cls(): # pragma: no cover
# we create the classes in a function to work around
# typing.no_type_check not supporting classes (yet);
# https://github.com/python/mypy/issues/607
class DebugCursor(sqlite3.Cursor):
def _log(self, data):
# can't rely on id(self) as it's likely to be reused
data['cursor'] = self._id
self.connection._log(data)
execute = _make_debug_method_wrapper(sqlite3.Cursor.execute, stmt=True)
executemany = _make_debug_method_wrapper(sqlite3.Cursor.executemany, stmt=True)
close = _make_debug_method_wrapper(sqlite3.Cursor.close)
__del__ = _make_debug_method_wrapper('__del__')
class DebugConnection(sqlite3.Connection):
"""sqlite3 connection subclass for debugging stuff.
>>> debug = logging.getLogger('whatever').debug
>>> class MyDebugConnection(DebugConnection):
... _log_method = staticmethod(lambda data: debug(json.dumps(data)))
... _set_trace = True
...
>>> db = sqlite3.connect('', factory=MyDebugConnection)
"""
_set_trace = False
_io_counters = False
@staticmethod
def _log_method(data):
raise NotImplementedError
_cursor_factory = DebugCursor
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._next_cursor_id = 0
if self._set_trace:
trace_wrapper = _make_debug_method_wrapper('~trace', stmt=True)
def trace(stmt):
return trace_wrapper(self, stmt)
self.set_trace_callback(trace)
def _log(self, data):
# less likely for this to be the same address
data['connection'] = id(self)
self._log_method(data)
def cursor(self, factory=None):
if factory:
raise NotImplementedError("cursor(factory=...) not supported")
cursor = super().cursor(factory=self._cursor_factory)
cursor._id = self._next_cursor_id
self._next_cursor_id += 1
return cursor
close = _make_debug_method_wrapper(sqlite3.Connection.close)
__enter__ = _make_debug_method_wrapper(sqlite3.Connection.__enter__)
__exit__ = _make_debug_method_wrapper(sqlite3.Connection.__exit__)
# the sqlite3 objects don't have a __del__
__del__ = _make_debug_method_wrapper('__del__')
return DebugConnection
DebugConnection = _make_debug_connection_cls()
# END DebugConnection
| 31.79159
| 102
| 0.624382
|
4a085ebf7599f21bafb21a0dd0c5bbb5e93536e3
| 8,236
|
py
|
Python
|
smartcab/smartcab/agent.py
|
man007yadav/Udacity-Machine-Learning-Nanodegree
|
1a82161809a837c38b1bfead6a8c05d074b1d85b
|
[
"MIT"
] | null | null | null |
smartcab/smartcab/agent.py
|
man007yadav/Udacity-Machine-Learning-Nanodegree
|
1a82161809a837c38b1bfead6a8c05d074b1d85b
|
[
"MIT"
] | null | null | null |
smartcab/smartcab/agent.py
|
man007yadav/Udacity-Machine-Learning-Nanodegree
|
1a82161809a837c38b1bfead6a8c05d074b1d85b
|
[
"MIT"
] | null | null | null |
import random
import math
import itertools
from environment import Agent, Environment
from planner import RoutePlanner
from simulator import Simulator
class LearningAgent(Agent):
""" An agent that learns to drive in the Smartcab world.
This is the object you will be modifying. """
def __init__(self, env, learning=False, epsilon=1.0, alpha=0.5):
super(LearningAgent, self).__init__(env) # Set the agent in the evironment
self.planner = RoutePlanner(self.env, self) # Create a route planner
self.valid_actions = self.env.valid_actions # The set of valid actions
# Set parameters of the learning agent
self.learning = learning # Whether the agent is expected to learn
self.Q = dict() # Create a Q-table which will be a dictionary of tuples
self.epsilon = epsilon # Random exploration factor
self.alpha = alpha # Learning factor
###########
## TO DO ##
###########
# Set any additional class parameters as needed
self.t = 0
# light, left, waypoint, oncoming
self.state_def = [
['red', 'green'],
['left', 'right', 'forward', None],
['left', 'right', 'forward'],
['left', 'right', 'forward', None]
]
# build Q table
self.template_q = dict((k, 0) for k in self.valid_actions)
for state_tuple in itertools.product(*self.state_def):
self.Q[state_tuple] = self.template_q.copy()
def reset(self, destination=None, testing=False):
""" The reset function is called at the beginning of each trial.
'testing' is set to True if testing trials are being used
once training trials have completed. """
# Select the destination as the new location to route to
self.planner.route_to(destination)
###########
## TO DO ##
###########
# Update epsilon using a decay function of your choice
# Update additional class parameters as needed
# If 'testing' is True, set epsilon and alpha to 0
if testing is True:
self.epsilon = 0
self.alpha = 0
else:
# negative exponential decay
self.epsilon = math.exp(-self.alpha * self.t)
self.t += 1
return None
def build_state(self):
""" The build_state function is called when the agent requests data from the
environment. The next waypoint, the intersection inputs, and the deadline
are all features available to the agent. """
# Collect data about the environment
waypoint = self.planner.next_waypoint() # The next waypoint
inputs = self.env.sense(self) # Visual input - intersection light and traffic
deadline = self.env.get_deadline(self) # Remaining deadline
###########
## TO DO ##
###########
# Set 'state' as a tuple of relevant data for the agent
state = (inputs['light'], inputs['left'], waypoint, inputs['oncoming'])
return state
def get_maxQ(self, state):
""" The get_max_Q function is called when the agent is asked to find the
maximum Q-value of all actions based on the 'state' the smartcab is in. """
###########
## TO DO ##
###########
# Calculate the maximum Q-value of all actions for a given state
maxQ = max(self.Q[state].values())
maxQ_actions = []
for action, Q in self.Q[state].items():
if Q == maxQ:
maxQ_actions.append(action)
return maxQ, maxQ_actions
def createQ(self, state):
""" The createQ function is called when a state is generated by the agent. """
###########
## TO DO ##
###########
# When learning, check if the 'state' is not in the Q-table
# If it is not, create a new dictionary for that state
# Then, for each action available, set the initial Q-value to 0.0
if self.learning is False:
return
if state not in self.Q:
self.Q[state] = self.template_q.copy()
return
def choose_action(self, state):
""" The choose_action function is called when the agent is asked to choose
which action to take, based on the 'state' the smartcab is in. """
# Set the agent state and default action
self.state = state
self.next_waypoint = self.planner.next_waypoint()
action = None
Q_value = None
###########
## TO DO ##
###########
# When not learning, choose a random action
# When learning, choose a random action with 'epsilon' probability
# Otherwise, choose an action with the highest Q-value for the current state
if not self.learning or random.random() <= self.epsilon:
action = random.choice(self.valid_actions)
else:
maxQ, maxQ_actions = self.get_maxQ(state)
action = random.choice(maxQ_actions)
return action
def learn(self, state, action, reward):
""" The learn function is called after the agent completes an action and
receives an award. This function does not consider future rewards
when conducting learning. """
###########
## TO DO ##
###########
# When learning, implement the value iteration update rule
# Use only the learning rate 'alpha' (do not use the discount factor 'gamma')
if self.learning:
self.Q[state][action] = reward * self.alpha + self.Q[state][action] * (1 - self.alpha)
return
def update(self):
""" The update function is called when a time step is completed in the
environment for a given trial. This function will build the agent
state, choose an action, receive a reward, and learn if enabled. """
state = self.build_state() # Get current state
self.createQ(state) # Create 'state' in Q-table
action = self.choose_action(state) # Choose an action
reward = self.env.act(self, action) # Receive a reward
self.learn(state, action, reward) # Q-learn
return
def run():
""" Driving function for running the simulation.
Press ESC to close the simulation, or [SPACE] to pause the simulation. """
##############
# Create the environment
# Flags:
# verbose - set to True to display additional output from the simulation
# num_dummies - discrete number of dummy agents in the environment, default is 100
# grid_size - discrete number of intersections (columns, rows), default is (8, 6)
env = Environment()
##############
# Create the driving agent
# Flags:
# learning - set to True to force the driving agent to use Q-learning
# * epsilon - continuous value for the exploration factor, default is 1
# * alpha - continuous value for the learning rate, default is 0.5
agent = env.create_agent(LearningAgent, learning=True, alpha=0.002, epsilon=1)
##############
# Follow the driving agent
# Flags:
# enforce_deadline - set to True to enforce a deadline metric
env.set_primary_agent(agent, enforce_deadline=True)
##############
# Create the simulation
# Flags:
# update_delay - continuous time (in seconds) between actions, default is 2.0 seconds
# display - set to False to disable the GUI if PyGame is enabled
# log_metrics - set to True to log trial and simulation results to /logs
# optimized - set to True to change the default log file name
sim = Simulator(env, update_delay=0.01, log_metrics=True, optimized=True, display=False)
##############
# Run the simulator
# Flags:
# tolerance - epsilon tolerance before beginning testing, default is 0.05
# n_test - discrete number of testing trials to perform, default is 0
sim.run(n_test=50, tolerance=0.01)
if __name__ == '__main__':
run()
| 36.122807
| 98
| 0.594828
|
4a085f499f7fcb5a77a82379bf755ea01c767ecf
| 8,403
|
py
|
Python
|
bokeh/pivot_table.py
|
tswicegood/bokeh
|
2e74be5c9288306896e8c76af2e14a8c7513e0e3
|
[
"BSD-3-Clause"
] | 2
|
2015-07-23T21:19:52.000Z
|
2016-01-25T17:00:15.000Z
|
bokeh/pivot_table.py
|
csaid/bokeh
|
4312b2de1a15fb24884fcd97eaf6442bf8b4bd7b
|
[
"BSD-3-Clause"
] | null | null | null |
bokeh/pivot_table.py
|
csaid/bokeh
|
4312b2de1a15fb24884fcd97eaf6442bf8b4bd7b
|
[
"BSD-3-Clause"
] | 2
|
2015-12-22T04:13:10.000Z
|
2021-07-06T21:18:04.000Z
|
from pandas import Series, DataFrame
from pandas.core.index import MultiIndex
from pandas.tools.merge import concat
from pandas.tools.util import cartesian_product
from pandas.compat import range, lrange, zip
from pandas import compat
import numpy as np
from six import string_types, iteritems
_aggregates = {
"count": len,
"counta": np.count_nonzero,
"countunique": lambda arr: len(np.unique(arr)),
"average": np.average,
"max": np.max,
"min": np.min,
"median": np.median,
"sum": np.sum,
"product": np.product,
"stdev": np.std,
"var": np.var,
}
def pivot_table(data, values=[], rows=[], cols=[], aggfunc=None, fill_value=0):
"""
Create a spreadsheet-style pivot table as a DataFrame. The levels in the
pivot table will be stored in MultiIndex objects (hierarchical indexes) on
the index and columns of the result DataFrame
Parameters
----------
data : DataFrame
values : column to aggregate, optional
rows : list of column names or arrays to group on
Keys to group on the x-axis of the pivot table
cols : list of column names or arrays to group on
Keys to group on the y-axis of the pivot table
aggfunc : function, default numpy.mean, or list of functions
If list of functions passed, the resulting pivot table will have
hierarchical columns whose top level are the function names (inferred
from the function objects themselves)
fill_value : scalar, default None
Value to replace missing values with
margins : boolean, default False
Add all row / columns (e.g. for subtotal / grand totals)
Examples
--------
>>> df
A B C D
0 foo one small 1
1 foo one large 2
2 foo one large 2
3 foo two small 3
4 foo two small 3
5 bar one large 4
6 bar one small 5
7 bar two small 6
8 bar two large 7
>>> table = pivot_table(df, values='D', rows=['A', 'B'],
... cols=['C'], aggfunc=np.sum)
>>> table
small large
foo one 1 4
two 6 NaN
bar one 5 4
two 6 7
Returns
-------
table : DataFrame
"""
assert len(values) <= 1
rows = _convert_by(rows)
cols = _convert_by(cols)
keys = rows + cols
if aggfunc is None:
aggfunc = len
elif isinstance(aggfunc, string_types):
aggfunc = _aggregates[aggfunc]
to_filter = []
for x in keys + values:
try:
if x in data:
to_filter.append(x)
except TypeError:
pass
if len(to_filter) < len(data.columns):
data = data[to_filter]
grouped = data.groupby(keys)
agged = grouped.agg(aggfunc)
if agged.index.nlevels > 1:
to_unstack = [ agged.index.names[i] for i in range(len(rows), len(keys)) ]
table = agged.unstack(to_unstack)
else:
table = agged
if isinstance(table, DataFrame):
if isinstance(table.columns, MultiIndex):
table = table.sortlevel(axis=1)
else:
table = table.sort_index(axis=1)
if fill_value is not None:
table = table.fillna(value=fill_value, downcast='infer')
table = _add_margins(table, data, values, rows=rows, cols=cols, aggfunc=aggfunc)
if rows and cols:
pass
elif rows:
pass
elif cols:
pass
else:
pass
if len(rows) == 0 and len(cols) > 0:
table = table.T
return table
def _add_margins(table, data, values, rows, cols, aggfunc):
grand_margin = _compute_grand_margin(data, values, aggfunc)
if not values and isinstance(table, Series):
# If there are no values and the table is a series, then there is only
# one column in the data. Compute grand margin and return it.
row_key = ('All',) + ('',) * (len(rows) - 1) if len(rows) > 1 else 'All'
return table.append(Series({row_key: grand_margin['All']}))
if values:
marginal_result_set = _generate_marginal_results(table, data, values, rows, cols, aggfunc, grand_margin)
if not isinstance(marginal_result_set, tuple):
return marginal_result_set
result, margin_keys, row_margin = marginal_result_set
else:
marginal_result_set = _generate_marginal_results_without_values(table, data, rows, cols, aggfunc)
if not isinstance(marginal_result_set, tuple):
return marginal_result_set
result, margin_keys, row_margin = marginal_result_set
key = ('All',) + ('',) * (len(rows) - 1) if len(rows) > 1 else 'All'
row_margin = row_margin.reindex(result.columns)
# populate grand margin
for k in margin_keys:
if isinstance(k, compat.string_types):
row_margin[k] = grand_margin[k]
else:
row_margin[k] = grand_margin[k[0]]
margin_dummy = DataFrame(row_margin, columns=[key]).T
row_names = result.index.names
result = result.append(margin_dummy)
result.index.names = row_names
return result
def _compute_grand_margin(data, values, aggfunc):
if values:
grand_margin = {}
for k, v in iteritems(data[values]):
try:
if isinstance(aggfunc, compat.string_types):
grand_margin[k] = getattr(v, aggfunc)()
else:
grand_margin[k] = aggfunc(v)
except TypeError:
pass
return grand_margin
else:
return {'All': aggfunc(data.index)}
def _generate_marginal_results(table, data, values, rows, cols, aggfunc, grand_margin):
if len(cols) > 0:
# need to "interleave" the margins
table_pieces = []
margin_keys = []
def _all_key(key):
return (key, 'All') + ('',) * (len(cols) - 1)
if len(rows) > 0:
margin = data[rows + values].groupby(rows).agg(aggfunc)
cat_axis = 1
for key, piece in table.groupby(level=0, axis=cat_axis):
all_key = _all_key(key)
piece[all_key] = margin[key]
table_pieces.append(piece)
margin_keys.append(all_key)
else:
margin = grand_margin
cat_axis = 0
for key, piece in table.groupby(level=0, axis=cat_axis):
all_key = _all_key(key)
table_pieces.append(piece)
table_pieces.append(Series(margin[key], index=[all_key]))
margin_keys.append(all_key)
result = concat(table_pieces, axis=cat_axis)
if len(rows) == 0:
return result
else:
result = table
margin_keys = table.columns
if len(cols) > 0:
row_margin = data[cols + values].groupby(cols).agg(aggfunc)
row_margin = row_margin.stack()
# slight hack
new_order = [len(cols)] + lrange(len(cols))
row_margin.index = row_margin.index.reorder_levels(new_order)
else:
row_margin = Series(np.nan, index=result.columns)
return result, margin_keys, row_margin
def _generate_marginal_results_without_values(table, data, rows, cols, aggfunc):
if len(cols) > 0:
# need to "interleave" the margins
margin_keys = []
def _all_key():
if len(cols) == 1:
return 'All'
return ('All', ) + ('', ) * (len(cols) - 1)
if len(rows) > 0:
margin = data[rows].groupby(rows).apply(aggfunc)
all_key = _all_key()
table[all_key] = margin
result = table
margin_keys.append(all_key)
else:
margin = data.groupby(level=0, axis=0).apply(aggfunc)
all_key = _all_key()
table[all_key] = margin
result = table
margin_keys.append(all_key)
return result
else:
result = table
margin_keys = table.columns
if len(cols):
row_margin = data[cols].groupby(cols).apply(aggfunc)
else:
row_margin = Series(np.nan, index=result.columns)
return result, margin_keys, row_margin
def _convert_by(by):
if by is None:
by = []
elif (np.isscalar(by) or isinstance(by, (np.ndarray, Series))
or hasattr(by, '__call__')):
by = [by]
else:
by = list(by)
return by
| 30.667883
| 112
| 0.593002
|
4a086138151ddafe282d8c3f7cff813ca5c46c00
| 45,288
|
py
|
Python
|
controllers/plotwindow_ctrl.py
|
endymecy/NDIToolbox
|
f7a0a642b4a778d9d0c131871f4bfb9822ecb3da
|
[
"BSD-4-Clause"
] | 5
|
2017-02-28T16:16:06.000Z
|
2020-07-13T06:49:34.000Z
|
controllers/plotwindow_ctrl.py
|
endymecy/NDIToolbox
|
f7a0a642b4a778d9d0c131871f4bfb9822ecb3da
|
[
"BSD-4-Clause"
] | 1
|
2018-08-19T19:08:14.000Z
|
2018-08-19T19:08:14.000Z
|
controllers/plotwindow_ctrl.py
|
endymecy/NDIToolbox
|
f7a0a642b4a778d9d0c131871f4bfb9822ecb3da
|
[
"BSD-4-Clause"
] | 4
|
2017-10-25T20:17:15.000Z
|
2021-07-26T11:39:50.000Z
|
"""plotwindow_ctrl.py - defines the controller for plotwindow.py
Chris R. Coughlin (TRI/Austin, Inc.)
"""
__author__ = 'Chris R. Coughlin'
from views import dialogs
from views import fetchplugin_dialog
from views import colormapcreator
from models import mainmodel
from models import dataio
from models import ndescanhandler
import models.plotwindow_model as model
import matplotlib
import matplotlib.axes
import wx
import wx.lib.dialogs
from functools import wraps
import os.path
import Queue
module_logger = mainmodel.get_logger(__name__)
def replace_plot(fn):
"""Decorator function - runs the specified function and updates the plot.
Designed to work with PlotWindowController instances.
"""
@wraps(fn)
def wrapped(self, *args, **kwargs):
if self.model.data is not None:
if isinstance(self.view.axes, matplotlib.axes.Subplot):
self.view.axes.hold()
else:
for ax in self.view.axes:
ax.hold()
fn(self, *args, **kwargs)
self.plot(self.model.data)
self.refresh_plot()
if isinstance(self.view.axes, matplotlib.axes.Subplot):
self.view.axes.hold()
else:
for ax in self.view.axes:
ax.hold()
return wrapped
class BasicPlotWindowController(object):
"""Base class for PlotWindows"""
def __init__(self, view, data_file):
self.view = view
self.axes_grid = True
self.model = model.BasicPlotWindowModel(self, data_file)
self.init_plot_defaults()
module_logger.info("Successfully initialized BasicPlotWindowController.")
@property
def available_plugins(self):
"""Returns a list of available plugins suitable for
inclusion in a wxMenu"""
return self.generate_plugin_dict()
def init_plot_defaults(self):
"""Sets some basic matplotlib configuration parameters
to sane defaults."""
mainmodel.init_matplotlib_defaults()
@property
def data(self):
return self.model.data
@property
def original_data(self):
return self.model.original_data
def refresh_plot(self):
"""Forces plot to redraw itself"""
self.view.canvas.draw()
def on_install_plugin(self, evt):
"""Handles request to install a local plugin"""
file_dlg = wx.FileDialog(parent=self.view,
message="Please select a plugin archive to install.",
wildcard="ZIP files (*.zip)|*.zip|All files (*.*)|*.*")
if file_dlg.ShowModal() == wx.ID_OK:
dlg = fetchplugin_dialog.FetchPluginDialog(parent=self.view,
plugin_path=file_dlg.GetPath())
if dlg.ShowModal() == wx.ID_OK:
try:
dlg.install_plugin()
self.view.init_plugins_menu()
except Exception as err:
module_logger.error("Unable to install plugin: {0}".format(err))
err_msg = "{0}".format(err)
err_dlg = wx.MessageDialog(self.view, message=err_msg,
caption="Unable To Install Plugin",
style=wx.ICON_ERROR)
err_dlg.ShowModal()
err_dlg.Destroy()
dlg.Destroy()
file_dlg.Destroy()
def on_download_plugin(self, evt):
"""Handles request to download and install a plugin"""
dlg = fetchplugin_dialog.FetchRemotePluginDialog(parent=self.view)
if dlg.ShowModal() == wx.ID_OK:
try:
dlg.install_plugin()
self.view.init_plugins_menu()
except Exception as err:
module_logger.error("Unable to install plugin: {0}".format(err))
err_msg = "{0}".format(err)
err_dlg = wx.MessageDialog(self.view, message=err_msg,
caption="Unable To Install Plugin", style=wx.ICON_ERROR)
err_dlg.ShowModal()
err_dlg.Destroy()
dlg.Destroy()
def on_run_toolkit(self, evt):
"""Handles request to run a plugin"""
self.run_toolkit(evt.GetId())
@replace_plot
def run_toolkit(self, requested_toolkit_id):
"""Runs toolkit with specified ID on current data set,
replaces current data and refreshes plot"""
for toolkit_id, toolkit in self.available_plugins.items():
if requested_toolkit_id == toolkit_id:
plugin_class = self.model.get_plugin(toolkit[0])
module_logger.info("Attempt to run plugin {0}".format(plugin_class))
self.run_plugin(plugin_class)
@replace_plot
def run_plugin(self, plugin_cls, **kwargs):
"""Runs plugin of specified class plugin_cls on current data set,
replaces current data and refreshes plot"""
cfg = None
# Instantiate the plugin to see if it has a self.config dict
# that should be configured by the user prior to execution
plugin_instance = plugin_cls()
if hasattr(plugin_instance, "config"):
cfg = self.configure_plugin_dlg(plugin_instance)
if cfg is None:
return
try:
plugin_process, plugin_queue, exception_queue = mainmodel.run_plugin(plugin_cls, self.data, cfg, **kwargs)
except MemoryError as err: # Insufficient memory to run plugin with current data
err_dlg = wx.MessageDialog(self.view, message="Insufficient memory to run plugin.",
caption="Unable To Run Plugin",
style=wx.ICON_ERROR)
err_dlg.ShowModal()
err_dlg.Destroy()
return
keepGoing = True
try:
progress_dlg = wx.ProgressDialog("Running Plugin",
"Please wait, executing plugin...",
parent=self.view,
style=wx.PD_CAN_ABORT)
while keepGoing:
wx.MilliSleep(125)
(keepGoing, skip) = progress_dlg.UpdatePulse()
try:
if not plugin_process.is_alive():
# Catch low-level exceptions thrown by multiprocessing, such as MemoryError
# exceptions raised when attempting to send data through the queue
module_logger.error("Unknown error occurred during plugin execution, plugin terminated")
err_msg = ' '.join(["An unknown error has occurred running the plugin.",
"Please ensure your system has sufficient memory and disk space to process this data.",
"If the problem persists, please contact the plugin's author."])
err_dlg = wx.MessageDialog(self.view, message=err_msg,
caption="Unable To Run Plugin",
style=wx.ICON_ERROR)
err_dlg.ShowModal()
err_dlg.Destroy()
break
exc_type, exc = exception_queue.get(block=False)
err_str = str(exc)
if len(err_str) == 0:
err_str = exc_type.__name__
module_logger.error("Error occurred running plugin: {0}".format(err_str))
err_msg = "An error occurred while running the plugin:\n{0}".format(err_str)
err_dlg = wx.MessageDialog(self.view, message=err_msg,
caption="Unable To Run Plugin",
style=wx.ICON_ERROR)
err_dlg.ShowModal()
err_dlg.Destroy()
break
except Queue.Empty:
pass
try:
returned_data = plugin_queue.get(False)
except Queue.Empty:
continue
if returned_data is not None:
self.model.data = returned_data
break
if not keepGoing:
break
wx.getApp().Yield()
finally:
plugin_process.join()
progress_dlg.Destroy()
def on_close(self, evt):
"""Handles request to close plot window"""
self.view.Destroy()
def on_save_data(self, evt):
"""Handles request to save current data set to disk"""
default_path, default_file = os.path.split(self.model.data_file)
wild_card = "NDIToolbox data files (*.hdf5)|*.hdf5|All files (*.*)|*.*"
save_dlg = wx.FileDialog(self.view, message="Save File As...",
defaultDir=default_path,
defaultFile=default_file,
wildcard=wild_card,
style=wx.SAVE | wx.OVERWRITE_PROMPT)
if save_dlg.ShowModal() == wx.ID_OK:
dataio.save_data(save_dlg.GetPath(), self.data)
self.view.parent.refresh()
save_dlg.Destroy()
def on_revert(self, evt):
"""Handles request to revert to original data set"""
self.revert()
def on_toggle_grid(self, evt):
"""Toggles the plot's grid on or off"""
self.view.axes.grid()
self.axes_grid = not self.axes_grid
self.refresh_plot()
def on_set_xlabel(self, evt):
"""Handles the set x-axis label event"""
label_dlg = wx.TextEntryDialog(parent=self.view,
message="Enter a new label for the X-Axis",
caption="Set X Axis Label",
defaultValue=self.get_titles()['x'])
if label_dlg.ShowModal() == wx.ID_OK:
self.set_titles(x=label_dlg.GetValue())
def on_set_ylabel(self, evt):
"""Handles the set y-axis label event"""
label_dlg = wx.TextEntryDialog(parent=self.view,
message="Enter a new label for the Y-Axis",
caption="Set Y Axis Label",
defaultValue=self.get_titles()['y'])
if label_dlg.ShowModal() == wx.ID_OK:
self.set_titles(y=label_dlg.GetValue())
def on_set_plottitle(self, evt):
"""Handles the set x-axis label event"""
label_dlg = wx.TextEntryDialog(parent=self.view,
message="Enter a new title for the plot",
caption="Set Plot Title",
defaultValue=self.get_titles()['plot'])
if label_dlg.ShowModal() == wx.ID_OK:
self.set_titles(plot=label_dlg.GetValue())
def get_titles(self):
"""Returns the current titles for the plot, x and y axes as a dict with
keys 'plot', 'x', 'y'."""
titles = {'plot': self.view.axes.get_title(),
'x': self.view.axes.get_xlabel(),
'y': self.view.axes.get_ylabel()}
return titles
def set_titles(self, plot=None, x=None, y=None):
"""Sets one or more of plot, x, or y axis titles to specified
string. If not specified, title is left unchanged."""
if plot:
self.view.axes.set_title(plot)
if x:
self.view.axes.set_xlabel(x)
if y:
self.view.axes.set_ylabel(y)
self.refresh_plot()
def OnPaint(self, evt):
"""Handles wxPython paint event"""
self.refresh_plot()
evt.Skip()
@replace_plot
def revert(self):
"""Reverts data to original"""
self.model.revert_data()
def load_data(self):
"""Loads the data from the specified file name"""
try:
self.model.load_data()
except MemoryError as err: # out of memory
module_logger.exception("Insufficient memory - {0}".format(err))
raise MemoryError("Insufficient memory to load data")
def get_plugins(self):
"""Returns a list of the available NDIToolbox plugins"""
return self.model.get_plugins()
def generate_plugin_dict(self):
"""Returns a dict (key = wx ID, val = plugin) suitable
for inclusion in a Menu."""
plugin_id = 1000
plugins = {}
for plugin in self.get_plugins():
plugins[plugin_id] = plugin
plugin_id += 1
return plugins
def configure_plugin_dlg(self, plugin_instance):
"""Produces a ConfigurePlugin dialog to configure the
selected plugin"""
cfg = None
cfg_dlg = dialogs.ConfigurePluginDialog(self.view, plugin_instance)
if cfg_dlg.ShowModal() == wx.ID_OK:
cfg = cfg_dlg.get_config()
cfg_dlg.Destroy()
return cfg
class PlotWindowController(BasicPlotWindowController):
"""Controller for PlotWindow class"""
def __init__(self, view, data_file):
self.view = view
self.axes_grid = True
self.model = model.PlotWindowModel(self, data_file)
self.gates = {}
self.get_gates()
self.init_plot_defaults()
module_logger.info("PlotWindowController successfully initialized.")
def plot(self, data):
"""Plots the dataset"""
if data is not None:
try:
# matplotlib forgets settings with replots -
# save current values to reset after the replot
titles = self.get_titles()
if data.ndim == 1:
self.view.axes.plot(data)
elif data.ndim == 2:
if 2 in data.shape: # Assume data is X, Y
self.view.axes.plot(data[0], data[1])
else:
slice_dlg = dialogs.LinearSliceDialog(parent=self.view, data_shape=data.shape,
title="Select Axis To Plot")
if slice_dlg.ShowModal() == wx.ID_OK:
self.model.load_data(slice_idx=slice_dlg.get_data_slice())
self.plot(self.data)
slice_dlg.Destroy()
elif data.ndim == 3:
# 3D data; offer to take a slice in X, Y, or Z to plot
slice_dlg = dialogs.LinearSliceDialog(parent=self.view, data_shape=data.shape,
title="Select Axis To Plot")
if slice_dlg.ShowModal() == wx.ID_OK:
self.model.load_data(slice_idx=slice_dlg.get_data_slice())
self.plot(self.data)
slice_dlg.Destroy()
self.set_titles(plot=titles['plot'], x=titles['x'], y=titles['y'])
self.view.axes.grid(self.axes_grid)
except OverflowError as err: # Data too large to plot
module_logger.error("Data too large to plot: {0}".format(OverflowError))
err_msg = "{0}".format(err)
err_dlg = wx.MessageDialog(self.view, message=err_msg,
caption="Unable To Plot Data", style=wx.ICON_ERROR)
err_dlg.ShowModal()
err_dlg.Destroy()
@replace_plot
def rectify_full(self):
"""Applies full rectification to the current data set"""
self.model.rectify_full()
def generate_gate_id(self):
"""Generates an ID number for the specified gate name.
Used to identify gates in wxPython menu events."""
id = 1011 + len(self.gates)
return id
def get_gates(self):
"""Returns a dict listing available window functions"""
for gate_name in self.model.gates:
self.gates[self.generate_gate_id()] = gate_name
def on_apply_gate(self, evt):
"""Handles request to apply window function ('gate' in UT)
to data"""
self.run_gate(evt.GetId())
@replace_plot
def run_gate(self, gate_id):
"""Runs toolkit with specified ID on current data set,
replaces current data and refreshes plot"""
if self.model.data is not None:
rng_dlg = dialogs.FloatRangeDialog("Please specify the gate region.")
if rng_dlg.ShowModal() == wx.ID_OK:
try:
start_pos, end_pos = rng_dlg.GetValue()
gate_name, gate_cls = self.gates.get(gate_id)
self.run_plugin(gate_cls, start_pos=start_pos, end_pos=end_pos)
except ValueError as err: # negative dimensions
module_logger.error("Unable to apply gate, user provided negative dimensions: {0}, {1}".format(
start_pos, end_pos
))
err_msg = "{0}".format(err)
err_dlg = wx.MessageDialog(self.view, message=err_msg,
caption="Unable To Apply Gate", style=wx.ICON_ERROR)
err_dlg.ShowModal()
err_dlg.Destroy()
except IndexError: # specified nonexistent gate id
module_logger.error("Unable to apply gate, couldn't find specified gate function.")
err_msg = "Unable to locate specified gate function."
err_dlg = wx.MessageDialog(self.view, message=err_msg,
caption="Unable To Apply Gate", style=wx.ICON_ERROR)
err_dlg.ShowModal()
err_dlg.Destroy()
finally:
rng_dlg.Destroy()
def on_rectify(self, evt):
"""Handles request to apply rectification"""
self.rectify_full()
class BasicImgPlotWindowController(BasicPlotWindowController):
"""Base class for ImgPlotWindow Controllers"""
def __init__(self, view, data_file):
self.view = view
self.axes_grid = True
self.model = model.ImgPlotWindowModel(self, data_file)
self.colorbar = None
self.init_plot_defaults()
module_logger.info("Successfully initialized BasicImgPlotWindowController.")
def init_plot_defaults(self):
super(BasicImgPlotWindowController, self).init_plot_defaults()
cfg = mainmodel.get_config()
if cfg.has_option("ImgPlot", "colormap"):
self.colormap = self.model.get_cmap(cfg.get("ImgPlot", "colormap"))
else:
self.colormap = self.model.get_cmap('Spectral')
def on_set_cbarlbl(self, evt):
"""Sets the label for the imgplot's colorbar"""
if self.colorbar is not None:
label_dlg = wx.TextEntryDialog(parent=self.view,
message="Enter a new label for the colorbar",
caption="Set Colorbar Label",
defaultValue=self.get_titles()['colorbar'])
if label_dlg.ShowModal() == wx.ID_OK:
self.set_titles(colorbar=label_dlg.GetValue())
def get_titles(self):
"""Returns the current titles for the plot, x & y axes, and colorbar as a dict
with keys 'plot', 'x', 'y', 'colorbar'."""
if self.colorbar is not None:
# matplotlib has a set_label function but not a get - ??
colorbar_lbl = self.colorbar._label
else:
colorbar_lbl = ''
titles = {'plot': self.view.axes.get_title(),
'x': self.view.axes.get_xlabel(),
'y': self.view.axes.get_ylabel(),
'colorbar': colorbar_lbl}
return titles
def set_titles(self, plot=None, x=None, y=None, colorbar=None):
"""Sets one or more of plot, x/y axis, or colorbar labels to
specified string. If not specified, label is unchanged."""
if plot:
self.view.axes.set_title(plot)
if x:
self.view.axes.set_xlabel(x)
if y:
self.view.axes.set_ylabel(y)
if colorbar:
self.colorbar.set_label(colorbar)
self.refresh_plot()
def on_preview_cmaps(self, evt):
"""Generates a new dialog displaying all the built-in matplotlib
colormaps and their reverse colormaps. Original code courtesy
SciPy Cookbook http://www.scipy.org/Cookbook/Matplotlib/Show_colormaps"""
wx.BeginBusyCursor()
import matplotlib.pyplot as plt
colormaps = self.model.get_colormap_choices()
colormap_strip = self.model.generate_colormap_strip()
num_maps = len(colormaps) + 1
figure = plt.figure(figsize=(5, 8))
figure.subplots_adjust(top=0.99, bottom=0.01, left=0.2, right=0.99)
for i, m in enumerate(colormaps):
if not m.endswith("_r"):
ax = plt.subplot(num_maps, 1, i + 1)
plt.axis('off')
plt.imshow(colormap_strip, aspect='equal', cmap=self.model.get_cmap(m), origin='lower')
pos = list(ax.get_position().bounds)
figure.text(pos[0] - 0.01, pos[1], m, fontsize=10, horizontalalignment='right')
plt.show()
wx.EndBusyCursor()
def on_select_cmap(self, evt):
"""Generates a list of available matplotlib colormaps and sets the plot's
colormap to the user's choice."""
colormaps = self.model.get_colormap_choices()
cmap_dlg = wx.lib.dialogs.singleChoiceDialog(self.view, "Select Colormap",
"Please select a colormap for this plot.",
colormaps)
if cmap_dlg.accepted is True:
cfg = mainmodel.get_config()
colormap = cmap_dlg.selection
if colormap == '':
self.colormap = self.model.get_cmap('Spectral')
cfg.set("ImgPlot", {"colormap":"spectral"})
else:
self.colormap = self.model.get_cmap(colormap)
cfg.set("ImgPlot", {"colormap":colormap})
if self.view.img is not None:
self.view.img.set_cmap(self.colormap)
self.refresh_plot()
def on_create_cmap(self, evt):
"""Handles request to create a new matplotlib colormap"""
cmapcreator_ui = colormapcreator.ColormapCreatorUI(parent=self.view)
cmapcreator_ui.Show()
class ImgPlotWindowController(BasicImgPlotWindowController):
"""Controller for ImgPlotWindow class"""
def __init__(self, view, data_file):
super(ImgPlotWindowController, self).__init__(view, data_file)
module_logger.info("Successfully initialized ImgPlotWindowController.")
def check_data_dims(self):
"""If the data is a 3D array, set the data to a single 2D
slice."""
if self.data is None:
self.load_data()
if self.data.ndim == 3:
slice_dlg = dialogs.PlanarSliceDialog(parent=self.view, data_shape=self.data.shape,
title="Specify 2D Plane")
if slice_dlg.ShowModal() == wx.ID_OK:
self.model.load_data(slice_idx=slice_dlg.get_data_slice())
slice_dlg.Destroy()
def plot(self, data):
"""Plots the dataset"""
if data is not None:
try:
# matplotlib forgets settings with replots -
# save current values to reapply after plot
titles = self.get_titles()
self.view.axes.cla()
self.view.img = self.view.axes.imshow(data, aspect="equal", origin="lower", cmap=self.colormap)
if self.colorbar:
self.view.figure.delaxes(self.view.figure.axes[1])
self.view.figure.subplots_adjust(right=0.90)
self.colorbar = self.view.figure.colorbar(self.view.img)
self.set_titles(plot=titles['plot'], x=titles['x'], y=titles['y'])
self.view.axes.grid(self.axes_grid)
except TypeError as err: # Tried to imgplot 1D array
module_logger.error("Unable to plot data, user attempted to imgplot 1D array: {0}".format(err))
err_msg = "{0}".format(err)
err_dlg = wx.MessageDialog(self.view, message=err_msg,
caption="Unable To Plot Data", style=wx.ICON_ERROR)
err_dlg.ShowModal()
err_dlg.Destroy()
except OverflowError as err: # Data too large to plot
module_logger.error("Unable to plot data, data too large: {0}".format(err))
err_msg = "{0}".format(err)
err_dlg = wx.MessageDialog(self.view, message=err_msg,
caption="Unable To Plot Data", style=wx.ICON_ERROR)
err_dlg.ShowModal()
err_dlg.Destroy()
def on_detrend_meanx(self, evt):
"""Applies constant (mean) detrend in X"""
self.detrend(axis=0, type='constant')
def on_detrend_meany(self, evt):
"""Applies constant (mean) detrend in Y"""
self.detrend(axis=1, type='constant')
def on_detrend_linearx(self, evt):
"""Applies linear detrend in X"""
self.detrend(axis=0, type='linear')
def on_detrend_lineary(self, evt):
"""Applies linear detrend in Y"""
self.detrend(axis=1, type='linear')
@replace_plot
def detrend(self, axis, type):
"""Applies detrend along specified axis of specified type.
Refreshes the plot."""
self.model.detrend_data(axis, type)
@replace_plot
def on_flipud(self, evt):
"""Handles request to flip the data vertically"""
self.model.flipud_data()
@replace_plot
def on_fliplr(self, evt):
"""Handles request to flip the data horizontally"""
self.model.fliplr_data()
@replace_plot
def on_rot90ccw(self, evt):
"""Handles request to rotate data 90 degrees counterclockwise"""
self.model.rotate_data(1)
@replace_plot
def on_rot90cw(self, evt):
"""Handles request to rotate data 90 degrees clockwise"""
self.model.rotate_data(3)
@replace_plot
def on_rot180(self, evt):
"""Handles request to rotate data 180 degrees"""
self.model.rotate_data(2)
@replace_plot
def on_transpose(self, evt):
"""Handles request to transpose data"""
self.model.transpose_data()
class MegaPlotWindowController(BasicImgPlotWindowController, PlotWindowController):
"""Controller for MegaPlotWindows"""
def __init__(self, view, data_file):
self.view = view
self.slice_idx = 0
self.xpos = 0
self.ypos = 0
self.axes_grid = True
self.model = model.MegaPlotWindowModel(self, data_file)
self.colorbar = None
self.gate_coords = [None, None]
self.gates = {}
self.get_gates()
self.init_plot_defaults()
module_logger.info("Successfully initialized MegaPlotWindowController.")
def init_plot_defaults(self):
"""Initializes the defaults for the Megaplot presentation."""
super(MegaPlotWindowController, self).init_plot_defaults()
cfg = mainmodel.get_config()
if cfg.has_option("MegaPlot", "conventional bscans"):
self.conventional_bscans = cfg.get_boolean("MegaPlot", "conventional bscans")
else:
self.conventional_bscans = False
self.use_colorbar = self.get_colorbar_config()
def plot(self, data):
"""Plots the dataset"""
if data is not None:
self.scnr = ndescanhandler.NDEScanHandler(self.data)
try:
if self.view.slice_cb.IsChecked():
self.plot_cscan(self.scnr.cscan_data(self.slice_idx), self.slice_idx)
except TypeError as err: # Tried to imgplot 1D array
module_logger.error("Unable to plot data, user attempted to imgplot 1D array: {0}".format(err))
err_msg = "{0}".format(err)
err_dlg = wx.MessageDialog(self.view, message=err_msg,
caption="Unable To Plot Data", style=wx.ICON_ERROR)
err_dlg.ShowModal()
err_dlg.Destroy()
except OverflowError as err: # Data too large to plot
module_logger.error("Unable to plot data, data too large to plot: {0}".format(err))
err_msg = "{0}".format(err)
err_dlg = wx.MessageDialog(self.view, message=err_msg,
caption="Unable To Plot Data", style=wx.ICON_ERROR)
err_dlg.ShowModal()
err_dlg.Destroy()
def plot_ascan(self, ascan_data, xpos, ypos):
"""Plots the provided A-scan data"""
self.view.ascan_axes.cla()
self.view.ascan_plt = self.view.ascan_axes.plot(ascan_data)
self.view.ascan_axes.autoscale_view(tight=True)
self.view.ascan_axes.set_title("A Scan x={0} y={1}".format(xpos, ypos))
def plot_hbscan(self, hbscan_data, ypos, slice_idx=None):
"""Plots the provided horizontal B-scan data. If plotting a conventional Bscan, the slice_idx parameter
can be omitted."""
self.view.hbscan_axes.cla()
if hbscan_data.ndim == 1:
self.view.hbscan_plt = self.view.hbscan_axes.plot(hbscan_data)
self.view.hbscan_axes.set_title("Horizontal B Scan y={0} z={1}".format(ypos, slice_idx))
else:
self.view.hbscan_plt = self.view.hbscan_axes.imshow(hbscan_data, aspect='auto',
origin='lower', cmap=self.colormap,
interpolation='nearest')
self.view.hbscan_axes.set_title("Horizontal B Scan y={0}".format(ypos))
self.view.hbscan_axes.autoscale_view(tight=True)
def plot_vbscan(self, vbscan_data, xpos, slice_idx=None):
"""Plots the provided vertical B-scan data. If plotting a conventional Bscan, the slice_idx parameter
can be omitted."""
self.view.vbscan_axes.cla()
if vbscan_data.ndim == 1:
self.view.vbscan_plt = self.view.vbscan_axes.plot(vbscan_data)
self.view.vbscan_axes.set_title("Vertical B Scan x={0} z={1}".format(xpos, slice_idx))
else:
self.view.vbscan_plt = self.view.vbscan_axes.imshow(vbscan_data, aspect='auto',
origin='lower', cmap=self.colormap,
interpolation='nearest')
self.view.vbscan_axes.set_title("Vertical B Scan x={0}".format(xpos))
self.view.vbscan_axes.autoscale_view(tight=True)
def plot_cscan(self, cscan_data, slice_idx):
"""Plots the supplied C-scan data"""
self.view.cscan_axes.cla()
self.view.cscan_img = self.view.cscan_axes.imshow(cscan_data, aspect='auto',
origin='lower', cmap=self.colormap,
interpolation='nearest')
if self.use_colorbar:
if self.colorbar:
# In MegaPlot the colorbar is the fifth AxesSubplot if present -
# need to delete to avoid cascading colorbars in replots
if len(self.view.figure.axes) == 5:
self.view.figure.delaxes(self.view.figure.axes[4])
self.view.figure.subplots_adjust(right=0.90)
self.colorbar = self.view.figure.colorbar(self.view.cscan_img)
self.view.cscan_axes.set_title("C Scan z={0}".format(slice_idx))
def get_plot_choice(self):
"""Presents single choice dialog to the user to select an Axes to modify."""
plot_choices = ["A-Scan", "Horizontal B-Scan", "Vertical B-Scan", "C-Scan"]
choice_dlg = wx.SingleChoiceDialog(parent=self.view, message="Please select a plot to modify.",
caption="Available Plots", choices=plot_choices)
if choice_dlg.ShowModal() == wx.ID_OK:
return self.view.axes[choice_dlg.GetSelection()]
choice_dlg.Destroy()
return None
def on_set_xlabel(self, evt):
"""Handles the set x-axis label event"""
axis = self.get_plot_choice()
if axis is not None:
label_dlg = wx.TextEntryDialog(parent=self.view,
message="Enter a new label for the X-Axis",
caption="Set X Axis Label",
defaultValue=self.get_titles(axis)['x'])
if label_dlg.ShowModal() == wx.ID_OK:
self.set_titles(axis, x=label_dlg.GetValue())
label_dlg.Destroy()
def on_set_ylabel(self, evt):
"""Handles the set y-axis label event"""
axis = self.get_plot_choice()
if axis is not None:
label_dlg = wx.TextEntryDialog(parent=self.view,
message="Enter a new label for the Y-Axis",
caption="Set Y Axis Label",
defaultValue=self.get_titles(axis)['y'])
if label_dlg.ShowModal() == wx.ID_OK:
self.set_titles(axis, y=label_dlg.GetValue())
label_dlg.Destroy()
def on_set_plottitle(self, evt):
"""Handles the set x-axis label event"""
axis = self.get_plot_choice()
if axis is not None:
label_dlg = wx.TextEntryDialog(parent=self.view,
message="Enter a new title for the plot",
caption="Set Plot Title",
defaultValue=self.get_titles(axis)['plot'])
if label_dlg.ShowModal() == wx.ID_OK:
self.set_titles(axis, plot=label_dlg.GetValue())
label_dlg.Destroy()
def on_set_cbarlbl(self, evt):
"""Sets the label for the imgplot's colorbar"""
if self.use_colorbar and self.colorbar is not None:
label_dlg = wx.TextEntryDialog(parent=self.view,
message="Enter a new label for the colorbar",
caption="Set Colorbar Label",
defaultValue=self.colorbar._label)
if label_dlg.ShowModal() == wx.ID_OK:
super(MegaPlotWindowController, self).set_titles(colorbar=label_dlg.GetValue())
def get_titles(self, axes_inst):
"""Returns the current titles for the specified AxesSubplot instance's
plot, x and y axes as a dict with keys 'plot', 'x', 'y'."""
if isinstance(axes_inst, matplotlib.axes.Subplot):
titles = {'plot': axes_inst.get_title(),
'x': axes_inst.get_xlabel(),
'y': axes_inst.get_ylabel()}
return titles
return None
def set_titles(self, axes_inst, plot=None, x=None, y=None):
"""Sets one or more of plot, x, or y axis titles to specified
string for the specified AxesSubplot instance.
If not specified, title is left unchanged."""
if isinstance(axes_inst, matplotlib.axes.Subplot):
if plot:
axes_inst.set_title(plot)
if x:
axes_inst.set_xlabel(x)
if y:
axes_inst.set_ylabel(y)
self.refresh_plot()
def on_click(self, evt):
"""Handles mouse click in the C Scan - update other plots"""
if not self.view.navtools_enabled():
if evt.inaxes == self.view.cscan_axes:
xpos = int(evt.xdata)
ypos = int(evt.ydata)
self.update_plot(xpos, ypos)
if evt.inaxes == self.view.ascan_axes:
xpos = int(evt.xdata)
if self.gate_coords[0] is None:
self.gate_coords[0] = xpos
self.view.ascan_axes.axvline(x=xpos, color='r', linestyle='--')
elif self.gate_coords[1] is None:
self.gate_coords[1] = xpos
self.view.ascan_axes.axvline(x=xpos, color='r', linestyle='--')
self.gate_coords.sort()
else:
self.gate_coords[0] = None
self.gate_coords[1] = None
while len(self.view.ascan_axes.lines) > 1:
self.view.ascan_axes.lines.pop(-1)
self.view.canvas.draw()
def on_check_navtools(self, evt):
"""Handles toggle of enable/disable navigation toolbar checkbox"""
self.view.toggle_toolbar()
def set_navtools_config(self, navtools_enabled):
"""Sets the enable navtools option in the config"""
cfg = mainmodel.get_config()
cfg.set("MegaPlot", {"enable navtools":navtools_enabled})
def get_navtools_config(self):
"""Returns the enable navtools setting from config."""
cfg = mainmodel.get_config()
if cfg.has_option("MegaPlot", "enable navtools"):
return cfg.get_boolean("MegaPlot", "enable navtools")
return True
def on_toggle_colorbar(self, evt):
"""Handles toggle of enable/disable colorbar display"""
use_colorbar = not self.get_colorbar_config()
self.set_colorbar_config(use_colorbar)
self.update_plot()
def set_colorbar_config(self, colorbar_enabled):
"""Sets the enable colorbar option in the config"""
cfg = mainmodel.get_config()
cfg.set("MegaPlot", {"show_colorbar":colorbar_enabled})
self.use_colorbar = colorbar_enabled
def get_colorbar_config(self):
"""Returns the enable colorbar setting from config."""
cfg = mainmodel.get_config()
if cfg.has_option("MegaPlot", "show_colorbar"):
return cfg.get_boolean("MegaPlot", "show_colorbar")
return False
def on_sliceidx_change(self, evt):
"""Responds to changes in the z position spin control"""
self.update_plot(self.view.xpos_sc.GetValue(), self.view.ypos_sc.GetValue(),
self.view.slice_sc.GetValue())
def on_xy_change(self, evt):
"""Responds to changes in the x position and y position spin controls"""
self.update_plot(self.view.xpos_sc.GetValue(), self.view.ypos_sc.GetValue())
@replace_plot
def update_plot(self, xpos=None, ypos=None, slice_idx=None):
"""Updates the A and B scans based on the provided (x,y) position in the data. If xpos and/or ypos
are None (default), A and B scans are updated on the last (x,y) position selected by the user.
If slice_idx is provided the C scan plot is updated to that position, default is to leave unchanged if
slice_idx is None."""
if xpos is None:
xpos = self.xpos
else:
self.xpos = xpos
if ypos is None:
ypos = self.ypos
else:
self.ypos = ypos
self.view.xpos_sc.SetValue(xpos)
self.view.ypos_sc.SetValue(ypos)
self.plot_ascan(self.scnr.ascan_data(xpos, ypos), xpos, ypos)
if self.conventional_bscans is False:
self.plot_hbscan(self.view.cscan_img.get_array()[ypos, :], slice_idx=self.slice_idx, ypos=ypos)
self.plot_vbscan(self.view.cscan_img.get_array()[:, xpos], slice_idx=self.slice_idx, xpos=xpos)
else:
self.plot_hbscan(self.scnr.hbscan_data(ypos).T, ypos)
self.plot_vbscan(self.scnr.vbscan_data(xpos), xpos)
if slice_idx is not None:
self.slice_idx = slice_idx
if self.view.slice_cb.IsChecked():
self.plot_cscan(self.scnr.cscan_data(self.slice_idx), self.slice_idx)
if not self.use_colorbar:
if self.colorbar:
# In MegaPlot the colorbar is the fifth AxesSubplot if present -
# need to delete to avoid cascading colorbars in replots
if len(self.view.figure.axes) == 5:
self.view.figure.delaxes(self.view.figure.axes[4])
self.view.figure.subplots_adjust(right=0.90)
if self.gate_coords != [None, None]:
self.view.ascan_axes.axvline(x=self.gate_coords[0], color='r', linestyle='--')
self.view.ascan_axes.axvline(x=self.gate_coords[1], color='r', linestyle='--')
self.refresh_plot()
def on_select_cmap(self, evt):
"""Generates a list of available matplotlib colormaps and sets the plot's
colormap to the user's choice."""
colormaps = self.model.get_colormap_choices()
cmap_dlg = wx.lib.dialogs.singleChoiceDialog(self.view, "Select Colormap",
"Please select a colormap for this plot.",
colormaps)
if cmap_dlg.accepted is True:
cfg = mainmodel.get_config()
colormap = cmap_dlg.selection
if colormap == '':
self.colormap = self.model.get_cmap('Spectral')
cfg.set("ImgPlot", {"colormap":"spectral"})
else:
self.colormap = self.model.get_cmap(colormap)
cfg.set("ImgPlot", {"colormap":colormap})
if self.view.cscan_img is not None:
self.view.cscan_img.set_cmap(self.colormap)
self.update_plot()
@replace_plot
def on_toggle_grid(self, evt):
"""Toggles the plot's grid on or off"""
for ax in self.view.axes:
ax.grid()
self.axes_grid = not self.axes_grid
self.refresh_plot()
@replace_plot
def on_change_bscans(self, evt):
"""Toggles using conventional Bscan imgplots or 1D cross-sections through the current Cscan"""
self.conventional_bscans = self.view.plot_conventional_bscans
cfg = mainmodel.get_config()
cfg.set("MegaPlot", {"conventional bscans":self.conventional_bscans})
self.update_plot()
@replace_plot
def on_rectify(self, evt):
"""Handles request to apply rectification to A-scan plot"""
xpos = self.view.xpos_sc.GetValue()
ypos = self.view.ypos_sc.GetValue()
self.plot_ascan(self.model.rectify_full(self.scnr.ascan_data(xpos, ypos)), xpos, ypos)
def on_define_cscan(self, evt):
"""Handles request to define the data used
to produce the C Scan imgplot"""
self.view.slice_cb.SetValue(False)
self.define_cscan()
@replace_plot
def define_cscan(self):
"""Specify a range of data and a function
to generate a C Scan plot"""
if self.model.data is not None:
rng_dlg = dialogs.FloatRangeDialog("Please specify the index range in Z.")
if rng_dlg.ShowModal() == wx.ID_OK:
try:
start_pos, end_pos = rng_dlg.GetValue()
fn_dlg = wx.SingleChoiceDialog(parent=self.view, caption="Choose C Scan Function",
message="Please choose a function to generate the C Scan data.",
choices=self.scnr.available_cscan_function_names)
if fn_dlg.ShowModal() == wx.ID_OK:
wx.BeginBusyCursor()
cscan_data = self.scnr.gen_cscan(start_pos, end_pos,
fn=self.scnr.available_cscan_functions[fn_dlg.GetSelection()])
self.plot_cscan(cscan_data, self.slice_idx)
plot_title = "C Scan {0} z={1}:{2}".format(
self.scnr.available_cscan_function_names[fn_dlg.GetSelection()], start_pos, end_pos)
self.set_titles(self.view.cscan_axes, plot=plot_title)
wx.EndBusyCursor()
except ValueError as err:
module_logger.error("Unable to generate C-scan: {0}".format(err))
err_msg = "{0}".format(err)
err_dlg = wx.MessageDialog(self.view, message=err_msg,
caption="Unable To Generate C Scan", style=wx.ICON_ERROR)
err_dlg.ShowModal()
err_dlg.Destroy()
finally:
rng_dlg.Destroy()
| 45.288
| 131
| 0.56505
|
4a0862e08cf08c47cdb1d81211e1711c2f0b4148
| 557
|
py
|
Python
|
data_store.py
|
Rin-The-QT-Bunny/Glaurung
|
4751c89b7e821ab9c7307312aa928cb1323e5c73
|
[
"CC0-1.0"
] | null | null | null |
data_store.py
|
Rin-The-QT-Bunny/Glaurung
|
4751c89b7e821ab9c7307312aa928cb1323e5c73
|
[
"CC0-1.0"
] | null | null | null |
data_store.py
|
Rin-The-QT-Bunny/Glaurung
|
4751c89b7e821ab9c7307312aa928cb1323e5c73
|
[
"CC0-1.0"
] | null | null | null |
import json
programs = { "p1" : 64,
"p2" : 64,
"p3" : 30,
"p4" : 4,
"p5" : 5 }
def save_setup(system_setup):
data_setup = json.dumps(system_setup)
f2 = open('data/programs/program_base.json', 'w')
f2.write(data_setup)
f2.close()
def load_setup():
f = open('data/programs/program_base.json' ,'r')
content = f.read() # open the setup file
settings = json.loads(content)
f.close() # close the setup file
return settings
save_setup(programs)
print(load_setup())
| 23.208333
| 53
| 0.574506
|
4a086333f8aa67e232dcc0dc0d76b2206b385902
| 5,618
|
py
|
Python
|
code/imagecomp.py
|
jkfids/cross-correlation
|
003c43c1089bacf407ea735ea4b2befca10acd1f
|
[
"MIT"
] | 1
|
2021-04-09T04:02:00.000Z
|
2021-04-09T04:02:00.000Z
|
code/imagecomp.py
|
jkfids/stereo-vision
|
003c43c1089bacf407ea735ea4b2befca10acd1f
|
[
"MIT"
] | null | null | null |
code/imagecomp.py
|
jkfids/stereo-vision
|
003c43c1089bacf407ea735ea4b2befca10acd1f
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Tue May 18 16:37:40 2021
@author: Fidel
"""
# Standard libraries
from time import time
import numpy as np
from matplotlib import pyplot as plt
from PIL import Image
import matplotlib.image as mpimg
import matplotlib.ticker as plticker
# Local modules
from stereovision import StereoVision
#%%
left_desert = Image.open('data/stereo/left_desert.png')
right_desert = Image.open('data/stereo/right_desert.png')
start = time()
desert = StereoVision(left_desert, right_desert, resize=0.5)
dparray, _ = desert.calc_dparray(16, (7,3))
end = time()
print(f'Time elapsed (axis comparison): {round(end - start, 3)}s')
dparray = desert.filter_dparray(passes=0, edge_cutoff=(0,-1,0,-2))
X, Y = dparray
X[X<10] = np.mean(X)
Y[Y>19] = Y[Y<0] = np.mean(Y)
R = np.sqrt(X*X + Y*Y)
fig1, axes1 = plt.subplots(1, 3, dpi=144, figsize=(8,6))
im = axes1[0].imshow(X, vmin=0, vmax=50)
im = axes1[1].imshow(Y, vmin=0, vmax=50)
im = axes1[2].imshow(R, vmin=0, vmax=50)
axes1[0].set_title('Δx')
axes1[1].set_title('Δy')
axes1[2].set_title('Δr')
fig1.tight_layout(rect=[0, 0, 0.95, 1])
fig1.colorbar(im, ax=axes1.ravel().tolist(), fraction=0.018)
fig1.savefig('output/desert_dparray')
#%%
left_desert = Image.open('data/stereo/left_desert.png')
right_desert = Image.open('data/stereo/right_desert.png')
desert1 = StereoVision(left_desert, right_desert, resize=0.5)
desert2 = StereoVision(left_desert, right_desert, resize=0.5)
desert3 = StereoVision(left_desert, right_desert, resize=0.5)
time1 = time()
dparray1, _ = desert1.calc_dparray(16, (7,3), overlap=.25)
dparray1 = desert1.filter_dparray(passes=2, edge_cutoff=(0,-1,0,-3))
time2 = time()
dparray2, _ = desert2.calc_dparray(16, (7,3), overlap=.5)
dparray2 = desert2.filter_dparray(passes=2, edge_cutoff=(0,-2,0,-4))
time3 = time()
dparray3, _ = desert3.calc_dparray(16, (7,3), overlap=.75)
dparray3 = desert3.filter_dparray(passes=2, edge_cutoff=(0,-4,0,-7))
time4 = time()
print(f'Time elapsed (overlap = 0.25): {round(time2 - time1, 3)}s')
print(f'Time elapsed (overlap = 0.5): {round(time3 - time2, 3)}s')
print(f'Time elapsed (overlap = 0.75): {round(time4 - time3, 3)}s')
X1, Y1 = dparray1
X2, Y2 = dparray2
X3, Y3 = dparray3
R1 = np.sqrt(X1*X1 + Y1*Y1)
R2 = np.sqrt(X2*X2 + Y2*Y2)
R3 = np.sqrt(X3*X3 + Y3*Y3)
fig2, axes2 = plt.subplots(1, 3, dpi=144, figsize=(8,6))
im = axes2[0].imshow(R1, vmin=0, vmax=50)
im = axes2[1].imshow(R2, vmin=0, vmax=50)
im = axes2[2].imshow(R3, vmin=0, vmax=50)
axes2[0].set_title('overlap = 0.25', fontsize=9)
axes2[1].set_title('overlap = 0.5', fontsize=9)
axes2[2].set_title('overlap = 0.75', fontsize=9)
fig2.tight_layout(rect=[0, 0, 0.95, 1])
fig2.colorbar(im, ax=axes2.ravel().tolist(), fraction=0.018)
fig2.savefig('output/overlap_dparray')
#%%
left_desert = Image.open('data/stereo/left_desert.png')
right_desert = Image.open('data/stereo/right_desert.png')
desert1 = StereoVision(left_desert, right_desert, resize=0.5)
desert2 = StereoVision(left_desert, right_desert, resize=0.5)
desert3 = StereoVision(left_desert, right_desert, resize=0.5)
time1 = time()
dparray1, _ = desert1.calc_dparray(16, (12,8), multipass=2)
dparray1 = desert1.filter_dparray(edge_cutoff=(0,-4,0,-6))
time2 = time()
dparray2, _ = desert2.calc_dparray(32, (6,4), multipass=3)
dparray2 = desert2.filter_dparray(edge_cutoff=(0,-4,0,-4))
time3 = time()
dparray3, _ = desert3.calc_dparray(64, (3,2), multipass=4)
dparray3 = desert3.filter_dparray(edge_cutoff=(0,-4,0,None))
time4 = time()
print(f'Time elapsed (multipass = 2): {round(time2 - time1, 3)}s')
print(f'Time elapsed (multipass = 3): {round(time3 - time2, 3)}s')
print(f'Time elapsed (multipass = 4): {round(time4 - time3, 3)}s')
X1, Y1 = dparray1
X2, Y2 = dparray2
X3, Y3 = dparray3
R1 = np.sqrt(X1*X1 + Y1*Y1)
R2 = np.sqrt(X2*X2 + Y2*Y2)
R3 = np.sqrt(X3*X3 + Y3*Y3)
end = time()
fig3, axes3 = plt.subplots(1, 3, dpi=144, figsize=(8,6))
im = axes3[0].imshow(R1, vmin=0, vmax=50)
im = axes3[1].imshow(R2, vmin=0, vmax=50)
im = axes3[2].imshow(R3, vmin=0, vmax=50)
axes3[0].set_title('multipass level = 2', fontsize=8)
axes3[1].set_title('multipass level = 3', fontsize=8)
axes3[2].set_title('multipass level = 4', fontsize=8)
fig3.tight_layout(rect=[0, 0, 0.95, 1])
fig3.colorbar(im, ax=axes3.ravel().tolist(), fraction=0.018)
fig3.savefig('output/multipass_dparray')
#%%
left_portal = Image.open('data/stereo/left_portal.tiff')
right_portal = Image.open('data/stereo/right_portal.tiff')
portal = StereoVision(left_portal, right_portal)
start = time()
dparray, _ = portal.calc_dparray(32, (3,1), overlap=0.75, multipass=1)
dparray = portal.filter_dparray(stds=4, edge_cutoff=(2,None,0,None))
end = time()
print(f'Time elapsed (portal images): {round(end - start, 3)}s')
X, Y = dparray
R = np.sqrt(X*X + Y*Y)
fig4, ax4 = plt.subplots(dpi=144, figsize=(8,6))
im = ax4.imshow(R)
ax4.set_title('wsize = 32, ssize = (3,1), overlap = 0.75, multipass = 1')
fig4.colorbar(im, ax=ax4, fraction=0.03, pad=0.04)
fig4.savefig('output/portal_dparray')
#%%
left_cone = Image.open('data/stereo/left_cone.tiff')
right_cone = Image.open('data/stereo/right_cone.tiff')
cone = StereoVision(left_cone, right_cone)
start = time()
dparray, _ = cone.calc_dparray(64, (2,2), multipass=4)
dparray = cone.filter_dparray(stds=2)
end = time()
print(f'Time elapsed (cone images): {round(end - start, 3)}s')
X, Y = dparray
R = np.sqrt(X*X + Y*Y)
fig5, ax5 = plt.subplots(dpi=144, figsize=(6,8))
im = ax5.imshow(R)
ax5.set_title('wsize = 64, ssize = (2,2), overlap = 0, multipass = 4', fontsize=12)
fig5.colorbar(im, ax=ax5, fraction=0.08)
fig5.savefig('output/cone_dparray')
| 33.242604
| 83
| 0.699359
|
4a08645f1af53165260ea523ee96bc6e48432184
| 8,887
|
py
|
Python
|
robocute/scene.py
|
kfields/robocute
|
f6f15ab74266053da5fe4ede3cc81310a62146e5
|
[
"MIT"
] | 1
|
2015-08-24T21:58:34.000Z
|
2015-08-24T21:58:34.000Z
|
robocute/scene.py
|
kfields/robocute
|
f6f15ab74266053da5fe4ede3cc81310a62146e5
|
[
"MIT"
] | null | null | null |
robocute/scene.py
|
kfields/robocute
|
f6f15ab74266053da5fe4ede3cc81310a62146e5
|
[
"MIT"
] | null | null | null |
from pyglet.gl import *
import robocute.graphics
import robocute.camera
from robocute.node import *
from robocute.world import *
from robocute.pane import *
from robocute.dash import *
from robocute.mouse import Mouse
class Clip(robocute.graphics.Clip):
def __init__(self, world, rowCount = 3, colCount = 3):
super().__init__()
self.world = world
self.data = []
self.gridX = 0
self.gridY = 0
self.rowCount = rowCount
self.colCount = colCount
#
self.clear_cache()
def clear_cache(self):
self.data = []
i = 0
while i < self.rowCount:
self.data.append([None] * self.colCount)
i += 1
def cache_miss(self, colNdx, rowNdx):
#print 'gridX: ' + str(gridX),' gridY: ' + str(gridY)
#print 'rowNdx: ' + str(rowNdx),' colNdx: ' + str(colNdx)
self.data[rowNdx][colNdx] = self.world.get_grid(self.gridX + colNdx, self.gridY + rowNdx)
def validate(self):
#super().validate()
gridColMax = self.world.gridColMax
gridRowMax = self.world.gridRowMax
gridWidth = gridColMax * BLOCK_WIDTH
invGridWidth = 1. / gridWidth
gridHeight = gridRowMax * BLOCK_ROW_HEIGHT
invGridHeight = 1. / gridHeight
#
gridX = int(self.x * invGridWidth)
if gridX < 0:
gridX = 0
gridY = int(self.y * invGridHeight)
if gridY < 0:
gridY = 0
#
if self.gridX != gridX or self.gridY != gridY:
self.clear_cache()
self.gridX = gridX
self.gridY = gridY
class Camera(robocute.camera.Camera):
def __init__(self, scene, rowCount = 3, colCount = 3):
super().__init__(scene.window)
#
self.world = scene.node
self.graphics.camera = self
clip = Clip(self.world, rowCount, colCount)
self.clip = clip
self.graphics.clip = clip
def validate(self):
super().validate()
self.clip.validate()
class BubbleLayer(NodeLayer):
def __init__(self, parent, name, order):
super().__init__(parent, name, order)
class WidgetLayer(NodeLayer):
def __init__(self, parent, name, order):
super().__init__(parent, name, order)
class MouseLayer(NodeLayer):
def __init__(self, parent, name, order):
super().__init__(parent, name, order)
def draw(self, graphics):
g = graphics.copy() #fixme:necessary?
for node in self.nodes:
vu = node.vu
g.x = node.x
g.y = node.y - vu.height #fixme:mouse.hotx & hoty!!!
vu.draw(g)
class SceneLayer(RootLayer):
def __init__(self):
super().__init__('scene')
def create_layer(self, name):
order = len(self.layers)
if name == 'bubbles' :
layer = BubbleLayer(self, name, order)
elif name == 'dash':
layer = Dash(self, name, order)
elif name == 'widgets':
layer = WidgetLayer(self, name, order)
elif name == 'mice':
layer = MouseLayer(self, name, order)
self.layers.append(layer)
return layer
class Scene(Pane):
def __init__(self, world, app, win):
super().__init__(world)
#
self.layer = SceneLayer()
self.app = app
self.window = win
#
self.bgImg = image.load(data.filepath('image/clouds.png'))
#
self.bubbles = self.layer.create_layer('bubbles')
#
self.dash = self.layer.create_layer('dash')
#
self.widgets = self.layer.create_layer('widgets')
#
self.mice = self.layer.create_layer('mice')
#
self.query = None
def create_camera(self):
camera = Camera(self)
camera.deviceWidth = self.window.width
camera.deviceHeight = self.window.height
return camera
'''
Rendering
'''
def draw(self, layerGraphics, worldGraphics):
query = self.query
if(query):
worldGraphics.query = query
layerGraphics.query = query
#
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
#
self.draw_background(layerGraphics)
#
self.draw_world(worldGraphics)
#
self.dash.draw(layerGraphics)
#
self.widgets.draw(layerGraphics)
#
self.mice.draw(layerGraphics)
#
if query:
query.process()
self.query = None
worldGraphics.query = None
layerGraphics.query = None
def draw_background(self, graphics):
bgWidth = self.bgImg.width
bgHeight = self.bgImg.height
blitY = 0
while(blitY < self.window.height):
blitX = 0
while(blitX < self.window.width):
self.bgImg.blit(blitX, blitY, 0)
blitX = blitX + bgWidth
blitY = blitY + bgHeight
def draw_world(self, graphics):
glPushMatrix()
#
glScalef(graphics.scaleX, graphics.scaleY, graphics.scaleZ)
glTranslatef(-graphics.camera.x, -graphics.camera.y, -graphics.camera.z)
#
self.draw_grids(graphics)
#
self.bubbles.draw(graphics)
#
glPopMatrix()
def draw_grids(self, graphics):
clip = graphics.clip
g = graphics
query = g.query
#
gridColMax = self.node.gridColMax
gridRowMax = self.node.gridRowMax
#
gridWidth = gridColMax * BLOCK_WIDTH
invGridWidth = 1. / gridWidth
gridHeight = gridRowMax * BLOCK_ROW_HEIGHT
invGridHeight = 1. / gridHeight
#
'''
topPadding = gridHeight
bottomPadding = gridHeight
leftPadding = gridWidth
rightPadding = gridWidth
'''
topPadding = 0
bottomPadding = 0
leftPadding = 0
rightPadding = 0
#
posX = clip.gridX * gridWidth
posY = clip.gridY * gridHeight
#
bottom = clip.bottom - posY
top = clip.top - posY
left = clip.left - posX
right = clip.right - posX
#
rowCount = clip.rowCount
rowMax = rowCount - 1
colCount = clip.colCount
colMax = colCount - 1
#
r1 = int(top * invGridHeight)
if(r1 < 0):
r1 = 0
if(r1 > rowMax):
r1 = rowMax
#
r2 = int(bottom * invGridHeight)
if(r2 < 0):
r2 = 0
if(r2 > rowMax):
r2 = rowMax
#
c1 = int(left * invGridWidth)
if(c1 < 0):
c1 = 0
if(c1 > colMax):
c1 = colMax
#
c2 = int(right * invGridWidth)
if(c2 < 0):
c2 = 0
if(c2 > colMax):
c2 = colMax
#
r = r1
while(r >= r2): #rows in sheet
row = clip.data[r]
if len(row) == 0:
c += 1
continue
c = c1
blitY = posY + (r * gridHeight)
while(c <= c2): #cells in row
blitX = posX + (c * gridWidth)
grid = row[c]
if not grid:
#c += 1
clip.cache_miss(c, r)
continue
#else
self.draw_grid(grid, g, blitX, blitY, g.z)
c += 1
r -= 1
#
#glPopMatrix()
def draw_grid(self, grid, graphics, tX, tY, tZ = 1.):
g = graphics.copy()
#
glPushMatrix()
glTranslatef(tX, tY, tZ)
#
g.translate(tX, tY, tZ)
grid.vu.draw(g)
#
glPopMatrix()
'''
Bubbles:
'''
def add_bubble(self, bubble):
self.bubbles.add_node(bubble)
def remove_bubble(self, bubble):
self.bubbles.remove_node(bubble)
'''
Widgets:
'''
def add_widget(self, widget):
self.widgets.add_node(widget)
def remove_widget(self, widget):
self.widgets.remove_node(widget)
'''
Mouse Support
'''
def add_mouse(self, mouse):
self.mice.add_node(mouse)
def remove_mouse(self, mouse):
self.mice.remove_node(mouse)
| 29.137705
| 98
| 0.497581
|
4a08684a34de8461ee4dccdf18d0821521a65095
| 530
|
py
|
Python
|
altdeutsch/reader.py
|
clemsciences/old_high_german_texts
|
1fe458613da5f13760d743cee99fc2eaceb59298
|
[
"MIT"
] | null | null | null |
altdeutsch/reader.py
|
clemsciences/old_high_german_texts
|
1fe458613da5f13760d743cee99fc2eaceb59298
|
[
"MIT"
] | null | null | null |
altdeutsch/reader.py
|
clemsciences/old_high_german_texts
|
1fe458613da5f13760d743cee99fc2eaceb59298
|
[
"MIT"
] | null | null | null |
"""
"""
import csv
import codecs
__author__ = ["Clément Besnier <clemsciences@aol.com>", ]
def read_export(filename):
with codecs.open(filename, "r", encoding="utf-8") as f:
reader = csv.reader(f, delimiter="\t")
return {row[1]: [tokenize(sent) for sent in sentence_delimit(row[2])] for row in reader if len(row) > 2}
def sentence_delimit(text):
return [token for token in text.split("·")]
def tokenize(text):
return [token for token in text.split(" ") if token]
def render_text():
pass
| 19.62963
| 112
| 0.65283
|
4a0868ec4a2fad5a3c032e4e9e8a0caa9a0453a6
| 2,986
|
py
|
Python
|
couchbase_v2/tests/cases/excextra_t.py
|
couchbase/couchbase-python-client
|
99ec055835f5aef0cd07905497b3ab4bb3cbbc32
|
[
"Apache-2.0"
] | 189
|
2015-01-07T18:34:31.000Z
|
2022-03-21T17:41:56.000Z
|
couchbase_v2/tests/cases/excextra_t.py
|
couchbase/couchbase-python-client
|
99ec055835f5aef0cd07905497b3ab4bb3cbbc32
|
[
"Apache-2.0"
] | 24
|
2015-05-19T14:00:16.000Z
|
2022-03-16T22:01:30.000Z
|
couchbase_v2/tests/cases/excextra_t.py
|
couchbase/couchbase-python-client
|
99ec055835f5aef0cd07905497b3ab4bb3cbbc32
|
[
"Apache-2.0"
] | 60
|
2015-03-10T22:12:50.000Z
|
2022-03-07T21:57:40.000Z
|
#
# Copyright 2013, Couchbase, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import couchbase_v2.exceptions as E
from couchbase_tests.base import ConnectionTestCase
# These tests try to see if the 'result' and 'all_results' appear properly
# also verify that other documented exception fields are present
class ExceptionsTest(ConnectionTestCase):
def test_simple_excextra(self):
exc = None
key = self.gen_key("simple_excextra")
self.cb.remove(key, quiet=True)
try:
self.cb.get(key, quiet=False)
except E.CouchbaseException as e:
exc = e
self.assertTrue(exc)
self.assertIsInstance(exc, E.CouchbaseException)
self.assertTrue(exc.message)
self.assertIsInstance(exc, E.DocumentNotFoundException)
self.assertEqual(exc.key, key)
self.assertIsInstance(exc.all_results, self.cls_MultiResult)
self.assertTrue(key in exc.all_results)
self.assertIsInstance(exc.all_results[key], self.cls_ValueResult)
self.assertEqual(exc.all_results[key].rc, exc.rc)
str(exc)
repr(exc)
del exc
def test_multi_exc(self):
kv_missing = self.gen_kv_dict(prefix="multi_exc_missing")
kv_existing = self.gen_kv_dict(prefix="multi_exc_existing")
self.cb.upsert_multi(kv_existing)
exc = None
try:
self.cb.get_multi(list(kv_missing.keys()) + list(kv_existing.keys()),
quiet=False)
except E.CouchbaseException as e:
exc = e
self.assertTrue(exc)
self.assertIsInstance(exc, E.DocumentNotFoundException)
self.assertEqual(len(exc.all_results),
len(kv_missing) + len(kv_existing))
res_ok, res_fail = exc.split_results()
all_results = exc.all_results
for k, v in kv_missing.items():
self.assertTrue(k in all_results)
self.assertTrue(k in res_fail)
self.assertFalse(k in res_ok)
self.assertFalse(all_results[k].success)
for k, v in kv_existing.items():
self.assertTrue(k in all_results)
self.assertTrue(k in res_ok)
self.assertFalse(k in res_fail)
self.assertTrue(all_results[k].success)
self.assertTrue(all_results[k].value)
self.assertEqual(v, all_results[k].value)
str(exc)
repr(exc)
del exc
| 34.72093
| 81
| 0.658071
|
4a0869dc255a849c056292f8ff9c764e3254da03
| 6,495
|
py
|
Python
|
migration/20170713-19-move-third-party-config-to-external-integrations.py
|
tdilauro/simplified-circulation
|
f52d333616f63e2bff0cf1de98ef301bf152fba1
|
[
"Apache-2.0"
] | null | null | null |
migration/20170713-19-move-third-party-config-to-external-integrations.py
|
tdilauro/simplified-circulation
|
f52d333616f63e2bff0cf1de98ef301bf152fba1
|
[
"Apache-2.0"
] | null | null | null |
migration/20170713-19-move-third-party-config-to-external-integrations.py
|
tdilauro/simplified-circulation
|
f52d333616f63e2bff0cf1de98ef301bf152fba1
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""Move integration details from the Configuration file into the
database as ExternalIntegrations
"""
import os
import sys
import json
import logging
bin_dir = os.path.split(__file__)[0]
package_dir = os.path.join(bin_dir, "..")
sys.path.append(os.path.abspath(package_dir))
from core.model import (
ConfigurationSetting,
ExternalIntegration as EI,
Library,
get_one_or_create,
production_session,
)
from api.adobe_vendor_id import AuthdataUtility
from api.config import Configuration
log = logging.getLogger(name="Circulation manager configuration import")
def log_import(integration_or_setting):
log.info("CREATED: %r" % integration_or_setting)
try:
Configuration.load()
_db = production_session()
LIBRARIES = _db.query(Library).all()
# Import Circulation Manager base url.
circ_manager_conf = Configuration.integration('Circulation Manager')
if circ_manager_conf:
url = circ_manager_conf.get('url')
if url:
setting = ConfigurationSetting.sitewide(_db, Configuration.BASE_URL_KEY)
setting.value = unicode(url)
log_import(setting)
# Import Metadata Wrangler configuration.
metadata_wrangler_conf = Configuration.integration('Metadata Wrangler')
if metadata_wrangler_conf:
integration = EI(protocol=EI.METADATA_WRANGLER, goal=EI.METADATA_GOAL)
_db.add(integration)
integration.url = metadata_wrangler_conf.get('url')
integration.username = metadata_wrangler_conf.get('client_id')
integration.password = metadata_wrangler_conf.get('client_secret')
log_import(integration)
# Import NoveList Select configuration.
novelist = Configuration.integration('NoveList Select')
if novelist:
integration = EI(protocol=EI.NOVELIST, goal=EI.METADATA_GOAL)
_db.add(integration)
integration.username = novelist.get('profile')
integration.password = novelist.get('password')
integration.libraries.extend(LIBRARIES)
log_import(integration)
# Import NYT configuration.
nyt_conf = Configuration.integration(u'New York Times')
if nyt_conf:
integration = EI(protocol=EI.NYT, goal=EI.METADATA_GOAL)
_db.add(integration)
integration.password = nyt_conf.get('best_sellers_api_key')
log_import(integration)
# Import Adobe Vendor ID configuration.
adobe_conf = Configuration.integration('Adobe Vendor ID')
if adobe_conf:
vendor_id = adobe_conf.get('vendor_id')
node_value = adobe_conf.get('node_value')
other_libraries = adobe_conf.get('other_libraries')
if node_value:
node_library = Library.default(_db)
integration = EI(protocol=EI.ADOBE_VENDOR_ID, goal=EI.DRM_GOAL)
_db.add(integration)
integration.username = vendor_id
integration.password = node_value
if other_libraries:
other_libraries = unicode(json.dumps(other_libraries))
integration.set_setting(u'other_libraries', other_libraries)
integration.libraries.append(node_library)
log_import(integration)
# Import short client token configuration.
integration = EI(protocol=u'Short Client Token', goal=EI.DRM_GOAL)
_db.add(integration)
integration.set_setting(
AuthdataUtility.VENDOR_ID_KEY, vendor_id
)
for library in LIBRARIES:
short_name = library.library_registry_short_name
short_name = short_name or adobe_conf.get('library_short_name')
if short_name:
ConfigurationSetting.for_library_and_externalintegration(
_db, EI.USERNAME, library, integration
).value = short_name
shared_secret = library.library_registry_shared_secret
shared_secret = shared_secret or adobe_conf.get('authdata_secret')
ConfigurationSetting.for_library_and_externalintegration(
_db, EI.PASSWORD, library, integration
).value = shared_secret
library_url = adobe_conf.get('library_uri')
ConfigurationSetting.for_library(
Configuration.WEBSITE_URL, library).value = library_url
integration.libraries.append(library)
# Import Google OAuth configuration.
google_oauth_conf = Configuration.integration('Google OAuth')
if google_oauth_conf:
integration = EI(protocol=EI.GOOGLE_OAUTH, goal=EI.ADMIN_AUTH_GOAL)
_db.add(integration)
integration.url = google_oauth_conf.get("web", {}).get("auth_uri")
integration.username = google_oauth_conf.get("web", {}).get("client_id")
integration.password = google_oauth_conf.get("web", {}).get("client_secret")
auth_domain = Configuration.policy('admin_authentication_domain')
if auth_domain:
integration.set_setting(u'domains', json.dumps([auth_domain]))
log_import(integration)
# Import Patron Web Client configuration.
patron_web_client_conf = Configuration.integration(u'Patron Web Client', {})
patron_web_client_url = patron_web_client_conf.get('url')
if patron_web_client_url:
setting = ConfigurationSetting.sitewide(
_db, Configuration.PATRON_WEB_CLIENT_URL)
setting.value = patron_web_client_url
log_import(setting)
# Import analytics configuration.
policies = Configuration.get(u"policies", {})
analytics_modules = policies.get(u"analytics", ["core.local_analytics_provider"])
if "api.google_analytics_provider" in analytics_modules:
google_analytics_conf = Configuration.integration(u"Google Analytics Provider", {})
tracking_id = google_analytics_conf.get(u"tracking_id")
integration = EI(protocol=u"api.google_analytics_provider", goal=EI.ANALYTICS_GOAL)
_db.add(integration)
integration.url = "http://www.google-analytics.com/collect"
for library in LIBRARIES:
ConfigurationSetting.for_library_and_externalintegration(
_db, u"tracking_id", library, integration).value = tracking_id
library.integrations += [integration]
if "core.local_analytics_provider" in analytics_modules:
integration = EI(protocol=u"core.local_analytics_provider", goal=EI.ANALYTICS_GOAL)
_db.add(integration)
finally:
_db.commit()
_db.close()
| 36.903409
| 91
| 0.694842
|
4a086a0b6c72825dbd1aad7318b545c4b7120d90
| 15,380
|
py
|
Python
|
ncsnv3/models/ncsnv3.py
|
deepneuralmachine/google-research
|
d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231
|
[
"Apache-2.0"
] | 23,901
|
2018-10-04T19:48:53.000Z
|
2022-03-31T21:27:42.000Z
|
ncsnv3/models/ncsnv3.py
|
deepneuralmachine/google-research
|
d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231
|
[
"Apache-2.0"
] | 891
|
2018-11-10T06:16:13.000Z
|
2022-03-31T10:42:34.000Z
|
ncsnv3/models/ncsnv3.py
|
deepneuralmachine/google-research
|
d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231
|
[
"Apache-2.0"
] | 6,047
|
2018-10-12T06:31:02.000Z
|
2022-03-31T13:59:28.000Z
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: skip-file
"""The NCSNv3 model."""
from . import utils, layers, layersv3, normalization
import flax.nn as nn
import jax.numpy as jnp
import numpy as np
ResnetBlockDDPM = layersv3.ResnetBlockDDPMv3
ResnetBlockBigGAN = layersv3.ResnetBlockBigGANv3
Combine = layersv3.Combine
conv3x3 = layersv3.conv3x3
conv1x1 = layersv3.conv1x1
get_act = layers.get_act
get_normalization = normalization.get_normalization
default_initializer = layers.default_init
@utils.register_model(name='ncsnv3')
class NCSNv3(nn.Module):
"""NCSNv3 model without continuous noise levels."""
def apply(self, x, labels, y=None, config=None, train=True):
# config parsing
nf = config.model.nf
act = get_act(config)
normalize = get_normalization(config)
sigmas = utils.get_sigmas(config)
nf = config.model.nf
ch_mult = config.model.ch_mult
num_res_blocks = config.model.num_res_blocks
attn_resolutions = config.model.attn_resolutions
dropout = config.model.dropout
resamp_with_conv = config.model.resamp_with_conv
num_resolutions = len(ch_mult)
conditional = config.model.conditional # noise-conditional
fir = config.model.fir
fir_kernel = config.model.fir_kernel
skip_rescale = config.model.skip_rescale
resblock_type = config.model.resblock_type
progressive = config.model.progressive
progressive_input = config.model.progressive_input
init_scale = config.model.init_scale
assert progressive.lower() in ['none', 'output_skip', 'residual']
assert config.model.embedding_type.lower() in ['gaussian', 'positional']
combine_method = config.model.progressive_combine
combiner = Combine.partial(method=combine_method)
# timestep/noise_level embedding
if config.model.embedding_type == 'gaussian':
# Gaussian Fourier features embeddings.
used_sigmas = sigmas[labels]
temb = layersv3.GaussianFourierProjection(
jnp.log(used_sigmas),
embedding_size=nf,
scale=config.model.fourier_scale)
elif config.model.embedding_type == 'positional':
# Sinusoidal positional embeddings.
timesteps = labels
temb = layers.get_timestep_embedding(timesteps, nf)
else:
raise ValueError(f'embedding type {config.model.embedding_type} unknown.')
temb = nn.Dense(temb, nf * 4, kernel_init=default_initializer())
temb = nn.Dense(act(temb), nf * 4, kernel_init=default_initializer())
if y is not None: # class-conditional image generation
class_embed = nn.Embed(y, config.data.num_classes, nf * 4)
class_embed = nn.Dense(
class_embed, nf * 4, kernel_init=default_initializer())
class_embed = nn.Dense(
act(class_embed), nf * 4, kernel_init=default_initializer())
temb += class_embed
AttnBlock = layersv3.AttnBlockv3.partial(
normalize=normalize,
init_scale=init_scale,
skip_rescale=skip_rescale)
Upsample = layersv3.Upsample.partial(
with_conv=resamp_with_conv, fir=fir, fir_kernel=fir_kernel)
if progressive == 'output_skip':
pyramid_upsample = layersv3.Upsample.partial(
fir=fir, fir_kernel=fir_kernel, with_conv=False)
elif progressive == 'residual':
pyramid_upsample = layersv3.Upsample.partial(
fir=fir, fir_kernel=fir_kernel, with_conv=True)
Downsample = layersv3.Downsample.partial(
with_conv=resamp_with_conv, fir=fir, fir_kernel=fir_kernel)
if progressive_input == 'input_skip':
pyramid_downsample = layersv3.Downsample.partial(
fir=fir, fir_kernel=fir_kernel, with_conv=False)
elif progressive_input == 'residual':
pyramid_downsample = layersv3.Downsample.partial(
fir=fir, fir_kernel=fir_kernel, with_conv=True)
if resblock_type == 'ddpm':
ResnetBlock = ResnetBlockDDPM.partial(
act=act,
normalize=normalize,
dropout=dropout,
temb=temb if conditional else None,
train=train,
init_scale=init_scale,
skip_rescale=skip_rescale)
elif resblock_type == 'biggan':
ResnetBlock = ResnetBlockBigGAN.partial(
act=act,
normalize=normalize,
temb=temb if conditional else None,
train=train,
dropout=dropout,
fir=fir,
fir_kernel=fir_kernel,
init_scale=init_scale,
skip_rescale=skip_rescale)
else:
raise ValueError(f'resblock_type {resblock_type} unrecognized.')
if not config.data.centered:
# If input data is in [0, 1]
x = 2 * x - 1.
# Downsampling block
input_pyramid = None
if progressive_input != 'none':
input_pyramid = x
hs = [conv3x3(x, nf)]
for i_level in range(num_resolutions):
# Residual blocks for this resolution
for i_block in range(num_res_blocks):
h = ResnetBlock(hs[-1], out_ch=nf * ch_mult[i_level])
if h.shape[1] in attn_resolutions:
h = AttnBlock(h)
hs.append(h)
if i_level != num_resolutions - 1:
if resblock_type == 'ddpm':
h = Downsample(hs[-1])
else:
h = ResnetBlock(hs[-1], down=True)
if progressive_input == 'input_skip':
input_pyramid = pyramid_downsample(input_pyramid)
h = combiner(input_pyramid, h)
elif progressive_input == 'residual':
input_pyramid = pyramid_downsample(
input_pyramid, out_ch=h.shape[-1])
if skip_rescale:
input_pyramid = (input_pyramid + h) / np.sqrt(2.)
else:
input_pyramid = input_pyramid + h
h = input_pyramid
hs.append(h)
h = hs[-1]
h = ResnetBlock(h)
h = AttnBlock(h)
h = ResnetBlock(h)
pyramid = None
# Upsampling block
for i_level in reversed(range(num_resolutions)):
for i_block in range(num_res_blocks + 1):
h = ResnetBlock(
jnp.concatenate([h, hs.pop()], axis=-1),
out_ch=nf * ch_mult[i_level])
if h.shape[1] in attn_resolutions:
h = AttnBlock(h)
if progressive != 'none':
if i_level == num_resolutions - 1:
if progressive == 'output_skip':
pyramid = conv3x3(
act(normalize(h, num_groups=min(h.shape[-1] // 4, 32))),
x.shape[-1],
bias=True,
init_scale=init_scale)
elif progressive == 'residual':
pyramid = conv3x3(
act(normalize(h, num_groups=min(h.shape[-1] // 4, 32))),
h.shape[-1],
bias=True)
else:
raise ValueError(f'{progressive} is not a valid name.')
else:
if progressive == 'output_skip':
pyramid = pyramid_upsample(pyramid)
pyramid = pyramid + conv3x3(
act(normalize(h, num_groups=min(h.shape[-1] // 4, 32))),
x.shape[-1],
bias=True,
init_scale=init_scale)
elif progressive == 'residual':
pyramid = pyramid_upsample(pyramid, out_ch=h.shape[-1])
if skip_rescale:
pyramid = (pyramid + h) / np.sqrt(2.)
else:
pyramid = pyramid + h
h = pyramid
else:
raise ValueError(f'{progressive} is not a valid name')
if i_level != 0:
if resblock_type == 'ddpm':
h = Upsample(h)
else:
h = ResnetBlock(h, up=True)
assert not hs
if progressive == 'output_skip':
h = pyramid
else:
h = act(normalize(h, num_groups=min(h.shape[-1] // 4, 32)))
h = conv3x3(h, x.shape[-1], init_scale=init_scale)
if config.model.scale_by_sigma:
used_sigmas = sigmas[labels].reshape((x.shape[0],
*([1] * len(x.shape[1:]))))
h = h / used_sigmas
return h
@utils.register_model(name='ncsnv3_fourier')
class NCSNv3Fourier(nn.Module):
"""NCSNv3 model with continuous noise levels."""
def apply(self, x, sigmas, y=None, config=None, train=True):
# config parsing
nf = config.model.nf
act = get_act(config)
normalize = get_normalization(config)
nf = config.model.nf
ch_mult = config.model.ch_mult
num_res_blocks = config.model.num_res_blocks
attn_resolutions = config.model.attn_resolutions
dropout = config.model.dropout
resamp_with_conv = config.model.resamp_with_conv
num_resolutions = len(ch_mult)
conditional = config.model.conditional # noise-conditional
fir = config.model.fir
fir_kernel = config.model.fir_kernel
skip_rescale = config.model.skip_rescale
resblock_type = config.model.resblock_type
progressive = config.model.progressive
progressive_input = config.model.progressive_input
init_scale = config.model.init_scale
assert progressive in ['none', 'output_skip', 'residual']
combine_method = config.model.progressive_combine
combiner = Combine.partial(method=combine_method)
fourier_scale = config.model.fourier_scale
# timestep/scale embedding
temb = layersv3.GaussianFourierProjection(jnp.log(sigmas), embedding_size=nf,
scale=fourier_scale)
temb = nn.Dense(temb, nf * 4, kernel_init=default_initializer())
temb = nn.Dense(act(temb), nf * 4, kernel_init=default_initializer())
if y is not None: # class-conditional image generation.
class_embed = nn.Embed(y, config.data.num_classes, nf * 4)
class_embed = nn.Dense(
class_embed, nf * 4, kernel_init=default_initializer())
class_embed = nn.Dense(
act(class_embed), nf * 4, kernel_init=default_initializer())
temb += class_embed
AttnBlock = layersv3.AttnBlockv3.partial(
normalize=normalize,
init_scale=init_scale,
skip_rescale=skip_rescale)
Upsample = layersv3.Upsample.partial(
with_conv=resamp_with_conv, fir=fir, fir_kernel=fir_kernel)
if progressive == 'output_skip':
pyramid_upsample = layersv3.Upsample.partial(
fir=fir, fir_kernel=fir_kernel, with_conv=False)
elif progressive == 'residual':
pyramid_upsample = layersv3.Upsample.partial(
fir=fir, fir_kernel=fir_kernel, with_conv=True)
Downsample = layersv3.Downsample.partial(
with_conv=resamp_with_conv, fir=fir, fir_kernel=fir_kernel)
if progressive_input == 'input_skip':
pyramid_downsample = layersv3.Downsample.partial(
fir=fir, fir_kernel=fir_kernel, with_conv=False)
elif progressive_input == 'residual':
pyramid_downsample = layersv3.Downsample.partial(
fir=fir, fir_kernel=fir_kernel, with_conv=True)
if resblock_type == 'ddpm':
ResnetBlock = ResnetBlockDDPM.partial(
act=act,
normalize=normalize,
dropout=dropout,
temb=temb if conditional else None,
train=train,
init_scale=init_scale,
skip_rescale=skip_rescale)
elif resblock_type == 'biggan':
ResnetBlock = ResnetBlockBigGAN.partial(
act=act,
normalize=normalize,
temb=temb if conditional else None,
train=train,
dropout=dropout,
fir=fir,
fir_kernel=fir_kernel,
init_scale=init_scale,
skip_rescale=skip_rescale)
else:
raise ValueError(f'resblock_type {resblock_type} unrecognized.')
if not config.data.centered:
x = 2 * x - 1.
# Downsampling block
input_pyramid = None
if progressive_input != 'none':
input_pyramid = x
hs = [conv3x3(x, nf)]
for i_level in range(num_resolutions):
# Residual blocks for this resolution
for i_block in range(num_res_blocks):
h = ResnetBlock(hs[-1], out_ch=nf * ch_mult[i_level])
if h.shape[1] in attn_resolutions:
h = AttnBlock(h)
hs.append(h)
if i_level != num_resolutions - 1:
if resblock_type == 'ddpm':
h = Downsample(hs[-1])
else:
h = ResnetBlock(hs[-1], down=True)
if progressive_input == 'input_skip':
input_pyramid = pyramid_downsample(input_pyramid)
h = combiner(input_pyramid, h)
elif progressive_input == 'residual':
input_pyramid = pyramid_downsample(
input_pyramid, out_ch=h.shape[-1])
if skip_rescale:
input_pyramid = (input_pyramid + h) / np.sqrt(2.)
else:
input_pyramid = input_pyramid + h
h = input_pyramid
hs.append(h)
h = hs[-1]
h = ResnetBlock(h)
h = AttnBlock(h)
h = ResnetBlock(h)
pyramid = None
# Upsampling block
for i_level in reversed(range(num_resolutions)):
for i_block in range(num_res_blocks + 1):
h = ResnetBlock(
jnp.concatenate([h, hs.pop()], axis=-1),
out_ch=nf * ch_mult[i_level])
if h.shape[1] in attn_resolutions:
h = AttnBlock(h)
if progressive != 'none':
if i_level == num_resolutions - 1:
if progressive == 'output_skip':
pyramid = conv3x3(
act(normalize(h, num_groups=min(h.shape[-1] // 4, 32))),
x.shape[-1],
bias=True,
init_scale=init_scale)
elif progressive == 'residual':
pyramid = conv3x3(
act(normalize(h, num_groups=min(h.shape[-1] // 4, 32))),
h.shape[-1],
bias=True)
else:
raise ValueError(f'{progressive} is not a valid name.')
else:
if progressive == 'output_skip':
pyramid = pyramid_upsample(pyramid)
pyramid = pyramid + conv3x3(
act(normalize(h, num_groups=min(h.shape[-1] // 4, 32))),
x.shape[-1],
bias=True,
init_scale=init_scale)
elif progressive == 'residual':
pyramid = pyramid_upsample(pyramid, out_ch=h.shape[-1])
if skip_rescale:
pyramid = (pyramid + h) / np.sqrt(2.)
else:
pyramid = pyramid + h
h = pyramid
else:
raise ValueError(f'{progressive} is not a valid name')
if i_level != 0:
if resblock_type == 'ddpm':
h = Upsample(h)
else:
h = ResnetBlock(h, up=True)
assert not hs
if progressive == 'output_skip':
h = pyramid
else:
h = act(normalize(h, num_groups=min(h.shape[-1] // 4, 32)))
h = conv3x3(h, x.shape[-1], init_scale=init_scale)
if config.model.scale_by_sigma:
used_sigmas = sigmas.reshape((x.shape[0], *([1] * len(x.shape[1:]))))
h = h / used_sigmas
return h
| 33.654267
| 81
| 0.625748
|
4a086b31802a0362a6855390dd0d5b431b3a9551
| 4,494
|
py
|
Python
|
src/main/python/apache/aurora/config/schema/base.py
|
wickman/incubator-aurora
|
9906d217093568ed4c9cfe620862818f15ce4150
|
[
"Apache-2.0"
] | null | null | null |
src/main/python/apache/aurora/config/schema/base.py
|
wickman/incubator-aurora
|
9906d217093568ed4c9cfe620862818f15ce4150
|
[
"Apache-2.0"
] | null | null | null |
src/main/python/apache/aurora/config/schema/base.py
|
wickman/incubator-aurora
|
9906d217093568ed4c9cfe620862818f15ce4150
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright 2013 Apache Software Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from apache.thermos.config.schema import *
from gen.apache.aurora.constants import DEFAULT_ENVIRONMENT
# TODO(wickman) Bind {{mesos.instance}} to %shard_id%
class MesosContext(Struct):
# The instance id (i.e. replica id, shard id) in the context of a task
instance = Required(Integer)
# The object bound into the {{packer}} namespace.
# Referenced by
# {{packer[role][name][version]}}
#
# Where version =
# number (integer)
# 'live' (live package)
# 'latest' (highest version number)
#
# For example if you'd like to create a copy process for a particular
# package,
# copy_latest = Process(
# name = 'copy-{{package_name}}',
# cmdline = '{{packer[{{role}}][{{package_name}}][latest].copy_command}}')
# processes = [
# copy_latest.bind(package_name = 'labrat'),
# copy_latest.bind(package_name = 'packer')
# ]
class PackerObject(Struct):
package = String
package_uri = String
copy_command = String
class UpdateConfig(Struct):
batch_size = Default(Integer, 1)
restart_threshold = Default(Integer, 60)
watch_secs = Default(Integer, 30)
max_per_shard_failures = Default(Integer, 0)
max_total_failures = Default(Integer, 0)
rollback_on_failure = Default(Boolean, True)
class HealthCheckConfig(Struct):
initial_interval_secs = Default(Float, 60.0)
interval_secs = Default(Float, 30.0)
timeout_secs = Default(Float, 1.0)
max_consecutive_failures = Default(Integer, 0)
class Announcer(Struct):
primary_port = Default(String, 'http')
# Portmap can either alias two ports together, e.g.
# aurora <= http
# Or it can be used to alias static ports to endpoints, e.g.
# http <= 80
# https <= 443
# aurora <= https
portmap = Default(Map(String, String), {
'aurora': '{{primary_port}}'
})
# The executorConfig populated inside of TaskConfig.
class MesosTaskInstance(Struct):
task = Required(Task)
instance = Required(Integer)
role = Required(String)
announce = Announcer
environment = Default(String, DEFAULT_ENVIRONMENT)
health_check_interval_secs = Default(Integer, 30) # DEPRECATED (MESOS-2649)
health_check_config = Default(HealthCheckConfig, HealthCheckConfig())
class MesosJob(Struct):
name = Default(String, '{{task.name}}')
role = Required(String)
contact = String
cluster = Required(String)
environment = Required(String)
instances = Default(Integer, 1)
task = Required(Task)
recipes = List(String)
announce = Announcer
cron_schedule = String
cron_policy = String # these two are aliases of each other. default is KILL_EXISTING
cron_collision_policy = String # if unspecified.
# cron_policy is DEPRECATED (MESOS-2491) in favor of
# cron_collision_policy.
update_config = Default(UpdateConfig, UpdateConfig())
constraints = Map(String, String)
daemon = Boolean # daemon and service are aliased together.
service = Boolean # daemon is DEPRECATED (MESOS-2492) in favor of
# service. by default, service is False.
max_task_failures = Default(Integer, 1)
production = Default(Boolean, False)
priority = Default(Integer, 0)
health_check_interval_secs = Integer # DEPRECATED in favor of health_check_config (MESOS-2649).
health_check_config = HealthCheckConfig
task_links = Map(String, String)
enable_hooks = Default(Boolean, False) # enable client API hooks; from env python-list 'hooks'
Job = MesosJob
Service = Job(service = True)
| 35.109375
| 98
| 0.647085
|
4a086c1314061ae757cdb3ed865715855ba47de0
| 2,495
|
py
|
Python
|
src/sync.py
|
IronCountySchoolDistrict/naviance-sync
|
868e47d2850e751644f909da1157e3226638a38b
|
[
"MIT"
] | null | null | null |
src/sync.py
|
IronCountySchoolDistrict/naviance-sync
|
868e47d2850e751644f909da1157e3226638a38b
|
[
"MIT"
] | null | null | null |
src/sync.py
|
IronCountySchoolDistrict/naviance-sync
|
868e47d2850e751644f909da1157e3226638a38b
|
[
"MIT"
] | null | null | null |
from naviance import Naviance
from db import create_cx_oracle_conn
import argparse
import dotenv
dotenv.load()
def results_to_csv_str(results, cursor):
csv_results = ''
csv_results += ','.join([column[0] for column in cursor.description])
csv_results += '\n'
csv_results += '\n'.join(
# process each column, separate values by comma
','.join(str(i) if i is not None else '' for i in result)
# process each row, separate values by \n
for result in results)
return csv_results
def import_students(client):
student_sql = open('sql/student.sql').read()
cursor = create_cx_oracle_conn().cursor()
cursor.execute(student_sql)
student_results = cursor.fetchall()
csv_results = results_to_csv_str(student_results, cursor)
naviance_response = client.import_students(csv_results)
return naviance_response
def import_parents(client):
student_sql = open('sql/parent.sql').read()
cursor = create_cx_oracle_conn().cursor()
cursor.execute(student_sql)
parent_results = cursor.fetchall()
csv_results = results_to_csv_str(parent_results, cursor)
naviance_response = client.import_parents(csv_results)
return naviance_response
def import_course_data(client):
student_sql = open('sql/student_course.sql').read()
cursor = create_cx_oracle_conn().cursor()
cursor.execute(student_sql)
course_data_results = cursor.fetchall()
csv_results = results_to_csv_str(course_data_results, cursor)
naviance_response = client.import_student_course(csv_results)
return naviance_response
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Perform Naviance Sync process')
parser.add_argument('import_type', metavar='student|parent|student_course', nargs=1)
args = parser.parse_args()
naviance_client = Naviance(account=dotenv.get('NAVIANCE_ACCOUNT'),
username=dotenv.get('NAVIANCE_USERNAME'),
email=dotenv.get('NAVIANCE_EMAIL'),
data_import_key=dotenv.get('NAVIANCE_DATA_IMPORT_KEY'),
has_header=dotenv.get('NAVIANCE_HAS_HEADER'))
if args.import_type[0] == 'student':
response = import_students(naviance_client)
if args.import_type[0] == 'parent':
response = import_parents(naviance_client)
if args.import_type[0] == 'student_course':
response = import_course_data(naviance_client)
| 34.652778
| 88
| 0.697796
|
4a086d4f3ef96f955e2c1f5d26f3808018be384e
| 33,260
|
py
|
Python
|
deepctr/layers/sequence.py
|
BradyBromley/DeepCTR
|
3d12ffc0e0a5e893dce8bd315824c180445b772e
|
[
"Apache-2.0"
] | 2
|
2019-11-07T10:17:40.000Z
|
2020-04-13T14:25:14.000Z
|
deepctr/layers/sequence.py
|
BradyBromley/DeepCTR
|
3d12ffc0e0a5e893dce8bd315824c180445b772e
|
[
"Apache-2.0"
] | 7
|
2019-12-16T22:22:25.000Z
|
2022-02-10T00:37:34.000Z
|
deepctr/layers/sequence.py
|
BradyBromley/DeepCTR
|
3d12ffc0e0a5e893dce8bd315824c180445b772e
|
[
"Apache-2.0"
] | 1
|
2020-01-07T09:12:21.000Z
|
2020-01-07T09:12:21.000Z
|
# -*- coding:utf-8 -*-
"""
Author:
Weichen Shen,wcshen1994@163.com
"""
import numpy as np
import tensorflow as tf
from tensorflow.python.keras import backend as K
from tensorflow.python.keras.initializers import TruncatedNormal
from tensorflow.python.keras.layers import LSTM, Lambda, Layer
from .core import LocalActivationUnit
from .normalization import LayerNormalization
if tf.__version__ >= '2.0.0':
from ..contrib.rnn_v2 import dynamic_rnn
else:
from ..contrib.rnn import dynamic_rnn
from ..contrib.utils import QAAttGRUCell, VecAttGRUCell
from .utils import reduce_sum,reduce_max,div,softmax,reduce_mean
class SequencePoolingLayer(Layer):
"""The SequencePoolingLayer is used to apply pooling operation(sum,mean,max) on variable-length sequence feature/multi-value feature.
Input shape
- A list of two tensor [seq_value,seq_len]
- seq_value is a 3D tensor with shape: ``(batch_size, T, embedding_size)``
- seq_len is a 2D tensor with shape : ``(batch_size, 1)``,indicate valid length of each sequence.
Output shape
- 3D tensor with shape: ``(batch_size, 1, embedding_size)``.
Arguments
- **mode**:str.Pooling operation to be used,can be sum,mean or max.
- **supports_masking**:If True,the input need to support masking.
"""
def __init__(self, mode='mean', supports_masking=False, **kwargs):
if mode not in ['sum', 'mean', 'max']:
raise ValueError("mode must be sum or mean")
self.mode = mode
self.eps = tf.constant(1e-8,tf.float32)
super(SequencePoolingLayer, self).__init__(**kwargs)
self.supports_masking = supports_masking
def build(self, input_shape):
if not self.supports_masking:
self.seq_len_max = int(input_shape[0][1])
super(SequencePoolingLayer, self).build(
input_shape) # Be sure to call this somewhere!
def call(self, seq_value_len_list, mask=None, **kwargs):
if self.supports_masking:
if mask is None:
raise ValueError(
"When supports_masking=True,input must support masking")
uiseq_embed_list = seq_value_len_list
mask = tf.cast(mask,tf.float32)# tf.to_float(mask)
user_behavior_length = reduce_sum(mask, axis=-1, keep_dims=True)
mask = tf.expand_dims(mask, axis=2)
else:
uiseq_embed_list, user_behavior_length = seq_value_len_list
mask = tf.sequence_mask(user_behavior_length,
self.seq_len_max, dtype=tf.float32)
mask = tf.transpose(mask, (0, 2, 1))
embedding_size = uiseq_embed_list.shape[-1]
mask = tf.tile(mask, [1, 1, embedding_size])
uiseq_embed_list *= mask
hist = uiseq_embed_list
if self.mode == "max":
return reduce_max(hist, 1, keep_dims=True)
hist = reduce_sum(hist, 1, keep_dims=False)
if self.mode == "mean":
hist = div(hist, tf.cast(user_behavior_length,tf.float32) + self.eps)
hist = tf.expand_dims(hist, axis=1)
return hist
def compute_output_shape(self, input_shape):
if self.supports_masking:
return (None, 1, input_shape[-1])
else:
return (None, 1, input_shape[0][-1])
def compute_mask(self, inputs, mask):
return None
def get_config(self, ):
config = {'mode': self.mode, 'supports_masking': self.supports_masking}
base_config = super(SequencePoolingLayer, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class WeightedSequenceLayer(Layer):
"""The WeightedSequenceLayer is used to apply weight score on variable-length sequence feature/multi-value feature.
Input shape
- A list of two tensor [seq_value,seq_len,seq_weight]
- seq_value is a 3D tensor with shape: ``(batch_size, T, embedding_size)``
- seq_len is a 2D tensor with shape : ``(batch_size, 1)``,indicate valid length of each sequence.
- seq_weight is a 3D tensor with shape: ``(batch_size, T, 1)``
Output shape
- 3D tensor with shape: ``(batch_size, T, embedding_size)``.
Arguments
- **weight_normalization**: bool.Whether normalize the weight socre before applying to sequence.
- **supports_masking**:If True,the input need to support masking.
"""
def __init__(self,weight_normalization=False, supports_masking=False, **kwargs):
super(WeightedSequenceLayer, self).__init__(**kwargs)
self.weight_normalization = weight_normalization
self.supports_masking = supports_masking
def build(self, input_shape):
if not self.supports_masking:
self.seq_len_max = int(input_shape[0][1])
super(WeightedSequenceLayer, self).build(
input_shape) # Be sure to call this somewhere!
def call(self, input_list, mask=None, **kwargs):
if self.supports_masking:
if mask is None:
raise ValueError(
"When supports_masking=True,input must support masking")
key_input, value_input = input_list
mask = tf.expand_dims(mask[0], axis=2)
else:
key_input, key_length_input, value_input = input_list
mask = tf.sequence_mask(key_length_input,
self.seq_len_max, dtype=tf.bool)
mask = tf.transpose(mask, (0, 2, 1))
embedding_size = key_input.shape[-1]
if self.weight_normalization:
paddings = tf.ones_like(value_input) * (-2 ** 32 + 1)
else:
paddings = tf.zeros_like(value_input)
value_input = tf.where(mask, value_input, paddings)
if self.weight_normalization:
value_input = softmax(value_input,dim=1)
if len(value_input.shape) == 2:
value_input = tf.expand_dims(value_input, axis=2)
value_input = tf.tile(value_input, [1, 1, embedding_size])
return tf.multiply(key_input,value_input)
def compute_output_shape(self, input_shape):
return input_shape[0]
def compute_mask(self, inputs, mask):
if self.supports_masking:
return mask[0]
else:
return None
def get_config(self, ):
config = {'supports_masking': self.supports_masking}
base_config = super(WeightedSequenceLayer, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class AttentionSequencePoolingLayer(Layer):
"""The Attentional sequence pooling operation used in DIN.
Input shape
- A list of three tensor: [query,keys,keys_length]
- query is a 3D tensor with shape: ``(batch_size, 1, embedding_size)``
- keys is a 3D tensor with shape: ``(batch_size, T, embedding_size)``
- keys_length is a 2D tensor with shape: ``(batch_size, 1)``
Output shape
- 3D tensor with shape: ``(batch_size, 1, embedding_size)``.
Arguments
- **att_hidden_units**:list of positive integer, the attention net layer number and units in each layer.
- **att_activation**: Activation function to use in attention net.
- **weight_normalization**: bool.Whether normalize the attention score of local activation unit.
- **supports_masking**:If True,the input need to support masking.
References
- [Zhou G, Zhu X, Song C, et al. Deep interest network for click-through rate prediction[C]//Proceedings of the 24th ACM SIGKDD International Conference on Knowledge Discovery & Data Mining. ACM, 2018: 1059-1068.](https://arxiv.org/pdf/1706.06978.pdf)
"""
def __init__(self, att_hidden_units=(80, 40), att_activation='sigmoid', weight_normalization=False,
return_score=False,
supports_masking=False, **kwargs):
self.att_hidden_units = att_hidden_units
self.att_activation = att_activation
self.weight_normalization = weight_normalization
self.return_score = return_score
super(AttentionSequencePoolingLayer, self).__init__(**kwargs)
self.supports_masking = supports_masking
def build(self, input_shape):
if not self.supports_masking:
if not isinstance(input_shape, list) or len(input_shape) != 3:
raise ValueError('A `AttentionSequencePoolingLayer` layer should be called '
'on a list of 3 inputs')
if len(input_shape[0]) != 3 or len(input_shape[1]) != 3 or len(input_shape[2]) != 2:
raise ValueError(
"Unexpected inputs dimensions,the 3 tensor dimensions are %d,%d and %d , expect to be 3,3 and 2" % (
len(input_shape[0]), len(input_shape[1]), len(input_shape[2])))
if input_shape[0][-1] != input_shape[1][-1] or input_shape[0][1] != 1 or input_shape[2][1] != 1:
raise ValueError('A `AttentionSequencePoolingLayer` layer requires '
'inputs of a 3 tensor with shape (None,1,embedding_size),(None,T,embedding_size) and (None,1)'
'Got different shapes: %s' % (input_shape))
else:
pass
self.local_att = LocalActivationUnit(
self.att_hidden_units, self.att_activation, l2_reg=0, dropout_rate=0, use_bn=False, seed=1024, )
super(AttentionSequencePoolingLayer, self).build(
input_shape) # Be sure to call this somewhere!
def call(self, inputs, mask=None, training=None, **kwargs):
if self.supports_masking:
if mask is None:
raise ValueError(
"When supports_masking=True,input must support masking")
queries, keys = inputs
key_masks = tf.expand_dims(mask[-1], axis=1)
else:
queries, keys, keys_length = inputs
hist_len = keys.get_shape()[1]
key_masks = tf.sequence_mask(keys_length, hist_len)
attention_score = self.local_att([queries, keys], training=training)
outputs = tf.transpose(attention_score, (0, 2, 1))
if self.weight_normalization:
paddings = tf.ones_like(outputs) * (-2 ** 32 + 1)
else:
paddings = tf.zeros_like(outputs)
outputs = tf.where(key_masks, outputs, paddings)
if self.weight_normalization:
outputs = softmax(outputs)
if not self.return_score:
outputs = tf.matmul(outputs, keys)
if tf.__version__ < '1.13.0':
outputs._uses_learning_phase = attention_score._uses_learning_phase
else:
outputs._uses_learning_phase = training is not None
return outputs
def compute_output_shape(self, input_shape):
if self.return_score:
return (None, 1, input_shape[1][1])
else:
return (None, 1, input_shape[0][-1])
def compute_mask(self, inputs, mask):
return None
def get_config(self, ):
config = {'att_hidden_units': self.att_hidden_units, 'att_activation': self.att_activation,
'weight_normalization': self.weight_normalization, 'return_score': self.return_score,
'supports_masking': self.supports_masking}
base_config = super(AttentionSequencePoolingLayer, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class BiLSTM(Layer):
"""A multiple layer Bidirectional Residual LSTM Layer.
Input shape
- 3D tensor with shape ``(batch_size, timesteps, input_dim)``.
Output shape
- 3D tensor with shape: ``(batch_size, timesteps, units)``.
Arguments
- **units**: Positive integer, dimensionality of the output space.
- **layers**:Positive integer, number of LSTM layers to stacked.
- **res_layers**: Positive integer, number of residual connection to used in last ``res_layers``.
- **dropout_rate**: Float between 0 and 1. Fraction of the units to drop for the linear transformation of the inputs.
- **merge_mode**: merge_mode: Mode by which outputs of the forward and backward RNNs will be combined. One of { ``'fw'`` , ``'bw'`` , ``'sum'`` , ``'mul'`` , ``'concat'`` , ``'ave'`` , ``None`` }. If None, the outputs will not be combined, they will be returned as a list.
"""
def __init__(self, units, layers=2, res_layers=0, dropout_rate=0.2, merge_mode='ave', **kwargs):
if merge_mode not in ['fw', 'bw', 'sum', 'mul', 'ave', 'concat', None]:
raise ValueError('Invalid merge mode. '
'Merge mode should be one of '
'{"fw","bw","sum", "mul", "ave", "concat", None}')
self.units = units
self.layers = layers
self.res_layers = res_layers
self.dropout_rate = dropout_rate
self.merge_mode = merge_mode
super(BiLSTM, self).__init__(**kwargs)
self.supports_masking = True
def build(self, input_shape):
if len(input_shape) != 3:
raise ValueError(
"Unexpected inputs dimensions %d, expect to be 3 dimensions" % (len(input_shape)))
self.fw_lstm = []
self.bw_lstm = []
for _ in range(self.layers):
self.fw_lstm.append(
LSTM(self.units, dropout=self.dropout_rate, bias_initializer='ones', return_sequences=True,
unroll=True))
self.bw_lstm.append(
LSTM(self.units, dropout=self.dropout_rate, bias_initializer='ones', return_sequences=True,
go_backwards=True, unroll=True))
super(BiLSTM, self).build(
input_shape) # Be sure to call this somewhere!
def call(self, inputs, mask=None, **kwargs):
input_fw = inputs
input_bw = inputs
for i in range(self.layers):
output_fw = self.fw_lstm[i](input_fw)
output_bw = self.bw_lstm[i](input_bw)
output_bw = Lambda(lambda x: K.reverse(
x, 1), mask=lambda inputs, mask: mask)(output_bw)
if i >= self.layers - self.res_layers:
output_fw += input_fw
output_bw += input_bw
input_fw = output_fw
input_bw = output_bw
output_fw = input_fw
output_bw = input_bw
if self.merge_mode == "fw":
output = output_fw
elif self.merge_mode == "bw":
output = output_bw
elif self.merge_mode == 'concat':
output = K.concatenate([output_fw, output_bw])
elif self.merge_mode == 'sum':
output = output_fw + output_bw
elif self.merge_mode == 'ave':
output = (output_fw + output_bw) / 2
elif self.merge_mode == 'mul':
output = output_fw * output_bw
elif self.merge_mode is None:
output = [output_fw, output_bw]
return output
def compute_output_shape(self, input_shape):
print(self.merge_mode)
if self.merge_mode is None:
return [input_shape, input_shape]
elif self.merge_mode == 'concat':
return input_shape[:-1] + (input_shape[-1] * 2,)
else:
return input_shape
def compute_mask(self, inputs, mask):
return mask
def get_config(self, ):
config = {'units': self.units, 'layers': self.layers,
'res_layers': self.res_layers, 'dropout_rate': self.dropout_rate, 'merge_mode': self.merge_mode}
base_config = super(BiLSTM, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class Transformer(Layer):
""" Simplified version of Transformer proposed in 《Attention is all you need》
Input shape
- a list of two 3D tensor with shape ``(batch_size, timesteps, input_dim)`` if supports_masking=True.
- a list of two 4 tensors, first two tensors with shape ``(batch_size, timesteps, input_dim)``,last two tensors with shape ``(batch_size, 1)`` if supports_masking=False.
Output shape
- 3D tensor with shape: ``(batch_size, 1, input_dim)``.
Arguments
- **att_embedding_size**: int.The embedding size in multi-head self-attention network.
- **head_num**: int.The head number in multi-head self-attention network.
- **dropout_rate**: float between 0 and 1. Fraction of the units to drop.
- **use_positional_encoding**: bool. Whether or not use positional_encoding
- **use_res**: bool. Whether or not use standard residual connections before output.
- **use_feed_forward**: bool. Whether or not use pointwise feed foward network.
- **use_layer_norm**: bool. Whether or not use Layer Normalization.
- **blinding**: bool. Whether or not use blinding.
- **seed**: A Python integer to use as random seed.
- **supports_masking**:bool. Whether or not support masking.
References
- [Vaswani, Ashish, et al. "Attention is all you need." Advances in Neural Information Processing Systems. 2017.](https://papers.nips.cc/paper/7181-attention-is-all-you-need.pdf)
"""
def __init__(self, att_embedding_size=1, head_num=8, dropout_rate=0.0, use_positional_encoding=True, use_res=True,
use_feed_forward=True, use_layer_norm=False, blinding=True, seed=1024, supports_masking=False,
**kwargs):
if head_num <= 0:
raise ValueError('head_num must be a int > 0')
self.att_embedding_size = att_embedding_size
self.head_num = head_num
self.num_units = att_embedding_size * head_num
self.use_res = use_res
self.use_feed_forward = use_feed_forward
self.seed = seed
self.use_positional_encoding = use_positional_encoding
self.dropout_rate = dropout_rate
self.use_layer_norm = use_layer_norm
self.blinding = blinding
super(Transformer, self).__init__(**kwargs)
self.supports_masking = supports_masking
def build(self, input_shape):
embedding_size = int(input_shape[0][-1])
if self.num_units != embedding_size:
raise ValueError(
"att_embedding_size * head_num must equal the last dimension size of inputs,got %d * %d != %d" % (self.att_embedding_size,self.head_num,embedding_size))
self.seq_len_max = int(input_shape[0][-2])
self.W_Query = self.add_weight(name='query', shape=[embedding_size, self.att_embedding_size * self.head_num],
dtype=tf.float32,
initializer=tf.keras.initializers.TruncatedNormal(seed=self.seed))
self.W_key = self.add_weight(name='key', shape=[embedding_size, self.att_embedding_size * self.head_num],
dtype=tf.float32,
initializer=tf.keras.initializers.TruncatedNormal(seed=self.seed + 1))
self.W_Value = self.add_weight(name='value', shape=[embedding_size, self.att_embedding_size * self.head_num],
dtype=tf.float32,
initializer=tf.keras.initializers.TruncatedNormal(seed=self.seed + 2))
# if self.use_res:
# self.W_Res = self.add_weight(name='res', shape=[embedding_size, self.att_embedding_size * self.head_num], dtype=tf.float32,
# initializer=tf.keras.initializers.TruncatedNormal(seed=self.seed))
if self.use_feed_forward:
self.fw1 = self.add_weight('fw1', shape=[self.num_units, 4 * self.num_units], dtype=tf.float32,
initializer=tf.keras.initializers.glorot_uniform(seed=self.seed))
self.fw2 = self.add_weight('fw2', shape=[4 * self.num_units, self.num_units], dtype=tf.float32,
initializer=tf.keras.initializers.glorot_uniform(seed=self.seed))
# if self.use_positional_encoding:
#
# self.kpe = Position_Embedding(input_shape[0][-1].value)
# self.qpe = Position_Embedding(input_shape[1][-1].value)
self.dropout = tf.keras.layers.Dropout(
self.dropout_rate, seed=self.seed)
self.ln = LayerNormalization()
# Be sure to call this somewhere!
super(Transformer, self).build(input_shape)
def call(self, inputs, mask=None, training=None, **kwargs):
if self.supports_masking:
queries, keys = inputs
query_masks, key_masks = mask
query_masks = tf.cast(query_masks, tf.float32)
key_masks = tf.cast(key_masks, tf.float32)
else:
queries, keys, query_masks, key_masks = inputs
query_masks = tf.sequence_mask(
query_masks, self.seq_len_max, dtype=tf.float32)
key_masks = tf.sequence_mask(
key_masks, self.seq_len_max, dtype=tf.float32)
query_masks = tf.squeeze(query_masks, axis=1)
key_masks = tf.squeeze(key_masks, axis=1)
if self.use_positional_encoding:
queries = positional_encoding(queries)
keys = positional_encoding(queries)
querys = tf.tensordot(queries, self.W_Query,
axes=(-1, 0)) # None T_q D*head_num
keys = tf.tensordot(keys, self.W_key, axes=(-1, 0))
values = tf.tensordot(keys, self.W_Value, axes=(-1, 0))
# head_num*None T_q D
querys = tf.concat(tf.split(querys, self.head_num, axis=2), axis=0)
keys = tf.concat(tf.split(keys, self.head_num, axis=2), axis=0)
values = tf.concat(tf.split(values, self.head_num, axis=2), axis=0)
# head_num*None T_q T_k
outputs = tf.matmul(querys, keys, transpose_b=True)
outputs = outputs / (keys.get_shape().as_list()[-1] ** 0.5)
key_masks = tf.tile(key_masks, [self.head_num, 1])
# (h*N, T_q, T_k)
key_masks = tf.tile(tf.expand_dims(key_masks, 1),
[1, tf.shape(queries)[1], 1])
paddings = tf.ones_like(outputs) * (-2 ** 32 + 1)
# (h*N, T_q, T_k)
outputs = tf.where(tf.equal(key_masks, 1), outputs, paddings, )
if self.blinding:
try:
outputs = tf.matrix_set_diag(outputs, tf.ones_like(outputs)[
:, :, 0] * (-2 ** 32 + 1))
except:
outputs = tf.compat.v1.matrix_set_diag(outputs, tf.ones_like(outputs)[
:, :, 0] * (-2 ** 32 + 1))
outputs -= reduce_max(outputs, axis=-1, keep_dims=True)
outputs = softmax(outputs)
query_masks = tf.tile(query_masks, [self.head_num, 1]) # (h*N, T_q)
# (h*N, T_q, T_k)
query_masks = tf.tile(tf.expand_dims(
query_masks, -1), [1, 1, tf.shape(keys)[1]])
outputs *= query_masks
outputs = self.dropout(outputs, training=training)
# Weighted sum
# ( h*N, T_q, C/h)
result = tf.matmul(outputs, values)
result = tf.concat(tf.split(result, self.head_num, axis=0), axis=2)
if self.use_res:
# tf.tensordot(queries, self.W_Res, axes=(-1, 0))
result += queries
if self.use_layer_norm:
result = self.ln(result)
if self.use_feed_forward:
fw1 = tf.nn.relu(tf.tensordot(result, self.fw1, axes=[-1, 0]))
fw1 = self.dropout(fw1, training=training)
fw2 = tf.tensordot(fw1, self.fw2, axes=[-1, 0])
if self.use_res:
result += fw2
if self.use_layer_norm:
result = self.ln(result)
return reduce_mean(result, axis=1, keep_dims=True)
def compute_output_shape(self, input_shape):
return (None, 1, self.att_embedding_size * self.head_num)
def compute_mask(self, inputs, mask=None):
return None
def get_config(self, ):
config = {'att_embedding_size': self.att_embedding_size, 'head_num': self.head_num,
'dropout_rate': self.dropout_rate, 'use_res': self.use_res,
'use_positional_encoding': self.use_positional_encoding, 'use_feed_forward': self.use_feed_forward,
'use_layer_norm': self.use_layer_norm, 'seed': self.seed, 'supports_masking': self.supports_masking,
'blinding': self.blinding}
base_config = super(Transformer, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def positional_encoding(inputs,
pos_embedding_trainable=True,
zero_pad=False,
scale=True,
):
'''Sinusoidal Positional_Encoding.
Args:
- inputs: A 2d Tensor with shape of (N, T).
- num_units: Output dimensionality
- zero_pad: Boolean. If True, all the values of the first row (id = 0) should be constant zero
- scale: Boolean. If True, the output will be multiplied by sqrt num_units(check details from paper)
- scope: Optional scope for `variable_scope`.
- reuse: Boolean, whether to reuse the weights of a previous layer by the same name.
Returns:
- A 'Tensor' with one more rank than inputs's, with the dimensionality should be 'num_units'
'''
_, T, num_units = inputs.get_shape().as_list()
# with tf.variable_scope(scope, reuse=reuse):
position_ind = tf.expand_dims(tf.range(T), 0)
# First part of the PE function: sin and cos argument
position_enc = np.array([
[pos / np.power(10000, 2. * i / num_units)
for i in range(num_units)]
for pos in range(T)])
# Second part, apply the cosine to even columns and sin to odds.
position_enc[:, 0::2] = np.sin(position_enc[:, 0::2]) # dim 2i
position_enc[:, 1::2] = np.cos(position_enc[:, 1::2]) # dim 2i+1
# Convert to a tensor
if pos_embedding_trainable:
lookup_table = K.variable(position_enc, dtype=tf.float32)
if zero_pad:
lookup_table = tf.concat((tf.zeros(shape=[1, num_units]),
lookup_table[1:, :]), 0)
outputs = tf.nn.embedding_lookup(lookup_table, position_ind)
if scale:
outputs = outputs * num_units ** 0.5
return outputs + inputs
class BiasEncoding(Layer):
def __init__(self, sess_max_count, seed=1024, **kwargs):
self.sess_max_count = sess_max_count
self.seed = seed
super(BiasEncoding, self).__init__(**kwargs)
def build(self, input_shape):
# Create a trainable weight variable for this layer.
if self.sess_max_count == 1:
embed_size = input_shape[2].value
seq_len_max = input_shape[1].value
else:
embed_size = input_shape[0][2].value
seq_len_max = input_shape[0][1].value
self.sess_bias_embedding = self.add_weight('sess_bias_embedding', shape=(self.sess_max_count, 1, 1),
initializer=TruncatedNormal(
mean=0.0, stddev=0.0001, seed=self.seed))
self.seq_bias_embedding = self.add_weight('seq_bias_embedding', shape=(1, seq_len_max, 1),
initializer=TruncatedNormal(
mean=0.0, stddev=0.0001, seed=self.seed))
self.item_bias_embedding = self.add_weight('item_bias_embedding', shape=(1, 1, embed_size),
initializer=TruncatedNormal(
mean=0.0, stddev=0.0001, seed=self.seed))
# Be sure to call this somewhere!
super(BiasEncoding, self).build(input_shape)
def call(self, inputs, mask=None):
"""
:param concated_embeds_value: None * field_size * embedding_size
:return: None*1
"""
transformer_out = []
for i in range(self.sess_max_count):
transformer_out.append(
inputs[i] + self.item_bias_embedding + self.seq_bias_embedding + self.sess_bias_embedding[i])
return transformer_out
def compute_output_shape(self, input_shape):
return input_shape
def compute_mask(self, inputs, mask=None):
return mask
def get_config(self, ):
config = {'sess_max_count': self.sess_max_count, 'seed': self.seed, }
base_config = super(BiasEncoding, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class DynamicGRU(Layer):
def __init__(self, num_units=None, gru_type='GRU', return_sequence=True, **kwargs):
self.num_units = num_units
self.return_sequence = return_sequence
self.gru_type = gru_type
super(DynamicGRU, self).__init__(**kwargs)
def build(self, input_shape):
# Create a trainable weight variable for this layer.
input_seq_shape = input_shape[0]
if self.num_units is None:
self.num_units = input_seq_shape.as_list()[-1]
if self.gru_type == "AGRU":
self.gru_cell = QAAttGRUCell(self.num_units)
elif self.gru_type == "AUGRU":
self.gru_cell = VecAttGRUCell(self.num_units)
else:
try:
self.gru_cell = tf.nn.rnn_cell.GRUCell(self.num_units)
except:
self.gru_cell = tf.compat.v1.nn.rnn_cell.GRUCell(self.num_units)
# Be sure to call this somewhere!
super(DynamicGRU, self).build(input_shape)
def call(self, input_list):
"""
:param concated_embeds_value: None * field_size * embedding_size
:return: None*1
"""
if self.gru_type == "GRU" or self.gru_type == "AIGRU":
rnn_input, sequence_length = input_list
att_score = None
else:
rnn_input, sequence_length, att_score = input_list
rnn_output, hidden_state = dynamic_rnn(self.gru_cell, inputs=rnn_input, att_scores=att_score,
sequence_length=tf.squeeze(sequence_length,
), dtype=tf.float32, scope=self.name)
if self.return_sequence:
return rnn_output
else:
return tf.expand_dims(hidden_state, axis=1)
def compute_output_shape(self, input_shape):
rnn_input_shape = input_shape[0]
if self.return_sequence:
return rnn_input_shape
else:
return (None, 1, rnn_input_shape[2])
def get_config(self, ):
config = {'num_units': self.num_units, 'gru_type': self.gru_type, 'return_sequence': self.return_sequence}
base_config = super(DynamicGRU, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class KMaxPooling(Layer):
"""K Max pooling that selects the k biggest value along the specific axis.
Input shape
- nD tensor with shape: ``(batch_size, ..., input_dim)``.
Output shape
- nD tensor with shape: ``(batch_size, ..., output_dim)``.
Arguments
- **k**: positive integer, number of top elements to look for along the ``axis`` dimension.
- **axis**: positive integer, the dimension to look for elements.
"""
def __init__(self, k=1, axis=-1, **kwargs):
self.k = k
self.axis = axis
super(KMaxPooling, self).__init__(**kwargs)
def build(self, input_shape):
if self.axis < 1 or self.axis > len(input_shape):
raise ValueError("axis must be 1~%d,now is %d" %
(len(input_shape), self.axis))
if self.k < 1 or self.k > input_shape[self.axis]:
raise ValueError("k must be in 1 ~ %d,now k is %d" %
(input_shape[self.axis], self.k))
self.dims = len(input_shape)
# Be sure to call this somewhere!
super(KMaxPooling, self).build(input_shape)
def call(self, inputs):
# swap the last and the axis dimensions since top_k will be applied along the last dimension
perm = list(range(self.dims))
perm[-1], perm[self.axis] = perm[self.axis], perm[-1]
shifted_input = tf.transpose(inputs, perm)
# extract top_k, returns two tensors [values, indices]
top_k = tf.nn.top_k(shifted_input, k=self.k, sorted=True, name=None)[0]
output = tf.transpose(top_k, perm)
return output
def compute_output_shape(self, input_shape):
output_shape = list(input_shape)
output_shape[self.axis] = self.k
return tuple(output_shape)
def get_config(self, ):
config = {'k': self.k, 'axis': self.axis}
base_config = super(KMaxPooling, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
| 40.511571
| 280
| 0.608539
|
4a086d604514375a340bbb6e9d0dca501999c674
| 1,654
|
py
|
Python
|
Disease/urls.py
|
11pawan11/E-Health-Care
|
53385ca85c40829a68f21190d0d5dc351221158c
|
[
"MIT"
] | null | null | null |
Disease/urls.py
|
11pawan11/E-Health-Care
|
53385ca85c40829a68f21190d0d5dc351221158c
|
[
"MIT"
] | null | null | null |
Disease/urls.py
|
11pawan11/E-Health-Care
|
53385ca85c40829a68f21190d0d5dc351221158c
|
[
"MIT"
] | null | null | null |
"""Disease URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.views.generic.base import RedirectView
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('chat/', include('chat.urls')),
path('appointment/', include('appointment.urls')),
path('admin/', admin.site.urls),
path('api/',include('api.urls')),
path('doctor/',include('doctor.urls')),
path('roleadmin/',include('roleadmin.urls')),
path('patient/',include('patient.urls')),
path('health/',include('Health.urls')),
path('', RedirectView.as_view(url="health/")),
]
urlpatterns=urlpatterns+static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
admin.site.site_header = 'Smart Health' # default: "Django Administration"
admin.site.index_title = 'Features area' # default: "Site administration"
admin.site.site_title = 'HTML title from adminsitration' # default: "Django site admin"
| 41.35
| 94
| 0.685611
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.