hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f72ac5724f4c0949289c5827a02bc25b216cc4ef | 687 | py | Python | setup.py | DocNow/twarc-hashtags | 2a8ab84c9585b6efe9696194b6030ce5486a9e7e | [
"MIT"
] | 3 | 2021-09-09T06:22:39.000Z | 2022-02-25T13:51:29.000Z | setup.py | DocNow/twarc-hashtags | 2a8ab84c9585b6efe9696194b6030ce5486a9e7e | [
"MIT"
] | 1 | 2022-01-25T11:07:05.000Z | 2022-01-27T01:33:00.000Z | setup.py | DocNow/twarc-hashtags | 2a8ab84c9585b6efe9696194b6030ce5486a9e7e | [
"MIT"
] | null | null | null | import setuptools
with open("README.md") as f:
long_description = f.read()
setuptools.setup(
name='twarc-hashtags',
version='0.0.5',
url='https://github.com/docnow/twarc-hashtags',
author='Ed Summers',
author_email='ehs@pobox.com',
py_modules=['twarc_hashtags'],
description='A twarc plugin to extract hashtags from Twitter data',
long_description=long_description,
long_description_content_type="text/markdown",
python_requires='>=3.3',
install_requires=['twarc>=2.1.1'],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
entry_points='''
[twarc.plugins]
hashtags=twarc_hashtags:hashtags
'''
)
| 27.48 | 71 | 0.6754 | import setuptools
with open("README.md") as f:
long_description = f.read()
setuptools.setup(
name='twarc-hashtags',
version='0.0.5',
url='https://github.com/docnow/twarc-hashtags',
author='Ed Summers',
author_email='ehs@pobox.com',
py_modules=['twarc_hashtags'],
description='A twarc plugin to extract hashtags from Twitter data',
long_description=long_description,
long_description_content_type="text/markdown",
python_requires='>=3.3',
install_requires=['twarc>=2.1.1'],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
entry_points='''
[twarc.plugins]
hashtags=twarc_hashtags:hashtags
'''
)
| true | true |
f72ac585b2ba49e680b69313a2fa0d0a5d6a749c | 137 | py | Python | Python/Regex and Parsing/Validating Roman Numerals/Solution.py | PawarAditi/HackerRank | fcd9d1450ee293372ce5f1d4a3b7284ecf472657 | [
"MIT"
] | 219 | 2018-06-17T19:47:22.000Z | 2022-03-27T15:28:56.000Z | Python/Regex and Parsing/Validating Roman Numerals/Solution.py | PawarAditi/HackerRank | fcd9d1450ee293372ce5f1d4a3b7284ecf472657 | [
"MIT"
] | 2 | 2020-08-12T16:47:41.000Z | 2020-12-15T17:05:57.000Z | Python/Regex and Parsing/Validating Roman Numerals/Solution.py | PawarAditi/HackerRank | fcd9d1450ee293372ce5f1d4a3b7284ecf472657 | [
"MIT"
] | 182 | 2018-12-12T21:36:50.000Z | 2022-03-26T17:49:51.000Z | import re
regex_pattern = r'M{0,3}(C[MD]|D?C{0,3})(X[CL]|L?X{0,3})(I[VX]|V?I{0,3})$'
print(str(bool(re.match(regex_pattern, input())))) | 27.4 | 74 | 0.605839 | import re
regex_pattern = r'M{0,3}(C[MD]|D?C{0,3})(X[CL]|L?X{0,3})(I[VX]|V?I{0,3})$'
print(str(bool(re.match(regex_pattern, input())))) | true | true |
f72ac6556032482e4ba83a528d58e88c2de8f5b6 | 3,955 | py | Python | SimpleServer.py | wanzhiguo/mininero | 7dd71b02a4613478b59b2670ccf7c74a22cc2ffd | [
"BSD-3-Clause"
] | 64 | 2015-06-12T19:29:51.000Z | 2022-01-03T17:14:56.000Z | SimpleServer.py | wanzhiguo/mininero | 7dd71b02a4613478b59b2670ccf7c74a22cc2ffd | [
"BSD-3-Clause"
] | 4 | 2015-11-27T18:49:40.000Z | 2017-12-14T21:32:48.000Z | SimpleServer.py | wanzhiguo/mininero | 7dd71b02a4613478b59b2670ccf7c74a22cc2ffd | [
"BSD-3-Clause"
] | 39 | 2016-02-07T08:47:02.000Z | 2022-03-07T06:07:10.000Z | import MiniNero
import ed25519
import binascii
import PaperWallet
import cherrypy
import os
import time
import bitmonerod
import SimpleXMR2
lasttime = 0
def HexSigningPubKey(s):
return binascii.hexlify(ed25519.publickey(ed25519.encodeint(MiniNero.hexToInt(s))))
def Signature(m, sk):
#note this seems to return nicely sized version of the signature
#contrast with, i.e. tweetnacl..
sk2 = ed25519.encodeint(MiniNero.hexToInt(sk))
pk = ed25519.publickey(sk2)
return binascii.hexlify(ed25519.signature(m, sk2, pk))
def Verify(sig, m, pk):
return ed25519.checkvalid(binascii.unhexlify(sig), m, binascii.unhexlify(pk))
class MiniNeroServer:
exposed = True
def GET(self, id=None):
times = str(int(time.time()))
return (times)
def POST(self, signature, Type, timestamp, amount=None, destination=None, pid=None, mixin=None):
times= int(time.time())
pubkey = MiniNeroPk
global lasttime
if (abs(times - int(timestamp)) > 30):
ver = False
return ('fail based on timestamp too old')
else:
if Type == 'address':
message = Type+timestamp
ver = Verify(signature.encode("utf8"), message.encode("utf8"), pubkey)
if (ver):
print("getting address")
address = bitmonerod.myAddress()
return (str(address))
if Type == 'balance':
message = Type+timestamp
ver = Verify(signature.encode("utf8"), message.encode("utf8"), pubkey)
if (ver):
print("getting balance")
balance = bitmonerod.balance()
return (str(float(balance)/1000000000000))
if Type == 'send':
message = Type+amount.replace('.', 'd')+timestamp+destination
ver = Verify(signature.encode("utf8"), message.encode("utf8"), pubkey)
if (ver) and (abs(times - lasttime >30 )):
#create xmr2 order async, return uuid
uuid, xmr_amount, xmr_addr, xmr_pid = SimpleXMR2.btc2xmr(destination, amount)
bitmonerod.send(xmr_addr, float(xmr_amount), xmr_pid, 3)
lasttime = times
return ('order uuid: '+uuid)
if Type == 'sendXMR':
message = Type+amount.replace('.', 'd')+timestamp+destination
ver = Verify(signature.encode("utf8"), message.encode("utf8"), pubkey)
if (ver) and (abs(times - lasttime >30 )):
#create xmr2 order async, return uuid
#uuid, xmr_amount, xmr_addr, xmr_pid = SimpleXMR2.btc2xmr(destination, amount)
lasttime = times
xmr_amount = amount
xmr_addr = destination
xmr_pid = pid
bitmonerod.send(xmr_addr, float(xmr_amount), xmr_pid, 3)
return ('sent')
if __name__ == '__main__':
#check if api pubkey is created, if not create it:
if(os.path.isfile('MiniNeroPubKey.py')):
from MiniNeroPubKey import *
try:
MiniNeroPk
except NameError:
MiniNeroSk= PaperWallet.skGen()
MiniNeroPk= HexSigningPubKey(MiniNeroSk)
print("Your new api secret key is:")
print(MiniNeroSk)
print("You should save this in a password manager")
print("Your pubkey will be stored in MiniNeroPubKey.py")
f = open('MiniNeroPubKey.py', 'w')
f.write("MiniNeroPk = \'"+MiniNeroPk+"\'")
print("Your MiniNeroServer PubKey is:")
print(MiniNeroPk)
lasttime = 0
#Launch Cherry Server
cherrypy.tree.mount(
MiniNeroServer(), '/api/mininero',
{'/':
{'request.dispatch': cherrypy.dispatch.MethodDispatcher()}
}
)
cherrypy.server.socket_host = '0.0.0.0' #run on metal
cherrypy.engine.start()
cherrypy.engine.block()
| 35.3125 | 100 | 0.588369 | import MiniNero
import ed25519
import binascii
import PaperWallet
import cherrypy
import os
import time
import bitmonerod
import SimpleXMR2
lasttime = 0
def HexSigningPubKey(s):
return binascii.hexlify(ed25519.publickey(ed25519.encodeint(MiniNero.hexToInt(s))))
def Signature(m, sk):
sk2 = ed25519.encodeint(MiniNero.hexToInt(sk))
pk = ed25519.publickey(sk2)
return binascii.hexlify(ed25519.signature(m, sk2, pk))
def Verify(sig, m, pk):
return ed25519.checkvalid(binascii.unhexlify(sig), m, binascii.unhexlify(pk))
class MiniNeroServer:
exposed = True
def GET(self, id=None):
times = str(int(time.time()))
return (times)
def POST(self, signature, Type, timestamp, amount=None, destination=None, pid=None, mixin=None):
times= int(time.time())
pubkey = MiniNeroPk
global lasttime
if (abs(times - int(timestamp)) > 30):
ver = False
return ('fail based on timestamp too old')
else:
if Type == 'address':
message = Type+timestamp
ver = Verify(signature.encode("utf8"), message.encode("utf8"), pubkey)
if (ver):
print("getting address")
address = bitmonerod.myAddress()
return (str(address))
if Type == 'balance':
message = Type+timestamp
ver = Verify(signature.encode("utf8"), message.encode("utf8"), pubkey)
if (ver):
print("getting balance")
balance = bitmonerod.balance()
return (str(float(balance)/1000000000000))
if Type == 'send':
message = Type+amount.replace('.', 'd')+timestamp+destination
ver = Verify(signature.encode("utf8"), message.encode("utf8"), pubkey)
if (ver) and (abs(times - lasttime >30 )):
uuid, xmr_amount, xmr_addr, xmr_pid = SimpleXMR2.btc2xmr(destination, amount)
bitmonerod.send(xmr_addr, float(xmr_amount), xmr_pid, 3)
lasttime = times
return ('order uuid: '+uuid)
if Type == 'sendXMR':
message = Type+amount.replace('.', 'd')+timestamp+destination
ver = Verify(signature.encode("utf8"), message.encode("utf8"), pubkey)
if (ver) and (abs(times - lasttime >30 )):
lasttime = times
xmr_amount = amount
xmr_addr = destination
xmr_pid = pid
bitmonerod.send(xmr_addr, float(xmr_amount), xmr_pid, 3)
return ('sent')
if __name__ == '__main__':
if(os.path.isfile('MiniNeroPubKey.py')):
from MiniNeroPubKey import *
try:
MiniNeroPk
except NameError:
MiniNeroSk= PaperWallet.skGen()
MiniNeroPk= HexSigningPubKey(MiniNeroSk)
print("Your new api secret key is:")
print(MiniNeroSk)
print("You should save this in a password manager")
print("Your pubkey will be stored in MiniNeroPubKey.py")
f = open('MiniNeroPubKey.py', 'w')
f.write("MiniNeroPk = \'"+MiniNeroPk+"\'")
print("Your MiniNeroServer PubKey is:")
print(MiniNeroPk)
lasttime = 0
cherrypy.tree.mount(
MiniNeroServer(), '/api/mininero',
{'/':
{'request.dispatch': cherrypy.dispatch.MethodDispatcher()}
}
)
cherrypy.server.socket_host = '0.0.0.0'
cherrypy.engine.start()
cherrypy.engine.block()
| true | true |
f72ac71ab4bf2592bbd31344ee98206db5efb0b0 | 1,390 | py | Python | dvc/command/run.py | IlyaKisil/dvc | 1f549d665944a314331282a132b1ba3cc3a835f5 | [
"Apache-2.0"
] | null | null | null | dvc/command/run.py | IlyaKisil/dvc | 1f549d665944a314331282a132b1ba3cc3a835f5 | [
"Apache-2.0"
] | null | null | null | dvc/command/run.py | IlyaKisil/dvc | 1f549d665944a314331282a132b1ba3cc3a835f5 | [
"Apache-2.0"
] | null | null | null | import dvc.logger as logger
from dvc.command.base import CmdBase
from dvc.exceptions import DvcException
class CmdRun(CmdBase):
def _joined_cmd(self):
if len(self.args.command) == 0:
return ''
if len(self.args.command) == 1:
return self.args.command[0]
cmd = ''
for chunk in self.args.command:
if len(chunk.split()) != 1:
fmt = ' "{}"'
else:
fmt = ' {}'
cmd += fmt.format(chunk)
return cmd
def run(self):
overwrite = (self.args.yes or self.args.overwrite_dvcfile)
try:
self.project.run(cmd=self._joined_cmd(),
outs=self.args.outs,
outs_no_cache=self.args.outs_no_cache,
metrics_no_cache=self.args.metrics_no_cache,
deps=self.args.deps,
fname=self.args.file,
cwd=self.args.cwd,
no_exec=self.args.no_exec,
overwrite=overwrite,
ignore_build_cache=self.args.ignore_build_cache,
remove_outs=self.args.remove_outs)
except DvcException:
logger.error('failed to run command')
return 1
return 0
| 33.095238 | 77 | 0.488489 | import dvc.logger as logger
from dvc.command.base import CmdBase
from dvc.exceptions import DvcException
class CmdRun(CmdBase):
def _joined_cmd(self):
if len(self.args.command) == 0:
return ''
if len(self.args.command) == 1:
return self.args.command[0]
cmd = ''
for chunk in self.args.command:
if len(chunk.split()) != 1:
fmt = ' "{}"'
else:
fmt = ' {}'
cmd += fmt.format(chunk)
return cmd
def run(self):
overwrite = (self.args.yes or self.args.overwrite_dvcfile)
try:
self.project.run(cmd=self._joined_cmd(),
outs=self.args.outs,
outs_no_cache=self.args.outs_no_cache,
metrics_no_cache=self.args.metrics_no_cache,
deps=self.args.deps,
fname=self.args.file,
cwd=self.args.cwd,
no_exec=self.args.no_exec,
overwrite=overwrite,
ignore_build_cache=self.args.ignore_build_cache,
remove_outs=self.args.remove_outs)
except DvcException:
logger.error('failed to run command')
return 1
return 0
| true | true |
f72ac72145f9cff31e471c1a682180a9ab441579 | 1,584 | py | Python | python/misc.py | dnbh/kpg | c9e79b8092434919e9ac90dc199f49845403c2ba | [
"MIT"
] | 69 | 2018-01-08T19:56:55.000Z | 2022-03-05T17:14:05.000Z | python/misc.py | dnbaker/emp | c9e79b8092434919e9ac90dc199f49845403c2ba | [
"MIT"
] | 6 | 2018-04-14T21:09:51.000Z | 2021-07-17T21:08:54.000Z | python/misc.py | dnbaker/emp | c9e79b8092434919e9ac90dc199f49845403c2ba | [
"MIT"
] | 11 | 2018-03-21T19:28:35.000Z | 2021-06-29T17:33:34.000Z | #!/usr/bin/env python
import sys
import string
from collections import defaultdict
def freq(iterable):
"""
Returns a dictionary of counts for each item in an iterable.
>>>freq("ACGTTTAAA")
{'A': 4, 'C': 1, 'G': 1, 'T': 3}
"""
ret = defaultdict(int)
for el in iterable:
ret[el] += 1
return ret
try:
from cytoolz import frequencies as freq
except ImportError:
pass
# Don't sweat it
REV_CMP_TABLE = (str if sys.version_info[0] == 3
else string).maketrans("ACGTN", "TGCAN")
def revcmp(seq):
"""
Returns the reverse complement of a sequence.
>>>revcmp("ACGTNTTTAAATTT")
'AAATTTAAANACGT'
"""
return seq[::-1].translate(REV_CMP_TABLE)
def xopen(path):
"""
Stolen from Dooplicity. (https://github.com/nellore/rail/),
then stripped to only open files with open or gzip to open
based on magic number presence.
"""
import gzip
fh = (gzip.open(path, "rb") if open(path, 'rb').read(2) == '\x1f\x8b'
else open(path, "r"))
try:
yield fh
finally:
fh.close()
__all__ = [revcmp, REV_CMP_TABLE, freq, xopen]
if __name__ == "__main__":
"""
Unit tests
"""
import unittest
class Test(unittest.TestCase):
def test_revcmp(self):
self.assertEqual(revcmp("ACGTACCTTATATATATA"),
"TATATATATAAGGTACGT")
def test_freq(self):
self.assertEqual(freq("ACGTTTAAA"),
{'A': 4, 'C': 1, 'G': 1, 'T': 3})
unittest.main()
| 22.628571 | 73 | 0.571338 |
import sys
import string
from collections import defaultdict
def freq(iterable):
ret = defaultdict(int)
for el in iterable:
ret[el] += 1
return ret
try:
from cytoolz import frequencies as freq
except ImportError:
pass
REV_CMP_TABLE = (str if sys.version_info[0] == 3
else string).maketrans("ACGTN", "TGCAN")
def revcmp(seq):
return seq[::-1].translate(REV_CMP_TABLE)
def xopen(path):
import gzip
fh = (gzip.open(path, "rb") if open(path, 'rb').read(2) == '\x1f\x8b'
else open(path, "r"))
try:
yield fh
finally:
fh.close()
__all__ = [revcmp, REV_CMP_TABLE, freq, xopen]
if __name__ == "__main__":
import unittest
class Test(unittest.TestCase):
def test_revcmp(self):
self.assertEqual(revcmp("ACGTACCTTATATATATA"),
"TATATATATAAGGTACGT")
def test_freq(self):
self.assertEqual(freq("ACGTTTAAA"),
{'A': 4, 'C': 1, 'G': 1, 'T': 3})
unittest.main()
| true | true |
f72ac86bdcf9c11af4e34184f7bc61e8e47c1475 | 1,781 | py | Python | dex/dextIR/CommandListIR.py | jmorse/dexter | 79cefa890d041dfc927aea2a84737aa704ddd35c | [
"MIT"
] | null | null | null | dex/dextIR/CommandListIR.py | jmorse/dexter | 79cefa890d041dfc927aea2a84737aa704ddd35c | [
"MIT"
] | null | null | null | dex/dextIR/CommandListIR.py | jmorse/dexter | 79cefa890d041dfc927aea2a84737aa704ddd35c | [
"MIT"
] | null | null | null | # DExTer : Debugging Experience Tester
# ~~~~~~ ~ ~~ ~ ~~
#
# Copyright (c) 2018 by SN Systems Ltd., Sony Interactive Entertainment Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""Serialization of the DExTer commands embedded within the files under test.
"""
from dex.dextIR.CommandIR import CommandIR
from dex.utils.serialize import SrField, SrObject
class CommandListIR(SrObject):
sr_fields = [
SrField(
'command_list',
CommandIR,
list_of=True,
required_in_init=False,
default_value=list),
]
def __getitem__(self, idx):
return getattr(self, 'command_list')[idx]
def append(self, item):
return getattr(self, 'command_list').append(item)
| 39.577778 | 79 | 0.718136 |
from dex.dextIR.CommandIR import CommandIR
from dex.utils.serialize import SrField, SrObject
class CommandListIR(SrObject):
sr_fields = [
SrField(
'command_list',
CommandIR,
list_of=True,
required_in_init=False,
default_value=list),
]
def __getitem__(self, idx):
return getattr(self, 'command_list')[idx]
def append(self, item):
return getattr(self, 'command_list').append(item)
| true | true |
f72ac92ca104149447f8f64cf75ef595d16ca300 | 9,128 | py | Python | tests/operators/test_gcs_to_s3.py | InigoSJ/airflow | 8b97a387dc30d8c88390d500ec99333798c20f1c | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1 | 2019-09-06T09:55:18.000Z | 2019-09-06T09:55:18.000Z | tests/operators/test_gcs_to_s3.py | InigoSJ/airflow | 8b97a387dc30d8c88390d500ec99333798c20f1c | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | tests/operators/test_gcs_to_s3.py | InigoSJ/airflow | 8b97a387dc30d8c88390d500ec99333798c20f1c | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1 | 2019-12-09T08:41:32.000Z | 2019-12-09T08:41:32.000Z | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from airflow.operators.gcs_to_s3 import GoogleCloudStorageToS3Operator
from airflow.hooks.S3_hook import S3Hook
from tests.compat import mock
try:
from moto import mock_s3
except ImportError:
mock_s3 = None
TASK_ID = 'test-gcs-list-operator'
GCS_BUCKET = 'test-bucket'
DELIMITER = '.csv'
PREFIX = 'TEST'
S3_BUCKET = 's3://bucket/'
MOCK_FILES = ["TEST1.csv", "TEST2.csv", "TEST3.csv"]
class TestGoogleCloudStorageToS3Operator(unittest.TestCase):
# Test1: incremental behaviour (just some files missing)
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute_incremental(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=False)
# create dest bucket
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
b.put_object(Key=MOCK_FILES[0], Body=b'testing')
# we expect all except first file in MOCK_FILES to be uploaded
# and all the MOCK_FILES to be present at the S3 bucket
uploaded_files = operator.execute(None)
self.assertEqual(sorted(MOCK_FILES[1:]),
sorted(uploaded_files))
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
# Test2: All the files are already in origin and destination without replace
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute_without_replace(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=False)
# create dest bucket with all the files
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
[b.put_object(Key=MOCK_FILE, Body=b'testing') for MOCK_FILE in MOCK_FILES]
# we expect nothing to be uploaded
# and all the MOCK_FILES to be present at the S3 bucket
uploaded_files = operator.execute(None)
self.assertEqual([],
uploaded_files)
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
# Test3: There are no files in destination bucket
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=False)
# create dest bucket without files
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
# we expect all MOCK_FILES to be uploaded
# and all MOCK_FILES to be present at the S3 bucket
uploaded_files = operator.execute(None)
self.assertEqual(sorted(MOCK_FILES),
sorted(uploaded_files))
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
# Test4: Destination and Origin are in sync but replace all files in destination
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute_with_replace(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=True)
# create dest bucket with all the files
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
[b.put_object(Key=MOCK_FILE, Body=b'testing') for MOCK_FILE in MOCK_FILES]
# we expect all MOCK_FILES to be uploaded and replace the existing ones
# and all MOCK_FILES to be present at the S3 bucket
uploaded_files = operator.execute(None)
self.assertEqual(sorted(MOCK_FILES),
sorted(uploaded_files))
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
# Test5: Incremental sync with replace
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute_incremental_with_replace(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=True)
# create dest bucket with just two files (the first two files in MOCK_FILES)
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
[b.put_object(Key=MOCK_FILE, Body=b'testing') for MOCK_FILE in MOCK_FILES[:2]]
# we expect all the MOCK_FILES to be uploaded and replace the existing ones
# and all MOCK_FILES to be present at the S3 bucket
uploaded_files = operator.execute(None)
self.assertEqual(sorted(MOCK_FILES),
sorted(uploaded_files))
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
| 48.296296 | 86 | 0.594106 |
import unittest
from airflow.operators.gcs_to_s3 import GoogleCloudStorageToS3Operator
from airflow.hooks.S3_hook import S3Hook
from tests.compat import mock
try:
from moto import mock_s3
except ImportError:
mock_s3 = None
TASK_ID = 'test-gcs-list-operator'
GCS_BUCKET = 'test-bucket'
DELIMITER = '.csv'
PREFIX = 'TEST'
S3_BUCKET = 's3://bucket/'
MOCK_FILES = ["TEST1.csv", "TEST2.csv", "TEST3.csv"]
class TestGoogleCloudStorageToS3Operator(unittest.TestCase):
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute_incremental(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=False)
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
b.put_object(Key=MOCK_FILES[0], Body=b'testing')
uploaded_files = operator.execute(None)
self.assertEqual(sorted(MOCK_FILES[1:]),
sorted(uploaded_files))
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute_without_replace(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=False)
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
[b.put_object(Key=MOCK_FILE, Body=b'testing') for MOCK_FILE in MOCK_FILES]
uploaded_files = operator.execute(None)
self.assertEqual([],
uploaded_files)
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=False)
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
uploaded_files = operator.execute(None)
self.assertEqual(sorted(MOCK_FILES),
sorted(uploaded_files))
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute_with_replace(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=True)
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
[b.put_object(Key=MOCK_FILE, Body=b'testing') for MOCK_FILE in MOCK_FILES]
uploaded_files = operator.execute(None)
self.assertEqual(sorted(MOCK_FILES),
sorted(uploaded_files))
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
@mock_s3
@mock.patch('airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageHook')
@mock.patch('airflow.operators.gcs_to_s3.GoogleCloudStorageHook')
def test_execute_incremental_with_replace(self, mock_hook, mock_hook2):
mock_hook.return_value.list.return_value = MOCK_FILES
mock_hook.return_value.download.return_value = b"testing"
mock_hook2.return_value.list.return_value = MOCK_FILES
operator = GoogleCloudStorageToS3Operator(task_id=TASK_ID,
bucket=GCS_BUCKET,
prefix=PREFIX,
delimiter=DELIMITER,
dest_aws_conn_id=None,
dest_s3_key=S3_BUCKET,
replace=True)
hook = S3Hook(aws_conn_id=None)
b = hook.get_bucket('bucket')
b.create()
[b.put_object(Key=MOCK_FILE, Body=b'testing') for MOCK_FILE in MOCK_FILES[:2]]
uploaded_files = operator.execute(None)
self.assertEqual(sorted(MOCK_FILES),
sorted(uploaded_files))
self.assertEqual(sorted(MOCK_FILES),
sorted(hook.list_keys('bucket', delimiter='/')))
| true | true |
f72acb68ed93a51226e787125180c68eb7131f4d | 5,030 | py | Python | gdxpds/read_gdx.py | cdgaete/gdx-pandas | 2b9b00a177268227bce189939cdab081e09cb0dc | [
"BSD-3-Clause"
] | null | null | null | gdxpds/read_gdx.py | cdgaete/gdx-pandas | 2b9b00a177268227bce189939cdab081e09cb0dc | [
"BSD-3-Clause"
] | null | null | null | gdxpds/read_gdx.py | cdgaete/gdx-pandas | 2b9b00a177268227bce189939cdab081e09cb0dc | [
"BSD-3-Clause"
] | null | null | null | # [LICENSE]
# Copyright (c) 2018, Alliance for Sustainable Energy.
# All rights reserved.
#
# Redistribution and use in source and binary forms,
# with or without modification, are permitted provided
# that the following conditions are met:
#
# 1. Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
#
# 2. Redistributions in binary form must reproduce the
# above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or
# promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# [/LICENSE]
from collections import OrderedDict
import logging
# gdxpds needs to be imported before pandas to try to avoid library conflict on
# Linux that causes a segmentation fault.
from gdxpds.tools import Error
from gdxpds.gdx import GdxFile
logger = logging.getLogger(__name__)
class Translator(object):
def __init__(self,gdx_file,gams_dir=None,lazy_load=False):
self.__gdx = GdxFile(gams_dir=gams_dir,lazy_load=lazy_load)
self.__gdx.read(gdx_file)
self.__dataframes = None
def __exit__(self, *args):
self.__gdx.__exit__(self, *args)
def __del__(self):
self.__gdx.__del__()
@property
def gams_dir(self):
return self.gdx.gams_dir
@gams_dir.setter
def gams_dir(self, value):
self.gdx.gams_dir = value
@property
def gdx_file(self):
return self.gdx.filename
@gdx_file.setter
def gdx_file(self,value):
self.__gdx.__del__()
self.__gdx = GdxFile(gams_dir=self.gdx.gams_dir,lazy_load=self.gdx.lazy_load)
self.__gdx.read(value)
self.__dataframes = None
@property
def gdx(self):
return self.__gdx
@property
def dataframes(self):
if self.__dataframes is None:
self.__dataframes = OrderedDict()
for symbol in self.__gdx:
if not symbol.loaded:
symbol.load()
self.__dataframes[symbol.name] = symbol.dataframe.copy()
return self.__dataframes
@property
def symbols(self):
return [symbol_name for symbol_name in self.gdx]
def dataframe(self, symbol_name):
if not symbol_name in self.gdx:
raise Error("No symbol named '{}' in '{}'.".format(symbol_name, self.gdx_file))
if not self.gdx[symbol_name].loaded:
self.gdx[symbol_name].load()
# This was returning { symbol_name: dataframe }, which seems intuitively off.
return self.gdx[symbol_name].dataframe.copy()
def to_dataframes(gdx_file,gams_dir=None):
"""
Primary interface for converting a GAMS GDX file to pandas DataFrames.
Parameters:
- gdx_file (string): path to a GDX file
- gams_dir (string): optional path to GAMS directory
Returns a dict of Pandas DataFrames, one item for each symbol in the GDX
file, keyed with the symbol name.
"""
dfs = Translator(gdx_file,gams_dir=gams_dir).dataframes
return dfs
def list_symbols(gdx_file,gams_dir=None):
"""
Returns the list of symbols available in gdx_file.
Parameters:
- gdx_file (string): path to a GDX file
- gams_dir (string): optional path to GAMS directory
"""
symbols = Translator(gdx_file,gams_dir=gams_dir,lazy_load=True).symbols
return symbols
def to_dataframe(gdx_file,symbol_name,gams_dir=None,old_interface=True):
"""
Interface for getting the { symbol_name: pandas.DataFrame } dict for a
single symbol.
Parameters:
- gdx_file (string): path to a GDX file
- symbol_name (string): symbol whose pandas.DataFrame is being requested
- gams_dir (string): optional path to GAMS directory
Returns a dict with a single entry, where the key is symbol_name and the
value is the corresponding pandas.DataFrame.
"""
df = Translator(gdx_file,gams_dir=gams_dir,lazy_load=True).dataframe(symbol_name)
return {symbol_name: df} if old_interface else df
| 34.689655 | 91 | 0.706362 |
from collections import OrderedDict
import logging
from gdxpds.tools import Error
from gdxpds.gdx import GdxFile
logger = logging.getLogger(__name__)
class Translator(object):
def __init__(self,gdx_file,gams_dir=None,lazy_load=False):
self.__gdx = GdxFile(gams_dir=gams_dir,lazy_load=lazy_load)
self.__gdx.read(gdx_file)
self.__dataframes = None
def __exit__(self, *args):
self.__gdx.__exit__(self, *args)
def __del__(self):
self.__gdx.__del__()
@property
def gams_dir(self):
return self.gdx.gams_dir
@gams_dir.setter
def gams_dir(self, value):
self.gdx.gams_dir = value
@property
def gdx_file(self):
return self.gdx.filename
@gdx_file.setter
def gdx_file(self,value):
self.__gdx.__del__()
self.__gdx = GdxFile(gams_dir=self.gdx.gams_dir,lazy_load=self.gdx.lazy_load)
self.__gdx.read(value)
self.__dataframes = None
@property
def gdx(self):
return self.__gdx
@property
def dataframes(self):
if self.__dataframes is None:
self.__dataframes = OrderedDict()
for symbol in self.__gdx:
if not symbol.loaded:
symbol.load()
self.__dataframes[symbol.name] = symbol.dataframe.copy()
return self.__dataframes
@property
def symbols(self):
return [symbol_name for symbol_name in self.gdx]
def dataframe(self, symbol_name):
if not symbol_name in self.gdx:
raise Error("No symbol named '{}' in '{}'.".format(symbol_name, self.gdx_file))
if not self.gdx[symbol_name].loaded:
self.gdx[symbol_name].load()
return self.gdx[symbol_name].dataframe.copy()
def to_dataframes(gdx_file,gams_dir=None):
dfs = Translator(gdx_file,gams_dir=gams_dir).dataframes
return dfs
def list_symbols(gdx_file,gams_dir=None):
symbols = Translator(gdx_file,gams_dir=gams_dir,lazy_load=True).symbols
return symbols
def to_dataframe(gdx_file,symbol_name,gams_dir=None,old_interface=True):
df = Translator(gdx_file,gams_dir=gams_dir,lazy_load=True).dataframe(symbol_name)
return {symbol_name: df} if old_interface else df
| true | true |
f72acbaa7eb80d299ab01ae2d3c86752036d4dac | 24,244 | py | Python | test/api/table/test_table.py | rizwanniazigroupdocs/aspose-words-cloud-python | b943384a1e3c0710cc84df74119e6edf7356037e | [
"MIT"
] | null | null | null | test/api/table/test_table.py | rizwanniazigroupdocs/aspose-words-cloud-python | b943384a1e3c0710cc84df74119e6edf7356037e | [
"MIT"
] | null | null | null | test/api/table/test_table.py | rizwanniazigroupdocs/aspose-words-cloud-python | b943384a1e3c0710cc84df74119e6edf7356037e | [
"MIT"
] | null | null | null | # -----------------------------------------------------------------------------------
# <copyright company="Aspose" file="test_table.py">
# Copyright (c) 2020 Aspose.Words for Cloud
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# -----------------------------------------------------------------------------------
import os
import dateutil.parser
import asposewordscloud.models.requests
from test.base_test_context import BaseTestContext
#
# Example of how to work wtih table.
#
class TestTable(BaseTestContext):
#
# Test for getting tables.
#
def test_get_tables(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTables.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTablesRequest(name=remoteFileName, node_path='', folder=remoteDataFolder)
result = self.words_api.get_tables(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.tables, 'Validate GetTables response')
self.assertIsNotNone(result.tables.table_link_list, 'Validate GetTables response')
self.assertEqual(5, len(result.tables.table_link_list))
self.assertEqual('0.0.1', result.tables.table_link_list[0].node_id)
#
# Test for getting tables without node path.
#
def test_get_tables_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTablesWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTablesRequest(name=remoteFileName, folder=remoteDataFolder)
result = self.words_api.get_tables(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.tables, 'Validate GetTablesWithoutNodePath response')
self.assertIsNotNone(result.tables.table_link_list, 'Validate GetTablesWithoutNodePath response')
self.assertEqual(5, len(result.tables.table_link_list))
self.assertEqual('0.0.1', result.tables.table_link_list[0].node_id)
#
# Test for getting table.
#
def test_get_table(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTable.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableRequest(name=remoteFileName, index=1, node_path='', folder=remoteDataFolder)
result = self.words_api.get_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.table, 'Validate GetTable response')
self.assertIsNotNone(result.table.table_row_list, 'Validate GetTable response')
self.assertEqual(1, len(result.table.table_row_list))
self.assertIsNotNone(result.table.table_row_list[0].table_cell_list, 'Validate GetTable response')
self.assertEqual(2, len(result.table.table_row_list[0].table_cell_list))
#
# Test for getting table without node path.
#
def test_get_table_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableRequest(name=remoteFileName, index=1, folder=remoteDataFolder)
result = self.words_api.get_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.table, 'Validate GetTableWithoutNodePath response')
self.assertIsNotNone(result.table.table_row_list, 'Validate GetTableWithoutNodePath response')
self.assertEqual(1, len(result.table.table_row_list))
self.assertIsNotNone(result.table.table_row_list[0].table_cell_list, 'Validate GetTableWithoutNodePath response')
self.assertEqual(2, len(result.table.table_row_list[0].table_cell_list))
#
# Test for deleting table.
#
def test_delete_table(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestDeleteTable.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.DeleteTableRequest(name=remoteFileName, index=1, node_path='', folder=remoteDataFolder)
self.words_api.delete_table(request)
#
# Test for deleting table without node path.
#
def test_delete_table_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestDeleteTableWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.DeleteTableRequest(name=remoteFileName, index=1, folder=remoteDataFolder)
self.words_api.delete_table(request)
#
# Test for adding table.
#
def test_insert_table(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestInsertTable.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestTable = asposewordscloud.TableInsert(columns_count=5, rows_count=4)
request = asposewordscloud.models.requests.InsertTableRequest(name=remoteFileName, table=requestTable, node_path='', folder=remoteDataFolder)
result = self.words_api.insert_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.table, 'Validate InsertTable response')
self.assertIsNotNone(result.table.table_row_list, 'Validate InsertTable response')
self.assertEqual(4, len(result.table.table_row_list))
self.assertIsNotNone(result.table.table_row_list[0].table_cell_list, 'Validate InsertTable response')
self.assertEqual(5, len(result.table.table_row_list[0].table_cell_list))
#
# Test for adding table without node path.
#
def test_insert_table_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestInsertTableWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestTable = asposewordscloud.TableInsert(columns_count=5, rows_count=4)
request = asposewordscloud.models.requests.InsertTableRequest(name=remoteFileName, table=requestTable, folder=remoteDataFolder)
result = self.words_api.insert_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.table, 'Validate InsertTableWithoutNodePath response')
self.assertIsNotNone(result.table.table_row_list, 'Validate InsertTableWithoutNodePath response')
self.assertEqual(4, len(result.table.table_row_list))
self.assertIsNotNone(result.table.table_row_list[0].table_cell_list, 'Validate InsertTableWithoutNodePath response')
self.assertEqual(5, len(result.table.table_row_list[0].table_cell_list))
#
# Test for getting document properties.
#
def test_get_table_properties(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableProperties.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTablePropertiesRequest(name=remoteFileName, index=1, node_path='', folder=remoteDataFolder)
result = self.words_api.get_table_properties(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.properties, 'Validate GetTableProperties response')
self.assertEqual('Table Grid', result.properties.style_name)
#
# Test for getting document properties without node path.
#
def test_get_table_properties_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTablePropertiesWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTablePropertiesRequest(name=remoteFileName, index=1, folder=remoteDataFolder)
result = self.words_api.get_table_properties(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.properties, 'Validate GetTablePropertiesWithoutNodePath response')
self.assertEqual('Table Grid', result.properties.style_name)
#
# Test for updating table properties.
#
def test_update_table_properties(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestUpdateTableProperties.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestProperties = asposewordscloud.TableProperties(alignment='Right', allow_auto_fit=False, bidi=True, bottom_padding=1, cell_spacing=2.0, style_options='ColumnBands')
request = asposewordscloud.models.requests.UpdateTablePropertiesRequest(name=remoteFileName, properties=requestProperties, index=1, node_path='', folder=remoteDataFolder)
result = self.words_api.update_table_properties(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.properties, 'Validate UpdateTableProperties response')
self.assertFalse(result.properties.allow_auto_fit, 'Validate UpdateTableProperties response')
self.assertTrue(result.properties.bidi, 'Validate UpdateTableProperties response')
self.assertEqual(1.0, result.properties.bottom_padding)
self.assertEqual(2.0, result.properties.cell_spacing)
#
# Test for updating table properties without node path.
#
def test_update_table_properties_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestUpdateTablePropertiesWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestProperties = asposewordscloud.TableProperties(alignment='Right', allow_auto_fit=False, bidi=True, bottom_padding=1.0, cell_spacing=2.0, style_options='ColumnBands')
request = asposewordscloud.models.requests.UpdateTablePropertiesRequest(name=remoteFileName, properties=requestProperties, index=1, folder=remoteDataFolder)
result = self.words_api.update_table_properties(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.properties, 'Validate UpdateTablePropertiesWithoutNodePath response')
self.assertFalse(result.properties.allow_auto_fit, 'Validate UpdateTablePropertiesWithoutNodePath response')
self.assertTrue(result.properties.bidi, 'Validate UpdateTablePropertiesWithoutNodePath response')
self.assertEqual(1.0, result.properties.bottom_padding)
self.assertEqual(2.0, result.properties.cell_spacing)
#
# Test for getting table row.
#
def test_get_table_row(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableRow.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableRowRequest(name=remoteFileName, table_path='tables/1', index=0, folder=remoteDataFolder)
result = self.words_api.get_table_row(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.row, 'Validate GetTableRow response')
self.assertIsNotNone(result.row.table_cell_list, 'Validate GetTableRow response')
self.assertEqual(2, len(result.row.table_cell_list))
#
# Test for deleting table row.
#
def test_delete_table_row(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestDeleteTableRow.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.DeleteTableRowRequest(name=remoteFileName, table_path='tables/1', index=0, folder=remoteDataFolder)
self.words_api.delete_table_row(request)
#
# Test for adding row.
#
def test_insert_table_row(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestInsertTableRow.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestRow = asposewordscloud.TableRowInsert(columns_count=5)
request = asposewordscloud.models.requests.InsertTableRowRequest(name=remoteFileName, row=requestRow, table_path='sections/0/tables/2', folder=remoteDataFolder)
result = self.words_api.insert_table_row(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.row, 'Validate InsertTableRow response')
self.assertIsNotNone(result.row.table_cell_list, 'Validate InsertTableRow response')
self.assertEqual(5, len(result.row.table_cell_list))
#
# Test for getting row format.
#
def test_get_table_row_format(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableRowFormat.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableRowFormatRequest(name=remoteFileName, table_path='sections/0/tables/2', index=0, folder=remoteDataFolder)
result = self.words_api.get_table_row_format(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.row_format, 'Validate GetTableRowFormat response')
self.assertTrue(result.row_format.allow_break_across_pages, 'Validate GetTableRowFormat response')
#
# Test updating row format.
#
def test_update_table_row_format(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestUpdateTableRowFormat.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestFormat = asposewordscloud.TableRowFormat(allow_break_across_pages=True, heading_format=True, height=10.0, height_rule='Exactly')
request = asposewordscloud.models.requests.UpdateTableRowFormatRequest(name=remoteFileName, format=requestFormat, table_path='sections/0/tables/2', index=0, folder=remoteDataFolder)
result = self.words_api.update_table_row_format(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.row_format, 'Validate UpdateTableRowFormat response')
self.assertTrue(result.row_format.allow_break_across_pages, 'Validate UpdateTableRowFormat response')
self.assertTrue(result.row_format.heading_format, 'Validate UpdateTableRowFormat response')
self.assertEqual(10.0, result.row_format.height)
#
# Test for getting table cell.
#
def test_get_table_cell(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableCell.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableCellRequest(name=remoteFileName, table_row_path='sections/0/tables/2/rows/0', index=0, folder=remoteDataFolder)
result = self.words_api.get_table_cell(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.cell, 'Validate GetTableCell response')
self.assertEqual('0.0.5.0.0', result.cell.node_id)
#
# Test for deleting cell.
#
def test_delete_table_cell(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestDeleteTableCell.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.DeleteTableCellRequest(name=remoteFileName, table_row_path='sections/0/tables/2/rows/0', index=0, folder=remoteDataFolder)
self.words_api.delete_table_cell(request)
#
# Test for adding cell.
#
def test_insert_table_cell(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestInsertTableCell.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestCell = asposewordscloud.TableCellInsert()
request = asposewordscloud.models.requests.InsertTableCellRequest(name=remoteFileName, cell=requestCell, table_row_path='sections/0/tables/2/rows/0', folder=remoteDataFolder)
result = self.words_api.insert_table_cell(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.cell, 'Validate InsertTableCell response')
self.assertEqual('0.0.5.0.3', result.cell.node_id)
#
# Test for getting cell format.
#
def test_get_table_cell_format(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableCellFormat.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableCellFormatRequest(name=remoteFileName, table_row_path='sections/0/tables/2/rows/0', index=0, folder=remoteDataFolder)
result = self.words_api.get_table_cell_format(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.cell_format, 'Validate GetTableCellFormat response')
self.assertTrue(result.cell_format.wrap_text, 'Validate GetTableCellFormat response')
#
# Test for updating cell format.
#
def test_update_table_cell_format(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestUpdateTableCellFormat.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestFormat = asposewordscloud.TableCellFormat(bottom_padding=5.0, fit_text=True, horizontal_merge='First', wrap_text=True)
request = asposewordscloud.models.requests.UpdateTableCellFormatRequest(name=remoteFileName, format=requestFormat, table_row_path='sections/0/tables/2/rows/0', index=0, folder=remoteDataFolder)
result = self.words_api.update_table_cell_format(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.cell_format, 'Validate UpdateTableCellFormat response')
self.assertEqual(5.0, result.cell_format.bottom_padding)
self.assertTrue(result.cell_format.fit_text, 'Validate UpdateTableCellFormat response')
self.assertTrue(result.cell_format.wrap_text, 'Validate UpdateTableCellFormat response')
#
# Test for table rendering.
#
def test_render_table(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestRenderTable.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.RenderTableRequest(name=remoteFileName, format='png', index=0, node_path='', folder=remoteDataFolder)
result = self.words_api.render_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
#
# Test for table rendering without node path.
#
def test_render_table_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestRenderTableWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.RenderTableRequest(name=remoteFileName, format='png', index=0, folder=remoteDataFolder)
result = self.words_api.render_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
| 51.803419 | 201 | 0.735068 |
import os
import dateutil.parser
import asposewordscloud.models.requests
from test.base_test_context import BaseTestContext
class TestTable(BaseTestContext):
def test_get_tables(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTables.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTablesRequest(name=remoteFileName, node_path='', folder=remoteDataFolder)
result = self.words_api.get_tables(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.tables, 'Validate GetTables response')
self.assertIsNotNone(result.tables.table_link_list, 'Validate GetTables response')
self.assertEqual(5, len(result.tables.table_link_list))
self.assertEqual('0.0.1', result.tables.table_link_list[0].node_id)
def test_get_tables_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTablesWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTablesRequest(name=remoteFileName, folder=remoteDataFolder)
result = self.words_api.get_tables(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.tables, 'Validate GetTablesWithoutNodePath response')
self.assertIsNotNone(result.tables.table_link_list, 'Validate GetTablesWithoutNodePath response')
self.assertEqual(5, len(result.tables.table_link_list))
self.assertEqual('0.0.1', result.tables.table_link_list[0].node_id)
def test_get_table(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTable.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableRequest(name=remoteFileName, index=1, node_path='', folder=remoteDataFolder)
result = self.words_api.get_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.table, 'Validate GetTable response')
self.assertIsNotNone(result.table.table_row_list, 'Validate GetTable response')
self.assertEqual(1, len(result.table.table_row_list))
self.assertIsNotNone(result.table.table_row_list[0].table_cell_list, 'Validate GetTable response')
self.assertEqual(2, len(result.table.table_row_list[0].table_cell_list))
def test_get_table_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableRequest(name=remoteFileName, index=1, folder=remoteDataFolder)
result = self.words_api.get_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.table, 'Validate GetTableWithoutNodePath response')
self.assertIsNotNone(result.table.table_row_list, 'Validate GetTableWithoutNodePath response')
self.assertEqual(1, len(result.table.table_row_list))
self.assertIsNotNone(result.table.table_row_list[0].table_cell_list, 'Validate GetTableWithoutNodePath response')
self.assertEqual(2, len(result.table.table_row_list[0].table_cell_list))
def test_delete_table(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestDeleteTable.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.DeleteTableRequest(name=remoteFileName, index=1, node_path='', folder=remoteDataFolder)
self.words_api.delete_table(request)
def test_delete_table_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestDeleteTableWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.DeleteTableRequest(name=remoteFileName, index=1, folder=remoteDataFolder)
self.words_api.delete_table(request)
def test_insert_table(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestInsertTable.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestTable = asposewordscloud.TableInsert(columns_count=5, rows_count=4)
request = asposewordscloud.models.requests.InsertTableRequest(name=remoteFileName, table=requestTable, node_path='', folder=remoteDataFolder)
result = self.words_api.insert_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.table, 'Validate InsertTable response')
self.assertIsNotNone(result.table.table_row_list, 'Validate InsertTable response')
self.assertEqual(4, len(result.table.table_row_list))
self.assertIsNotNone(result.table.table_row_list[0].table_cell_list, 'Validate InsertTable response')
self.assertEqual(5, len(result.table.table_row_list[0].table_cell_list))
def test_insert_table_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestInsertTableWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestTable = asposewordscloud.TableInsert(columns_count=5, rows_count=4)
request = asposewordscloud.models.requests.InsertTableRequest(name=remoteFileName, table=requestTable, folder=remoteDataFolder)
result = self.words_api.insert_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.table, 'Validate InsertTableWithoutNodePath response')
self.assertIsNotNone(result.table.table_row_list, 'Validate InsertTableWithoutNodePath response')
self.assertEqual(4, len(result.table.table_row_list))
self.assertIsNotNone(result.table.table_row_list[0].table_cell_list, 'Validate InsertTableWithoutNodePath response')
self.assertEqual(5, len(result.table.table_row_list[0].table_cell_list))
def test_get_table_properties(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableProperties.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTablePropertiesRequest(name=remoteFileName, index=1, node_path='', folder=remoteDataFolder)
result = self.words_api.get_table_properties(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.properties, 'Validate GetTableProperties response')
self.assertEqual('Table Grid', result.properties.style_name)
def test_get_table_properties_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTablePropertiesWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTablePropertiesRequest(name=remoteFileName, index=1, folder=remoteDataFolder)
result = self.words_api.get_table_properties(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.properties, 'Validate GetTablePropertiesWithoutNodePath response')
self.assertEqual('Table Grid', result.properties.style_name)
def test_update_table_properties(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestUpdateTableProperties.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestProperties = asposewordscloud.TableProperties(alignment='Right', allow_auto_fit=False, bidi=True, bottom_padding=1, cell_spacing=2.0, style_options='ColumnBands')
request = asposewordscloud.models.requests.UpdateTablePropertiesRequest(name=remoteFileName, properties=requestProperties, index=1, node_path='', folder=remoteDataFolder)
result = self.words_api.update_table_properties(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.properties, 'Validate UpdateTableProperties response')
self.assertFalse(result.properties.allow_auto_fit, 'Validate UpdateTableProperties response')
self.assertTrue(result.properties.bidi, 'Validate UpdateTableProperties response')
self.assertEqual(1.0, result.properties.bottom_padding)
self.assertEqual(2.0, result.properties.cell_spacing)
def test_update_table_properties_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestUpdateTablePropertiesWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestProperties = asposewordscloud.TableProperties(alignment='Right', allow_auto_fit=False, bidi=True, bottom_padding=1.0, cell_spacing=2.0, style_options='ColumnBands')
request = asposewordscloud.models.requests.UpdateTablePropertiesRequest(name=remoteFileName, properties=requestProperties, index=1, folder=remoteDataFolder)
result = self.words_api.update_table_properties(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.properties, 'Validate UpdateTablePropertiesWithoutNodePath response')
self.assertFalse(result.properties.allow_auto_fit, 'Validate UpdateTablePropertiesWithoutNodePath response')
self.assertTrue(result.properties.bidi, 'Validate UpdateTablePropertiesWithoutNodePath response')
self.assertEqual(1.0, result.properties.bottom_padding)
self.assertEqual(2.0, result.properties.cell_spacing)
def test_get_table_row(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableRow.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableRowRequest(name=remoteFileName, table_path='tables/1', index=0, folder=remoteDataFolder)
result = self.words_api.get_table_row(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.row, 'Validate GetTableRow response')
self.assertIsNotNone(result.row.table_cell_list, 'Validate GetTableRow response')
self.assertEqual(2, len(result.row.table_cell_list))
def test_delete_table_row(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestDeleteTableRow.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.DeleteTableRowRequest(name=remoteFileName, table_path='tables/1', index=0, folder=remoteDataFolder)
self.words_api.delete_table_row(request)
def test_insert_table_row(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestInsertTableRow.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestRow = asposewordscloud.TableRowInsert(columns_count=5)
request = asposewordscloud.models.requests.InsertTableRowRequest(name=remoteFileName, row=requestRow, table_path='sections/0/tables/2', folder=remoteDataFolder)
result = self.words_api.insert_table_row(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.row, 'Validate InsertTableRow response')
self.assertIsNotNone(result.row.table_cell_list, 'Validate InsertTableRow response')
self.assertEqual(5, len(result.row.table_cell_list))
def test_get_table_row_format(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableRowFormat.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableRowFormatRequest(name=remoteFileName, table_path='sections/0/tables/2', index=0, folder=remoteDataFolder)
result = self.words_api.get_table_row_format(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.row_format, 'Validate GetTableRowFormat response')
self.assertTrue(result.row_format.allow_break_across_pages, 'Validate GetTableRowFormat response')
def test_update_table_row_format(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestUpdateTableRowFormat.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestFormat = asposewordscloud.TableRowFormat(allow_break_across_pages=True, heading_format=True, height=10.0, height_rule='Exactly')
request = asposewordscloud.models.requests.UpdateTableRowFormatRequest(name=remoteFileName, format=requestFormat, table_path='sections/0/tables/2', index=0, folder=remoteDataFolder)
result = self.words_api.update_table_row_format(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.row_format, 'Validate UpdateTableRowFormat response')
self.assertTrue(result.row_format.allow_break_across_pages, 'Validate UpdateTableRowFormat response')
self.assertTrue(result.row_format.heading_format, 'Validate UpdateTableRowFormat response')
self.assertEqual(10.0, result.row_format.height)
def test_get_table_cell(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableCell.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableCellRequest(name=remoteFileName, table_row_path='sections/0/tables/2/rows/0', index=0, folder=remoteDataFolder)
result = self.words_api.get_table_cell(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.cell, 'Validate GetTableCell response')
self.assertEqual('0.0.5.0.0', result.cell.node_id)
def test_delete_table_cell(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestDeleteTableCell.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.DeleteTableCellRequest(name=remoteFileName, table_row_path='sections/0/tables/2/rows/0', index=0, folder=remoteDataFolder)
self.words_api.delete_table_cell(request)
def test_insert_table_cell(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestInsertTableCell.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestCell = asposewordscloud.TableCellInsert()
request = asposewordscloud.models.requests.InsertTableCellRequest(name=remoteFileName, cell=requestCell, table_row_path='sections/0/tables/2/rows/0', folder=remoteDataFolder)
result = self.words_api.insert_table_cell(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.cell, 'Validate InsertTableCell response')
self.assertEqual('0.0.5.0.3', result.cell.node_id)
def test_get_table_cell_format(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestGetTableCellFormat.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.GetTableCellFormatRequest(name=remoteFileName, table_row_path='sections/0/tables/2/rows/0', index=0, folder=remoteDataFolder)
result = self.words_api.get_table_cell_format(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.cell_format, 'Validate GetTableCellFormat response')
self.assertTrue(result.cell_format.wrap_text, 'Validate GetTableCellFormat response')
def test_update_table_cell_format(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestUpdateTableCellFormat.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
requestFormat = asposewordscloud.TableCellFormat(bottom_padding=5.0, fit_text=True, horizontal_merge='First', wrap_text=True)
request = asposewordscloud.models.requests.UpdateTableCellFormatRequest(name=remoteFileName, format=requestFormat, table_row_path='sections/0/tables/2/rows/0', index=0, folder=remoteDataFolder)
result = self.words_api.update_table_cell_format(request)
self.assertIsNotNone(result, 'Error has occurred.')
self.assertIsNotNone(result.cell_format, 'Validate UpdateTableCellFormat response')
self.assertEqual(5.0, result.cell_format.bottom_padding)
self.assertTrue(result.cell_format.fit_text, 'Validate UpdateTableCellFormat response')
self.assertTrue(result.cell_format.wrap_text, 'Validate UpdateTableCellFormat response')
def test_render_table(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestRenderTable.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.RenderTableRequest(name=remoteFileName, format='png', index=0, node_path='', folder=remoteDataFolder)
result = self.words_api.render_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
def test_render_table_without_node_path(self):
remoteDataFolder = self.remote_test_folder + '/DocumentElements/Tables'
localFile = 'DocumentElements/Tables/TablesGet.docx'
remoteFileName = 'TestRenderTableWithoutNodePath.docx'
self.upload_file(remoteDataFolder + '/' + remoteFileName, open(os.path.join(self.local_test_folder, localFile), 'rb'))
request = asposewordscloud.models.requests.RenderTableRequest(name=remoteFileName, format='png', index=0, folder=remoteDataFolder)
result = self.words_api.render_table(request)
self.assertIsNotNone(result, 'Error has occurred.')
| true | true |
f72accd8900bf752d4868f03ba6ce4c1c4210e08 | 7,851 | py | Python | kaggle/ghouls-goblins-and-ghosts-boo/script_3.py | josepablocam/janus-public | 4713092b27d02386bdb408213d8edc0dc5859eec | [
"MIT"
] | null | null | null | kaggle/ghouls-goblins-and-ghosts-boo/script_3.py | josepablocam/janus-public | 4713092b27d02386bdb408213d8edc0dc5859eec | [
"MIT"
] | null | null | null | kaggle/ghouls-goblins-and-ghosts-boo/script_3.py | josepablocam/janus-public | 4713092b27d02386bdb408213d8edc0dc5859eec | [
"MIT"
] | null | null | null | #Libraries
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
sns.set_style('whitegrid')
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.calibration import CalibratedClassifierCV
import xgboost as xgb
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import StratifiedKFold
from sklearn.feature_selection import SelectFromModel
from sklearn.linear_model import LogisticRegression
from sklearn import svm
from sklearn.ensemble import VotingClassifier
from sklearn.naive_bayes import GaussianNB
train = pd.read_csv('../input/train.csv')
test = pd.read_csv('../input/test.csv')
train.info()
train.describe(include='all')
train.head()
plt.subplot(1,4,1)
train.groupby('type').mean()['rotting_flesh'].plot(kind='bar',figsize=(7,4), color='r')
plt.subplot(1,4,2)
train.groupby('type').mean()['bone_length'].plot(kind='bar',figsize=(7,4), color='g')
plt.subplot(1,4,3)
train.groupby('type').mean()['hair_length'].plot(kind='bar',figsize=(7,4), color='y')
plt.subplot(1,4,4)
train.groupby('type').mean()['has_soul'].plot(kind='bar',figsize=(7,4), color='teal')
sns.factorplot("type", col="color", col_wrap=4, data=train, kind="count", size=2.4, aspect=.8)
#The graphs look much better with higher figsize.
fig, ax = plt.subplots(2, 2, figsize = (16, 12))
sns.pointplot(x="color", y="rotting_flesh", hue="type", data=train, ax = ax[0, 0])
sns.pointplot(x="color", y="bone_length", hue="type", data=train, ax = ax[0, 1])
sns.pointplot(x="color", y="hair_length", hue="type", data=train, ax = ax[1, 0])
sns.pointplot(x="color", y="has_soul", hue="type", data=train, ax = ax[1, 1])
sns.pairplot(train, hue='type')
train['hair_soul'] = train['hair_length'] * train['has_soul']
train['hair_bone'] = train['hair_length'] * train['bone_length']
test['hair_soul'] = test['hair_length'] * test['has_soul']
test['hair_bone'] = test['hair_length'] * test['bone_length']
train['hair_soul_bone'] = train['hair_length'] * train['has_soul'] * train['bone_length']
test['hair_soul_bone'] = test['hair_length'] * test['has_soul'] * test['bone_length']
#test_id will be used later, so save it
test_id = test['id']
train.drop(['id'], axis=1, inplace=True)
test.drop(['id'], axis=1, inplace=True)
#Deal with 'color' column
col = 'color'
dummies = pd.get_dummies(train[col], drop_first=False)
dummies = dummies.add_prefix("{}#".format(col))
train.drop(col, axis=1, inplace=True)
train = train.join(dummies)
dummies = pd.get_dummies(test[col], drop_first=False)
dummies = dummies.add_prefix("{}#".format(col))
test.drop(col, axis=1, inplace=True)
test = test.join(dummies)
X_train = train.drop('type', axis=1)
le = LabelEncoder()
Y_train = le.fit_transform(train.type.values)
X_test = test
clf = RandomForestClassifier(n_estimators=200)
clf = clf.fit(X_train, Y_train)
indices = np.argsort(clf.feature_importances_)[::-1]
# Print the feature ranking
print('Feature ranking:')
for f in range(X_train.shape[1]):
print('%d. feature %d %s (%f)' % (f + 1, indices[f], X_train.columns[indices[f]],
clf.feature_importances_[indices[f]]))
best_features=X_train.columns[indices[0:7]]
X = X_train[best_features]
Xt = X_test[best_features]
#Splitting data for validation
Xtrain, Xtest, ytrain, ytest = train_test_split(X, Y_train, test_size=0.20, random_state=36)
forest = RandomForestClassifier(max_depth = 100,
min_samples_split =7,
min_weight_fraction_leaf = 0.0,
max_leaf_nodes = 60)
parameter_grid = {'n_estimators' : [10, 20, 100, 150],
'criterion' : ['gini', 'entropy'],
'max_features' : ['auto', 'sqrt', 'log2', None]
}
grid_search = GridSearchCV(forest, param_grid=parameter_grid, scoring='accuracy', cv=StratifiedKFold(5))
grid_search.fit(Xtrain, ytrain)
print('Best score: {}'.format(grid_search.best_score_))
print('Best parameters: {}'.format(grid_search.best_params_))
forest = RandomForestClassifier(n_estimators = 150,
criterion = 'entropy',
max_features = 'auto')
parameter_grid = {
'max_depth' : [None, 5, 20, 100],
'min_samples_split' : [2, 5, 7],
'min_weight_fraction_leaf' : [0.0, 0.1],
'max_leaf_nodes' : [40, 60, 80],
}
grid_search = GridSearchCV(forest, param_grid=parameter_grid, scoring='accuracy', cv=StratifiedKFold(5))
grid_search.fit(Xtrain, ytrain)
print('Best score: {}'.format(grid_search.best_score_))
print('Best parameters: {}'.format(grid_search.best_params_))
#Optimal parameters
clf = RandomForestClassifier(n_estimators=150, n_jobs=-1, criterion = 'entropy', max_features = 'auto',
min_samples_split=7, min_weight_fraction_leaf=0.0,
max_leaf_nodes=40, max_depth=20)
#Calibration improves probability predictions
calibrated_clf = CalibratedClassifierCV(clf, method='sigmoid', cv=5)
calibrated_clf.fit(Xtrain, ytrain)
y_val = calibrated_clf.predict_proba(Xtest)
print("Validation accuracy: ", sum(pd.DataFrame(y_val, columns=le.classes_).idxmax(axis=1).values
== le.inverse_transform(ytest))/len(ytest))
svc = svm.SVC(kernel='linear')
svc.fit(Xtrain, ytrain)
y_val_s = svc.predict(Xtest)
print("Validation accuracy: ", sum(le.inverse_transform(y_val_s)
== le.inverse_transform(ytest))/len(ytest))
#The last model is logistic regression
logreg = LogisticRegression()
parameter_grid = {'solver' : ['newton-cg', 'lbfgs'],
'multi_class' : ['ovr', 'multinomial'],
'C' : [0.005, 0.01, 1, 10, 100, 1000],
'tol': [0.0001, 0.001, 0.005]
}
grid_search = GridSearchCV(logreg, param_grid=parameter_grid, cv=StratifiedKFold(5))
grid_search.fit(Xtrain, ytrain)
print('Best score: {}'.format(grid_search.best_score_))
print('Best parameters: {}'.format(grid_search.best_params_))
log_reg = LogisticRegression(C = 1, tol = 0.0001, solver='newton-cg', multi_class='multinomial')
log_reg.fit(Xtrain, ytrain)
y_val_l = log_reg.predict_proba(Xtest)
print("Validation accuracy: ", sum(pd.DataFrame(y_val_l, columns=le.classes_).idxmax(axis=1).values
== le.inverse_transform(ytest))/len(ytest))
clf = RandomForestClassifier(n_estimators=20, n_jobs=-1, criterion = 'gini', max_features = 'sqrt',
min_samples_split=2, min_weight_fraction_leaf=0.0,
max_leaf_nodes=40, max_depth=100)
calibrated_clf = CalibratedClassifierCV(clf, method='sigmoid', cv=5)
log_reg = LogisticRegression(C = 1, tol = 0.0001, solver='newton-cg', multi_class='multinomial')
gnb = GaussianNB()
calibrated_clf1 = CalibratedClassifierCV(RandomForestClassifier())
log_reg1 = LogisticRegression()
gnb1 = GaussianNB()
Vclf1 = VotingClassifier(estimators=[('LR', log_reg1), ('CRF', calibrated_clf1),
('GNB', gnb1)], voting='hard')
Vclf = VotingClassifier(estimators=[('LR', log_reg), ('CRF', calibrated_clf),
('GNB', gnb)], voting='soft', weights=[1,1,1])
hard_predict = le.inverse_transform(Vclf1.fit(X, Y_train).predict(Xt))
soft_predict = le.inverse_transform(Vclf.fit(X, Y_train).predict(Xt))
#Let's see the differences:
for i in range(len(hard_predict)):
if hard_predict[i] != soft_predict[i]:
print(i, hard_predict[i], soft_predict[i])
submission = pd.DataFrame({'id':test_id, 'type':hard_predict})
submission.to_csv('GGG_submission.csv', index=False)
| 47.011976 | 104 | 0.672271 |
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
sns.set_style('whitegrid')
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.calibration import CalibratedClassifierCV
import xgboost as xgb
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import StratifiedKFold
from sklearn.feature_selection import SelectFromModel
from sklearn.linear_model import LogisticRegression
from sklearn import svm
from sklearn.ensemble import VotingClassifier
from sklearn.naive_bayes import GaussianNB
train = pd.read_csv('../input/train.csv')
test = pd.read_csv('../input/test.csv')
train.info()
train.describe(include='all')
train.head()
plt.subplot(1,4,1)
train.groupby('type').mean()['rotting_flesh'].plot(kind='bar',figsize=(7,4), color='r')
plt.subplot(1,4,2)
train.groupby('type').mean()['bone_length'].plot(kind='bar',figsize=(7,4), color='g')
plt.subplot(1,4,3)
train.groupby('type').mean()['hair_length'].plot(kind='bar',figsize=(7,4), color='y')
plt.subplot(1,4,4)
train.groupby('type').mean()['has_soul'].plot(kind='bar',figsize=(7,4), color='teal')
sns.factorplot("type", col="color", col_wrap=4, data=train, kind="count", size=2.4, aspect=.8)
fig, ax = plt.subplots(2, 2, figsize = (16, 12))
sns.pointplot(x="color", y="rotting_flesh", hue="type", data=train, ax = ax[0, 0])
sns.pointplot(x="color", y="bone_length", hue="type", data=train, ax = ax[0, 1])
sns.pointplot(x="color", y="hair_length", hue="type", data=train, ax = ax[1, 0])
sns.pointplot(x="color", y="has_soul", hue="type", data=train, ax = ax[1, 1])
sns.pairplot(train, hue='type')
train['hair_soul'] = train['hair_length'] * train['has_soul']
train['hair_bone'] = train['hair_length'] * train['bone_length']
test['hair_soul'] = test['hair_length'] * test['has_soul']
test['hair_bone'] = test['hair_length'] * test['bone_length']
train['hair_soul_bone'] = train['hair_length'] * train['has_soul'] * train['bone_length']
test['hair_soul_bone'] = test['hair_length'] * test['has_soul'] * test['bone_length']
test_id = test['id']
train.drop(['id'], axis=1, inplace=True)
test.drop(['id'], axis=1, inplace=True)
col = 'color'
dummies = pd.get_dummies(train[col], drop_first=False)
dummies = dummies.add_prefix("{}#".format(col))
train.drop(col, axis=1, inplace=True)
train = train.join(dummies)
dummies = pd.get_dummies(test[col], drop_first=False)
dummies = dummies.add_prefix("{}#".format(col))
test.drop(col, axis=1, inplace=True)
test = test.join(dummies)
X_train = train.drop('type', axis=1)
le = LabelEncoder()
Y_train = le.fit_transform(train.type.values)
X_test = test
clf = RandomForestClassifier(n_estimators=200)
clf = clf.fit(X_train, Y_train)
indices = np.argsort(clf.feature_importances_)[::-1]
print('Feature ranking:')
for f in range(X_train.shape[1]):
print('%d. feature %d %s (%f)' % (f + 1, indices[f], X_train.columns[indices[f]],
clf.feature_importances_[indices[f]]))
best_features=X_train.columns[indices[0:7]]
X = X_train[best_features]
Xt = X_test[best_features]
Xtrain, Xtest, ytrain, ytest = train_test_split(X, Y_train, test_size=0.20, random_state=36)
forest = RandomForestClassifier(max_depth = 100,
min_samples_split =7,
min_weight_fraction_leaf = 0.0,
max_leaf_nodes = 60)
parameter_grid = {'n_estimators' : [10, 20, 100, 150],
'criterion' : ['gini', 'entropy'],
'max_features' : ['auto', 'sqrt', 'log2', None]
}
grid_search = GridSearchCV(forest, param_grid=parameter_grid, scoring='accuracy', cv=StratifiedKFold(5))
grid_search.fit(Xtrain, ytrain)
print('Best score: {}'.format(grid_search.best_score_))
print('Best parameters: {}'.format(grid_search.best_params_))
forest = RandomForestClassifier(n_estimators = 150,
criterion = 'entropy',
max_features = 'auto')
parameter_grid = {
'max_depth' : [None, 5, 20, 100],
'min_samples_split' : [2, 5, 7],
'min_weight_fraction_leaf' : [0.0, 0.1],
'max_leaf_nodes' : [40, 60, 80],
}
grid_search = GridSearchCV(forest, param_grid=parameter_grid, scoring='accuracy', cv=StratifiedKFold(5))
grid_search.fit(Xtrain, ytrain)
print('Best score: {}'.format(grid_search.best_score_))
print('Best parameters: {}'.format(grid_search.best_params_))
clf = RandomForestClassifier(n_estimators=150, n_jobs=-1, criterion = 'entropy', max_features = 'auto',
min_samples_split=7, min_weight_fraction_leaf=0.0,
max_leaf_nodes=40, max_depth=20)
calibrated_clf = CalibratedClassifierCV(clf, method='sigmoid', cv=5)
calibrated_clf.fit(Xtrain, ytrain)
y_val = calibrated_clf.predict_proba(Xtest)
print("Validation accuracy: ", sum(pd.DataFrame(y_val, columns=le.classes_).idxmax(axis=1).values
== le.inverse_transform(ytest))/len(ytest))
svc = svm.SVC(kernel='linear')
svc.fit(Xtrain, ytrain)
y_val_s = svc.predict(Xtest)
print("Validation accuracy: ", sum(le.inverse_transform(y_val_s)
== le.inverse_transform(ytest))/len(ytest))
logreg = LogisticRegression()
parameter_grid = {'solver' : ['newton-cg', 'lbfgs'],
'multi_class' : ['ovr', 'multinomial'],
'C' : [0.005, 0.01, 1, 10, 100, 1000],
'tol': [0.0001, 0.001, 0.005]
}
grid_search = GridSearchCV(logreg, param_grid=parameter_grid, cv=StratifiedKFold(5))
grid_search.fit(Xtrain, ytrain)
print('Best score: {}'.format(grid_search.best_score_))
print('Best parameters: {}'.format(grid_search.best_params_))
log_reg = LogisticRegression(C = 1, tol = 0.0001, solver='newton-cg', multi_class='multinomial')
log_reg.fit(Xtrain, ytrain)
y_val_l = log_reg.predict_proba(Xtest)
print("Validation accuracy: ", sum(pd.DataFrame(y_val_l, columns=le.classes_).idxmax(axis=1).values
== le.inverse_transform(ytest))/len(ytest))
clf = RandomForestClassifier(n_estimators=20, n_jobs=-1, criterion = 'gini', max_features = 'sqrt',
min_samples_split=2, min_weight_fraction_leaf=0.0,
max_leaf_nodes=40, max_depth=100)
calibrated_clf = CalibratedClassifierCV(clf, method='sigmoid', cv=5)
log_reg = LogisticRegression(C = 1, tol = 0.0001, solver='newton-cg', multi_class='multinomial')
gnb = GaussianNB()
calibrated_clf1 = CalibratedClassifierCV(RandomForestClassifier())
log_reg1 = LogisticRegression()
gnb1 = GaussianNB()
Vclf1 = VotingClassifier(estimators=[('LR', log_reg1), ('CRF', calibrated_clf1),
('GNB', gnb1)], voting='hard')
Vclf = VotingClassifier(estimators=[('LR', log_reg), ('CRF', calibrated_clf),
('GNB', gnb)], voting='soft', weights=[1,1,1])
hard_predict = le.inverse_transform(Vclf1.fit(X, Y_train).predict(Xt))
soft_predict = le.inverse_transform(Vclf.fit(X, Y_train).predict(Xt))
for i in range(len(hard_predict)):
if hard_predict[i] != soft_predict[i]:
print(i, hard_predict[i], soft_predict[i])
submission = pd.DataFrame({'id':test_id, 'type':hard_predict})
submission.to_csv('GGG_submission.csv', index=False)
| true | true |
f72acedfe31ef0d6425a9d5e280c234bf012eb1c | 2,456 | py | Python | example.py | macky168/gaopt | bf2785325d3cb4489513f47ed06f745a059262f8 | [
"MIT"
] | null | null | null | example.py | macky168/gaopt | bf2785325d3cb4489513f47ed06f745a059262f8 | [
"MIT"
] | null | null | null | example.py | macky168/gaopt | bf2785325d3cb4489513f47ed06f745a059262f8 | [
"MIT"
] | null | null | null | import gaopt
from gaopt import search_space
import pandas as pd
import numpy as np
import lightgbm as lgb
from sklearn.model_selection import train_test_split
from sklearn.metrics import r2_score
from sklearn.datasets import load_diabetes
params_range={
'lambda_l1': search_space.discrete_int(-8, 2),
'lambda_l2': search_space.discrete_int(-8, 2),
'num_leaves': search_space.discrete(2, 100, 4),
'feature_fraction': search_space.discrete(0.1, 1.0, 0.02),
'bagging_fraction': search_space.discrete(0.1, 1.0, 0.02),
'bagging_freq': search_space.discrete_int(0,1),
'min_child_samples': search_space.discrete_int(1,30),
}
cal_time_lst = []
date_start = None
def objective1(params):
diabetes = load_diabetes()
X = diabetes.data
y = diabetes.target
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.3, random_state = 0)
X_train, X_valid, y_train, y_valid = train_test_split(X_train, y_train, test_size = 0.3, random_state = 0)
lgb_train = lgb.Dataset(data=X_train, label=y_train)
lgb_valid = lgb.Dataset(data=X_valid, label=y_valid)
params ={
'lambda_l1': 10**params.lambda_l1,
'lambda_l2': 10**params.lambda_l2,
'num_leaves': params.num_leaves,
'feature_fraction': params.feature_fraction,
'bagging_fraction': params.bagging_fraction,
'bagging_freq': params.bagging_freq,
'min_child_samples': params.min_child_samples,
'objective': 'regression',
'metric': 'rmse',
"verbosity": -1,
"seed": 0
}
model = lgb.train(params,
train_set=lgb_train,
valid_sets=lgb_valid,
verbose_eval=False
)
y_pred_lgb = model.predict(X_test)
fitness = r2_score(y_test, y_pred_lgb)
return fitness
def main():
p_m = 0.10
p_c = 0.7
population = 30
generation = 50
instance = gaopt.GAOpt(params_range, objective=objective1, generation=generation, population=population,
p_m=p_m, p_c=p_c, elitism=True,
history=2, verbose=2, maximizing=True)
best_params, best_fitness, best_fitness_lst, worst_fitness_lst, mean_fitness_lst, median_fitness_lst, sd_fitness_lst, search_history_lst = instance.fit()
print("best params: ", best_params)
print("best fitness: ", best_fitness)
if __name__ == '__main__':
main()
| 31.487179 | 157 | 0.664088 | import gaopt
from gaopt import search_space
import pandas as pd
import numpy as np
import lightgbm as lgb
from sklearn.model_selection import train_test_split
from sklearn.metrics import r2_score
from sklearn.datasets import load_diabetes
params_range={
'lambda_l1': search_space.discrete_int(-8, 2),
'lambda_l2': search_space.discrete_int(-8, 2),
'num_leaves': search_space.discrete(2, 100, 4),
'feature_fraction': search_space.discrete(0.1, 1.0, 0.02),
'bagging_fraction': search_space.discrete(0.1, 1.0, 0.02),
'bagging_freq': search_space.discrete_int(0,1),
'min_child_samples': search_space.discrete_int(1,30),
}
cal_time_lst = []
date_start = None
def objective1(params):
diabetes = load_diabetes()
X = diabetes.data
y = diabetes.target
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.3, random_state = 0)
X_train, X_valid, y_train, y_valid = train_test_split(X_train, y_train, test_size = 0.3, random_state = 0)
lgb_train = lgb.Dataset(data=X_train, label=y_train)
lgb_valid = lgb.Dataset(data=X_valid, label=y_valid)
params ={
'lambda_l1': 10**params.lambda_l1,
'lambda_l2': 10**params.lambda_l2,
'num_leaves': params.num_leaves,
'feature_fraction': params.feature_fraction,
'bagging_fraction': params.bagging_fraction,
'bagging_freq': params.bagging_freq,
'min_child_samples': params.min_child_samples,
'objective': 'regression',
'metric': 'rmse',
"verbosity": -1,
"seed": 0
}
model = lgb.train(params,
train_set=lgb_train,
valid_sets=lgb_valid,
verbose_eval=False
)
y_pred_lgb = model.predict(X_test)
fitness = r2_score(y_test, y_pred_lgb)
return fitness
def main():
p_m = 0.10
p_c = 0.7
population = 30
generation = 50
instance = gaopt.GAOpt(params_range, objective=objective1, generation=generation, population=population,
p_m=p_m, p_c=p_c, elitism=True,
history=2, verbose=2, maximizing=True)
best_params, best_fitness, best_fitness_lst, worst_fitness_lst, mean_fitness_lst, median_fitness_lst, sd_fitness_lst, search_history_lst = instance.fit()
print("best params: ", best_params)
print("best fitness: ", best_fitness)
if __name__ == '__main__':
main()
| true | true |
f72acf6685fa304f560b7aba21b3cc59df08af86 | 1,407 | py | Python | plotly/validators/contour/colorbar/_tickfont.py | faezs/plotly.py | 6009b5b9c746e5d2a2849ad255a4eb234b551ed7 | [
"MIT"
] | 1 | 2018-07-16T01:51:47.000Z | 2018-07-16T01:51:47.000Z | plotly/validators/contour/colorbar/_tickfont.py | faezs/plotly.py | 6009b5b9c746e5d2a2849ad255a4eb234b551ed7 | [
"MIT"
] | null | null | null | plotly/validators/contour/colorbar/_tickfont.py | faezs/plotly.py | 6009b5b9c746e5d2a2849ad255a4eb234b551ed7 | [
"MIT"
] | 1 | 2019-02-18T04:12:56.000Z | 2019-02-18T04:12:56.000Z | import _plotly_utils.basevalidators
class TickfontValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self, plotly_name='tickfont', parent_name='contour.colorbar', **kwargs
):
super(TickfontValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str='Tickfont',
data_docs="""
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The plotly service (at https://plot.ly
or on-premise) generates images on a server,
where only a select number of fonts are
installed and supported. These include *Arial*,
*Balto*, *Courier New*, *Droid Sans*,, *Droid
Serif*, *Droid Sans Mono*, *Gravitas One*, *Old
Standard TT*, *Open Sans*, *Overpass*, *PT Sans
Narrow*, *Raleway*, *Times New Roman*.
size
""",
**kwargs
)
| 39.083333 | 78 | 0.570007 | import _plotly_utils.basevalidators
class TickfontValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self, plotly_name='tickfont', parent_name='contour.colorbar', **kwargs
):
super(TickfontValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str='Tickfont',
data_docs="""
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The plotly service (at https://plot.ly
or on-premise) generates images on a server,
where only a select number of fonts are
installed and supported. These include *Arial*,
*Balto*, *Courier New*, *Droid Sans*,, *Droid
Serif*, *Droid Sans Mono*, *Gravitas One*, *Old
Standard TT*, *Open Sans*, *Overpass*, *PT Sans
Narrow*, *Raleway*, *Times New Roman*.
size
""",
**kwargs
)
| true | true |
f72acf916cc7270f998cfd07db89c1ac93ca5b18 | 1,812 | py | Python | src/scripts/extract_syscall.py | Manouchehri/Triton-docker | ce49ce9ba49965a5e7f814f2b46e50cc74b704de | [
"BSD-3-Clause"
] | 1 | 2020-11-15T15:21:12.000Z | 2020-11-15T15:21:12.000Z | src/scripts/extract_syscall.py | Manouchehri/Triton-docker | ce49ce9ba49965a5e7f814f2b46e50cc74b704de | [
"BSD-3-Clause"
] | null | null | null | src/scripts/extract_syscall.py | Manouchehri/Triton-docker | ce49ce9ba49965a5e7f814f2b46e50cc74b704de | [
"BSD-3-Clause"
] | null | null | null | #! /usr/bin/env python
#
# This script is used to generate the files src/utils/syscalls{32,64}.cpp.
# As the list of syscalls depends of your Kernel version. We must
# generate the list at the compile time.
#
from __future__ import print_function
import argparse
import sys
import re
import platform
HEADER = """
/*! \\file */
#if defined(__unix__) || defined(__APPLE__)
#include <syscalls.hpp>
namespace triton {
namespace os {
namespace unix {
"""
FOOTER = """
}; /* unix namespace */
}; /* os namespace */
}; /* triton namespace */
#endif
"""
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("file", help="this file must contains the syscalls definitions", type=str)
parser.add_argument("arch", help="syscall architecture - 32 or 64", type=str)
args = parser.parse_args()
if platform.system() == 'Linux':
regex = re.compile(r"#define\s+(__NR_)(\w+)\s+(\d+)")
elif platform.system() == 'Darwin':
regex = re.compile(r"#define\s+(SYS_)(\w+)\s+(\d+)")
else:
sys.exit(0)
with open(args.file) as hfile:
print(HEADER)
print(" const char* syscallmap%s[] = {" % args.arch)
counter = 0
for match in regex.finditer(hfile.read()):
prefix = str(match.groups()[0])
name = str(match.groups()[1])
sysid = int(match.groups()[2])
if counter != sysid:
for i in range(sysid - counter):
print(' "UNDEF", // undefined')
counter += 1
print(' "%s", // %s%s' % (name.upper(), prefix, name))
counter += 1
print(" };")
print()
print(" const unsigned int NB_SYSCALL%s = %d;" % (args.arch, counter))
print(FOOTER)
| 25.885714 | 98 | 0.570088 |
from __future__ import print_function
import argparse
import sys
import re
import platform
HEADER = """
/*! \\file */
#if defined(__unix__) || defined(__APPLE__)
#include <syscalls.hpp>
namespace triton {
namespace os {
namespace unix {
"""
FOOTER = """
}; /* unix namespace */
}; /* os namespace */
}; /* triton namespace */
#endif
"""
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("file", help="this file must contains the syscalls definitions", type=str)
parser.add_argument("arch", help="syscall architecture - 32 or 64", type=str)
args = parser.parse_args()
if platform.system() == 'Linux':
regex = re.compile(r"#define\s+(__NR_)(\w+)\s+(\d+)")
elif platform.system() == 'Darwin':
regex = re.compile(r"#define\s+(SYS_)(\w+)\s+(\d+)")
else:
sys.exit(0)
with open(args.file) as hfile:
print(HEADER)
print(" const char* syscallmap%s[] = {" % args.arch)
counter = 0
for match in regex.finditer(hfile.read()):
prefix = str(match.groups()[0])
name = str(match.groups()[1])
sysid = int(match.groups()[2])
if counter != sysid:
for i in range(sysid - counter):
print(' "UNDEF", // undefined')
counter += 1
print(' "%s", // %s%s' % (name.upper(), prefix, name))
counter += 1
print(" };")
print()
print(" const unsigned int NB_SYSCALL%s = %d;" % (args.arch, counter))
print(FOOTER)
| true | true |
f72ad055c9ca2d52827b7e4aa011c2370f6292dc | 15,695 | py | Python | electrum_ltc/tests/test_lnpeer.py | SynchrotronCoinDev/electrum-ltc | 178589f30ce57ca84e4d8bc7587f39522e9d17b3 | [
"MIT"
] | null | null | null | electrum_ltc/tests/test_lnpeer.py | SynchrotronCoinDev/electrum-ltc | 178589f30ce57ca84e4d8bc7587f39522e9d17b3 | [
"MIT"
] | null | null | null | electrum_ltc/tests/test_lnpeer.py | SynchrotronCoinDev/electrum-ltc | 178589f30ce57ca84e4d8bc7587f39522e9d17b3 | [
"MIT"
] | null | null | null | import asyncio
import tempfile
from decimal import Decimal
import os
from contextlib import contextmanager
from collections import defaultdict
import logging
import concurrent
from concurrent import futures
import unittest
from aiorpcx import TaskGroup
from electrum_ltc import constants
from electrum_ltc.network import Network
from electrum_ltc.ecc import ECPrivkey
from electrum_ltc import simple_config, lnutil
from electrum_ltc.lnaddr import lnencode, LnAddr, lndecode
from electrum_ltc.bitcoin import COIN, sha256
from electrum_ltc.util import bh2u, create_and_start_event_loop
from electrum_ltc.lnpeer import Peer
from electrum_ltc.lnutil import LNPeerAddr, Keypair, privkey_to_pubkey
from electrum_ltc.lnutil import LightningPeerConnectionClosed, RemoteMisbehaving
from electrum_ltc.lnutil import PaymentFailure, LnLocalFeatures, HTLCOwner
from electrum_ltc.lnchannel import channel_states, peer_states, Channel
from electrum_ltc.lnrouter import LNPathFinder
from electrum_ltc.channel_db import ChannelDB
from electrum_ltc.lnworker import LNWallet, NoPathFound
from electrum_ltc.lnmsg import encode_msg, decode_msg
from electrum_ltc.logging import console_stderr_handler, Logger
from electrum_ltc.lnworker import PaymentInfo, RECEIVED, PR_UNPAID
from .test_lnchannel import create_test_channels
from .test_bitcoin import needs_test_with_all_chacha20_implementations
from . import ElectrumTestCase
def keypair():
priv = ECPrivkey.generate_random_key().get_secret_bytes()
k1 = Keypair(
pubkey=privkey_to_pubkey(priv),
privkey=priv)
return k1
@contextmanager
def noop_lock():
yield
class MockNetwork:
def __init__(self, tx_queue):
self.callbacks = defaultdict(list)
self.lnwatcher = None
self.interface = None
user_config = {}
user_dir = tempfile.mkdtemp(prefix="electrum-lnpeer-test-")
self.config = simple_config.SimpleConfig(user_config, read_user_dir_function=lambda: user_dir)
self.asyncio_loop = asyncio.get_event_loop()
self.channel_db = ChannelDB(self)
self.channel_db.data_loaded.set()
self.path_finder = LNPathFinder(self.channel_db)
self.tx_queue = tx_queue
@property
def callback_lock(self):
return noop_lock()
register_callback = Network.register_callback
unregister_callback = Network.unregister_callback
trigger_callback = Network.trigger_callback
def get_local_height(self):
return 0
async def broadcast_transaction(self, tx):
if self.tx_queue:
await self.tx_queue.put(tx)
async def try_broadcasting(self, tx, name):
self.broadcast_transaction(tx)
class MockWallet:
def set_label(self, x, y):
pass
def save_db(self):
pass
def is_lightning_backup(self):
return False
class MockLNWallet(Logger):
def __init__(self, remote_keypair, local_keypair, chan: 'Channel', tx_queue):
Logger.__init__(self)
self.remote_keypair = remote_keypair
self.node_keypair = local_keypair
self.network = MockNetwork(tx_queue)
self.channels = {chan.channel_id: chan}
self.payments = {}
self.logs = defaultdict(list)
self.wallet = MockWallet()
self.localfeatures = LnLocalFeatures(0)
self.localfeatures |= LnLocalFeatures.OPTION_DATA_LOSS_PROTECT_OPT
self.pending_payments = defaultdict(asyncio.Future)
chan.lnworker = self
chan.node_id = remote_keypair.pubkey
# used in tests
self.enable_htlc_settle = asyncio.Event()
self.enable_htlc_settle.set()
def get_invoice_status(self, key):
pass
@property
def lock(self):
return noop_lock()
@property
def peers(self):
return {self.remote_keypair.pubkey: self.peer}
def channels_for_peer(self, pubkey):
return self.channels
def get_channel_by_short_id(self, short_channel_id):
with self.lock:
for chan in self.channels.values():
if chan.short_channel_id == short_channel_id:
return chan
def save_channel(self, chan):
print("Ignoring channel save")
is_routing = set()
preimages = {}
get_payment_info = LNWallet.get_payment_info
save_payment_info = LNWallet.save_payment_info
set_invoice_status = LNWallet.set_invoice_status
set_payment_status = LNWallet.set_payment_status
get_payment_status = LNWallet.get_payment_status
await_payment = LNWallet.await_payment
payment_received = LNWallet.payment_received
payment_sent = LNWallet.payment_sent
payment_failed = LNWallet.payment_failed
save_preimage = LNWallet.save_preimage
get_preimage = LNWallet.get_preimage
_create_route_from_invoice = LNWallet._create_route_from_invoice
_check_invoice = staticmethod(LNWallet._check_invoice)
_pay_to_route = LNWallet._pay_to_route
_pay = LNWallet._pay
force_close_channel = LNWallet.force_close_channel
try_force_closing = LNWallet.try_force_closing
get_first_timestamp = lambda self: 0
class MockTransport:
def __init__(self, name):
self.queue = asyncio.Queue()
self._name = name
def name(self):
return self._name
async def read_messages(self):
while True:
yield await self.queue.get()
class NoFeaturesTransport(MockTransport):
"""
This answers the init message with a init that doesn't signal any features.
Used for testing that we require DATA_LOSS_PROTECT.
"""
def send_bytes(self, data):
decoded = decode_msg(data)
print(decoded)
if decoded[0] == 'init':
self.queue.put_nowait(encode_msg('init', lflen=1, gflen=1, localfeatures=b"\x00", globalfeatures=b"\x00"))
class PutIntoOthersQueueTransport(MockTransport):
def __init__(self, name):
super().__init__(name)
self.other_mock_transport = None
def send_bytes(self, data):
self.other_mock_transport.queue.put_nowait(data)
def transport_pair(name1, name2):
t1 = PutIntoOthersQueueTransport(name1)
t2 = PutIntoOthersQueueTransport(name2)
t1.other_mock_transport = t2
t2.other_mock_transport = t1
return t1, t2
class TestPeer(ElectrumTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
console_stderr_handler.setLevel(logging.DEBUG)
def setUp(self):
super().setUp()
self.asyncio_loop, self._stop_loop, self._loop_thread = create_and_start_event_loop()
def tearDown(self):
super().tearDown()
self.asyncio_loop.call_soon_threadsafe(self._stop_loop.set_result, 1)
self._loop_thread.join(timeout=1)
def prepare_peers(self, alice_channel, bob_channel):
k1, k2 = keypair(), keypair()
t1, t2 = transport_pair(alice_channel.name, bob_channel.name)
q1, q2 = asyncio.Queue(), asyncio.Queue()
w1 = MockLNWallet(k1, k2, alice_channel, tx_queue=q1)
w2 = MockLNWallet(k2, k1, bob_channel, tx_queue=q2)
p1 = Peer(w1, k1.pubkey, t1)
p2 = Peer(w2, k2.pubkey, t2)
w1.peer = p1
w2.peer = p2
# mark_open won't work if state is already OPEN.
# so set it to FUNDED
alice_channel._state = channel_states.FUNDED
bob_channel._state = channel_states.FUNDED
# this populates the channel graph:
p1.mark_open(alice_channel)
p2.mark_open(bob_channel)
return p1, p2, w1, w2, q1, q2
@staticmethod
def prepare_invoice(
w2, # receiver
*,
amount_sat=100_000,
):
amount_btc = amount_sat/Decimal(COIN)
payment_preimage = os.urandom(32)
RHASH = sha256(payment_preimage)
info = PaymentInfo(RHASH, amount_sat, RECEIVED, PR_UNPAID)
w2.save_preimage(RHASH, payment_preimage)
w2.save_payment_info(info)
lnaddr = LnAddr(
RHASH,
amount_btc,
tags=[('c', lnutil.MIN_FINAL_CLTV_EXPIRY_FOR_INVOICE),
('d', 'coffee')
])
return lnencode(lnaddr, w2.node_keypair.privkey)
def test_reestablish(self):
alice_channel, bob_channel = create_test_channels()
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel, bob_channel)
for chan in (alice_channel, bob_channel):
chan.peer_state = peer_states.DISCONNECTED
async def reestablish():
await asyncio.gather(
p1.reestablish_channel(alice_channel),
p2.reestablish_channel(bob_channel))
self.assertEqual(alice_channel.peer_state, peer_states.GOOD)
self.assertEqual(bob_channel.peer_state, peer_states.GOOD)
gath.cancel()
gath = asyncio.gather(reestablish(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p1.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
@needs_test_with_all_chacha20_implementations
def test_reestablish_with_old_state(self):
alice_channel, bob_channel = create_test_channels()
alice_channel_0, bob_channel_0 = create_test_channels() # these are identical
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel, bob_channel)
pay_req = self.prepare_invoice(w2)
async def pay():
result = await w1._pay(pay_req)
self.assertEqual(result, True)
gath.cancel()
gath = asyncio.gather(pay(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel_0, bob_channel)
for chan in (alice_channel_0, bob_channel):
chan.peer_state = peer_states.DISCONNECTED
async def reestablish():
await asyncio.gather(
p1.reestablish_channel(alice_channel_0),
p2.reestablish_channel(bob_channel))
self.assertEqual(alice_channel_0.peer_state, peer_states.BAD)
self.assertEqual(bob_channel._state, channel_states.FORCE_CLOSING)
# wait so that pending messages are processed
#await asyncio.sleep(1)
gath.cancel()
gath = asyncio.gather(reestablish(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
@needs_test_with_all_chacha20_implementations
def test_payment(self):
alice_channel, bob_channel = create_test_channels()
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel, bob_channel)
pay_req = self.prepare_invoice(w2)
async def pay():
result = await w1._pay(pay_req)
self.assertTrue(result)
gath.cancel()
gath = asyncio.gather(pay(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
#@unittest.skip("too expensive")
#@needs_test_with_all_chacha20_implementations
def test_payments_stresstest(self):
alice_channel, bob_channel = create_test_channels()
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel, bob_channel)
alice_init_balance_msat = alice_channel.balance(HTLCOwner.LOCAL)
bob_init_balance_msat = bob_channel.balance(HTLCOwner.LOCAL)
num_payments = 50
#pay_reqs1 = [self.prepare_invoice(w1, amount_sat=1) for i in range(num_payments)]
pay_reqs2 = [self.prepare_invoice(w2, amount_sat=1) for i in range(num_payments)]
max_htlcs_in_flight = asyncio.Semaphore(5)
async def single_payment(pay_req):
async with max_htlcs_in_flight:
await w1._pay(pay_req)
async def many_payments():
async with TaskGroup() as group:
for pay_req in pay_reqs2:
await group.spawn(single_payment(pay_req))
gath.cancel()
gath = asyncio.gather(many_payments(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
self.assertEqual(alice_init_balance_msat - num_payments * 1000, alice_channel.balance(HTLCOwner.LOCAL))
self.assertEqual(alice_init_balance_msat - num_payments * 1000, bob_channel.balance(HTLCOwner.REMOTE))
self.assertEqual(bob_init_balance_msat + num_payments * 1000, bob_channel.balance(HTLCOwner.LOCAL))
self.assertEqual(bob_init_balance_msat + num_payments * 1000, alice_channel.balance(HTLCOwner.REMOTE))
@needs_test_with_all_chacha20_implementations
def test_close(self):
alice_channel, bob_channel = create_test_channels()
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel, bob_channel)
w1.network.config.set_key('dynamic_fees', False)
w2.network.config.set_key('dynamic_fees', False)
w1.network.config.set_key('fee_per_kb', 5000)
w2.network.config.set_key('fee_per_kb', 1000)
w2.enable_htlc_settle.clear()
pay_req = self.prepare_invoice(w2)
lnaddr = lndecode(pay_req, expected_hrp=constants.net.SEGWIT_HRP)
async def pay():
await asyncio.wait_for(p1.initialized, 1)
await asyncio.wait_for(p2.initialized, 1)
# alice sends htlc
route = w1._create_route_from_invoice(decoded_invoice=lnaddr)
htlc = p1.pay(route, alice_channel, int(lnaddr.amount * COIN * 1000), lnaddr.paymenthash, lnaddr.get_min_final_cltv_expiry())
# alice closes
await p1.close_channel(alice_channel.channel_id)
gath.cancel()
async def set_settle():
await asyncio.sleep(0.1)
w2.enable_htlc_settle.set()
gath = asyncio.gather(pay(), set_settle(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
def test_channel_usage_after_closing(self):
alice_channel, bob_channel = create_test_channels()
p1, p2, w1, w2, q1, q2 = self.prepare_peers(alice_channel, bob_channel)
pay_req = self.prepare_invoice(w2)
addr = w1._check_invoice(pay_req)
route = w1._create_route_from_invoice(decoded_invoice=addr)
run(w1.force_close_channel(alice_channel.channel_id))
# check if a tx (commitment transaction) was broadcasted:
assert q1.qsize() == 1
with self.assertRaises(NoPathFound) as e:
w1._create_route_from_invoice(decoded_invoice=addr)
peer = w1.peers[route[0].node_id]
# AssertionError is ok since we shouldn't use old routes, and the
# route finding should fail when channel is closed
async def f():
await asyncio.gather(w1._pay_to_route(route, addr), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
with self.assertRaises(PaymentFailure):
run(f())
def run(coro):
return asyncio.run_coroutine_threadsafe(coro, loop=asyncio.get_event_loop()).result()
| 39.633838 | 139 | 0.680663 | import asyncio
import tempfile
from decimal import Decimal
import os
from contextlib import contextmanager
from collections import defaultdict
import logging
import concurrent
from concurrent import futures
import unittest
from aiorpcx import TaskGroup
from electrum_ltc import constants
from electrum_ltc.network import Network
from electrum_ltc.ecc import ECPrivkey
from electrum_ltc import simple_config, lnutil
from electrum_ltc.lnaddr import lnencode, LnAddr, lndecode
from electrum_ltc.bitcoin import COIN, sha256
from electrum_ltc.util import bh2u, create_and_start_event_loop
from electrum_ltc.lnpeer import Peer
from electrum_ltc.lnutil import LNPeerAddr, Keypair, privkey_to_pubkey
from electrum_ltc.lnutil import LightningPeerConnectionClosed, RemoteMisbehaving
from electrum_ltc.lnutil import PaymentFailure, LnLocalFeatures, HTLCOwner
from electrum_ltc.lnchannel import channel_states, peer_states, Channel
from electrum_ltc.lnrouter import LNPathFinder
from electrum_ltc.channel_db import ChannelDB
from electrum_ltc.lnworker import LNWallet, NoPathFound
from electrum_ltc.lnmsg import encode_msg, decode_msg
from electrum_ltc.logging import console_stderr_handler, Logger
from electrum_ltc.lnworker import PaymentInfo, RECEIVED, PR_UNPAID
from .test_lnchannel import create_test_channels
from .test_bitcoin import needs_test_with_all_chacha20_implementations
from . import ElectrumTestCase
def keypair():
priv = ECPrivkey.generate_random_key().get_secret_bytes()
k1 = Keypair(
pubkey=privkey_to_pubkey(priv),
privkey=priv)
return k1
@contextmanager
def noop_lock():
yield
class MockNetwork:
def __init__(self, tx_queue):
self.callbacks = defaultdict(list)
self.lnwatcher = None
self.interface = None
user_config = {}
user_dir = tempfile.mkdtemp(prefix="electrum-lnpeer-test-")
self.config = simple_config.SimpleConfig(user_config, read_user_dir_function=lambda: user_dir)
self.asyncio_loop = asyncio.get_event_loop()
self.channel_db = ChannelDB(self)
self.channel_db.data_loaded.set()
self.path_finder = LNPathFinder(self.channel_db)
self.tx_queue = tx_queue
@property
def callback_lock(self):
return noop_lock()
register_callback = Network.register_callback
unregister_callback = Network.unregister_callback
trigger_callback = Network.trigger_callback
def get_local_height(self):
return 0
async def broadcast_transaction(self, tx):
if self.tx_queue:
await self.tx_queue.put(tx)
async def try_broadcasting(self, tx, name):
self.broadcast_transaction(tx)
class MockWallet:
def set_label(self, x, y):
pass
def save_db(self):
pass
def is_lightning_backup(self):
return False
class MockLNWallet(Logger):
def __init__(self, remote_keypair, local_keypair, chan: 'Channel', tx_queue):
Logger.__init__(self)
self.remote_keypair = remote_keypair
self.node_keypair = local_keypair
self.network = MockNetwork(tx_queue)
self.channels = {chan.channel_id: chan}
self.payments = {}
self.logs = defaultdict(list)
self.wallet = MockWallet()
self.localfeatures = LnLocalFeatures(0)
self.localfeatures |= LnLocalFeatures.OPTION_DATA_LOSS_PROTECT_OPT
self.pending_payments = defaultdict(asyncio.Future)
chan.lnworker = self
chan.node_id = remote_keypair.pubkey
self.enable_htlc_settle = asyncio.Event()
self.enable_htlc_settle.set()
def get_invoice_status(self, key):
pass
@property
def lock(self):
return noop_lock()
@property
def peers(self):
return {self.remote_keypair.pubkey: self.peer}
def channels_for_peer(self, pubkey):
return self.channels
def get_channel_by_short_id(self, short_channel_id):
with self.lock:
for chan in self.channels.values():
if chan.short_channel_id == short_channel_id:
return chan
def save_channel(self, chan):
print("Ignoring channel save")
is_routing = set()
preimages = {}
get_payment_info = LNWallet.get_payment_info
save_payment_info = LNWallet.save_payment_info
set_invoice_status = LNWallet.set_invoice_status
set_payment_status = LNWallet.set_payment_status
get_payment_status = LNWallet.get_payment_status
await_payment = LNWallet.await_payment
payment_received = LNWallet.payment_received
payment_sent = LNWallet.payment_sent
payment_failed = LNWallet.payment_failed
save_preimage = LNWallet.save_preimage
get_preimage = LNWallet.get_preimage
_create_route_from_invoice = LNWallet._create_route_from_invoice
_check_invoice = staticmethod(LNWallet._check_invoice)
_pay_to_route = LNWallet._pay_to_route
_pay = LNWallet._pay
force_close_channel = LNWallet.force_close_channel
try_force_closing = LNWallet.try_force_closing
get_first_timestamp = lambda self: 0
class MockTransport:
def __init__(self, name):
self.queue = asyncio.Queue()
self._name = name
def name(self):
return self._name
async def read_messages(self):
while True:
yield await self.queue.get()
class NoFeaturesTransport(MockTransport):
def send_bytes(self, data):
decoded = decode_msg(data)
print(decoded)
if decoded[0] == 'init':
self.queue.put_nowait(encode_msg('init', lflen=1, gflen=1, localfeatures=b"\x00", globalfeatures=b"\x00"))
class PutIntoOthersQueueTransport(MockTransport):
def __init__(self, name):
super().__init__(name)
self.other_mock_transport = None
def send_bytes(self, data):
self.other_mock_transport.queue.put_nowait(data)
def transport_pair(name1, name2):
t1 = PutIntoOthersQueueTransport(name1)
t2 = PutIntoOthersQueueTransport(name2)
t1.other_mock_transport = t2
t2.other_mock_transport = t1
return t1, t2
class TestPeer(ElectrumTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
console_stderr_handler.setLevel(logging.DEBUG)
def setUp(self):
super().setUp()
self.asyncio_loop, self._stop_loop, self._loop_thread = create_and_start_event_loop()
def tearDown(self):
super().tearDown()
self.asyncio_loop.call_soon_threadsafe(self._stop_loop.set_result, 1)
self._loop_thread.join(timeout=1)
def prepare_peers(self, alice_channel, bob_channel):
k1, k2 = keypair(), keypair()
t1, t2 = transport_pair(alice_channel.name, bob_channel.name)
q1, q2 = asyncio.Queue(), asyncio.Queue()
w1 = MockLNWallet(k1, k2, alice_channel, tx_queue=q1)
w2 = MockLNWallet(k2, k1, bob_channel, tx_queue=q2)
p1 = Peer(w1, k1.pubkey, t1)
p2 = Peer(w2, k2.pubkey, t2)
w1.peer = p1
w2.peer = p2
# so set it to FUNDED
alice_channel._state = channel_states.FUNDED
bob_channel._state = channel_states.FUNDED
# this populates the channel graph:
p1.mark_open(alice_channel)
p2.mark_open(bob_channel)
return p1, p2, w1, w2, q1, q2
@staticmethod
def prepare_invoice(
w2, # receiver
*,
amount_sat=100_000,
):
amount_btc = amount_sat/Decimal(COIN)
payment_preimage = os.urandom(32)
RHASH = sha256(payment_preimage)
info = PaymentInfo(RHASH, amount_sat, RECEIVED, PR_UNPAID)
w2.save_preimage(RHASH, payment_preimage)
w2.save_payment_info(info)
lnaddr = LnAddr(
RHASH,
amount_btc,
tags=[('c', lnutil.MIN_FINAL_CLTV_EXPIRY_FOR_INVOICE),
('d', 'coffee')
])
return lnencode(lnaddr, w2.node_keypair.privkey)
def test_reestablish(self):
alice_channel, bob_channel = create_test_channels()
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel, bob_channel)
for chan in (alice_channel, bob_channel):
chan.peer_state = peer_states.DISCONNECTED
async def reestablish():
await asyncio.gather(
p1.reestablish_channel(alice_channel),
p2.reestablish_channel(bob_channel))
self.assertEqual(alice_channel.peer_state, peer_states.GOOD)
self.assertEqual(bob_channel.peer_state, peer_states.GOOD)
gath.cancel()
gath = asyncio.gather(reestablish(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p1.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
@needs_test_with_all_chacha20_implementations
def test_reestablish_with_old_state(self):
alice_channel, bob_channel = create_test_channels()
alice_channel_0, bob_channel_0 = create_test_channels() # these are identical
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel, bob_channel)
pay_req = self.prepare_invoice(w2)
async def pay():
result = await w1._pay(pay_req)
self.assertEqual(result, True)
gath.cancel()
gath = asyncio.gather(pay(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel_0, bob_channel)
for chan in (alice_channel_0, bob_channel):
chan.peer_state = peer_states.DISCONNECTED
async def reestablish():
await asyncio.gather(
p1.reestablish_channel(alice_channel_0),
p2.reestablish_channel(bob_channel))
self.assertEqual(alice_channel_0.peer_state, peer_states.BAD)
self.assertEqual(bob_channel._state, channel_states.FORCE_CLOSING)
# wait so that pending messages are processed
#await asyncio.sleep(1)
gath.cancel()
gath = asyncio.gather(reestablish(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
@needs_test_with_all_chacha20_implementations
def test_payment(self):
alice_channel, bob_channel = create_test_channels()
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel, bob_channel)
pay_req = self.prepare_invoice(w2)
async def pay():
result = await w1._pay(pay_req)
self.assertTrue(result)
gath.cancel()
gath = asyncio.gather(pay(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
#@unittest.skip("too expensive")
#@needs_test_with_all_chacha20_implementations
def test_payments_stresstest(self):
alice_channel, bob_channel = create_test_channels()
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel, bob_channel)
alice_init_balance_msat = alice_channel.balance(HTLCOwner.LOCAL)
bob_init_balance_msat = bob_channel.balance(HTLCOwner.LOCAL)
num_payments = 50
#pay_reqs1 = [self.prepare_invoice(w1, amount_sat=1) for i in range(num_payments)]
pay_reqs2 = [self.prepare_invoice(w2, amount_sat=1) for i in range(num_payments)]
max_htlcs_in_flight = asyncio.Semaphore(5)
async def single_payment(pay_req):
async with max_htlcs_in_flight:
await w1._pay(pay_req)
async def many_payments():
async with TaskGroup() as group:
for pay_req in pay_reqs2:
await group.spawn(single_payment(pay_req))
gath.cancel()
gath = asyncio.gather(many_payments(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
self.assertEqual(alice_init_balance_msat - num_payments * 1000, alice_channel.balance(HTLCOwner.LOCAL))
self.assertEqual(alice_init_balance_msat - num_payments * 1000, bob_channel.balance(HTLCOwner.REMOTE))
self.assertEqual(bob_init_balance_msat + num_payments * 1000, bob_channel.balance(HTLCOwner.LOCAL))
self.assertEqual(bob_init_balance_msat + num_payments * 1000, alice_channel.balance(HTLCOwner.REMOTE))
@needs_test_with_all_chacha20_implementations
def test_close(self):
alice_channel, bob_channel = create_test_channels()
p1, p2, w1, w2, _q1, _q2 = self.prepare_peers(alice_channel, bob_channel)
w1.network.config.set_key('dynamic_fees', False)
w2.network.config.set_key('dynamic_fees', False)
w1.network.config.set_key('fee_per_kb', 5000)
w2.network.config.set_key('fee_per_kb', 1000)
w2.enable_htlc_settle.clear()
pay_req = self.prepare_invoice(w2)
lnaddr = lndecode(pay_req, expected_hrp=constants.net.SEGWIT_HRP)
async def pay():
await asyncio.wait_for(p1.initialized, 1)
await asyncio.wait_for(p2.initialized, 1)
# alice sends htlc
route = w1._create_route_from_invoice(decoded_invoice=lnaddr)
htlc = p1.pay(route, alice_channel, int(lnaddr.amount * COIN * 1000), lnaddr.paymenthash, lnaddr.get_min_final_cltv_expiry())
# alice closes
await p1.close_channel(alice_channel.channel_id)
gath.cancel()
async def set_settle():
await asyncio.sleep(0.1)
w2.enable_htlc_settle.set()
gath = asyncio.gather(pay(), set_settle(), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
async def f():
await gath
with self.assertRaises(concurrent.futures.CancelledError):
run(f())
def test_channel_usage_after_closing(self):
alice_channel, bob_channel = create_test_channels()
p1, p2, w1, w2, q1, q2 = self.prepare_peers(alice_channel, bob_channel)
pay_req = self.prepare_invoice(w2)
addr = w1._check_invoice(pay_req)
route = w1._create_route_from_invoice(decoded_invoice=addr)
run(w1.force_close_channel(alice_channel.channel_id))
# check if a tx (commitment transaction) was broadcasted:
assert q1.qsize() == 1
with self.assertRaises(NoPathFound) as e:
w1._create_route_from_invoice(decoded_invoice=addr)
peer = w1.peers[route[0].node_id]
# AssertionError is ok since we shouldn't use old routes, and the
async def f():
await asyncio.gather(w1._pay_to_route(route, addr), p1._message_loop(), p2._message_loop(), p1.htlc_switch(), p2.htlc_switch())
with self.assertRaises(PaymentFailure):
run(f())
def run(coro):
return asyncio.run_coroutine_threadsafe(coro, loop=asyncio.get_event_loop()).result()
| true | true |
f72ad123a16de5b88d83b7f0efe6887a58556b76 | 1,491 | py | Python | examples/map_view_simple_example.py | TomSchimansky/TkinterMapView | eb84f600e9b6bb8c60d88149e277b3abee704a70 | [
"CC0-1.0"
] | 43 | 2022-01-02T04:23:28.000Z | 2022-03-30T03:04:03.000Z | examples/map_view_simple_example.py | TomSchimansky/TkinterMapView | eb84f600e9b6bb8c60d88149e277b3abee704a70 | [
"CC0-1.0"
] | 6 | 2022-02-24T09:19:35.000Z | 2022-03-24T18:32:22.000Z | examples/map_view_simple_example.py | TomSchimansky/TkinterMapView | eb84f600e9b6bb8c60d88149e277b3abee704a70 | [
"CC0-1.0"
] | 4 | 2022-01-03T16:49:04.000Z | 2022-03-21T09:25:44.000Z | import tkinter
import tkintermapview
# create tkinter window
root_tk = tkinter.Tk()
root_tk.geometry(f"{1000}x{700}")
root_tk.title("map_view_simple_example.py")
# create map widget
map_widget = tkintermapview.TkinterMapView(root_tk, width=1000, height=700, corner_radius=0)
map_widget.pack(fill="both", expand=True)
# set other tile server (standard is OpenStreetMap)
# map_widget.set_tile_server("https://mt0.google.com/vt/lyrs=m&hl=en&x={x}&y={y}&z={z}&s=Ga", max_zoom=22) # google normal
# map_widget.set_tile_server("https://mt0.google.com/vt/lyrs=s&hl=en&x={x}&y={y}&z={z}&s=Ga", max_zoom=22) # google satellite
# set current position and zoom
# map_widget.set_position(52.516268, 13.377695, marker=False) # Berlin, Germany
# map_widget.set_zoom(17)
# set current position with address
# map_widget.set_address("Berlin Germany", marker=False)
def marker_click(marker):
print(f"marker clicked - text: {marker.text} position: {marker.position}")
# set a position marker (also with a custom color and command on click)
marker_2 = map_widget.set_marker(52.516268, 13.377695, text="Brandenburger Tor", command=marker_click)
marker_3 = map_widget.set_marker(52.55, 13.4, text="52.55, 13.4")
# marker_3.set_position(...)
# marker_3.set_text(...)
# marker_3.delete()
# set a path
path_1 = map_widget.set_path([marker_2.position, marker_3.position, (52.568, 13.4), (52.569, 13.35)])
# path_1.add_position(...)
# path_1.remove_position(...)
# path_1.delete()
root_tk.mainloop()
| 36.365854 | 126 | 0.739772 | import tkinter
import tkintermapview
root_tk = tkinter.Tk()
root_tk.geometry(f"{1000}x{700}")
root_tk.title("map_view_simple_example.py")
map_widget = tkintermapview.TkinterMapView(root_tk, width=1000, height=700, corner_radius=0)
map_widget.pack(fill="both", expand=True)
rker clicked - text: {marker.text} position: {marker.position}")
marker_2 = map_widget.set_marker(52.516268, 13.377695, text="Brandenburger Tor", command=marker_click)
marker_3 = map_widget.set_marker(52.55, 13.4, text="52.55, 13.4")
path_1 = map_widget.set_path([marker_2.position, marker_3.position, (52.568, 13.4), (52.569, 13.35)])
root_tk.mainloop()
| true | true |
f72ad17de09166bbcef6aaac4ff6b283c77049fa | 2,206 | py | Python | retrieve_response.py | kit-data-manager/gemma | 0ae4e64f966b389c7e7c5619c8fd09bef78c8c87 | [
"Apache-2.0"
] | null | null | null | retrieve_response.py | kit-data-manager/gemma | 0ae4e64f966b389c7e7c5619c8fd09bef78c8c87 | [
"Apache-2.0"
] | null | null | null | retrieve_response.py | kit-data-manager/gemma | 0ae4e64f966b389c7e7c5619c8fd09bef78c8c87 | [
"Apache-2.0"
] | null | null | null | import http.client
import os
import json
import wget
import mapping_functions
import pprint
import sys
HOST = 'episteme2.scc.kit.edu'
PORT = '8080'
URL = os.path.join('http://' + HOST + ':' + PORT, 'api/v1/dataresources')
output_folder = sys.argv[1]
payload = "{\n \t\"resourceType\": {\n \t\t\"typeGeneral\":\"TEXT\"\n \t}\n}"
headers = {'Content-Type': "application/json", 'cache-control': "no-cache"}
size = 20
page = 0
def http_call(TYPE, host=HOST, port=PORT, endpoint='', search='', query='', payload='', headers={}):
check_http_method(TYPE)
conn = http.client.HTTPConnection(host, port)
if search != '' or query != '':
endpoint = os.path.join(endpoint, search + query)
url = os.path.join(URL, endpoint)
print('URL: ', url)
conn.request(TYPE, url, payload, headers)
res = conn.getresponse()
data = json.loads(res.read().decode('utf-8'))
return data
def check_http_method(method):
assert(isinstance(method, str)), 'method must be a string'
list = ['POST', 'GET', 'PUT', 'PATCH', 'DELETE']
if method not in list:
print("{} not allowed. Use: 'POST', 'GET', 'PUT', 'PATCH', 'DELETE'".format(method))
return
def download_file(file_id, extention='xml'):
endpoint = 'data/manuscript_metadata.' + extention
url = os.path.join(URL, file_id, endpoint)
output_file = file_id + "." + extention
wget.download(url, os.path.join(output_folder, output_file))
while True:
retrieve = 'search?size=' + str(size) + '&page=' + str(page)
data = http_call('POST', search=retrieve, payload=payload, headers=headers)
print('{} results at page {}'.format(len(data), page))
if len(data) == 0:
break
for resourse in data:
manuscript_id = resourse['id']
print("manuscript id: {}".format(manuscript_id))
if resourse['state'] == "REVOKED":
print("Status of resource {} is {}".format(resourse, resourse['state']))
continue
assert(resourse['resourceType']['value'] == 'manuscriptMetadata'), "resourceType is not manuscriptMetadata"
download_file(manuscript_id, 'json')
if len(data) == size:
page += 1
else:
break
| 30.638889 | 115 | 0.629193 | import http.client
import os
import json
import wget
import mapping_functions
import pprint
import sys
HOST = 'episteme2.scc.kit.edu'
PORT = '8080'
URL = os.path.join('http://' + HOST + ':' + PORT, 'api/v1/dataresources')
output_folder = sys.argv[1]
payload = "{\n \t\"resourceType\": {\n \t\t\"typeGeneral\":\"TEXT\"\n \t}\n}"
headers = {'Content-Type': "application/json", 'cache-control': "no-cache"}
size = 20
page = 0
def http_call(TYPE, host=HOST, port=PORT, endpoint='', search='', query='', payload='', headers={}):
check_http_method(TYPE)
conn = http.client.HTTPConnection(host, port)
if search != '' or query != '':
endpoint = os.path.join(endpoint, search + query)
url = os.path.join(URL, endpoint)
print('URL: ', url)
conn.request(TYPE, url, payload, headers)
res = conn.getresponse()
data = json.loads(res.read().decode('utf-8'))
return data
def check_http_method(method):
assert(isinstance(method, str)), 'method must be a string'
list = ['POST', 'GET', 'PUT', 'PATCH', 'DELETE']
if method not in list:
print("{} not allowed. Use: 'POST', 'GET', 'PUT', 'PATCH', 'DELETE'".format(method))
return
def download_file(file_id, extention='xml'):
endpoint = 'data/manuscript_metadata.' + extention
url = os.path.join(URL, file_id, endpoint)
output_file = file_id + "." + extention
wget.download(url, os.path.join(output_folder, output_file))
while True:
retrieve = 'search?size=' + str(size) + '&page=' + str(page)
data = http_call('POST', search=retrieve, payload=payload, headers=headers)
print('{} results at page {}'.format(len(data), page))
if len(data) == 0:
break
for resourse in data:
manuscript_id = resourse['id']
print("manuscript id: {}".format(manuscript_id))
if resourse['state'] == "REVOKED":
print("Status of resource {} is {}".format(resourse, resourse['state']))
continue
assert(resourse['resourceType']['value'] == 'manuscriptMetadata'), "resourceType is not manuscriptMetadata"
download_file(manuscript_id, 'json')
if len(data) == size:
page += 1
else:
break
| true | true |
f72ad2f82bf260bd112b090bded6d3c5ba2e8a43 | 1,180 | py | Python | profiles_api/serializers.py | Atique-7/drf-genesis | a333564d285885c7661e3324d5503488d9ced6ae | [
"MIT"
] | null | null | null | profiles_api/serializers.py | Atique-7/drf-genesis | a333564d285885c7661e3324d5503488d9ced6ae | [
"MIT"
] | null | null | null | profiles_api/serializers.py | Atique-7/drf-genesis | a333564d285885c7661e3324d5503488d9ced6ae | [
"MIT"
] | null | null | null | from rest_framework import serializers
from profiles_api import models
class UserProfileSerializer(serializers.ModelSerializer):
"""serializes a user profile object"""
class Meta:
model = models.UserProfile
fields = ('id', 'name', 'email', 'password')
extra_kwargs = {
'password' : {
'write_only' : True,
'style' : {
'input_type' : 'password'
}
}
}
# We now take over the default create function.
def create(self, validated_data):
"""create and return a new user"""
user = models.UserProfile.objects.create_user(
email = validated_data['email'],
name = validated_data['name'],
password = validated_data['password'] )
return user
class ProfileFeedItemSerializer(serializers.ModelSerializer):
"""serializes profile feed items"""
class Meta:
model = models.ProfileFeedItem
fields = ('id', 'user_profile', 'status_text', 'created_on')
extra_kwargs = {
'user_profile' : {
'read_only' : True
}
} | 29.5 | 68 | 0.561864 | from rest_framework import serializers
from profiles_api import models
class UserProfileSerializer(serializers.ModelSerializer):
class Meta:
model = models.UserProfile
fields = ('id', 'name', 'email', 'password')
extra_kwargs = {
'password' : {
'write_only' : True,
'style' : {
'input_type' : 'password'
}
}
}
def create(self, validated_data):
user = models.UserProfile.objects.create_user(
email = validated_data['email'],
name = validated_data['name'],
password = validated_data['password'] )
return user
class ProfileFeedItemSerializer(serializers.ModelSerializer):
class Meta:
model = models.ProfileFeedItem
fields = ('id', 'user_profile', 'status_text', 'created_on')
extra_kwargs = {
'user_profile' : {
'read_only' : True
}
} | true | true |
f72ad439a6e7cf5dac1b087074d4ee471a260a4b | 52 | py | Python | tests/python/overload1.py | jacereda/py2nim | 56fc2699d31241c60bed726f59efea4bf46be238 | [
"MIT"
] | 10 | 2020-03-10T12:01:01.000Z | 2021-05-23T19:47:06.000Z | tests/python/overload1.py | jacereda/py2nim | 56fc2699d31241c60bed726f59efea4bf46be238 | [
"MIT"
] | null | null | null | tests/python/overload1.py | jacereda/py2nim | 56fc2699d31241c60bed726f59efea4bf46be238 | [
"MIT"
] | 1 | 2020-07-17T11:20:56.000Z | 2020-07-17T11:20:56.000Z | def a(z, b):
print(z + b)
a(0, 0.0)
a('e', '')
| 8.666667 | 16 | 0.365385 | def a(z, b):
print(z + b)
a(0, 0.0)
a('e', '')
| true | true |
f72ad5b39fcaee399cd011abf25e5fda0c0342a6 | 24,914 | py | Python | jina/flow/mixin/async_crud.py | liushuigs/jina | b3550e901b2a340924330b5ba2801603e493c933 | [
"Apache-2.0"
] | null | null | null | jina/flow/mixin/async_crud.py | liushuigs/jina | b3550e901b2a340924330b5ba2801603e493c933 | [
"Apache-2.0"
] | 2 | 2021-02-15T01:40:38.000Z | 2021-02-15T02:00:21.000Z | jina/flow/mixin/async_crud.py | liushuigs/jina | b3550e901b2a340924330b5ba2801603e493c933 | [
"Apache-2.0"
] | null | null | null | import warnings
from typing import Union, Iterable, TextIO, Dict, Optional
import numpy as np
from ...clients.base import InputType, CallbackFnType
from ...enums import DataInputType
from ...helper import deprecated_alias
class AsyncCRUDFlowMixin:
"""The asynchronous version of the Mixin for CRUD in Flow"""
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def train(
self,
inputs: InputType,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Do training on the current Flow
:param inputs: An iterator of bytes. If not given, then you have to specify it in **kwargs**.
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
warnings.warn(f'{self.train} is under heavy refactoring', FutureWarning)
async for r in self._get_client(**kwargs).train(
inputs, on_done, on_error, on_always, **kwargs
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def index_ndarray(
self,
array: 'np.ndarray',
axis: int = 0,
size: Optional[int] = None,
shuffle: bool = False,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Using numpy ndarray as the index source for the current Flow
:param array: the numpy ndarray data source
:param axis: iterate over that axis
:param size: the maximum number of the sub arrays
:param shuffle: shuffle the the numpy data source beforehand
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
from ...clients.sugary_io import _input_ndarray
async for r in self._get_client(**kwargs).index(
_input_ndarray(array, axis, size, shuffle),
on_done,
on_error,
on_always,
data_type=DataInputType.CONTENT,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def search_ndarray(
self,
array: 'np.ndarray',
axis: int = 0,
size: Optional[int] = None,
shuffle: bool = False,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Use a numpy ndarray as the query source for searching on the current Flow
:param array: the numpy ndarray data source
:param axis: iterate over that axis
:param size: the maximum number of the sub arrays
:param shuffle: shuffle the the numpy data source beforehand
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
from ...clients.sugary_io import _input_ndarray
async for r in self._get_client(**kwargs).search(
_input_ndarray(array, axis, size, shuffle),
on_done,
on_error,
on_always,
data_type=DataInputType.CONTENT,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def index_lines(
self,
lines: Optional[Union[Iterable[str], TextIO]] = None,
filepath: Optional[str] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
read_mode: str = 'r',
line_format: str = 'json',
field_resolver: Optional[Dict[str, str]] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Use a list of lines as the index source for indexing on the current Flow
:param lines: a list of strings, each is considered as d document
:param filepath: a text file that each line contains a document
:param size: the maximum number of the documents
:param sampling_rate: the sampling rate between [0, 1]
:param read_mode: specifies the mode in which the file
is opened. 'r' for reading in text mode, 'rb' for reading in binary
:param line_format: the format of each line: ``json`` or ``csv``
:param field_resolver: a map from field names defined in ``document`` (JSON, dict) to the field
names defined in Protobuf. This is only used when the given ``document`` is
a JSON string or a Python dict.
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
from ...clients.sugary_io import _input_lines
async for r in self._get_client(**kwargs).index(
_input_lines(
lines,
filepath,
size=size,
sampling_rate=sampling_rate,
read_mode=read_mode,
line_format=line_format,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.AUTO,
**kwargs,
):
yield r
async def index_csv(
self,
lines: Union[Iterable[str], TextIO],
field_resolver: Dict[str, str] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Use a list of lines as the index source for indexing on the current Flow
:param lines: a list of strings, each is considered as d document
:param size: the maximum number of the documents
:param sampling_rate: the sampling rate between [0, 1]
:param field_resolver: a map from field names defined in ``document`` (JSON, dict) to the field
names defined in Protobuf. This is only used when the given ``document`` is
a JSON string or a Python dict.
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
from ...clients.sugary_io import _input_csv
async for r in self._get_client(**kwargs).index(
_input_csv(
lines,
size=size,
sampling_rate=sampling_rate,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.AUTO,
**kwargs,
):
yield r
async def index_ndjson(
self,
lines: Union[Iterable[str], TextIO],
field_resolver: Optional[Dict[str, str]] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Use a list of lines as the index source for indexing on the current Flow
:param lines: a list of strings, each is considered as d document
:param size: the maximum number of the documents
:param sampling_rate: the sampling rate between [0, 1]
:param field_resolver: a map from field names defined in ``document`` (JSON, dict) to the field
names defined in Protobuf. This is only used when the given ``document`` is
a JSON string or a Python dict.
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
from ...clients.sugary_io import _input_ndjson
async for r in self._get_client(**kwargs).index(
_input_ndjson(
lines,
size=size,
sampling_rate=sampling_rate,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.AUTO,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def index_files(
self,
patterns: Union[str, Iterable[str]],
recursive: bool = True,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
read_mode: Optional[str] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Use a set of files as the index source for indexing on the current Flow
:param patterns: The pattern may contain simple shell-style wildcards, e.g. '\*.py', '[\*.zip, \*.gz]'
:param recursive: If recursive is true, the pattern '**' will match any files and
zero or more directories and subdirectories.
:param size: the maximum number of the files
:param sampling_rate: the sampling rate between [0, 1]
:param read_mode: specifies the mode in which the file
is opened. 'r' for reading in text mode, 'rb' for reading in binary mode
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
from ...clients.sugary_io import _input_files
async for r in self._get_client(**kwargs).index(
_input_files(patterns, recursive, size, sampling_rate, read_mode),
on_done,
on_error,
on_always,
data_type=DataInputType.CONTENT,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def search_files(
self,
patterns: Union[str, Iterable[str]],
recursive: bool = True,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
read_mode: Optional[str] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Use a set of files as the query source for searching on the current Flow
:param patterns: The pattern may contain simple shell-style wildcards, e.g. '\*.py', '[\*.zip, \*.gz]'
:param recursive: If recursive is true, the pattern '**' will match any files and
zero or more directories and subdirectories.
:param size: the maximum number of the files
:param sampling_rate: the sampling rate between [0, 1]
:param read_mode: specifies the mode in which the file
is opened. 'r' for reading in text mode, 'rb' for reading in
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
from ...clients.sugary_io import _input_files
async for r in self._get_client(**kwargs).search(
_input_files(patterns, recursive, size, sampling_rate, read_mode),
on_done,
on_error,
on_always,
data_type=DataInputType.CONTENT,
**kwargs,
):
yield r
async def search_ndjson(
self,
lines: Union[Iterable[str], TextIO],
field_resolver: Optional[Dict[str, str]] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Use a list of files as the query source for searching on the current Flow
:param lines: a list of strings, each is considered as d document
:param size: the maximum number of the documents
:param sampling_rate: the sampling rate between [0, 1]
:param field_resolver: a map from field names defined in ``document`` (JSON, dict) to the field
names defined in Protobuf. This is only used when the given ``document`` is
a JSON string or a Python dict.
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
from ...clients.sugary_io import _input_ndjson
async for r in self._get_client(**kwargs).search(
_input_ndjson(
lines,
size=size,
sampling_rate=sampling_rate,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.AUTO,
**kwargs,
):
yield r
async def search_csv(
self,
lines: Union[Iterable[str], TextIO],
field_resolver: Optional[Dict[str, str]] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Use a list of lines as the index source for indexing on the current Flow
:param lines: a list of strings, each is considered as d document
:param size: the maximum number of the documents
:param sampling_rate: the sampling rate between [0, 1]
:param field_resolver: a map from field names defined in ``document`` (JSON, dict) to the field
names defined in Protobuf. This is only used when the given ``document`` is
a JSON string or a Python dict.
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
from ...clients.sugary_io import _input_csv
async for r in self._get_client(**kwargs).search(
_input_csv(
lines,
size=size,
sampling_rate=sampling_rate,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.AUTO,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def search_lines(
self,
lines: Optional[Union[Iterable[str], TextIO]] = None,
filepath: Optional[str] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
read_mode: str = 'r',
line_format: str = 'json',
field_resolver: Optional[Dict[str, str]] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Use a list of files as the query source for searching on the current Flow
:param filepath: a text file that each line contains a document
:param lines: a list of strings, each is considered as d document
:param size: the maximum number of the documents
:param sampling_rate: the sampling rate between [0, 1]
:param read_mode: specifies the mode in which the file
is opened. 'r' for reading in text mode, 'rb' for reading in binary
:param line_format: the format of each line: ``json`` or ``csv``
:param field_resolver: a map from field names defined in ``document`` (JSON, dict) to the field
names defined in Protobuf. This is only used when the given ``document`` is
a JSON string or a Python dict.
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
from ...clients.sugary_io import _input_lines
async for r in self._get_client(**kwargs).search(
_input_lines(
lines,
filepath,
size=size,
sampling_rate=sampling_rate,
read_mode=read_mode,
line_format=line_format,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.CONTENT,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def index(
self,
inputs: InputType,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Do indexing on the current Flow
It will start a :py:class:`CLIClient` and call :py:func:`index`.
:param inputs: An iterator of bytes. If not given, then you have to specify it in **kwargs**.
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
async for r in self._get_client(**kwargs).index(
inputs, on_done, on_error, on_always, **kwargs
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def update(
self,
inputs: InputType,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Do updates on the current Flow
It will start a :py:class:`CLIClient` and call :py:func:`index`.
:param inputs: An iterator of bytes. If not given, then you have to specify it in **kwargs**.
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
async for r in self._get_client(**kwargs).update(
inputs, on_done, on_error, on_always, **kwargs
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def delete(
self,
ids: Iterable[str],
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Do deletion on the current Flow
:param ids: An iterable of ids
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
async for r in self._get_client(**kwargs).delete(
ids, on_done, on_error, on_always, **kwargs
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def search(
self,
inputs: InputType,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
"""Do searching on the current Flow
It will start a :py:class:`CLIClient` and call :py:func:`search`.
:param inputs: An iterator of bytes. If not given, then you have to specify it in **kwargs**.
:param on_done: the function to be called when the :class:`Request` object is resolved.
:param on_error: the function to be called when the :class:`Request` object is rejected.
:param on_always: the function to be called when the :class:`Request` object is is either resolved or rejected.
:param kwargs: accepts all keyword arguments of `jina client` CLI
:yields: results
"""
async for r in self._get_client(**kwargs).search(
inputs, on_done, on_error, on_always, **kwargs
):
yield r
| 40.70915 | 120 | 0.603837 | import warnings
from typing import Union, Iterable, TextIO, Dict, Optional
import numpy as np
from ...clients.base import InputType, CallbackFnType
from ...enums import DataInputType
from ...helper import deprecated_alias
class AsyncCRUDFlowMixin:
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def train(
self,
inputs: InputType,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
warnings.warn(f'{self.train} is under heavy refactoring', FutureWarning)
async for r in self._get_client(**kwargs).train(
inputs, on_done, on_error, on_always, **kwargs
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def index_ndarray(
self,
array: 'np.ndarray',
axis: int = 0,
size: Optional[int] = None,
shuffle: bool = False,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
from ...clients.sugary_io import _input_ndarray
async for r in self._get_client(**kwargs).index(
_input_ndarray(array, axis, size, shuffle),
on_done,
on_error,
on_always,
data_type=DataInputType.CONTENT,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def search_ndarray(
self,
array: 'np.ndarray',
axis: int = 0,
size: Optional[int] = None,
shuffle: bool = False,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
from ...clients.sugary_io import _input_ndarray
async for r in self._get_client(**kwargs).search(
_input_ndarray(array, axis, size, shuffle),
on_done,
on_error,
on_always,
data_type=DataInputType.CONTENT,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def index_lines(
self,
lines: Optional[Union[Iterable[str], TextIO]] = None,
filepath: Optional[str] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
read_mode: str = 'r',
line_format: str = 'json',
field_resolver: Optional[Dict[str, str]] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
from ...clients.sugary_io import _input_lines
async for r in self._get_client(**kwargs).index(
_input_lines(
lines,
filepath,
size=size,
sampling_rate=sampling_rate,
read_mode=read_mode,
line_format=line_format,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.AUTO,
**kwargs,
):
yield r
async def index_csv(
self,
lines: Union[Iterable[str], TextIO],
field_resolver: Dict[str, str] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
from ...clients.sugary_io import _input_csv
async for r in self._get_client(**kwargs).index(
_input_csv(
lines,
size=size,
sampling_rate=sampling_rate,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.AUTO,
**kwargs,
):
yield r
async def index_ndjson(
self,
lines: Union[Iterable[str], TextIO],
field_resolver: Optional[Dict[str, str]] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
from ...clients.sugary_io import _input_ndjson
async for r in self._get_client(**kwargs).index(
_input_ndjson(
lines,
size=size,
sampling_rate=sampling_rate,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.AUTO,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def index_files(
self,
patterns: Union[str, Iterable[str]],
recursive: bool = True,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
read_mode: Optional[str] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
from ...clients.sugary_io import _input_files
async for r in self._get_client(**kwargs).index(
_input_files(patterns, recursive, size, sampling_rate, read_mode),
on_done,
on_error,
on_always,
data_type=DataInputType.CONTENT,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def search_files(
self,
patterns: Union[str, Iterable[str]],
recursive: bool = True,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
read_mode: Optional[str] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
from ...clients.sugary_io import _input_files
async for r in self._get_client(**kwargs).search(
_input_files(patterns, recursive, size, sampling_rate, read_mode),
on_done,
on_error,
on_always,
data_type=DataInputType.CONTENT,
**kwargs,
):
yield r
async def search_ndjson(
self,
lines: Union[Iterable[str], TextIO],
field_resolver: Optional[Dict[str, str]] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
from ...clients.sugary_io import _input_ndjson
async for r in self._get_client(**kwargs).search(
_input_ndjson(
lines,
size=size,
sampling_rate=sampling_rate,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.AUTO,
**kwargs,
):
yield r
async def search_csv(
self,
lines: Union[Iterable[str], TextIO],
field_resolver: Optional[Dict[str, str]] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
from ...clients.sugary_io import _input_csv
async for r in self._get_client(**kwargs).search(
_input_csv(
lines,
size=size,
sampling_rate=sampling_rate,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.AUTO,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def search_lines(
self,
lines: Optional[Union[Iterable[str], TextIO]] = None,
filepath: Optional[str] = None,
size: Optional[int] = None,
sampling_rate: Optional[float] = None,
read_mode: str = 'r',
line_format: str = 'json',
field_resolver: Optional[Dict[str, str]] = None,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
from ...clients.sugary_io import _input_lines
async for r in self._get_client(**kwargs).search(
_input_lines(
lines,
filepath,
size=size,
sampling_rate=sampling_rate,
read_mode=read_mode,
line_format=line_format,
field_resolver=field_resolver,
),
on_done,
on_error,
on_always,
data_type=DataInputType.CONTENT,
**kwargs,
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def index(
self,
inputs: InputType,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
async for r in self._get_client(**kwargs).index(
inputs, on_done, on_error, on_always, **kwargs
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def update(
self,
inputs: InputType,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
async for r in self._get_client(**kwargs).update(
inputs, on_done, on_error, on_always, **kwargs
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def delete(
self,
ids: Iterable[str],
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
async for r in self._get_client(**kwargs).delete(
ids, on_done, on_error, on_always, **kwargs
):
yield r
@deprecated_alias(
input_fn=('inputs', 0),
buffer=('inputs', 1),
callback=('on_done', 1),
output_fn=('on_done', 1),
)
async def search(
self,
inputs: InputType,
on_done: CallbackFnType = None,
on_error: CallbackFnType = None,
on_always: CallbackFnType = None,
**kwargs,
):
async for r in self._get_client(**kwargs).search(
inputs, on_done, on_error, on_always, **kwargs
):
yield r
| true | true |
f72ad5bc7ad2d8fb6d61ac7005b04ae01a495d56 | 1,629 | py | Python | packages/tool_util/tests/test_tool_linters.py | lawrence14701/galaxy | 7eb2fcb708e7b63e17800c87613ddfa5497c0654 | [
"CC-BY-3.0"
] | 2 | 2017-03-28T12:11:41.000Z | 2017-04-22T02:58:25.000Z | packages/tool_util/tests/test_tool_linters.py | lawrence14701/galaxy | 7eb2fcb708e7b63e17800c87613ddfa5497c0654 | [
"CC-BY-3.0"
] | 12 | 2020-07-24T23:55:19.000Z | 2021-12-19T11:40:06.000Z | packages/tool_util/tests/test_tool_linters.py | lawrence14701/galaxy | 7eb2fcb708e7b63e17800c87613ddfa5497c0654 | [
"CC-BY-3.0"
] | 1 | 2019-01-16T22:21:54.000Z | 2019-01-16T22:21:54.000Z | import pytest
from galaxy.tool_util.lint import LintContext
from galaxy.tool_util.linters import inputs
from galaxy.util import etree
NO_SECTIONS_XML = """
<tool name="BWA Mapper" id="bwa" version="1.0.1" is_multi_byte="true" display_interface="true" require_login="true" hidden="true">
<description>The BWA Mapper</description>
<version_command interpreter="python">bwa.py --version</version_command>
</tool>
"""
NO_WHEN_IN_CONDITIONAL_XML = """
<tool name="BWA Mapper" id="bwa" version="1.0.1" is_multi_byte="true" display_interface="true" require_login="true" hidden="true">
<description>The BWA Mapper</description>
<version_command interpreter="python">bwa.py --version</version_command>
<inputs>
<conditional name="labels">
<param name="label_select" type="select" label="Points to label">
<option value="none" selected="True">None</option>
</param>
</conditional>
</inputs>
</tool>
"""
TESTS = [
(NO_SECTIONS_XML, inputs.lint_inputs, lambda x: 'Found no input parameters.' in x.warn_messages),
(NO_WHEN_IN_CONDITIONAL_XML, inputs.lint_inputs, lambda x: 'No <when /> block found for select option \'none\' inside conditional \'labels\'' in x.warn_messages),
]
@pytest.mark.parametrize('tool_xml,lint_func,assert_func', TESTS, ids=['Lint no sections', 'lint no when'])
def test_tool_xml(tool_xml, lint_func, assert_func):
lint_ctx = LintContext('all')
tree = etree.ElementTree(element=etree.fromstring(tool_xml))
lint_ctx.lint(name="test_lint", lint_func=lint_func, lint_target=tree)
assert assert_func(lint_ctx)
| 39.731707 | 166 | 0.715163 | import pytest
from galaxy.tool_util.lint import LintContext
from galaxy.tool_util.linters import inputs
from galaxy.util import etree
NO_SECTIONS_XML = """
<tool name="BWA Mapper" id="bwa" version="1.0.1" is_multi_byte="true" display_interface="true" require_login="true" hidden="true">
<description>The BWA Mapper</description>
<version_command interpreter="python">bwa.py --version</version_command>
</tool>
"""
NO_WHEN_IN_CONDITIONAL_XML = """
<tool name="BWA Mapper" id="bwa" version="1.0.1" is_multi_byte="true" display_interface="true" require_login="true" hidden="true">
<description>The BWA Mapper</description>
<version_command interpreter="python">bwa.py --version</version_command>
<inputs>
<conditional name="labels">
<param name="label_select" type="select" label="Points to label">
<option value="none" selected="True">None</option>
</param>
</conditional>
</inputs>
</tool>
"""
TESTS = [
(NO_SECTIONS_XML, inputs.lint_inputs, lambda x: 'Found no input parameters.' in x.warn_messages),
(NO_WHEN_IN_CONDITIONAL_XML, inputs.lint_inputs, lambda x: 'No <when /> block found for select option \'none\' inside conditional \'labels\'' in x.warn_messages),
]
@pytest.mark.parametrize('tool_xml,lint_func,assert_func', TESTS, ids=['Lint no sections', 'lint no when'])
def test_tool_xml(tool_xml, lint_func, assert_func):
lint_ctx = LintContext('all')
tree = etree.ElementTree(element=etree.fromstring(tool_xml))
lint_ctx.lint(name="test_lint", lint_func=lint_func, lint_target=tree)
assert assert_func(lint_ctx)
| true | true |
f72ad5f44335464611bcb3461699a32b7602d505 | 7,802 | py | Python | virtual/lib/python3.6/site-packages/PIL/PsdImagePlugin.py | Ruterana/clone_instagram | a068587ef1d1a93ec8d1c08086bf11c0fb274b83 | [
"MIT"
] | 99 | 2019-10-09T16:14:46.000Z | 2022-03-17T02:23:47.000Z | virtual/lib/python3.6/site-packages/PIL/PsdImagePlugin.py | Ruterana/clone_instagram | a068587ef1d1a93ec8d1c08086bf11c0fb274b83 | [
"MIT"
] | 123 | 2019-09-10T14:48:01.000Z | 2019-11-28T21:24:06.000Z | virtual/lib/python3.6/site-packages/PIL/PsdImagePlugin.py | Ruterana/clone_instagram | a068587ef1d1a93ec8d1c08086bf11c0fb274b83 | [
"MIT"
] | 98 | 2019-10-17T14:48:28.000Z | 2022-01-21T03:33:38.000Z | #
# The Python Imaging Library
# $Id$
#
# Adobe PSD 2.5/3.0 file handling
#
# History:
# 1995-09-01 fl Created
# 1997-01-03 fl Read most PSD images
# 1997-01-18 fl Fixed P and CMYK support
# 2001-10-21 fl Added seek/tell support (for layers)
#
# Copyright (c) 1997-2001 by Secret Labs AB.
# Copyright (c) 1995-2001 by Fredrik Lundh
#
# See the README file for information on usage and redistribution.
#
# __version__ is deprecated and will be removed in a future version. Use
# PIL.__version__ instead.
__version__ = "0.4"
import io
from . import Image, ImageFile, ImagePalette
from ._binary import i8, i16be as i16, i32be as i32
MODES = {
# (photoshop mode, bits) -> (pil mode, required channels)
(0, 1): ("1", 1),
(0, 8): ("L", 1),
(1, 8): ("L", 1),
(2, 8): ("P", 1),
(3, 8): ("RGB", 3),
(4, 8): ("CMYK", 4),
(7, 8): ("L", 1), # FIXME: multilayer
(8, 8): ("L", 1), # duotone
(9, 8): ("LAB", 3),
}
# --------------------------------------------------------------------.
# read PSD images
def _accept(prefix):
return prefix[:4] == b"8BPS"
##
# Image plugin for Photoshop images.
class PsdImageFile(ImageFile.ImageFile):
format = "PSD"
format_description = "Adobe Photoshop"
_close_exclusive_fp_after_loading = False
def _open(self):
read = self.fp.read
#
# header
s = read(26)
if s[:4] != b"8BPS" or i16(s[4:]) != 1:
raise SyntaxError("not a PSD file")
psd_bits = i16(s[22:])
psd_channels = i16(s[12:])
psd_mode = i16(s[24:])
mode, channels = MODES[(psd_mode, psd_bits)]
if channels > psd_channels:
raise IOError("not enough channels")
self.mode = mode
self._size = i32(s[18:]), i32(s[14:])
#
# color mode data
size = i32(read(4))
if size:
data = read(size)
if mode == "P" and size == 768:
self.palette = ImagePalette.raw("RGB;L", data)
#
# image resources
self.resources = []
size = i32(read(4))
if size:
# load resources
end = self.fp.tell() + size
while self.fp.tell() < end:
read(4) # signature
id = i16(read(2))
name = read(i8(read(1)))
if not (len(name) & 1):
read(1) # padding
data = read(i32(read(4)))
if len(data) & 1:
read(1) # padding
self.resources.append((id, name, data))
if id == 1039: # ICC profile
self.info["icc_profile"] = data
#
# layer and mask information
self.layers = []
size = i32(read(4))
if size:
end = self.fp.tell() + size
size = i32(read(4))
if size:
self.layers = _layerinfo(self.fp)
self.fp.seek(end)
#
# image descriptor
self.tile = _maketile(self.fp, mode, (0, 0) + self.size, channels)
# keep the file open
self.__fp = self.fp
self.frame = 1
self._min_frame = 1
@property
def n_frames(self):
return len(self.layers)
@property
def is_animated(self):
return len(self.layers) > 1
def seek(self, layer):
if not self._seek_check(layer):
return
# seek to given layer (1..max)
try:
name, mode, bbox, tile = self.layers[layer - 1]
self.mode = mode
self.tile = tile
self.frame = layer
self.fp = self.__fp
return name, bbox
except IndexError:
raise EOFError("no such layer")
def tell(self):
# return layer number (0=image, 1..max=layers)
return self.frame
def load_prepare(self):
# create image memory if necessary
if not self.im or self.im.mode != self.mode or self.im.size != self.size:
self.im = Image.core.fill(self.mode, self.size, 0)
# create palette (optional)
if self.mode == "P":
Image.Image.load(self)
def _close__fp(self):
try:
if self.__fp != self.fp:
self.__fp.close()
except AttributeError:
pass
finally:
self.__fp = None
def _layerinfo(file):
# read layerinfo block
layers = []
read = file.read
for i in range(abs(i16(read(2)))):
# bounding box
y0 = i32(read(4))
x0 = i32(read(4))
y1 = i32(read(4))
x1 = i32(read(4))
# image info
info = []
mode = []
types = list(range(i16(read(2))))
if len(types) > 4:
continue
for i in types:
type = i16(read(2))
if type == 65535:
m = "A"
else:
m = "RGBA"[type]
mode.append(m)
size = i32(read(4))
info.append((m, size))
# figure out the image mode
mode.sort()
if mode == ["R"]:
mode = "L"
elif mode == ["B", "G", "R"]:
mode = "RGB"
elif mode == ["A", "B", "G", "R"]:
mode = "RGBA"
else:
mode = None # unknown
# skip over blend flags and extra information
read(12) # filler
name = ""
size = i32(read(4)) # length of the extra data field
combined = 0
if size:
data_end = file.tell() + size
length = i32(read(4))
if length:
file.seek(length - 16, io.SEEK_CUR)
combined += length + 4
length = i32(read(4))
if length:
file.seek(length, io.SEEK_CUR)
combined += length + 4
length = i8(read(1))
if length:
# Don't know the proper encoding,
# Latin-1 should be a good guess
name = read(length).decode("latin-1", "replace")
combined += length + 1
file.seek(data_end)
layers.append((name, mode, (x0, y0, x1, y1)))
# get tiles
i = 0
for name, mode, bbox in layers:
tile = []
for m in mode:
t = _maketile(file, m, bbox, 1)
if t:
tile.extend(t)
layers[i] = name, mode, bbox, tile
i += 1
return layers
def _maketile(file, mode, bbox, channels):
tile = None
read = file.read
compression = i16(read(2))
xsize = bbox[2] - bbox[0]
ysize = bbox[3] - bbox[1]
offset = file.tell()
if compression == 0:
#
# raw compression
tile = []
for channel in range(channels):
layer = mode[channel]
if mode == "CMYK":
layer += ";I"
tile.append(("raw", bbox, offset, layer))
offset = offset + xsize * ysize
elif compression == 1:
#
# packbits compression
i = 0
tile = []
bytecount = read(channels * ysize * 2)
offset = file.tell()
for channel in range(channels):
layer = mode[channel]
if mode == "CMYK":
layer += ";I"
tile.append(("packbits", bbox, offset, layer))
for y in range(ysize):
offset = offset + i16(bytecount[i : i + 2])
i += 2
file.seek(offset)
if offset & 1:
read(1) # padding
return tile
# --------------------------------------------------------------------
# registry
Image.register_open(PsdImageFile.format, PsdImageFile, _accept)
Image.register_extension(PsdImageFile.format, ".psd")
| 24.38125 | 81 | 0.481671 |
__version__ = "0.4"
import io
from . import Image, ImageFile, ImagePalette
from ._binary import i8, i16be as i16, i32be as i32
MODES = {
(0, 1): ("1", 1),
(0, 8): ("L", 1),
(1, 8): ("L", 1),
(2, 8): ("P", 1),
(3, 8): ("RGB", 3),
(4, 8): ("CMYK", 4),
(7, 8): ("L", 1),
(8, 8): ("L", 1),
(9, 8): ("LAB", 3),
}
def _accept(prefix):
return prefix[:4] == b"8BPS"
class PsdImageFile(ImageFile.ImageFile):
format = "PSD"
format_description = "Adobe Photoshop"
_close_exclusive_fp_after_loading = False
def _open(self):
read = self.fp.read
s = read(26)
if s[:4] != b"8BPS" or i16(s[4:]) != 1:
raise SyntaxError("not a PSD file")
psd_bits = i16(s[22:])
psd_channels = i16(s[12:])
psd_mode = i16(s[24:])
mode, channels = MODES[(psd_mode, psd_bits)]
if channels > psd_channels:
raise IOError("not enough channels")
self.mode = mode
self._size = i32(s[18:]), i32(s[14:])
size = i32(read(4))
if size:
data = read(size)
if mode == "P" and size == 768:
self.palette = ImagePalette.raw("RGB;L", data)
self.resources = []
size = i32(read(4))
if size:
end = self.fp.tell() + size
while self.fp.tell() < end:
read(4)
id = i16(read(2))
name = read(i8(read(1)))
if not (len(name) & 1):
read(1)
data = read(i32(read(4)))
if len(data) & 1:
read(1)
self.resources.append((id, name, data))
if id == 1039:
self.info["icc_profile"] = data
self.layers = []
size = i32(read(4))
if size:
end = self.fp.tell() + size
size = i32(read(4))
if size:
self.layers = _layerinfo(self.fp)
self.fp.seek(end)
self.tile = _maketile(self.fp, mode, (0, 0) + self.size, channels)
self.__fp = self.fp
self.frame = 1
self._min_frame = 1
@property
def n_frames(self):
return len(self.layers)
@property
def is_animated(self):
return len(self.layers) > 1
def seek(self, layer):
if not self._seek_check(layer):
return
try:
name, mode, bbox, tile = self.layers[layer - 1]
self.mode = mode
self.tile = tile
self.frame = layer
self.fp = self.__fp
return name, bbox
except IndexError:
raise EOFError("no such layer")
def tell(self):
return self.frame
def load_prepare(self):
if not self.im or self.im.mode != self.mode or self.im.size != self.size:
self.im = Image.core.fill(self.mode, self.size, 0)
if self.mode == "P":
Image.Image.load(self)
def _close__fp(self):
try:
if self.__fp != self.fp:
self.__fp.close()
except AttributeError:
pass
finally:
self.__fp = None
def _layerinfo(file):
layers = []
read = file.read
for i in range(abs(i16(read(2)))):
y0 = i32(read(4))
x0 = i32(read(4))
y1 = i32(read(4))
x1 = i32(read(4))
info = []
mode = []
types = list(range(i16(read(2))))
if len(types) > 4:
continue
for i in types:
type = i16(read(2))
if type == 65535:
m = "A"
else:
m = "RGBA"[type]
mode.append(m)
size = i32(read(4))
info.append((m, size))
mode.sort()
if mode == ["R"]:
mode = "L"
elif mode == ["B", "G", "R"]:
mode = "RGB"
elif mode == ["A", "B", "G", "R"]:
mode = "RGBA"
else:
mode = None
read(12)
name = ""
size = i32(read(4))
combined = 0
if size:
data_end = file.tell() + size
length = i32(read(4))
if length:
file.seek(length - 16, io.SEEK_CUR)
combined += length + 4
length = i32(read(4))
if length:
file.seek(length, io.SEEK_CUR)
combined += length + 4
length = i8(read(1))
if length:
# Latin-1 should be a good guess
name = read(length).decode("latin-1", "replace")
combined += length + 1
file.seek(data_end)
layers.append((name, mode, (x0, y0, x1, y1)))
# get tiles
i = 0
for name, mode, bbox in layers:
tile = []
for m in mode:
t = _maketile(file, m, bbox, 1)
if t:
tile.extend(t)
layers[i] = name, mode, bbox, tile
i += 1
return layers
def _maketile(file, mode, bbox, channels):
tile = None
read = file.read
compression = i16(read(2))
xsize = bbox[2] - bbox[0]
ysize = bbox[3] - bbox[1]
offset = file.tell()
if compression == 0:
#
# raw compression
tile = []
for channel in range(channels):
layer = mode[channel]
if mode == "CMYK":
layer += ";I"
tile.append(("raw", bbox, offset, layer))
offset = offset + xsize * ysize
elif compression == 1:
#
# packbits compression
i = 0
tile = []
bytecount = read(channels * ysize * 2)
offset = file.tell()
for channel in range(channels):
layer = mode[channel]
if mode == "CMYK":
layer += ";I"
tile.append(("packbits", bbox, offset, layer))
for y in range(ysize):
offset = offset + i16(bytecount[i : i + 2])
i += 2
file.seek(offset)
if offset & 1:
read(1) # padding
return tile
# --------------------------------------------------------------------
# registry
Image.register_open(PsdImageFile.format, PsdImageFile, _accept)
Image.register_extension(PsdImageFile.format, ".psd")
| true | true |
f72ad8ba1938d20c873989d306f99b76c1ee53bf | 11,515 | py | Python | qiskit/tools/jupyter/backend_overview.py | t-imamichi/qiskit-core | 8d2eeeac44f97af1e10514cdae4157e5923ff2e5 | [
"Apache-2.0"
] | 92 | 2018-06-05T11:18:38.000Z | 2018-07-01T23:50:44.000Z | qiskit/tools/jupyter/backend_overview.py | t-imamichi/qiskit-core | 8d2eeeac44f97af1e10514cdae4157e5923ff2e5 | [
"Apache-2.0"
] | 107 | 2018-06-05T08:41:19.000Z | 2018-07-02T12:10:53.000Z | qiskit/tools/jupyter/backend_overview.py | t-imamichi/qiskit-core | 8d2eeeac44f97af1e10514cdae4157e5923ff2e5 | [
"Apache-2.0"
] | 39 | 2018-06-05T09:55:56.000Z | 2018-07-02T08:47:35.000Z | # This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2018.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""A module for monitoring backends."""
import time
import threading
import types
from IPython.display import display
from IPython.core.magic import line_magic, Magics, magics_class
from IPython.core import magic_arguments
import matplotlib.pyplot as plt
import ipywidgets as widgets
from qiskit.tools.monitor.overview import get_unique_backends
from qiskit.visualization.gate_map import plot_gate_map
@magics_class
class BackendOverview(Magics):
"""A class of status magic functions."""
@line_magic
@magic_arguments.magic_arguments()
@magic_arguments.argument(
"-i", "--interval", type=float, default=60, help="Interval for status check."
)
def qiskit_backend_overview(self, line=""):
"""A Jupyter magic function to monitor backends."""
args = magic_arguments.parse_argstring(self.qiskit_backend_overview, line)
unique_hardware_backends = get_unique_backends()
_value = "<h2 style ='color:#ffffff; background-color:#000000;"
_value += "padding-top: 1%; padding-bottom: 1%;padding-left: 1%;"
_value += "margin-top: 0px'>Backend Overview</h2>"
backend_title = widgets.HTML(value=_value, layout=widgets.Layout(margin="0px 0px 0px 0px"))
build_back_widgets = [backend_widget(b) for b in unique_hardware_backends]
_backends = []
# Sort backends by operational or not
oper_ord_backends = []
for n, back in enumerate(unique_hardware_backends):
if back.status().operational:
oper_ord_backends = [build_back_widgets[n]] + oper_ord_backends
_backends = [back] + _backends
else:
oper_ord_backends = oper_ord_backends + [build_back_widgets[n]]
_backends = _backends + [back]
qubit_label = widgets.Label(value="Num. Qubits")
qv_label = widgets.Label(value="Quantum Vol.")
pend_label = widgets.Label(
value="Pending Jobs", layout=widgets.Layout(margin="5px 0px 0px 0px")
)
least_label = widgets.Label(
value="Least Busy", layout=widgets.Layout(margin="10px 0px 0px 0px")
)
oper_label = widgets.Label(
value="Operational", layout=widgets.Layout(margin="5px 0px 0px 0px")
)
t12_label = widgets.Label(
value="Avg. T1 / T2", layout=widgets.Layout(margin="10px 0px 0px 0px")
)
cx_label = widgets.Label(
value="Avg. CX Err.", layout=widgets.Layout(margin="8px 0px 0px 0px")
)
meas_label = widgets.Label(
value="Avg. Meas. Err.", layout=widgets.Layout(margin="8px 0px 0px 0px")
)
labels_widget = widgets.VBox(
[
qubit_label,
qv_label,
pend_label,
oper_label,
least_label,
t12_label,
cx_label,
meas_label,
],
layout=widgets.Layout(margin="295px 0px 0px 0px", min_width="100px"),
)
backend_grid = GridBox_with_thread(
children=oper_ord_backends,
layout=widgets.Layout(
grid_template_columns="250px " * len(unique_hardware_backends),
grid_template_rows="auto",
grid_gap="0px 25px",
),
)
backend_grid._backends = _backends # pylint: disable=attribute-defined-outside-init
backend_grid._update = types.MethodType( # pylint: disable=attribute-defined-outside-init
update_backend_info, backend_grid
)
backend_grid._thread = threading.Thread( # pylint: disable=attribute-defined-outside-init
target=backend_grid._update, args=(args.interval,)
)
backend_grid._thread.start()
back_box = widgets.HBox([labels_widget, backend_grid])
back_monitor = widgets.VBox([backend_title, back_box])
display(back_monitor)
class GridBox_with_thread(widgets.GridBox): # pylint: disable=invalid-name
"""A GridBox that will close an attached thread"""
def __del__(self):
"""Object disposal"""
if hasattr(self, "_thread"):
try:
self._thread.do_run = False
self._thread.join()
except Exception: # pylint: disable=broad-except
pass
self.close()
def backend_widget(backend):
"""Creates a backend widget."""
config = backend.configuration().to_dict()
props = backend.properties().to_dict()
name = widgets.HTML(value=f"<h4>{backend.name()}</h4>", layout=widgets.Layout())
num_qubits = config["n_qubits"]
qv_val = "-"
if "quantum_volume" in config.keys():
if config["quantum_volume"]:
qv_val = config["quantum_volume"]
qubit_count = widgets.HTML(
value=f"<h5><b>{num_qubits}</b></h5>",
layout=widgets.Layout(justify_content="center"),
)
qv_value = widgets.HTML(
value=f"<h5>{qv_val}</h5>",
layout=widgets.Layout(justify_content="center"),
)
cmap = widgets.Output(
layout=widgets.Layout(
min_width="250px",
max_width="250px",
max_height="250px",
min_height="250px",
justify_content="center",
align_items="center",
margin="0px 0px 0px 0px",
)
)
with cmap:
_cmap_fig = plot_gate_map(backend, plot_directed=False, label_qubits=False)
if _cmap_fig is not None:
display(_cmap_fig)
# Prevents plot from showing up twice.
plt.close(_cmap_fig)
pending = generate_jobs_pending_widget()
is_oper = widgets.HTML(value="<h5></h5>", layout=widgets.Layout(justify_content="center"))
least_busy = widgets.HTML(value="<h5></h5>", layout=widgets.Layout(justify_content="center"))
t1_units = props["qubits"][0][0]["unit"]
avg_t1 = round(sum(q[0]["value"] for q in props["qubits"]) / num_qubits, 1)
avg_t2 = round(sum(q[1]["value"] for q in props["qubits"]) / num_qubits, 1)
t12_widget = widgets.HTML(
value=f"<h5>{avg_t1} / {avg_t2} {t1_units}</h5>",
layout=widgets.Layout(),
)
avg_cx_err = "NA"
if config["coupling_map"]:
sum_cx_err = 0
num_cx = 0
for gate in props["gates"]:
if gate["gate"] == "cx":
for param in gate["parameters"]:
if param["name"] == "gate_error":
# Value == 1.0 means gate effectively off
if param["value"] != 1.0:
sum_cx_err += param["value"]
num_cx += 1
if num_cx > 0:
avg_cx_err = round(sum_cx_err / num_cx, 4)
cx_widget = widgets.HTML(value=f"<h5>{avg_cx_err}</h5>", layout=widgets.Layout())
avg_meas_err = 0
for qub in props["qubits"]:
for item in qub:
if item["name"] == "readout_error":
avg_meas_err += item["value"]
avg_meas_err = round(avg_meas_err / num_qubits, 4)
meas_widget = widgets.HTML(value=f"<h5>{avg_meas_err}</h5>", layout=widgets.Layout())
out = widgets.VBox(
[
name,
cmap,
qubit_count,
qv_value,
pending,
is_oper,
least_busy,
t12_widget,
cx_widget,
meas_widget,
],
layout=widgets.Layout(display="inline-flex", flex_flow="column", align_items="center"),
)
out._is_alive = True
return out
def update_backend_info(self, interval=60):
"""Updates the monitor info
Called from another thread.
"""
my_thread = threading.current_thread()
current_interval = 0
started = False
all_dead = False
stati = [None] * len(self._backends)
while getattr(my_thread, "do_run", True) and not all_dead:
if current_interval == interval or started is False:
for ind, back in enumerate(self._backends):
_value = self.children[ind].children[2].value
_head = _value.split("<b>")[0]
try:
_status = back.status()
stati[ind] = _status
except Exception: # pylint: disable=broad-except
self.children[ind].children[2].value = _value.replace(
_head, "<h5 style='color:#ff5c49'>"
)
self.children[ind]._is_alive = False
else:
self.children[ind]._is_alive = True
self.children[ind].children[2].value = _value.replace(_head, "<h5>")
idx = list(range(len(self._backends)))
pending = [s.pending_jobs for s in stati]
_, least_idx = zip(*sorted(zip(pending, idx)))
# Make sure least pending is operational
for ind in least_idx:
if stati[ind].operational:
least_pending_idx = ind
break
for var in idx:
if var == least_pending_idx:
self.children[var].children[6].value = "<h5 style='color:#34bc6e'>True</h5>"
else:
self.children[var].children[6].value = "<h5 style='color:#dc267f'>False</h5>"
self.children[var].children[4].children[1].max = max(
self.children[var].children[4].children[1].max, pending[var] + 10
)
self.children[var].children[4].children[1].value = pending[var]
if stati[var].operational:
self.children[var].children[5].value = "<h5 style='color:#34bc6e'>True</h5>"
else:
self.children[var].children[5].value = "<h5 style='color:#dc267f'>False</h5>"
started = True
current_interval = 0
time.sleep(1)
all_dead = not any(wid._is_alive for wid in self.children)
current_interval += 1
def generate_jobs_pending_widget():
"""Generates a jobs_pending progress bar widget."""
pbar = widgets.IntProgress(
value=0,
min=0,
max=50,
description="",
orientation="horizontal",
layout=widgets.Layout(max_width="180px"),
)
pbar.style.bar_color = "#71cddd"
pbar_current = widgets.Label(value=str(pbar.value), layout=widgets.Layout(min_width="auto"))
pbar_max = widgets.Label(value=str(pbar.max), layout=widgets.Layout(min_width="auto"))
def _on_max_change(change):
pbar_max.value = str(change["new"])
def _on_val_change(change):
pbar_current.value = str(change["new"])
pbar.observe(_on_max_change, names="max")
pbar.observe(_on_val_change, names="value")
jobs_widget = widgets.HBox(
[pbar_current, pbar, pbar_max],
layout=widgets.Layout(max_width="250px", min_width="250px", justify_content="center"),
)
return jobs_widget
| 35.650155 | 99 | 0.590881 |
import time
import threading
import types
from IPython.display import display
from IPython.core.magic import line_magic, Magics, magics_class
from IPython.core import magic_arguments
import matplotlib.pyplot as plt
import ipywidgets as widgets
from qiskit.tools.monitor.overview import get_unique_backends
from qiskit.visualization.gate_map import plot_gate_map
@magics_class
class BackendOverview(Magics):
@line_magic
@magic_arguments.magic_arguments()
@magic_arguments.argument(
"-i", "--interval", type=float, default=60, help="Interval for status check."
)
def qiskit_backend_overview(self, line=""):
args = magic_arguments.parse_argstring(self.qiskit_backend_overview, line)
unique_hardware_backends = get_unique_backends()
_value = "<h2 style ='color:#ffffff; background-color:#000000;"
_value += "padding-top: 1%; padding-bottom: 1%;padding-left: 1%;"
_value += "margin-top: 0px'>Backend Overview</h2>"
backend_title = widgets.HTML(value=_value, layout=widgets.Layout(margin="0px 0px 0px 0px"))
build_back_widgets = [backend_widget(b) for b in unique_hardware_backends]
_backends = []
oper_ord_backends = []
for n, back in enumerate(unique_hardware_backends):
if back.status().operational:
oper_ord_backends = [build_back_widgets[n]] + oper_ord_backends
_backends = [back] + _backends
else:
oper_ord_backends = oper_ord_backends + [build_back_widgets[n]]
_backends = _backends + [back]
qubit_label = widgets.Label(value="Num. Qubits")
qv_label = widgets.Label(value="Quantum Vol.")
pend_label = widgets.Label(
value="Pending Jobs", layout=widgets.Layout(margin="5px 0px 0px 0px")
)
least_label = widgets.Label(
value="Least Busy", layout=widgets.Layout(margin="10px 0px 0px 0px")
)
oper_label = widgets.Label(
value="Operational", layout=widgets.Layout(margin="5px 0px 0px 0px")
)
t12_label = widgets.Label(
value="Avg. T1 / T2", layout=widgets.Layout(margin="10px 0px 0px 0px")
)
cx_label = widgets.Label(
value="Avg. CX Err.", layout=widgets.Layout(margin="8px 0px 0px 0px")
)
meas_label = widgets.Label(
value="Avg. Meas. Err.", layout=widgets.Layout(margin="8px 0px 0px 0px")
)
labels_widget = widgets.VBox(
[
qubit_label,
qv_label,
pend_label,
oper_label,
least_label,
t12_label,
cx_label,
meas_label,
],
layout=widgets.Layout(margin="295px 0px 0px 0px", min_width="100px"),
)
backend_grid = GridBox_with_thread(
children=oper_ord_backends,
layout=widgets.Layout(
grid_template_columns="250px " * len(unique_hardware_backends),
grid_template_rows="auto",
grid_gap="0px 25px",
),
)
backend_grid._backends = _backends
backend_grid._update = types.MethodType(
update_backend_info, backend_grid
)
backend_grid._thread = threading.Thread(
target=backend_grid._update, args=(args.interval,)
)
backend_grid._thread.start()
back_box = widgets.HBox([labels_widget, backend_grid])
back_monitor = widgets.VBox([backend_title, back_box])
display(back_monitor)
class GridBox_with_thread(widgets.GridBox):
def __del__(self):
if hasattr(self, "_thread"):
try:
self._thread.do_run = False
self._thread.join()
except Exception:
pass
self.close()
def backend_widget(backend):
config = backend.configuration().to_dict()
props = backend.properties().to_dict()
name = widgets.HTML(value=f"<h4>{backend.name()}</h4>", layout=widgets.Layout())
num_qubits = config["n_qubits"]
qv_val = "-"
if "quantum_volume" in config.keys():
if config["quantum_volume"]:
qv_val = config["quantum_volume"]
qubit_count = widgets.HTML(
value=f"<h5><b>{num_qubits}</b></h5>",
layout=widgets.Layout(justify_content="center"),
)
qv_value = widgets.HTML(
value=f"<h5>{qv_val}</h5>",
layout=widgets.Layout(justify_content="center"),
)
cmap = widgets.Output(
layout=widgets.Layout(
min_width="250px",
max_width="250px",
max_height="250px",
min_height="250px",
justify_content="center",
align_items="center",
margin="0px 0px 0px 0px",
)
)
with cmap:
_cmap_fig = plot_gate_map(backend, plot_directed=False, label_qubits=False)
if _cmap_fig is not None:
display(_cmap_fig)
plt.close(_cmap_fig)
pending = generate_jobs_pending_widget()
is_oper = widgets.HTML(value="<h5></h5>", layout=widgets.Layout(justify_content="center"))
least_busy = widgets.HTML(value="<h5></h5>", layout=widgets.Layout(justify_content="center"))
t1_units = props["qubits"][0][0]["unit"]
avg_t1 = round(sum(q[0]["value"] for q in props["qubits"]) / num_qubits, 1)
avg_t2 = round(sum(q[1]["value"] for q in props["qubits"]) / num_qubits, 1)
t12_widget = widgets.HTML(
value=f"<h5>{avg_t1} / {avg_t2} {t1_units}</h5>",
layout=widgets.Layout(),
)
avg_cx_err = "NA"
if config["coupling_map"]:
sum_cx_err = 0
num_cx = 0
for gate in props["gates"]:
if gate["gate"] == "cx":
for param in gate["parameters"]:
if param["name"] == "gate_error":
if param["value"] != 1.0:
sum_cx_err += param["value"]
num_cx += 1
if num_cx > 0:
avg_cx_err = round(sum_cx_err / num_cx, 4)
cx_widget = widgets.HTML(value=f"<h5>{avg_cx_err}</h5>", layout=widgets.Layout())
avg_meas_err = 0
for qub in props["qubits"]:
for item in qub:
if item["name"] == "readout_error":
avg_meas_err += item["value"]
avg_meas_err = round(avg_meas_err / num_qubits, 4)
meas_widget = widgets.HTML(value=f"<h5>{avg_meas_err}</h5>", layout=widgets.Layout())
out = widgets.VBox(
[
name,
cmap,
qubit_count,
qv_value,
pending,
is_oper,
least_busy,
t12_widget,
cx_widget,
meas_widget,
],
layout=widgets.Layout(display="inline-flex", flex_flow="column", align_items="center"),
)
out._is_alive = True
return out
def update_backend_info(self, interval=60):
my_thread = threading.current_thread()
current_interval = 0
started = False
all_dead = False
stati = [None] * len(self._backends)
while getattr(my_thread, "do_run", True) and not all_dead:
if current_interval == interval or started is False:
for ind, back in enumerate(self._backends):
_value = self.children[ind].children[2].value
_head = _value.split("<b>")[0]
try:
_status = back.status()
stati[ind] = _status
except Exception:
self.children[ind].children[2].value = _value.replace(
_head, "<h5 style='color:#ff5c49'>"
)
self.children[ind]._is_alive = False
else:
self.children[ind]._is_alive = True
self.children[ind].children[2].value = _value.replace(_head, "<h5>")
idx = list(range(len(self._backends)))
pending = [s.pending_jobs for s in stati]
_, least_idx = zip(*sorted(zip(pending, idx)))
for ind in least_idx:
if stati[ind].operational:
least_pending_idx = ind
break
for var in idx:
if var == least_pending_idx:
self.children[var].children[6].value = "<h5 style='color:#34bc6e'>True</h5>"
else:
self.children[var].children[6].value = "<h5 style='color:#dc267f'>False</h5>"
self.children[var].children[4].children[1].max = max(
self.children[var].children[4].children[1].max, pending[var] + 10
)
self.children[var].children[4].children[1].value = pending[var]
if stati[var].operational:
self.children[var].children[5].value = "<h5 style='color:#34bc6e'>True</h5>"
else:
self.children[var].children[5].value = "<h5 style='color:#dc267f'>False</h5>"
started = True
current_interval = 0
time.sleep(1)
all_dead = not any(wid._is_alive for wid in self.children)
current_interval += 1
def generate_jobs_pending_widget():
pbar = widgets.IntProgress(
value=0,
min=0,
max=50,
description="",
orientation="horizontal",
layout=widgets.Layout(max_width="180px"),
)
pbar.style.bar_color = "#71cddd"
pbar_current = widgets.Label(value=str(pbar.value), layout=widgets.Layout(min_width="auto"))
pbar_max = widgets.Label(value=str(pbar.max), layout=widgets.Layout(min_width="auto"))
def _on_max_change(change):
pbar_max.value = str(change["new"])
def _on_val_change(change):
pbar_current.value = str(change["new"])
pbar.observe(_on_max_change, names="max")
pbar.observe(_on_val_change, names="value")
jobs_widget = widgets.HBox(
[pbar_current, pbar, pbar_max],
layout=widgets.Layout(max_width="250px", min_width="250px", justify_content="center"),
)
return jobs_widget
| true | true |
f72ada2ce523c5d4764bb97fbbec0c1d62c192e2 | 897 | py | Python | idaes/generic_models/unit_models/column_models/__init__.py | eslickj/idaes-pse | 328ed07ffb0b4d98c03e972675ea32c41dd2531a | [
"RSA-MD"
] | 112 | 2019-02-11T23:16:36.000Z | 2022-03-23T20:59:57.000Z | idaes/generic_models/unit_models/column_models/__init__.py | eslickj/idaes-pse | 328ed07ffb0b4d98c03e972675ea32c41dd2531a | [
"RSA-MD"
] | 621 | 2019-03-01T14:44:12.000Z | 2022-03-31T19:49:25.000Z | idaes/generic_models/unit_models/column_models/__init__.py | eslickj/idaes-pse | 328ed07ffb0b4d98c03e972675ea32c41dd2531a | [
"RSA-MD"
] | 154 | 2019-02-01T23:46:33.000Z | 2022-03-23T15:07:10.000Z | #################################################################################
# The Institute for the Design of Advanced Energy Systems Integrated Platform
# Framework (IDAES IP) was produced under the DOE Institute for the
# Design of Advanced Energy Systems (IDAES), and is copyright (c) 2018-2021
# by the software owners: The Regents of the University of California, through
# Lawrence Berkeley National Laboratory, National Technology & Engineering
# Solutions of Sandia, LLC, Carnegie Mellon University, West Virginia University
# Research Corporation, et al. All rights reserved.
#
# Please see the files COPYRIGHT.md and LICENSE.md for full copyright and
# license information.
#################################################################################
from .condenser import Condenser
from .reboiler import Reboiler
from .tray import Tray
from .tray_column import TrayColumn
| 52.764706 | 81 | 0.654404 | true | true | |
f72adb7883b52f3f1c6bf8306f57b1dd0008ab29 | 868 | py | Python | enviorment/colors.py | JLMadsen/TetrisAI | c6f2ef47a57e60b1ec73666406931ca46c9d1233 | [
"MIT"
] | 1 | 2020-11-23T22:11:33.000Z | 2020-11-23T22:11:33.000Z | enviorment/colors.py | JLMadsen/TetrisAI | c6f2ef47a57e60b1ec73666406931ca46c9d1233 | [
"MIT"
] | 1 | 2021-07-13T15:31:00.000Z | 2021-07-13T15:31:00.000Z | enviorment/colors.py | JLMadsen/TetrisAI | c6f2ef47a57e60b1ec73666406931ca46c9d1233 | [
"MIT"
] | 1 | 2021-02-02T14:11:57.000Z | 2021-02-02T14:11:57.000Z | class Color:
WHITE = (255, 255, 255)
BLACK = (0, 0, 0 )
GRAY = (100, 100, 100)
RED = (220, 20, 60 )
GREEN = (50, 205, 50 )
YELLOW = (255, 255, 0 )
PURPLE = (218, 112, 214)
ALL = [WHITE, BLACK, GRAY, RED, GREEN]
# just for printing colors in terminal
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKCYAN = '\033[96m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def green(msg):
return bcolors.OKGREEN+msg+bcolors.ENDC
def header(msg):
return bcolors.HEADER+msg+bcolors.ENDC
def fail(msg):
return bcolors.FAIL+msg+bcolors.ENDC
def cyan(msg):
return bcolors.OKCYAN+msg+bcolors.ENDC
def warning(msg):
return bcolors.WARNING+msg+bcolors.ENDC | 22.842105 | 43 | 0.562212 | class Color:
WHITE = (255, 255, 255)
BLACK = (0, 0, 0 )
GRAY = (100, 100, 100)
RED = (220, 20, 60 )
GREEN = (50, 205, 50 )
YELLOW = (255, 255, 0 )
PURPLE = (218, 112, 214)
ALL = [WHITE, BLACK, GRAY, RED, GREEN]
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKCYAN = '\033[96m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def green(msg):
return bcolors.OKGREEN+msg+bcolors.ENDC
def header(msg):
return bcolors.HEADER+msg+bcolors.ENDC
def fail(msg):
return bcolors.FAIL+msg+bcolors.ENDC
def cyan(msg):
return bcolors.OKCYAN+msg+bcolors.ENDC
def warning(msg):
return bcolors.WARNING+msg+bcolors.ENDC | true | true |
f72adc47d855b9bd8cfb880f4445828ea9fe2109 | 9,267 | py | Python | pysot/datasets/dataset_template.py | wattanapong/DFA | c05851beca2f8739f80531eb4de2f61639715cab | [
"Apache-2.0"
] | null | null | null | pysot/datasets/dataset_template.py | wattanapong/DFA | c05851beca2f8739f80531eb4de2f61639715cab | [
"Apache-2.0"
] | null | null | null | pysot/datasets/dataset_template.py | wattanapong/DFA | c05851beca2f8739f80531eb4de2f61639715cab | [
"Apache-2.0"
] | null | null | null | # Copyright (c) SenseTime. All Rights Reserved.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import json
import logging
import sys
import os
import cv2
import numpy as np
from torch.utils.data import Dataset
from pysot.utils.bbox import center2corner, Center
from pysot.datasets.anchor_target import AnchorTarget
from pysot.datasets.augmentation import Augmentation
from pysot.core.config import cfg
logger = logging.getLogger("global")
# setting opencv
pyv = sys.version[0]
if pyv[0] == '3':
cv2.ocl.setUseOpenCL(False)
class SubDataset(object):
def __init__(self, name, root, anno, frame_range, num_use, start_idx):
cur_path = os.path.dirname(os.path.realpath(__file__))
self.name = name
self.root = os.path.join(cur_path, '../../', root)
self.anno = os.path.join(cur_path, '../../', anno)
self.frame_range = frame_range
self.num_use = num_use
self.start_idx = start_idx
logger.info("loading " + name)
with open(self.anno, 'r') as f:
meta_data = json.load(f)
meta_data = self._filter_zero(meta_data)
for video in list(meta_data.keys()):
for track in meta_data[video]:
frames = meta_data[video][track]
frames = list(map(int,
filter(lambda x: x.isdigit(), frames.keys())))
frames.sort()
meta_data[video][track]['frames'] = frames
if len(frames) <= 0:
logger.warning("{}/{} has no frames".format(video, track))
del meta_data[video][track]
for video in list(meta_data.keys()):
if len(meta_data[video]) <= 0:
logger.warning("{} has no tracks".format(video))
del meta_data[video]
self.labels = meta_data
self.num = len(self.labels)
self.num_use = self.num if self.num_use == -1 else self.num_use
self.videos = list(meta_data.keys())
logger.info("{} loaded".format(self.name))
self.path_format = '{}.{}.{}.jpg'
self.pick = self.shuffle()
def _filter_zero(self, meta_data):
meta_data_new = {}
for video, tracks in meta_data.items():
new_tracks = {}
for trk, frames in tracks.items():
new_frames = {}
for frm, bbox in frames.items():
if not isinstance(bbox, dict):
if len(bbox) == 4:
x1, y1, x2, y2 = bbox
w, h = x2 - x1, y2 - y1
else:
w, h = bbox
if w <= 0 or h <= 0:
continue
new_frames[frm] = bbox
if len(new_frames) > 0:
new_tracks[trk] = new_frames
if len(new_tracks) > 0:
meta_data_new[video] = new_tracks
return meta_data_new
def log(self):
logger.info("{} start-index {} select [{}/{}] path_format {}".format(
self.name, self.start_idx, self.num_use,
self.num, self.path_format))
def shuffle(self):
lists = list(range(self.start_idx, self.start_idx + self.num))
pick = []
while len(pick) < self.num_use:
np.random.shuffle(lists)
pick += lists
return pick[:self.num_use]
def get_image_anno(self, video, track, frame):
frame = "{:06d}".format(frame)
image_path = os.path.join(self.root, video,
self.path_format.format(frame, track, 'x'))
image_anno = self.labels[video][track][frame]
return image_path, image_anno
# track is tracking object in video
# video is one of subfolder under ILSVRC2015_VID_train_000{0-3}, for example, ILSVRC2015_train_00004000
def get_positive_pair(self, index):
video_name = self.videos[index]
video = self.labels[video_name]
track = np.random.choice(list(video.keys()))
track_info = video[track]
frames = track_info['frames']
template_frame = np.random.randint(0, len(frames))
template_frame = frames[template_frame]
return self.get_image_anno(video_name, track, template_frame)
def get_random_target(self, index=-1):
if index == -1:
index = np.random.randint(0, self.num)
video_name = self.videos[index]
video = self.labels[video_name]
track = np.random.choice(list(video.keys()))
track_info = video[track]
frames = track_info['frames']
frame = np.random.choice(frames)
return self.get_image_anno(video_name, track, frame)
def __len__(self):
return self.num
class TrkDataset(Dataset):
def __init__(self,):
super(TrkDataset, self).__init__()
desired_size = (cfg.TRAIN.SEARCH_SIZE - cfg.TRAIN.EXEMPLAR_SIZE) / \
cfg.ANCHOR.STRIDE + 1 + cfg.TRAIN.BASE_SIZE
if desired_size != cfg.TRAIN.OUTPUT_SIZE:
raise Exception('size not match!')
# create anchor target
self.anchor_target = AnchorTarget()
# create sub dataset
self.all_dataset = []
start = 0
self.num = 0
for name in cfg.DATASET.NAMES:
subdata_cfg = getattr(cfg.DATASET, name)
sub_dataset = SubDataset(
name,
subdata_cfg.ROOT,
subdata_cfg.ANNO,
subdata_cfg.FRAME_RANGE,
subdata_cfg.NUM_USE,
start
)
start += sub_dataset.num
self.num += sub_dataset.num_use
sub_dataset.log()
self.all_dataset.append(sub_dataset)
# data augmentation
self.template_aug = Augmentation(
cfg.DATASET.TEMPLATE.SHIFT,
cfg.DATASET.TEMPLATE.SCALE,
cfg.DATASET.TEMPLATE.BLUR,
cfg.DATASET.TEMPLATE.FLIP,
cfg.DATASET.TEMPLATE.COLOR
)
self.search_aug = Augmentation(
cfg.DATASET.SEARCH.SHIFT,
cfg.DATASET.SEARCH.SCALE,
cfg.DATASET.SEARCH.BLUR,
cfg.DATASET.SEARCH.FLIP,
cfg.DATASET.SEARCH.COLOR
)
videos_per_epoch = cfg.DATASET.VIDEOS_PER_EPOCH
self.num = videos_per_epoch if videos_per_epoch > 0 else self.num
self.num *= cfg.TRAIN.EPOCH
self.pick = self.shuffle()
def shuffle(self):
pick = []
m = 0
while m < self.num:
p = []
for sub_dataset in self.all_dataset:
sub_p = sub_dataset.pick
p += sub_p
np.random.shuffle(p)
pick += p
m = len(pick)
logger.info("shuffle done!")
logger.info("dataset length {}".format(self.num))
return pick[:self.num]
def _find_dataset(self, index):
for dataset in self.all_dataset:
if dataset.start_idx + dataset.num > index:
return dataset, index - dataset.start_idx
def _get_bbox(self, image, shape):
imh, imw = image.shape[:2]
if len(shape) == 4:
w, h = shape[2]-shape[0], shape[3]-shape[1]
else:
w, h = shape
context_amount = 0.5
exemplar_size = cfg.TRAIN.EXEMPLAR_SIZE
wc_z = w + context_amount * (w+h)
hc_z = h + context_amount * (w+h)
s_z = np.sqrt(wc_z * hc_z)
scale_z = exemplar_size / s_z
w = w*scale_z
h = h*scale_z
cx, cy = imw//2, imh//2
bbox = center2corner(Center(cx, cy, w, h))
return bbox
def __len__(self):
return self.num
def __getitem__(self, index):
index = self.pick[index]
dataset, index = self._find_dataset(index)
gray = cfg.DATASET.GRAY and cfg.DATASET.GRAY > np.random.random()
neg = cfg.DATASET.NEG and cfg.DATASET.NEG > np.random.random()
# get one dataset
if neg:
print('please check this suspension due to it was removed negative function (distractor)')
import pdb
pdb.set_trace()
template = dataset.get_random_target(index)
search = np.random.choice(self.all_dataset).get_random_target()
else:
template = dataset.get_positive_pair(index)
if not os.path.exists(template[0]):
print(template[0])
# get image
template_image = cv2.imread(template[0])
# get bounding box
template_box = self._get_bbox(template_image, template[1])
# augmentation
template, _ = self.template_aug(template_image,
template_box,
cfg.TRAIN.EXEMPLAR_SIZE,
gray=gray)
template = template.transpose((2, 0, 1)).astype(np.float32)
return {
'template': template,
'gt': template_box
}
| 34.449814 | 107 | 0.555627 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import json
import logging
import sys
import os
import cv2
import numpy as np
from torch.utils.data import Dataset
from pysot.utils.bbox import center2corner, Center
from pysot.datasets.anchor_target import AnchorTarget
from pysot.datasets.augmentation import Augmentation
from pysot.core.config import cfg
logger = logging.getLogger("global")
pyv = sys.version[0]
if pyv[0] == '3':
cv2.ocl.setUseOpenCL(False)
class SubDataset(object):
def __init__(self, name, root, anno, frame_range, num_use, start_idx):
cur_path = os.path.dirname(os.path.realpath(__file__))
self.name = name
self.root = os.path.join(cur_path, '../../', root)
self.anno = os.path.join(cur_path, '../../', anno)
self.frame_range = frame_range
self.num_use = num_use
self.start_idx = start_idx
logger.info("loading " + name)
with open(self.anno, 'r') as f:
meta_data = json.load(f)
meta_data = self._filter_zero(meta_data)
for video in list(meta_data.keys()):
for track in meta_data[video]:
frames = meta_data[video][track]
frames = list(map(int,
filter(lambda x: x.isdigit(), frames.keys())))
frames.sort()
meta_data[video][track]['frames'] = frames
if len(frames) <= 0:
logger.warning("{}/{} has no frames".format(video, track))
del meta_data[video][track]
for video in list(meta_data.keys()):
if len(meta_data[video]) <= 0:
logger.warning("{} has no tracks".format(video))
del meta_data[video]
self.labels = meta_data
self.num = len(self.labels)
self.num_use = self.num if self.num_use == -1 else self.num_use
self.videos = list(meta_data.keys())
logger.info("{} loaded".format(self.name))
self.path_format = '{}.{}.{}.jpg'
self.pick = self.shuffle()
def _filter_zero(self, meta_data):
meta_data_new = {}
for video, tracks in meta_data.items():
new_tracks = {}
for trk, frames in tracks.items():
new_frames = {}
for frm, bbox in frames.items():
if not isinstance(bbox, dict):
if len(bbox) == 4:
x1, y1, x2, y2 = bbox
w, h = x2 - x1, y2 - y1
else:
w, h = bbox
if w <= 0 or h <= 0:
continue
new_frames[frm] = bbox
if len(new_frames) > 0:
new_tracks[trk] = new_frames
if len(new_tracks) > 0:
meta_data_new[video] = new_tracks
return meta_data_new
def log(self):
logger.info("{} start-index {} select [{}/{}] path_format {}".format(
self.name, self.start_idx, self.num_use,
self.num, self.path_format))
def shuffle(self):
lists = list(range(self.start_idx, self.start_idx + self.num))
pick = []
while len(pick) < self.num_use:
np.random.shuffle(lists)
pick += lists
return pick[:self.num_use]
def get_image_anno(self, video, track, frame):
frame = "{:06d}".format(frame)
image_path = os.path.join(self.root, video,
self.path_format.format(frame, track, 'x'))
image_anno = self.labels[video][track][frame]
return image_path, image_anno
def get_positive_pair(self, index):
video_name = self.videos[index]
video = self.labels[video_name]
track = np.random.choice(list(video.keys()))
track_info = video[track]
frames = track_info['frames']
template_frame = np.random.randint(0, len(frames))
template_frame = frames[template_frame]
return self.get_image_anno(video_name, track, template_frame)
def get_random_target(self, index=-1):
if index == -1:
index = np.random.randint(0, self.num)
video_name = self.videos[index]
video = self.labels[video_name]
track = np.random.choice(list(video.keys()))
track_info = video[track]
frames = track_info['frames']
frame = np.random.choice(frames)
return self.get_image_anno(video_name, track, frame)
def __len__(self):
return self.num
class TrkDataset(Dataset):
def __init__(self,):
super(TrkDataset, self).__init__()
desired_size = (cfg.TRAIN.SEARCH_SIZE - cfg.TRAIN.EXEMPLAR_SIZE) / \
cfg.ANCHOR.STRIDE + 1 + cfg.TRAIN.BASE_SIZE
if desired_size != cfg.TRAIN.OUTPUT_SIZE:
raise Exception('size not match!')
self.anchor_target = AnchorTarget()
self.all_dataset = []
start = 0
self.num = 0
for name in cfg.DATASET.NAMES:
subdata_cfg = getattr(cfg.DATASET, name)
sub_dataset = SubDataset(
name,
subdata_cfg.ROOT,
subdata_cfg.ANNO,
subdata_cfg.FRAME_RANGE,
subdata_cfg.NUM_USE,
start
)
start += sub_dataset.num
self.num += sub_dataset.num_use
sub_dataset.log()
self.all_dataset.append(sub_dataset)
self.template_aug = Augmentation(
cfg.DATASET.TEMPLATE.SHIFT,
cfg.DATASET.TEMPLATE.SCALE,
cfg.DATASET.TEMPLATE.BLUR,
cfg.DATASET.TEMPLATE.FLIP,
cfg.DATASET.TEMPLATE.COLOR
)
self.search_aug = Augmentation(
cfg.DATASET.SEARCH.SHIFT,
cfg.DATASET.SEARCH.SCALE,
cfg.DATASET.SEARCH.BLUR,
cfg.DATASET.SEARCH.FLIP,
cfg.DATASET.SEARCH.COLOR
)
videos_per_epoch = cfg.DATASET.VIDEOS_PER_EPOCH
self.num = videos_per_epoch if videos_per_epoch > 0 else self.num
self.num *= cfg.TRAIN.EPOCH
self.pick = self.shuffle()
def shuffle(self):
pick = []
m = 0
while m < self.num:
p = []
for sub_dataset in self.all_dataset:
sub_p = sub_dataset.pick
p += sub_p
np.random.shuffle(p)
pick += p
m = len(pick)
logger.info("shuffle done!")
logger.info("dataset length {}".format(self.num))
return pick[:self.num]
def _find_dataset(self, index):
for dataset in self.all_dataset:
if dataset.start_idx + dataset.num > index:
return dataset, index - dataset.start_idx
def _get_bbox(self, image, shape):
imh, imw = image.shape[:2]
if len(shape) == 4:
w, h = shape[2]-shape[0], shape[3]-shape[1]
else:
w, h = shape
context_amount = 0.5
exemplar_size = cfg.TRAIN.EXEMPLAR_SIZE
wc_z = w + context_amount * (w+h)
hc_z = h + context_amount * (w+h)
s_z = np.sqrt(wc_z * hc_z)
scale_z = exemplar_size / s_z
w = w*scale_z
h = h*scale_z
cx, cy = imw//2, imh//2
bbox = center2corner(Center(cx, cy, w, h))
return bbox
def __len__(self):
return self.num
def __getitem__(self, index):
index = self.pick[index]
dataset, index = self._find_dataset(index)
gray = cfg.DATASET.GRAY and cfg.DATASET.GRAY > np.random.random()
neg = cfg.DATASET.NEG and cfg.DATASET.NEG > np.random.random()
if neg:
print('please check this suspension due to it was removed negative function (distractor)')
import pdb
pdb.set_trace()
template = dataset.get_random_target(index)
search = np.random.choice(self.all_dataset).get_random_target()
else:
template = dataset.get_positive_pair(index)
if not os.path.exists(template[0]):
print(template[0])
template_image = cv2.imread(template[0])
template_box = self._get_bbox(template_image, template[1])
template, _ = self.template_aug(template_image,
template_box,
cfg.TRAIN.EXEMPLAR_SIZE,
gray=gray)
template = template.transpose((2, 0, 1)).astype(np.float32)
return {
'template': template,
'gt': template_box
}
| true | true |
f72addc1225c0aa169e2bb36069de6d370480522 | 12,614 | py | Python | src/gluonnlp/data/utils.py | yifeim/gluon-nlp | ea30d3399d87404b731d513535af9a31a5672799 | [
"Apache-2.0"
] | null | null | null | src/gluonnlp/data/utils.py | yifeim/gluon-nlp | ea30d3399d87404b731d513535af9a31a5672799 | [
"Apache-2.0"
] | 2 | 2019-02-13T09:10:26.000Z | 2019-02-20T02:59:43.000Z | src/gluonnlp/data/utils.py | yifeim/gluon-nlp | ea30d3399d87404b731d513535af9a31a5672799 | [
"Apache-2.0"
] | 1 | 2019-02-13T03:07:06.000Z | 2019-02-13T03:07:06.000Z | # coding: utf-8
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Utility classes and functions. They help organize and keep statistics of datasets."""
from __future__ import absolute_import
from __future__ import print_function
__all__ = [
'Counter', 'count_tokens', 'concat_sequence', 'slice_sequence', 'train_valid_split',
'line_splitter', 'whitespace_splitter', 'Splitter'
]
import os
import collections
import zipfile
import tarfile
import numpy as np
from mxnet.gluon.data import SimpleDataset
from mxnet.gluon.utils import _get_repo_url, download, check_sha1
from .. import _constants as C
class Counter(collections.Counter): # pylint: disable=abstract-method
"""Counter class for keeping token frequencies."""
def discard(self, min_freq, unknown_token):
"""Discards tokens with frequency below min_frequency and represents them
as `unknown_token`.
Parameters
----------
min_freq: int
Tokens whose frequency is under min_freq is counted as `unknown_token` in
the Counter returned.
unknown_token: str
The representation for any unknown token.
Returns
-------
The Counter instance.
Examples
--------
>>> a = gluonnlp.data.Counter({'a': 10, 'b': 1, 'c': 1})
>>> a.discard(3, '<unk>')
Counter({'a': 10, '<unk>': 2})
"""
freq = 0
ret = Counter({})
for token, count in self.items():
if count < min_freq:
freq += count
else:
ret[token] = count
ret[unknown_token] = ret.get(unknown_token, 0) + freq
return ret
class DefaultLookupDict(dict):
"""Dictionary class with fall-back look-up with default value set in the constructor."""
def __init__(self, default, d=None):
if d:
super(DefaultLookupDict, self).__init__(d)
else:
super(DefaultLookupDict, self).__init__()
self._default = default
def __getitem__(self, k):
return self.get(k, self._default)
def count_tokens(tokens, to_lower=False, counter=None):
r"""Counts tokens in the specified string.
For token_delim='(td)' and seq_delim='(sd)', a specified string of two sequences of tokens may
look like::
(td)token1(td)token2(td)token3(td)(sd)(td)token4(td)token5(td)(sd)
Parameters
----------
tokens : list of str
A source list of tokens.
to_lower : bool, default False
Whether to convert the source source_str to the lower case.
counter : Counter or None, default None
The Counter instance to be updated with the counts of `tokens`. If
None, return a new Counter instance counting tokens from `tokens`.
Returns
-------
The `counter` Counter instance after being updated with the token
counts of `source_str`. If `counter` is None, return a new Counter
instance counting tokens from `source_str`.
Examples
--------
>>> import re
>>> source_str = ' Life is great ! \n life is good . \n'
>>> source_str_tokens = filter(None, re.split(' |\n', source_str))
>>> gluonnlp.data.count_tokens(source_str_tokens)
Counter({'is': 2, 'Life': 1, 'great': 1, '!': 1, 'life': 1, 'good': 1, '.': 1})
"""
if to_lower:
tokens = [t.lower() for t in tokens]
if counter is None:
return Counter(tokens)
else:
counter.update(tokens)
return counter
def concat_sequence(sequences):
"""Concatenate sequences of tokens into a single flattened list of tokens.
Parameters
----------
sequences : list of list of object
Sequences of tokens, each of which is an iterable of tokens.
Returns
-------
Flattened list of tokens.
"""
return [token for seq in sequences for token in seq if token]
def slice_sequence(sequence, length, pad_last=False, pad_val=C.PAD_TOKEN, overlap=0):
"""Slice a flat sequence of tokens into sequences tokens, with each
inner sequence's length equal to the specified `length`, taking into account the requested
sequence overlap.
Parameters
----------
sequence : list of object
A flat list of tokens.
length : int
The length of each of the samples.
pad_last : bool, default False
Whether to pad the last sequence when its length doesn't align. If the last sequence's
length doesn't align and ``pad_last`` is False, it will be dropped.
pad_val : object, default
The padding value to use when the padding of the last sequence is enabled. In general,
the type of ``pad_val`` should be the same as the tokens.
overlap : int, default 0
The extra number of items in current sample that should overlap with the
next sample.
Returns
-------
List of list of tokens, with the length of each inner list equal to `length`.
"""
if length <= overlap:
raise ValueError('length needs to be larger than overlap')
if pad_last:
pad_len = _slice_pad_length(len(sequence), length, overlap)
sequence = sequence + [pad_val] * pad_len
num_samples = (len(sequence)-length) // (length-overlap) + 1
return [sequence[i*(length-overlap):((i+1)*length-i*overlap)] for i in range(num_samples)]
def _slice_pad_length(num_items, length, overlap=0):
"""Calculate the padding length needed for sliced samples in order not to discard data.
Parameters
----------
num_items : int
Number of items in dataset before collating.
length : int
The length of each of the samples.
overlap : int, default 0
The extra number of items in current sample that should overlap with the
next sample.
Returns
-------
Length of paddings.
"""
if length <= overlap:
raise ValueError('length needs to be larger than overlap')
step = length-overlap
span = num_items-length
residual = span % step
if residual:
return step - residual
else:
return 0
_vocab_sha1 = {'wikitext-2': 'be36dc5238c2e7d69720881647ab72eb506d0131',
'gbw': 'ebb1a287ca14d8fa6f167c3a779e5e7ed63ac69f',
'WMT2014_src': '230ebb817b1d86950d71e2e765f192a4e4f34415',
'WMT2014_tgt': '230ebb817b1d86950d71e2e765f192a4e4f34415',
'book_corpus_wiki_en_cased': '2d62af22535ed51f35cc8e2abb607723c89c2636',
'book_corpus_wiki_en_uncased': 'a66073971aa0b1a262453fe51342e57166a8abcf',
'wiki_multilingual_cased': '71bb9e248dc75dce9227d3c8c16fde3993588b9e',
'wiki_cn': 'a1e06f8e39ae51ab8a92b8458e6a658b8b1f72bf',
'wiki_multilingual': '2b2514cc539047b9179e9d98a4e68c36db05c97a'}
_url_format = '{repo_url}gluon/dataset/vocab/{file_name}.zip'
def train_valid_split(dataset, valid_ratio=0.05):
"""Split the dataset into training and validation sets.
Parameters
----------
train : list
A list of training samples.
valid_ratio : float, default 0.05
Proportion of training samples to use for validation set
range: [0, 1]
Returns
-------
train : SimpleDataset
valid : SimpleDataset
"""
if not 0.0 <= valid_ratio <= 1.0:
raise ValueError('valid_ratio should be in [0, 1]')
num_train = len(dataset)
num_valid = np.ceil(num_train * valid_ratio).astype('int')
indices = np.arange(num_train)
np.random.shuffle(indices)
valid = SimpleDataset([dataset[indices[i]] for i in range(num_valid)])
train = SimpleDataset([dataset[indices[i + num_valid]] for i in range(num_train - num_valid)])
return train, valid
def short_hash(name):
if name not in _vocab_sha1:
raise ValueError('Vocabulary for {name} is not available.'.format(name=name))
return _vocab_sha1[name][:8]
def _load_pretrained_vocab(name, root=os.path.join('~', '.mxnet', 'models'), cls=None):
"""Load the accompanying vocabulary object for pre-trained model.
Parameters
----------
name : str
Name of the vocabulary, usually the name of the dataset.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
cls : nlp.Vocab or nlp.vocab.BERTVocab, default nlp.Vocab
Returns
-------
Vocab or nlp.bert.BERTVocab
Loaded vocabulary object for the pre-trained model.
"""
file_name = '{name}-{short_hash}'.format(name=name,
short_hash=short_hash(name))
root = os.path.expanduser(root)
file_path = os.path.join(root, file_name+'.vocab')
sha1_hash = _vocab_sha1[name]
if os.path.exists(file_path):
if check_sha1(file_path, sha1_hash):
return _load_vocab_file(file_path, cls)
else:
print('Detected mismatch in the content of model vocab file. Downloading again.')
else:
print('Vocab file is not found. Downloading.')
if not os.path.exists(root):
os.makedirs(root)
zip_file_path = os.path.join(root, file_name+'.zip')
repo_url = _get_repo_url()
if repo_url[-1] != '/':
repo_url = repo_url + '/'
download(_url_format.format(repo_url=repo_url, file_name=file_name),
path=zip_file_path,
overwrite=True)
with zipfile.ZipFile(zip_file_path) as zf:
zf.extractall(root)
os.remove(zip_file_path)
if check_sha1(file_path, sha1_hash):
return _load_vocab_file(file_path, cls)
else:
raise ValueError('Downloaded file has different hash. Please try again.')
def _load_vocab_file(file_path, cls):
with open(file_path, 'r') as f:
if cls is None:
from ..vocab import Vocab
cls = Vocab
return cls.from_json(f.read())
def _get_home_dir():
"""Get home directory for storing datasets/models/pre-trained word embeddings"""
_home_dir = os.environ.get('MXNET_HOME', os.path.join('~', '.mxnet'))
# expand ~ to actual path
_home_dir = os.path.expanduser(_home_dir)
return _home_dir
def _extract_archive(file, target_dir):
"""Extract archive file
Parameters
----------
file : str
Absolute path of the archive file.
target_dir : str
Target directory of the archive to be uncompressed
"""
if file.endswith('.gz') or file.endswith('.tar') or file.endswith('.tgz'):
archive = tarfile.open(file, 'r')
elif file.endswith('.zip'):
archive = zipfile.ZipFile(file, 'r')
else:
raise Exception('Unrecognized file type: ' + file)
archive.extractall(path=target_dir)
archive.close()
def line_splitter(s):
"""Split a string at newlines.
Parameters
----------
s : str
The string to be split
Returns
--------
List[str]
List of strings. Obtained by calling s.splitlines().
"""
return s.splitlines()
def whitespace_splitter(s):
"""Split a string at whitespace (space, tab, newline, return, formfeed).
Parameters
----------
s : str
The string to be split
Returns
--------
List[str]
List of strings. Obtained by calling s.split().
"""
return s.split()
class Splitter(object):
"""Split a string based on a separator.
Parameters
----------
separator : str
The separator based on which string is split.
"""
def __init__(self, separator=None):
self._separator = separator
def __call__(self, s):
"""Split a string based on the separator.
Parameters
----------
s : str
The string to be split
Returns
--------
List[str]
List of strings. Obtained by calling s.split(separator).
"""
return s.split(self._separator)
| 30.616505 | 98 | 0.641589 |
from __future__ import absolute_import
from __future__ import print_function
__all__ = [
'Counter', 'count_tokens', 'concat_sequence', 'slice_sequence', 'train_valid_split',
'line_splitter', 'whitespace_splitter', 'Splitter'
]
import os
import collections
import zipfile
import tarfile
import numpy as np
from mxnet.gluon.data import SimpleDataset
from mxnet.gluon.utils import _get_repo_url, download, check_sha1
from .. import _constants as C
class Counter(collections.Counter):
def discard(self, min_freq, unknown_token):
freq = 0
ret = Counter({})
for token, count in self.items():
if count < min_freq:
freq += count
else:
ret[token] = count
ret[unknown_token] = ret.get(unknown_token, 0) + freq
return ret
class DefaultLookupDict(dict):
def __init__(self, default, d=None):
if d:
super(DefaultLookupDict, self).__init__(d)
else:
super(DefaultLookupDict, self).__init__()
self._default = default
def __getitem__(self, k):
return self.get(k, self._default)
def count_tokens(tokens, to_lower=False, counter=None):
if to_lower:
tokens = [t.lower() for t in tokens]
if counter is None:
return Counter(tokens)
else:
counter.update(tokens)
return counter
def concat_sequence(sequences):
return [token for seq in sequences for token in seq if token]
def slice_sequence(sequence, length, pad_last=False, pad_val=C.PAD_TOKEN, overlap=0):
if length <= overlap:
raise ValueError('length needs to be larger than overlap')
if pad_last:
pad_len = _slice_pad_length(len(sequence), length, overlap)
sequence = sequence + [pad_val] * pad_len
num_samples = (len(sequence)-length) // (length-overlap) + 1
return [sequence[i*(length-overlap):((i+1)*length-i*overlap)] for i in range(num_samples)]
def _slice_pad_length(num_items, length, overlap=0):
if length <= overlap:
raise ValueError('length needs to be larger than overlap')
step = length-overlap
span = num_items-length
residual = span % step
if residual:
return step - residual
else:
return 0
_vocab_sha1 = {'wikitext-2': 'be36dc5238c2e7d69720881647ab72eb506d0131',
'gbw': 'ebb1a287ca14d8fa6f167c3a779e5e7ed63ac69f',
'WMT2014_src': '230ebb817b1d86950d71e2e765f192a4e4f34415',
'WMT2014_tgt': '230ebb817b1d86950d71e2e765f192a4e4f34415',
'book_corpus_wiki_en_cased': '2d62af22535ed51f35cc8e2abb607723c89c2636',
'book_corpus_wiki_en_uncased': 'a66073971aa0b1a262453fe51342e57166a8abcf',
'wiki_multilingual_cased': '71bb9e248dc75dce9227d3c8c16fde3993588b9e',
'wiki_cn': 'a1e06f8e39ae51ab8a92b8458e6a658b8b1f72bf',
'wiki_multilingual': '2b2514cc539047b9179e9d98a4e68c36db05c97a'}
_url_format = '{repo_url}gluon/dataset/vocab/{file_name}.zip'
def train_valid_split(dataset, valid_ratio=0.05):
if not 0.0 <= valid_ratio <= 1.0:
raise ValueError('valid_ratio should be in [0, 1]')
num_train = len(dataset)
num_valid = np.ceil(num_train * valid_ratio).astype('int')
indices = np.arange(num_train)
np.random.shuffle(indices)
valid = SimpleDataset([dataset[indices[i]] for i in range(num_valid)])
train = SimpleDataset([dataset[indices[i + num_valid]] for i in range(num_train - num_valid)])
return train, valid
def short_hash(name):
if name not in _vocab_sha1:
raise ValueError('Vocabulary for {name} is not available.'.format(name=name))
return _vocab_sha1[name][:8]
def _load_pretrained_vocab(name, root=os.path.join('~', '.mxnet', 'models'), cls=None):
file_name = '{name}-{short_hash}'.format(name=name,
short_hash=short_hash(name))
root = os.path.expanduser(root)
file_path = os.path.join(root, file_name+'.vocab')
sha1_hash = _vocab_sha1[name]
if os.path.exists(file_path):
if check_sha1(file_path, sha1_hash):
return _load_vocab_file(file_path, cls)
else:
print('Detected mismatch in the content of model vocab file. Downloading again.')
else:
print('Vocab file is not found. Downloading.')
if not os.path.exists(root):
os.makedirs(root)
zip_file_path = os.path.join(root, file_name+'.zip')
repo_url = _get_repo_url()
if repo_url[-1] != '/':
repo_url = repo_url + '/'
download(_url_format.format(repo_url=repo_url, file_name=file_name),
path=zip_file_path,
overwrite=True)
with zipfile.ZipFile(zip_file_path) as zf:
zf.extractall(root)
os.remove(zip_file_path)
if check_sha1(file_path, sha1_hash):
return _load_vocab_file(file_path, cls)
else:
raise ValueError('Downloaded file has different hash. Please try again.')
def _load_vocab_file(file_path, cls):
with open(file_path, 'r') as f:
if cls is None:
from ..vocab import Vocab
cls = Vocab
return cls.from_json(f.read())
def _get_home_dir():
_home_dir = os.environ.get('MXNET_HOME', os.path.join('~', '.mxnet'))
_home_dir = os.path.expanduser(_home_dir)
return _home_dir
def _extract_archive(file, target_dir):
if file.endswith('.gz') or file.endswith('.tar') or file.endswith('.tgz'):
archive = tarfile.open(file, 'r')
elif file.endswith('.zip'):
archive = zipfile.ZipFile(file, 'r')
else:
raise Exception('Unrecognized file type: ' + file)
archive.extractall(path=target_dir)
archive.close()
def line_splitter(s):
return s.splitlines()
def whitespace_splitter(s):
return s.split()
class Splitter(object):
def __init__(self, separator=None):
self._separator = separator
def __call__(self, s):
return s.split(self._separator)
| true | true |
f72addc1825c766c27b5ea9433ca8b1b439ac3e5 | 33,419 | py | Python | cirq/ops/common_gates.py | philiptmassey/Cirq | b8b457c2fc484d76bf8a82a73f6ecc11756229a6 | [
"Apache-2.0"
] | null | null | null | cirq/ops/common_gates.py | philiptmassey/Cirq | b8b457c2fc484d76bf8a82a73f6ecc11756229a6 | [
"Apache-2.0"
] | null | null | null | cirq/ops/common_gates.py | philiptmassey/Cirq | b8b457c2fc484d76bf8a82a73f6ecc11756229a6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Quantum gates that are commonly used in the literature.
This module creates Gate instances for the following gates:
X,Y,Z: Pauli gates.
H,S: Clifford gates.
T: A non-Clifford gate.
CZ: Controlled phase gate.
CNOT: Controlled not gate.
SWAP: the swap gate.
ISWAP: a swap gate with a phase on the swapped subspace.
Each of these are implemented as EigenGates, which means that they can be
raised to a power (i.e. cirq.H**0.5). See the definition in EigenGate.
In addition MeasurementGate is defined and convenience methods for
measurements are provided
measure
measure_each
"""
from typing import (
Any, Callable, cast, Iterable, List, Optional, Tuple, Union,
)
import numpy as np
from cirq import linalg, protocols, value
from cirq.ops import gate_features, eigen_gate, raw_types, gate_operation
from cirq.type_workarounds import NotImplementedType
# Note: avoiding 'from/as' because it creates a circular dependency in python 2.
import cirq.ops.phased_x_gate
class XPowGate(eigen_gate.EigenGate,
gate_features.SingleQubitGate):
"""A gate that rotates around the X axis of the Bloch sphere.
The unitary matrix of ``XPowGate(exponent=t)`` is:
[[g·c, -i·g·s],
[-i·g·s, g·c]]
where:
c = cos(π·t/2)
s = sin(π·t/2)
g = exp(i·π·t/2).
Note in particular that this gate has a global phase factor of
e^{i·π·t/2} vs the traditionally defined rotation matrices
about the Pauli X axis. See `cirq.Rx` for rotations without the global
phase. The global phase factor can be adjusted by using the `global_shift`
parameter when initializing.
`cirq.X`, the Pauli X gate, is an instance of this gate at exponent=1.
"""
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if self._exponent != 1:
return None
zero = args.subspace_index(0)
one = args.subspace_index(1)
args.available_buffer[zero] = args.target_tensor[one]
args.available_buffer[one] = args.target_tensor[zero]
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.available_buffer *= p
return args.available_buffer
def _eigen_components(self):
return [
(0, np.array([[0.5, 0.5], [0.5, 0.5]])),
(1, np.array([[0.5, -0.5], [-0.5, 0.5]])),
]
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> Union[str, protocols.CircuitDiagramInfo]:
if self._global_shift == -0.5:
return _rads_func_symbol(
'Rx',
args,
self._diagram_exponent(args, ignore_global_phase=False))
return protocols.CircuitDiagramInfo(
wire_symbols=('X',),
exponent=self._diagram_exponent(args))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
args.validate_version('2.0')
if self._exponent == 1:
return args.format('x {0};\n', qubits[0])
else:
return args.format('rx({0:half_turns}) {1};\n',
self._exponent, qubits[0])
def _phase_by_(self, phase_turns, qubit_index):
"""See `cirq.SupportsPhase`."""
return cirq.ops.phased_x_gate.PhasedXPowGate(
exponent=self._exponent,
phase_exponent=phase_turns * 2)
def __str__(self) -> str:
if self._exponent == 1:
return 'X'
return 'X**{!r}'.format(self._exponent)
def __repr__(self) -> str:
if self._global_shift == -0.5 and not protocols.is_parameterized(self):
return 'cirq.Rx(np.pi*{!r})'.format(self._exponent)
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.X'
return '(cirq.X**{!r})'.format(self._exponent)
return (
'cirq.XPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
class YPowGate(eigen_gate.EigenGate,
gate_features.SingleQubitGate):
"""A gate that rotates around the Y axis of the Bloch sphere.
The unitary matrix of ``YPowGate(exponent=t)`` is:
[[g·c, g·s],
[-g·s, g·c]]
where:
c = cos(π·t/2)
s = sin(π·t/2)
g = exp(i·π·t/2).
Note in particular that this gate has a global phase factor of
e^{i·π·t/2} vs the traditionally defined rotation matrices
about the Pauli Y axis. See `cirq.Ry` for rotations without the global
phase. The global phase factor can be adjusted by using the `global_shift`
parameter when initializing.
`cirq.Y`, the Pauli Y gate, is an instance of this gate at exponent=1.
"""
def _eigen_components(self):
return [
(0, np.array([[0.5, -0.5j], [0.5j, 0.5]])),
(1, np.array([[0.5, 0.5j], [-0.5j, 0.5]])),
]
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> Union[str, protocols.CircuitDiagramInfo]:
if self._global_shift == -0.5:
return _rads_func_symbol(
'Ry',
args,
self._diagram_exponent(args, ignore_global_phase=False))
return protocols.CircuitDiagramInfo(
wire_symbols=('Y',),
exponent=self._diagram_exponent(args))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
args.validate_version('2.0')
if self._exponent == 1:
return args.format('y {0};\n', qubits[0])
else:
return args.format('ry({0:half_turns}) {1};\n',
self._exponent, qubits[0])
def _phase_by_(self, phase_turns, qubit_index):
"""See `cirq.SupportsPhase`."""
return cirq.ops.phased_x_gate.PhasedXPowGate(
exponent=self._exponent,
phase_exponent=0.5 + phase_turns * 2)
def __str__(self) -> str:
if self._exponent == 1:
return 'Y'
return 'Y**{!r}'.format(self._exponent)
def __repr__(self) -> str:
if self._global_shift == -0.5 and not protocols.is_parameterized(self):
return 'cirq.Ry(np.pi*{!r})'.format(self._exponent)
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.Y'
return '(cirq.Y**{!r})'.format(self._exponent)
return (
'cirq.YPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
class ZPowGate(eigen_gate.EigenGate,
gate_features.SingleQubitGate):
"""A gate that rotates around the Z axis of the Bloch sphere.
The unitary matrix of ``ZPowGate(exponent=t)`` is:
[[1, 0],
[0, g]]
where:
g = exp(i·π·t).
Note in particular that this gate has a global phase factor of
e^{i·π·t/2} vs the traditionally defined rotation matrices
about the Pauli Z axis. See `cirq.Rz` for rotations without the global
phase. The global phase factor can be adjusted by using the `global_shift`
parameter when initializing.
`cirq.Z`, the Pauli Z gate, is an instance of this gate at exponent=1.
"""
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if protocols.is_parameterized(self):
return None
one = args.subspace_index(1)
c = 1j**(self._exponent * 2)
args.target_tensor[one] *= c
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _eigen_components(self):
return [
(0, np.diag([1, 0])),
(1, np.diag([0, 1])),
]
def _phase_by_(self, phase_turns: float, qubit_index: int):
return self
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> Union[str, protocols.CircuitDiagramInfo]:
if self._global_shift == -0.5:
return _rads_func_symbol(
'Rz',
args,
self._diagram_exponent(args, ignore_global_phase=False))
e = self._diagram_exponent(args)
if e in [-0.25, 0.25]:
return protocols.CircuitDiagramInfo(
wire_symbols=('T',),
exponent=cast(float, e) * 4)
if e in [-0.5, 0.5]:
return protocols.CircuitDiagramInfo(
wire_symbols=('S',),
exponent=cast(float, e) * 2)
return protocols.CircuitDiagramInfo(
wire_symbols=('Z',),
exponent=e)
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
args.validate_version('2.0')
if self._exponent == 1:
return args.format('z {0};\n', qubits[0])
else:
return args.format('rz({0:half_turns}) {1};\n',
self._exponent, qubits[0])
def __str__(self) -> str:
if self._exponent == 0.25:
return 'T'
if self._exponent == -0.25:
return 'T**-1'
if self._exponent == 0.5:
return 'S'
if self._exponent == -0.5:
return 'S**-1'
if self._exponent == 1:
return 'Z'
return 'Z**{}'.format(self._exponent)
def __repr__(self) -> str:
if self._global_shift == -0.5 and not protocols.is_parameterized(self):
return 'cirq.Rz(np.pi*{!r})'.format(self._exponent)
if self._global_shift == 0:
if self._exponent == 0.25:
return 'cirq.T'
if self._exponent == -0.25:
return '(cirq.T**-1)'
if self._exponent == 0.5:
return 'cirq.S'
if self._exponent == -0.5:
return '(cirq.S**-1)'
if self._exponent == 1:
return 'cirq.Z'
return '(cirq.Z**{!r})'.format(self._exponent)
return (
'cirq.ZPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
@value.value_equality
class MeasurementGate(raw_types.Gate):
"""A gate that measures qubits in the computational basis.
The measurement gate contains a key that is used to identify results
of measurements.
"""
def __init__(self,
key: str = '',
invert_mask: Tuple[bool, ...] = ()) -> None:
"""
Args:
key: The string key of the measurement.
invert_mask: A list of values indicating whether the corresponding
qubits should be flipped. The list's length must not be longer
than the number of qubits, but it is permitted to be shorter.
Qubits with indices past the end of the mask are not flipped.
"""
self.key = key
self.invert_mask = invert_mask or ()
@staticmethod
def is_measurement(op: Union[raw_types.Gate, raw_types.Operation]) -> bool:
if isinstance(op, MeasurementGate):
return True
if (isinstance(op, gate_operation.GateOperation) and
isinstance(op.gate, MeasurementGate)):
return True
return False
def with_bits_flipped(self, *bit_positions: int) -> 'MeasurementGate':
"""Toggles whether or not the measurement inverts various outputs."""
old_mask = self.invert_mask or ()
n = max(len(old_mask) - 1, *bit_positions) + 1
new_mask = [k < len(old_mask) and old_mask[k] for k in range(n)]
for b in bit_positions:
new_mask[b] = not new_mask[b]
return MeasurementGate(key=self.key, invert_mask=tuple(new_mask))
def validate_args(self, qubits):
if (self.invert_mask is not None and
len(self.invert_mask) > len(qubits)):
raise ValueError('len(invert_mask) > len(qubits)')
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
n = (max(1, len(self.invert_mask))
if args.known_qubit_count is None
else args.known_qubit_count)
symbols = ['M'] * n
# Show which output bits are negated.
if self.invert_mask:
for i, b in enumerate(self.invert_mask):
if b:
symbols[i] = '!M'
# Mention the measurement key.
if (not args.known_qubits or
self.key != _default_measurement_key(args.known_qubits)):
symbols[0] += "('{}')".format(self.key)
return protocols.CircuitDiagramInfo(tuple(symbols))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
args.validate_version('2.0')
invert_mask = self.invert_mask
if len(invert_mask) < len(qubits):
invert_mask = (invert_mask
+ (False,) * (len(qubits) - len(invert_mask)))
lines = []
for i, (qubit, inv) in enumerate(zip(qubits, invert_mask)):
if inv:
lines.append(args.format(
'x {0}; // Invert the following measurement\n', qubit))
lines.append(args.format('measure {0} -> {1:meas}[{2}];\n',
qubit, self.key, i))
return ''.join(lines)
def __repr__(self):
return 'cirq.MeasurementGate({}, {})'.format(repr(self.key),
repr(self.invert_mask))
def _value_equality_values_(self):
return self.key, self.invert_mask
def _default_measurement_key(qubits: Iterable[raw_types.QubitId]) -> str:
return ','.join(str(q) for q in qubits)
def measure(*qubits: raw_types.QubitId,
key: Optional[str] = None,
invert_mask: Tuple[bool, ...] = ()
) -> gate_operation.GateOperation:
"""Returns a single MeasurementGate applied to all the given qubits.
The qubits are measured in the computational basis.
Args:
*qubits: The qubits that the measurement gate should measure.
key: The string key of the measurement. If this is None, it defaults
to a comma-separated list of the target qubits' str values.
invert_mask: A list of Truthy or Falsey values indicating whether
the corresponding qubits should be flipped. None indicates no
inverting should be done.
Returns:
An operation targeting the given qubits with a measurement.
Raises:
ValueError if the qubits are not instances of QubitId.
"""
for qubit in qubits:
if isinstance(qubit, np.ndarray):
raise ValueError(
'measure() was called a numpy ndarray. Perhaps you meant '
'to call measure_state_vector on numpy array?'
)
elif not isinstance(qubit, raw_types.QubitId):
raise ValueError(
'measure() was called with type different than QubitId.')
if key is None:
key = _default_measurement_key(qubits)
return MeasurementGate(key, invert_mask).on(*qubits)
def measure_each(*qubits: raw_types.QubitId,
key_func: Callable[[raw_types.QubitId], str] = str
) -> List[gate_operation.GateOperation]:
"""Returns a list of operations individually measuring the given qubits.
The qubits are measured in the computational basis.
Args:
*qubits: The qubits to measure.
key_func: Determines the key of the measurements of each qubit. Takes
the qubit and returns the key for that qubit. Defaults to str.
Returns:
A list of operations individually measuring the given qubits.
"""
return [MeasurementGate(key_func(q)).on(q) for q in qubits]
class HPowGate(eigen_gate.EigenGate, gate_features.SingleQubitGate):
"""A Gate that performs a rotation around the X+Z axis of the Bloch sphere.
The unitary matrix of ``HPowGate(exponent=t)`` is:
[[g·(c-i·s/sqrt(2)), -i·g·s/sqrt(2)],
[-i·g·s/sqrt(2)], g·(c+i·s/sqrt(2))]]
where
c = cos(π·t/2)
s = sin(π·t/2)
g = exp(i·π·t/2).
Note in particular that for `t=1`, this gives the Hadamard matrix.
`cirq.H`, the Hadamard gate, is an instance of this gate at `exponent=1`.
"""
def _eigen_components(self):
s = np.sqrt(2)
component0 = np.array([
[3 + 2 * s, 1 + s],
[1 + s, 1]
]) / (4 + 2 * s)
component1 = np.array([
[3 - 2 * s, 1 - s],
[1 - s, 1]
]) / (4 - 2 * s)
return [(0, component0), (1, component1)]
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if self._exponent != 1:
return None
zero = args.subspace_index(0)
one = args.subspace_index(1)
args.target_tensor[one] -= args.target_tensor[zero]
args.target_tensor[one] *= -0.5
args.target_tensor[zero] -= args.target_tensor[one]
p = 1j**(2 * self._exponent * self._global_shift)
args.target_tensor *= np.sqrt(2) * p
return args.target_tensor
def _decompose_(self, qubits):
q = qubits[0]
if self._exponent == 1:
yield cirq.Y(q)**0.5
yield cirq.XPowGate(global_shift=-0.25).on(q)
return
yield Y(q)**0.25
yield X(q)**self._exponent
yield Y(q)**-0.25
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
return protocols.CircuitDiagramInfo(('H',))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
args.validate_version('2.0')
if self._exponent == 1:
return args.format('h {0};\n', qubits[0])
else:
return args.format('ry({0:half_turns}) {3};\n'
'rx({1:half_turns}) {3};\n'
'ry({2:half_turns}) {3};\n',
0.25, self._exponent, -0.25, qubits[0])
def __str__(self):
if self._exponent == 1:
return 'H'
return 'H^{}'.format(self._exponent)
def __repr__(self):
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.H'
return '(cirq.H**{!r})'.format(self._exponent)
return (
'cirq.HPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
class CZPowGate(eigen_gate.EigenGate,
gate_features.TwoQubitGate,
gate_features.InterchangeableQubitsGate):
"""A gate that applies a phase to the |11⟩ state of two qubits.
The unitary matrix of `CZPowGate(exponent=t)` is:
[[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, g]]
where:
g = exp(i·π·t/2).
`cirq.CZ`, the controlled Z gate, is an instance of this gate at
`exponent=1`.
"""
def _eigen_components(self):
return [
(0, np.diag([1, 1, 1, 0])),
(1, np.diag([0, 0, 0, 1])),
]
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Union[np.ndarray, NotImplementedType]:
if protocols.is_parameterized(self):
return NotImplemented
c = 1j**(2 * self._exponent)
one_one = linalg.slice_for_qubits_equal_to(args.axes, 0b11)
args.target_tensor[one_one] *= c
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _phase_by_(self, phase_turns, qubit_index):
return self
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
return protocols.CircuitDiagramInfo(
wire_symbols=('@', '@'),
exponent=self._diagram_exponent(args))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
if self._exponent != 1:
return None # Don't have an equivalent gate in QASM
args.validate_version('2.0')
return args.format('cz {0},{1};\n', qubits[0], qubits[1])
def __str__(self) -> str:
if self._exponent == 1:
return 'CZ'
return 'CZ**{!r}'.format(self._exponent)
def __repr__(self) -> str:
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.CZ'
return '(cirq.CZ**{!r})'.format(self._exponent)
return (
'cirq.CZPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
def _rads_func_symbol(func_name: str,
args: protocols.CircuitDiagramInfoArgs,
half_turns: Any) -> str:
unit = 'π' if args.use_unicode_characters else 'pi'
if half_turns == 1:
return '{}({})'.format(func_name, unit)
if half_turns == -1:
return '{}(-{})'.format(func_name, unit)
return '{}({}{})'.format(func_name, half_turns, unit)
class CNotPowGate(eigen_gate.EigenGate, gate_features.TwoQubitGate):
"""A gate that applies a controlled power of an X gate.
When applying CNOT (controlled-not) to qubits, you can either use
positional arguments CNOT(q1, q2), where q2 is toggled when q1 is on,
or named arguments CNOT(control=q1, target=q2).
(Mixing the two is not permitted.)
The unitary matrix of `CNotPowGate(exponent=t)` is:
[[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, g·c, -i·g·s],
[0, 0, -i·g·s, g·c]]
where:
c = cos(π·t/2)
s = sin(π·t/2)
g = exp(i·π·t/2).
`cirq.CNOT`, the controlled NOT gate, is an instance of this gate at
`exponent=1`.
"""
def _decompose_(self, qubits):
c, t = qubits
yield Y(t)**-0.5
yield CZ(c, t)**self._exponent
yield Y(t)**0.5
def _eigen_components(self):
return [
(0, np.array([[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 0.5, 0.5],
[0, 0, 0.5, 0.5]])),
(1, np.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0.5, -0.5],
[0, 0, -0.5, 0.5]])),
]
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
return protocols.CircuitDiagramInfo(
wire_symbols=('@', 'X'),
exponent=self._diagram_exponent(args))
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if self._exponent != 1:
return None
oo = args.subspace_index(0b11)
zo = args.subspace_index(0b01)
args.available_buffer[oo] = args.target_tensor[oo]
args.target_tensor[oo] = args.target_tensor[zo]
args.target_tensor[zo] = args.available_buffer[oo]
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
if self._exponent != 1:
return None # Don't have an equivalent gate in QASM
args.validate_version('2.0')
return args.format('cx {0},{1};\n', qubits[0], qubits[1])
def __str__(self) -> str:
if self._exponent == 1:
return 'CNOT'
return 'CNOT**{!r}'.format(self._exponent)
def __repr__(self):
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.CNOT'
return '(cirq.CNOT**{!r})'.format(self._exponent)
return (
'cirq.CNotPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
def on(self, *args: raw_types.QubitId,
**kwargs: raw_types.QubitId) -> gate_operation.GateOperation:
if not kwargs:
return super().on(*args)
if not args and set(kwargs.keys()) == {'control', 'target'}:
return super().on(kwargs['control'], kwargs['target'])
raise ValueError(
"Expected two positional argument or else 'target' AND 'control' "
"keyword arguments. But got args={!r}, kwargs={!r}.".format(
args, kwargs))
class SwapPowGate(eigen_gate.EigenGate,
gate_features.TwoQubitGate,
gate_features.InterchangeableQubitsGate):
"""The SWAP gate, possibly raised to a power. Exchanges qubits.
SwapPowGate()**t = SwapPowGate(exponent=t) and acts on two qubits in the
computational basis as the matrix:
[[1, 0, 0, 0],
[0, g·c, -i·g·s, 0],
[0, -i·g·s, g·c, 0],
[0, 0, 0, 1]]
where:
c = cos(π·t/2)
s = sin(π·t/2)
g = exp(i·π·t/2).
`cirq.SWAP`, the swap gate, is an instance of this gate at exponent=1.
"""
def _decompose_(self, qubits):
"""See base class."""
a, b = qubits
yield CNOT(a, b)
yield CNOT(b, a) ** self._exponent
yield CNOT(a, b)
def _eigen_components(self):
return [
(0, np.array([[1, 0, 0, 0],
[0, 0.5, 0.5, 0],
[0, 0.5, 0.5, 0],
[0, 0, 0, 1]])),
(1, np.array([[0, 0, 0, 0],
[0, 0.5, -0.5, 0],
[0, -0.5, 0.5, 0],
[0, 0, 0, 0]])),
]
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if self._exponent != 1:
return None
zo = args.subspace_index(0b01)
oz = args.subspace_index(0b10)
args.available_buffer[zo] = args.target_tensor[zo]
args.target_tensor[zo] = args.target_tensor[oz]
args.target_tensor[oz] = args.available_buffer[zo]
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
if not args.use_unicode_characters:
return protocols.CircuitDiagramInfo(
wire_symbols=('swap', 'swap'),
exponent=self._diagram_exponent(args))
return protocols.CircuitDiagramInfo(
wire_symbols=('×', '×'),
exponent=self._diagram_exponent(args))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
if self._exponent != 1:
return None # Don't have an equivalent gate in QASM
args.validate_version('2.0')
return args.format('swap {0},{1};\n', qubits[0], qubits[1])
def __str__(self) -> str:
if self._exponent == 1:
return 'SWAP'
return 'SWAP**{!r}'.format(self._exponent)
def __repr__(self):
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.SWAP'
return '(cirq.SWAP**{!r})'.format(self._exponent)
return (
'cirq.SwapPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
class ISwapPowGate(eigen_gate.EigenGate,
gate_features.InterchangeableQubitsGate,
gate_features.TwoQubitGate):
"""Rotates the |01⟩-vs-|10⟩ subspace of two qubits around its Bloch X-axis.
When exponent=1, swaps the two qubits and phases |01⟩ and |10⟩ by i. More
generally, this gate's matrix is defined as follows:
ISWAP**t ≡ exp(+i π t (X⊗X + Y⊗Y) / 4)
which is given by the matrix:
[[1, 0, 0, 0],
[0, c, i·s, 0],
[0, i·s, c, 0],
[0, 0, 0, 1]]
where:
c = cos(π·t/2)
s = sin(π·t/2)
`cirq.ISWAP`, the swap gate that applies -i to the |01> and |10> states,
is an instance of this gate at exponent=1.
"""
def _eigen_components(self):
return [
(0, np.diag([1, 0, 0, 1])),
(+0.5, np.array([[0, 0, 0, 0],
[0, 0.5, 0.5, 0],
[0, 0.5, 0.5, 0],
[0, 0, 0, 0]])),
(-0.5, np.array([[0, 0, 0, 0],
[0, 0.5, -0.5, 0],
[0, -0.5, 0.5, 0],
[0, 0, 0, 0]])),
]
def _decompose_(self, qubits):
a, b = qubits
yield CNOT(a, b)
yield H(a)
yield CNOT(b, a)
yield S(a)**self._exponent
yield CNOT(b, a)
yield S(a)**-self._exponent
yield H(a)
yield CNOT(a, b)
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if self._exponent != 1:
return None
zo = args.subspace_index(0b01)
oz = args.subspace_index(0b10)
args.available_buffer[zo] = args.target_tensor[zo]
args.target_tensor[zo] = args.target_tensor[oz]
args.target_tensor[oz] = args.available_buffer[zo]
args.target_tensor[zo] *= 1j
args.target_tensor[oz] *= 1j
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
return protocols.CircuitDiagramInfo(
wire_symbols=('iSwap', 'iSwap'),
exponent=self._diagram_exponent(args))
def __str__(self) -> str:
if self._exponent == 1:
return 'ISWAP'
return 'ISWAP**{!r}'.format(self._exponent)
def __repr__(self):
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.ISWAP'
return '(cirq.ISWAP**{!r})'.format(self._exponent)
return (
'cirq.ISwapPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
def Rx(rads: float) -> XPowGate:
"""Returns a gate with the matrix e^{-i X rads / 2}."""
return XPowGate(exponent=rads / np.pi, global_shift=-0.5)
def Ry(rads: float) -> YPowGate:
"""Returns a gate with the matrix e^{-i Y rads / 2}."""
return YPowGate(exponent=rads / np.pi, global_shift=-0.5)
def Rz(rads: float) -> ZPowGate:
"""Returns a gate with the matrix e^{-i Z rads / 2}."""
return ZPowGate(exponent=rads / np.pi, global_shift=-0.5)
X = XPowGate()
"""The Pauli X gate.
Matrix:
[[0, 1],
[1, 0]]
"""
#: The Pauli Y gate.
#:
#: Matrix:
#:
#: [[0, -i],
#: [i, 0]]
Y = YPowGate()
# The Pauli Z gate.
#
# Matrix:
#
# [[1, 0],
# [0, -1]]
Z = ZPowGate()
# The Hadamard gate.
#
# Matrix:
#
# [[s, s],
# [s, -s]]
# where s = sqrt(0.5).
H = HPowGate()
# The Clifford S gate.
#
# Matrix:
#
# [[1, 0],
# [0, i]]
S = Z**0.5
# The T gate.
#
# Matrix:
#
# [[1, 0]
# [0, exp(i pi / 4)]]
T = Z**0.25
# The controlled Z gate.
#
# Matrix:
#
# [[1, 0, 0, 0],
# [0, 1, 0, 0],
# [0, 0, 1, 0],
# [0, 0, 0, -1]]
CZ = CZPowGate()
# The controlled NOT gate.
#
# Matrix:
#
# [[1, 0, 0, 0],
# [0, 1, 0, 0],
# [0, 0, 0, 1],
# [0, 0, 1, 0]]
CNOT = CNotPowGate()
# The swap gate.
#
# Matrix:
#
# [[1, 0, 0, 0],
# [0, 0, 1, 0],
# [0, 1, 0, 0],
# [0, 0, 0, 1]]
SWAP = SwapPowGate()
# The iswap gate.
#
# Matrix:
#
# [[1, 0, 0, 0],
# [0, 0, i, 0],
# [0, i, 0, 0],
# [0, 0, 0, 1]]
ISWAP = ISwapPowGate()
| 32.071977 | 80 | 0.552739 |
from typing import (
Any, Callable, cast, Iterable, List, Optional, Tuple, Union,
)
import numpy as np
from cirq import linalg, protocols, value
from cirq.ops import gate_features, eigen_gate, raw_types, gate_operation
from cirq.type_workarounds import NotImplementedType
import cirq.ops.phased_x_gate
class XPowGate(eigen_gate.EigenGate,
gate_features.SingleQubitGate):
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if self._exponent != 1:
return None
zero = args.subspace_index(0)
one = args.subspace_index(1)
args.available_buffer[zero] = args.target_tensor[one]
args.available_buffer[one] = args.target_tensor[zero]
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.available_buffer *= p
return args.available_buffer
def _eigen_components(self):
return [
(0, np.array([[0.5, 0.5], [0.5, 0.5]])),
(1, np.array([[0.5, -0.5], [-0.5, 0.5]])),
]
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> Union[str, protocols.CircuitDiagramInfo]:
if self._global_shift == -0.5:
return _rads_func_symbol(
'Rx',
args,
self._diagram_exponent(args, ignore_global_phase=False))
return protocols.CircuitDiagramInfo(
wire_symbols=('X',),
exponent=self._diagram_exponent(args))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
args.validate_version('2.0')
if self._exponent == 1:
return args.format('x {0};\n', qubits[0])
else:
return args.format('rx({0:half_turns}) {1};\n',
self._exponent, qubits[0])
def _phase_by_(self, phase_turns, qubit_index):
return cirq.ops.phased_x_gate.PhasedXPowGate(
exponent=self._exponent,
phase_exponent=phase_turns * 2)
def __str__(self) -> str:
if self._exponent == 1:
return 'X'
return 'X**{!r}'.format(self._exponent)
def __repr__(self) -> str:
if self._global_shift == -0.5 and not protocols.is_parameterized(self):
return 'cirq.Rx(np.pi*{!r})'.format(self._exponent)
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.X'
return '(cirq.X**{!r})'.format(self._exponent)
return (
'cirq.XPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
class YPowGate(eigen_gate.EigenGate,
gate_features.SingleQubitGate):
def _eigen_components(self):
return [
(0, np.array([[0.5, -0.5j], [0.5j, 0.5]])),
(1, np.array([[0.5, 0.5j], [-0.5j, 0.5]])),
]
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> Union[str, protocols.CircuitDiagramInfo]:
if self._global_shift == -0.5:
return _rads_func_symbol(
'Ry',
args,
self._diagram_exponent(args, ignore_global_phase=False))
return protocols.CircuitDiagramInfo(
wire_symbols=('Y',),
exponent=self._diagram_exponent(args))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
args.validate_version('2.0')
if self._exponent == 1:
return args.format('y {0};\n', qubits[0])
else:
return args.format('ry({0:half_turns}) {1};\n',
self._exponent, qubits[0])
def _phase_by_(self, phase_turns, qubit_index):
return cirq.ops.phased_x_gate.PhasedXPowGate(
exponent=self._exponent,
phase_exponent=0.5 + phase_turns * 2)
def __str__(self) -> str:
if self._exponent == 1:
return 'Y'
return 'Y**{!r}'.format(self._exponent)
def __repr__(self) -> str:
if self._global_shift == -0.5 and not protocols.is_parameterized(self):
return 'cirq.Ry(np.pi*{!r})'.format(self._exponent)
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.Y'
return '(cirq.Y**{!r})'.format(self._exponent)
return (
'cirq.YPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
class ZPowGate(eigen_gate.EigenGate,
gate_features.SingleQubitGate):
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if protocols.is_parameterized(self):
return None
one = args.subspace_index(1)
c = 1j**(self._exponent * 2)
args.target_tensor[one] *= c
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _eigen_components(self):
return [
(0, np.diag([1, 0])),
(1, np.diag([0, 1])),
]
def _phase_by_(self, phase_turns: float, qubit_index: int):
return self
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> Union[str, protocols.CircuitDiagramInfo]:
if self._global_shift == -0.5:
return _rads_func_symbol(
'Rz',
args,
self._diagram_exponent(args, ignore_global_phase=False))
e = self._diagram_exponent(args)
if e in [-0.25, 0.25]:
return protocols.CircuitDiagramInfo(
wire_symbols=('T',),
exponent=cast(float, e) * 4)
if e in [-0.5, 0.5]:
return protocols.CircuitDiagramInfo(
wire_symbols=('S',),
exponent=cast(float, e) * 2)
return protocols.CircuitDiagramInfo(
wire_symbols=('Z',),
exponent=e)
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
args.validate_version('2.0')
if self._exponent == 1:
return args.format('z {0};\n', qubits[0])
else:
return args.format('rz({0:half_turns}) {1};\n',
self._exponent, qubits[0])
def __str__(self) -> str:
if self._exponent == 0.25:
return 'T'
if self._exponent == -0.25:
return 'T**-1'
if self._exponent == 0.5:
return 'S'
if self._exponent == -0.5:
return 'S**-1'
if self._exponent == 1:
return 'Z'
return 'Z**{}'.format(self._exponent)
def __repr__(self) -> str:
if self._global_shift == -0.5 and not protocols.is_parameterized(self):
return 'cirq.Rz(np.pi*{!r})'.format(self._exponent)
if self._global_shift == 0:
if self._exponent == 0.25:
return 'cirq.T'
if self._exponent == -0.25:
return '(cirq.T**-1)'
if self._exponent == 0.5:
return 'cirq.S'
if self._exponent == -0.5:
return '(cirq.S**-1)'
if self._exponent == 1:
return 'cirq.Z'
return '(cirq.Z**{!r})'.format(self._exponent)
return (
'cirq.ZPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
@value.value_equality
class MeasurementGate(raw_types.Gate):
def __init__(self,
key: str = '',
invert_mask: Tuple[bool, ...] = ()) -> None:
self.key = key
self.invert_mask = invert_mask or ()
@staticmethod
def is_measurement(op: Union[raw_types.Gate, raw_types.Operation]) -> bool:
if isinstance(op, MeasurementGate):
return True
if (isinstance(op, gate_operation.GateOperation) and
isinstance(op.gate, MeasurementGate)):
return True
return False
def with_bits_flipped(self, *bit_positions: int) -> 'MeasurementGate':
old_mask = self.invert_mask or ()
n = max(len(old_mask) - 1, *bit_positions) + 1
new_mask = [k < len(old_mask) and old_mask[k] for k in range(n)]
for b in bit_positions:
new_mask[b] = not new_mask[b]
return MeasurementGate(key=self.key, invert_mask=tuple(new_mask))
def validate_args(self, qubits):
if (self.invert_mask is not None and
len(self.invert_mask) > len(qubits)):
raise ValueError('len(invert_mask) > len(qubits)')
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
n = (max(1, len(self.invert_mask))
if args.known_qubit_count is None
else args.known_qubit_count)
symbols = ['M'] * n
if self.invert_mask:
for i, b in enumerate(self.invert_mask):
if b:
symbols[i] = '!M'
if (not args.known_qubits or
self.key != _default_measurement_key(args.known_qubits)):
symbols[0] += "('{}')".format(self.key)
return protocols.CircuitDiagramInfo(tuple(symbols))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
args.validate_version('2.0')
invert_mask = self.invert_mask
if len(invert_mask) < len(qubits):
invert_mask = (invert_mask
+ (False,) * (len(qubits) - len(invert_mask)))
lines = []
for i, (qubit, inv) in enumerate(zip(qubits, invert_mask)):
if inv:
lines.append(args.format(
'x {0}; // Invert the following measurement\n', qubit))
lines.append(args.format('measure {0} -> {1:meas}[{2}];\n',
qubit, self.key, i))
return ''.join(lines)
def __repr__(self):
return 'cirq.MeasurementGate({}, {})'.format(repr(self.key),
repr(self.invert_mask))
def _value_equality_values_(self):
return self.key, self.invert_mask
def _default_measurement_key(qubits: Iterable[raw_types.QubitId]) -> str:
return ','.join(str(q) for q in qubits)
def measure(*qubits: raw_types.QubitId,
key: Optional[str] = None,
invert_mask: Tuple[bool, ...] = ()
) -> gate_operation.GateOperation:
for qubit in qubits:
if isinstance(qubit, np.ndarray):
raise ValueError(
'measure() was called a numpy ndarray. Perhaps you meant '
'to call measure_state_vector on numpy array?'
)
elif not isinstance(qubit, raw_types.QubitId):
raise ValueError(
'measure() was called with type different than QubitId.')
if key is None:
key = _default_measurement_key(qubits)
return MeasurementGate(key, invert_mask).on(*qubits)
def measure_each(*qubits: raw_types.QubitId,
key_func: Callable[[raw_types.QubitId], str] = str
) -> List[gate_operation.GateOperation]:
return [MeasurementGate(key_func(q)).on(q) for q in qubits]
class HPowGate(eigen_gate.EigenGate, gate_features.SingleQubitGate):
def _eigen_components(self):
s = np.sqrt(2)
component0 = np.array([
[3 + 2 * s, 1 + s],
[1 + s, 1]
]) / (4 + 2 * s)
component1 = np.array([
[3 - 2 * s, 1 - s],
[1 - s, 1]
]) / (4 - 2 * s)
return [(0, component0), (1, component1)]
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if self._exponent != 1:
return None
zero = args.subspace_index(0)
one = args.subspace_index(1)
args.target_tensor[one] -= args.target_tensor[zero]
args.target_tensor[one] *= -0.5
args.target_tensor[zero] -= args.target_tensor[one]
p = 1j**(2 * self._exponent * self._global_shift)
args.target_tensor *= np.sqrt(2) * p
return args.target_tensor
def _decompose_(self, qubits):
q = qubits[0]
if self._exponent == 1:
yield cirq.Y(q)**0.5
yield cirq.XPowGate(global_shift=-0.25).on(q)
return
yield Y(q)**0.25
yield X(q)**self._exponent
yield Y(q)**-0.25
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
return protocols.CircuitDiagramInfo(('H',))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
args.validate_version('2.0')
if self._exponent == 1:
return args.format('h {0};\n', qubits[0])
else:
return args.format('ry({0:half_turns}) {3};\n'
'rx({1:half_turns}) {3};\n'
'ry({2:half_turns}) {3};\n',
0.25, self._exponent, -0.25, qubits[0])
def __str__(self):
if self._exponent == 1:
return 'H'
return 'H^{}'.format(self._exponent)
def __repr__(self):
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.H'
return '(cirq.H**{!r})'.format(self._exponent)
return (
'cirq.HPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
class CZPowGate(eigen_gate.EigenGate,
gate_features.TwoQubitGate,
gate_features.InterchangeableQubitsGate):
def _eigen_components(self):
return [
(0, np.diag([1, 1, 1, 0])),
(1, np.diag([0, 0, 0, 1])),
]
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Union[np.ndarray, NotImplementedType]:
if protocols.is_parameterized(self):
return NotImplemented
c = 1j**(2 * self._exponent)
one_one = linalg.slice_for_qubits_equal_to(args.axes, 0b11)
args.target_tensor[one_one] *= c
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _phase_by_(self, phase_turns, qubit_index):
return self
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
return protocols.CircuitDiagramInfo(
wire_symbols=('@', '@'),
exponent=self._diagram_exponent(args))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
if self._exponent != 1:
return None
args.validate_version('2.0')
return args.format('cz {0},{1};\n', qubits[0], qubits[1])
def __str__(self) -> str:
if self._exponent == 1:
return 'CZ'
return 'CZ**{!r}'.format(self._exponent)
def __repr__(self) -> str:
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.CZ'
return '(cirq.CZ**{!r})'.format(self._exponent)
return (
'cirq.CZPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
def _rads_func_symbol(func_name: str,
args: protocols.CircuitDiagramInfoArgs,
half_turns: Any) -> str:
unit = 'π' if args.use_unicode_characters else 'pi'
if half_turns == 1:
return '{}({})'.format(func_name, unit)
if half_turns == -1:
return '{}(-{})'.format(func_name, unit)
return '{}({}{})'.format(func_name, half_turns, unit)
class CNotPowGate(eigen_gate.EigenGate, gate_features.TwoQubitGate):
def _decompose_(self, qubits):
c, t = qubits
yield Y(t)**-0.5
yield CZ(c, t)**self._exponent
yield Y(t)**0.5
def _eigen_components(self):
return [
(0, np.array([[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 0.5, 0.5],
[0, 0, 0.5, 0.5]])),
(1, np.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0.5, -0.5],
[0, 0, -0.5, 0.5]])),
]
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
return protocols.CircuitDiagramInfo(
wire_symbols=('@', 'X'),
exponent=self._diagram_exponent(args))
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if self._exponent != 1:
return None
oo = args.subspace_index(0b11)
zo = args.subspace_index(0b01)
args.available_buffer[oo] = args.target_tensor[oo]
args.target_tensor[oo] = args.target_tensor[zo]
args.target_tensor[zo] = args.available_buffer[oo]
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
if self._exponent != 1:
return None # Don't have an equivalent gate in QASM
args.validate_version('2.0')
return args.format('cx {0},{1};\n', qubits[0], qubits[1])
def __str__(self) -> str:
if self._exponent == 1:
return 'CNOT'
return 'CNOT**{!r}'.format(self._exponent)
def __repr__(self):
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.CNOT'
return '(cirq.CNOT**{!r})'.format(self._exponent)
return (
'cirq.CNotPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
def on(self, *args: raw_types.QubitId,
**kwargs: raw_types.QubitId) -> gate_operation.GateOperation:
if not kwargs:
return super().on(*args)
if not args and set(kwargs.keys()) == {'control', 'target'}:
return super().on(kwargs['control'], kwargs['target'])
raise ValueError(
"Expected two positional argument or else 'target' AND 'control' "
"keyword arguments. But got args={!r}, kwargs={!r}.".format(
args, kwargs))
class SwapPowGate(eigen_gate.EigenGate,
gate_features.TwoQubitGate,
gate_features.InterchangeableQubitsGate):
def _decompose_(self, qubits):
a, b = qubits
yield CNOT(a, b)
yield CNOT(b, a) ** self._exponent
yield CNOT(a, b)
def _eigen_components(self):
return [
(0, np.array([[1, 0, 0, 0],
[0, 0.5, 0.5, 0],
[0, 0.5, 0.5, 0],
[0, 0, 0, 1]])),
(1, np.array([[0, 0, 0, 0],
[0, 0.5, -0.5, 0],
[0, -0.5, 0.5, 0],
[0, 0, 0, 0]])),
]
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if self._exponent != 1:
return None
zo = args.subspace_index(0b01)
oz = args.subspace_index(0b10)
args.available_buffer[zo] = args.target_tensor[zo]
args.target_tensor[zo] = args.target_tensor[oz]
args.target_tensor[oz] = args.available_buffer[zo]
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
if not args.use_unicode_characters:
return protocols.CircuitDiagramInfo(
wire_symbols=('swap', 'swap'),
exponent=self._diagram_exponent(args))
return protocols.CircuitDiagramInfo(
wire_symbols=('×', '×'),
exponent=self._diagram_exponent(args))
def _qasm_(self,
args: protocols.QasmArgs,
qubits: Tuple[raw_types.QubitId, ...]) -> Optional[str]:
if self._exponent != 1:
return None
args.validate_version('2.0')
return args.format('swap {0},{1};\n', qubits[0], qubits[1])
def __str__(self) -> str:
if self._exponent == 1:
return 'SWAP'
return 'SWAP**{!r}'.format(self._exponent)
def __repr__(self):
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.SWAP'
return '(cirq.SWAP**{!r})'.format(self._exponent)
return (
'cirq.SwapPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
class ISwapPowGate(eigen_gate.EigenGate,
gate_features.InterchangeableQubitsGate,
gate_features.TwoQubitGate):
def _eigen_components(self):
return [
(0, np.diag([1, 0, 0, 1])),
(+0.5, np.array([[0, 0, 0, 0],
[0, 0.5, 0.5, 0],
[0, 0.5, 0.5, 0],
[0, 0, 0, 0]])),
(-0.5, np.array([[0, 0, 0, 0],
[0, 0.5, -0.5, 0],
[0, -0.5, 0.5, 0],
[0, 0, 0, 0]])),
]
def _decompose_(self, qubits):
a, b = qubits
yield CNOT(a, b)
yield H(a)
yield CNOT(b, a)
yield S(a)**self._exponent
yield CNOT(b, a)
yield S(a)**-self._exponent
yield H(a)
yield CNOT(a, b)
def _apply_unitary_(self, args: protocols.ApplyUnitaryArgs
) -> Optional[np.ndarray]:
if self._exponent != 1:
return None
zo = args.subspace_index(0b01)
oz = args.subspace_index(0b10)
args.available_buffer[zo] = args.target_tensor[zo]
args.target_tensor[zo] = args.target_tensor[oz]
args.target_tensor[oz] = args.available_buffer[zo]
args.target_tensor[zo] *= 1j
args.target_tensor[oz] *= 1j
p = 1j**(2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
) -> protocols.CircuitDiagramInfo:
return protocols.CircuitDiagramInfo(
wire_symbols=('iSwap', 'iSwap'),
exponent=self._diagram_exponent(args))
def __str__(self) -> str:
if self._exponent == 1:
return 'ISWAP'
return 'ISWAP**{!r}'.format(self._exponent)
def __repr__(self):
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.ISWAP'
return '(cirq.ISWAP**{!r})'.format(self._exponent)
return (
'cirq.ISwapPowGate(exponent={!r}, '
'global_shift={!r})'
).format(self._exponent, self._global_shift)
def Rx(rads: float) -> XPowGate:
return XPowGate(exponent=rads / np.pi, global_shift=-0.5)
def Ry(rads: float) -> YPowGate:
return YPowGate(exponent=rads / np.pi, global_shift=-0.5)
def Rz(rads: float) -> ZPowGate:
return ZPowGate(exponent=rads / np.pi, global_shift=-0.5)
X = XPowGate()
#: The Pauli Y gate.
#:
#: Matrix:
#:
#: [[0, -i],
#: [i, 0]]
Y = YPowGate()
# The Pauli Z gate.
#
# Matrix:
#
# [[1, 0],
# [0, -1]]
Z = ZPowGate()
# The Hadamard gate.
#
# Matrix:
#
# [[s, s],
# [s, -s]]
# where s = sqrt(0.5).
H = HPowGate()
# The Clifford S gate.
#
# Matrix:
#
# [[1, 0],
# [0, i]]
S = Z**0.5
# The T gate.
#
# Matrix:
#
# [[1, 0]
# [0, exp(i pi / 4)]]
T = Z**0.25
# The controlled Z gate.
#
# Matrix:
#
# [[1, 0, 0, 0],
# [0, 1, 0, 0],
# [0, 0, 1, 0],
# [0, 0, 0, -1]]
CZ = CZPowGate()
# The controlled NOT gate.
#
# Matrix:
#
# [[1, 0, 0, 0],
# [0, 1, 0, 0],
# [0, 0, 0, 1],
# [0, 0, 1, 0]]
CNOT = CNotPowGate()
# The swap gate.
#
# Matrix:
#
# [[1, 0, 0, 0],
# [0, 0, 1, 0],
# [0, 1, 0, 0],
# [0, 0, 0, 1]]
SWAP = SwapPowGate()
# The iswap gate.
#
# Matrix:
#
# [[1, 0, 0, 0],
# [0, 0, i, 0],
# [0, i, 0, 0],
# [0, 0, 0, 1]]
ISWAP = ISwapPowGate()
| true | true |
f72adde5fd070ac204654007f643a021dddeff3a | 4,967 | py | Python | sensirion_shdlc_sensorbridge/commands/firmware_update.py | Sensirion/python-shdlc-sensorbridge | c441c17d89697ecf0f7b61955f54c3da195e30e6 | [
"BSD-3-Clause"
] | null | null | null | sensirion_shdlc_sensorbridge/commands/firmware_update.py | Sensirion/python-shdlc-sensorbridge | c441c17d89697ecf0f7b61955f54c3da195e30e6 | [
"BSD-3-Clause"
] | 1 | 2021-03-28T22:15:29.000Z | 2021-11-03T09:06:14.000Z | sensirion_shdlc_sensorbridge/commands/firmware_update.py | Sensirion/python-shdlc-sensorbridge | c441c17d89697ecf0f7b61955f54c3da195e30e6 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# (c) Copyright 2020 Sensirion AG, Switzerland
##############################################################################
##############################################################################
# _____ _ _ _______ _____ ____ _ _
# / ____| /\ | | | |__ __|_ _/ __ \| \ | |
# | | / \ | | | | | | | || | | | \| |
# | | / /\ \| | | | | | | || | | | . ` |
# | |____ / ____ \ |__| | | | _| || |__| | |\ |
# \_____/_/ \_\____/ |_| |_____\____/|_| \_|
#
# THIS FILE IS AUTOMATICALLY GENERATED AND MUST NOT BE EDITED MANUALLY!
#
# Generator: sensirion-shdlc-interface-generator 0.5.1
# Product: Sensor Bridge
# Version: 0.1.0
#
##############################################################################
##############################################################################
# flake8: noqa
from __future__ import absolute_import, division, print_function
from sensirion_shdlc_driver.command import ShdlcCommand
from struct import pack, unpack
import logging
log = logging.getLogger(__name__)
class SensorBridgeCmdFirmwareUpdateBase(ShdlcCommand):
"""
SHDLC command 0xF3: "Firmware Update".
"""
def __init__(self, *args, **kwargs):
super(SensorBridgeCmdFirmwareUpdateBase, self).__init__(
0xF3, *args, **kwargs)
class SensorBridgeCmdEnterBootloader(SensorBridgeCmdFirmwareUpdateBase):
def __init__(self):
"""
Enter Bootloader Command
Command to enter into the bootloader mode. The device will reboot into
bootloader mode and wait until the new Firmware is received (start
update command expected). Even after a power reset, the device returns
into bootloader mode. The response frame is sent before the reset.
.. note:: After the response frame is received, the device will not
accept new commands until fully booted (wait at least 1 s).
"""
super(SensorBridgeCmdEnterBootloader, self).__init__(
data=[],
max_response_time=0.5,
post_processing_time=1.0,
min_response_length=0,
max_response_length=0
)
class SensorBridgeCmdStartUpdate(SensorBridgeCmdFirmwareUpdateBase):
def __init__(self):
"""
Start Update Command
Command to start the firmware update. The devices flash will be erased
(except bootloader) and the internal pointers resetted. The device is
then ready to receive the new firmware with the update data command.
.. note:: Only supported when in bootloader mode.
"""
super(SensorBridgeCmdStartUpdate, self).__init__(
data=b"".join([bytes(bytearray([0x01]))]),
max_response_time=0.5,
post_processing_time=0.0,
min_response_length=0,
max_response_length=0
)
class SensorBridgeCmdUpdateData(SensorBridgeCmdFirmwareUpdateBase):
def __init__(self, data):
"""
Update Data Command
Command to send the new firmware data as hex code in binary format.
.. note:: Only supported when in bootloader mode after receiving the
start update command. Send even number of bytes except for
the last frame.
:param bytes data:
Firmware hex data in binary format.
"""
super(SensorBridgeCmdUpdateData, self).__init__(
data=b"".join([bytes(bytearray([0x02])),
bytes(bytearray(data))]),
max_response_time=0.5,
post_processing_time=0.0,
min_response_length=0,
max_response_length=0
)
class SensorBridgeCmdStopUpdate(SensorBridgeCmdFirmwareUpdateBase):
def __init__(self, checksum):
"""
Stop Update Command
After all update data frames are sent, the stop update marks the end of
the update sequence. The checksum is sent to the device and
verification is done. The device state represents the success of the
update sequence. If successfully, the device writes the signature and
reboots into the application.
.. note:: The checksum is calculated the same way as the SHDLC
checksum. First sum all firmware update data bytes and then
take the LSB of the result and invert it. This will be the
checksum.
:param int checksum:
Checksum of the firmware data.
"""
super(SensorBridgeCmdStopUpdate, self).__init__(
data=b"".join([bytes(bytearray([0x03])),
pack(">B", checksum)]),
max_response_time=1.0,
post_processing_time=0.0,
min_response_length=0,
max_response_length=0
)
| 35.733813 | 79 | 0.562915 | true | true | |
f72adf1f6af0532364f442d4ae606bac033e4b53 | 584 | py | Python | tutorials/migrations/0031_auto_20210211_1605.py | ericrobskyhuntley/vialab.mit.edu | 1318d03b8eeb106c1662052e1caa53290e206ae7 | [
"MIT"
] | null | null | null | tutorials/migrations/0031_auto_20210211_1605.py | ericrobskyhuntley/vialab.mit.edu | 1318d03b8eeb106c1662052e1caa53290e206ae7 | [
"MIT"
] | null | null | null | tutorials/migrations/0031_auto_20210211_1605.py | ericrobskyhuntley/vialab.mit.edu | 1318d03b8eeb106c1662052e1caa53290e206ae7 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.4 on 2021-02-11 21:05
from django.db import migrations
import martor.models
class Migration(migrations.Migration):
dependencies = [
('tutorials', '0030_auto_20200408_1257'),
]
operations = [
migrations.AlterField(
model_name='historicalsoftware',
name='desc',
field=martor.models.MartorField(max_length=400),
),
migrations.AlterField(
model_name='software',
name='desc',
field=martor.models.MartorField(max_length=400),
),
]
| 23.36 | 60 | 0.601027 |
from django.db import migrations
import martor.models
class Migration(migrations.Migration):
dependencies = [
('tutorials', '0030_auto_20200408_1257'),
]
operations = [
migrations.AlterField(
model_name='historicalsoftware',
name='desc',
field=martor.models.MartorField(max_length=400),
),
migrations.AlterField(
model_name='software',
name='desc',
field=martor.models.MartorField(max_length=400),
),
]
| true | true |
f72adfbd0b4913e9c0e119e52b6aa8237cc00b2a | 2,757 | py | Python | tools/count_opsize.py | VDIGPKU/OPANAS | 873ff09a65d3253ce8351e54880a642517f7e8b5 | [
"Apache-2.0"
] | 39 | 2021-03-31T21:15:48.000Z | 2022-03-30T03:34:14.000Z | tools/count_opsize.py | VDIGPKU/OPANAS | 873ff09a65d3253ce8351e54880a642517f7e8b5 | [
"Apache-2.0"
] | 8 | 2021-04-06T07:58:03.000Z | 2022-01-11T17:10:51.000Z | tools/count_opsize.py | VDIGPKU/OPANAS | 873ff09a65d3253ce8351e54880a642517f7e8b5 | [
"Apache-2.0"
] | 4 | 2021-04-06T03:28:56.000Z | 2022-03-06T19:57:50.000Z | import argparse
import os
import warnings
import mmcv
import torch
from mmcv import Config, DictAction
from mmcv.cnn import fuse_conv_bn
from mmcv.parallel import MMDataParallel, MMDistributedDataParallel
from mmcv.runner import get_dist_info, init_dist, load_checkpoint
from mmcv.runner import (HOOKS, DistSamplerSeedHook, EpochBasedRunner,
OptimizerHook, build_optimizer)
from mmdet.apis import multi_gpu_test_search, single_gpu_test_search
from mmdet.core import wrap_fp16_model
from mmdet.datasets import (build_dataloader, build_dataset,
replace_ImageToTensor)
from mmdet.models import build_detector
import numpy as np
from torch.autograd import Variable
import collections
import sys
import time
import copy
from mmdet.core import encode_mask_results, tensor2imgs
import logging
sys.setrecursionlimit(10000)
import argparse
import torch.distributed as dist
import functools
import random
import os
from mmdet.models.necks.spos_opsc import OPS
PRIMITIVES = ['TDM_dcn', 'BUM_dcn', 'PCONV_dcn', 'FSM_dcn']
def countop(paths, channel):
opsize = 0
fp = 0
for path in paths:
op = OPS[path](channel, channel, True, True)
opsize += op.size
fp += op.fp
#print(opsize)
return opsize, fp
def parse_args():
parser = argparse.ArgumentParser(description='Train a detector')
parser.add_argument('log',
help='train log file path',
default='./work_dirs/faster_rcnn_r50_sposfpn3_uniform_dcn_p4st12_c64_256_1x_coco/epoch_12_ea_prun_0_20210104_075032.log')
args = parser.parse_args()
return args
def main():
args = parse_args()
print(args)
name = args.log
print(os.getcwd())
print(name)
#name = '/data/liangtingting/projects/panas_super/work_dirs/faster_rcnn_r50_sposfpn3_uniform_dcn_p4st12_c64_256_1x_coco/epoch_12_ea_prun_0_20210104_075032.log'
op_name = os.path.splitext(name)[0] + '.txt'
print(op_name)
f = open(name, 'r')
wf = open(op_name,'w')
for line in f:
if '[' in line and 'AP' in line:
st = line.index('(')
ed = line.index(')')
paths = str(line[st+1:ed])
paths = paths.split(', ')
op_paths = [int(i) for i in paths]
channel = op_paths[-1]
cand = [PRIMITIVES[i] for i in op_paths[:-1]]
opsize, fp = countop(cand, channel)
ap = line.index('AP')
map = line[ap+3:ap+15]
wf.write(str(cand) + ' ' + str(channel) + ' ' + map + ' ' + str(opsize) + ' ' + str(fp) + '\n')
print(cand, channel, map, opsize, fp)
if 'top 50 result' in line:
break
if __name__ == '__main__':
main() | 31.689655 | 163 | 0.660863 | import argparse
import os
import warnings
import mmcv
import torch
from mmcv import Config, DictAction
from mmcv.cnn import fuse_conv_bn
from mmcv.parallel import MMDataParallel, MMDistributedDataParallel
from mmcv.runner import get_dist_info, init_dist, load_checkpoint
from mmcv.runner import (HOOKS, DistSamplerSeedHook, EpochBasedRunner,
OptimizerHook, build_optimizer)
from mmdet.apis import multi_gpu_test_search, single_gpu_test_search
from mmdet.core import wrap_fp16_model
from mmdet.datasets import (build_dataloader, build_dataset,
replace_ImageToTensor)
from mmdet.models import build_detector
import numpy as np
from torch.autograd import Variable
import collections
import sys
import time
import copy
from mmdet.core import encode_mask_results, tensor2imgs
import logging
sys.setrecursionlimit(10000)
import argparse
import torch.distributed as dist
import functools
import random
import os
from mmdet.models.necks.spos_opsc import OPS
PRIMITIVES = ['TDM_dcn', 'BUM_dcn', 'PCONV_dcn', 'FSM_dcn']
def countop(paths, channel):
opsize = 0
fp = 0
for path in paths:
op = OPS[path](channel, channel, True, True)
opsize += op.size
fp += op.fp
return opsize, fp
def parse_args():
parser = argparse.ArgumentParser(description='Train a detector')
parser.add_argument('log',
help='train log file path',
default='./work_dirs/faster_rcnn_r50_sposfpn3_uniform_dcn_p4st12_c64_256_1x_coco/epoch_12_ea_prun_0_20210104_075032.log')
args = parser.parse_args()
return args
def main():
args = parse_args()
print(args)
name = args.log
print(os.getcwd())
print(name)
op_name = os.path.splitext(name)[0] + '.txt'
print(op_name)
f = open(name, 'r')
wf = open(op_name,'w')
for line in f:
if '[' in line and 'AP' in line:
st = line.index('(')
ed = line.index(')')
paths = str(line[st+1:ed])
paths = paths.split(', ')
op_paths = [int(i) for i in paths]
channel = op_paths[-1]
cand = [PRIMITIVES[i] for i in op_paths[:-1]]
opsize, fp = countop(cand, channel)
ap = line.index('AP')
map = line[ap+3:ap+15]
wf.write(str(cand) + ' ' + str(channel) + ' ' + map + ' ' + str(opsize) + ' ' + str(fp) + '\n')
print(cand, channel, map, opsize, fp)
if 'top 50 result' in line:
break
if __name__ == '__main__':
main() | true | true |
f72ae0a27f7cd75894571c6fa943dd5463f7ef49 | 15,394 | py | Python | tests/test_rtc_parse_aec.py | fyntex/lib-cl-sii-python | b6ffb72be1f173a1d2e44b17ae5c08caf96ebf34 | [
"MIT"
] | 8 | 2020-03-07T19:58:40.000Z | 2021-12-15T13:47:40.000Z | tests/test_rtc_parse_aec.py | fyntex/lib-cl-sii-python | b6ffb72be1f173a1d2e44b17ae5c08caf96ebf34 | [
"MIT"
] | 141 | 2020-01-17T22:47:35.000Z | 2022-03-31T18:29:47.000Z | tests/test_rtc_parse_aec.py | fyntex/lib-cl-sii-python | b6ffb72be1f173a1d2e44b17ae5c08caf96ebf34 | [
"MIT"
] | 3 | 2020-03-07T20:30:02.000Z | 2021-03-22T03:14:26.000Z | from __future__ import annotations
import unittest
from datetime import date, datetime
from cl_sii.dte.data_models import DteDataL1, DteXmlData
from cl_sii.dte.constants import TipoDteEnum
from cl_sii.dte.parse import DTE_XMLNS
from cl_sii.libs import encoding_utils
from cl_sii.libs import tz_utils
from cl_sii.libs import xml_utils
from cl_sii.rut import Rut
from cl_sii.rtc.data_models_aec import CesionAecXml, AecXml
from cl_sii.rtc.parse_aec import AEC_XML_SCHEMA_OBJ, parse_aec_xml, validate_aec_xml
from .utils import read_test_file_bytes
class AecXmlSchemaTest(unittest.TestCase):
"""
Tests for AEC XML schema.
"""
@unittest.skip("TODO: Implement for 'AEC_XML_SCHEMA_OBJ'.")
def test_AEC_XML_SCHEMA_OBJ(self):
self.assertIsNotNone(AEC_XML_SCHEMA_OBJ)
class AecXmlValidatorTest(unittest.TestCase):
"""
Tests for :func:`validate_aec_xml`.
"""
def _set_obj_1(self) -> None:
aec_xml_bytes: bytes = read_test_file_bytes(
'test_data/sii-rtc/AEC--76354771-K--33--170--SEQ-2.xml',
)
self.aec_1_xml_bytes = aec_xml_bytes
def _set_obj_2(self) -> None:
aec_xml_bytes: bytes = read_test_file_bytes(
'test_data/sii-rtc/AEC--76399752-9--33--25568--SEQ-1.xml',
)
self.aec_2_xml_bytes = aec_xml_bytes
def test_validate_aec_xml_ok_1(self) -> None:
self._set_obj_1()
aec_xml_bytes = self.aec_1_xml_bytes
xml_doc = xml_utils.parse_untrusted_xml(aec_xml_bytes)
try:
validate_aec_xml(xml_doc)
except xml_utils.XmlSchemaDocValidationError as exc:
self.fail(f'{exc.__class__.__name__} raised')
expected_xml_root_tag = '{%s}AEC' % DTE_XMLNS
self.assertEqual(xml_doc.getroottree().getroot().tag, expected_xml_root_tag)
def test_validate_aec_xml_ok_2(self) -> None:
self._set_obj_2()
aec_xml_bytes = self.aec_2_xml_bytes
xml_doc = xml_utils.parse_untrusted_xml(aec_xml_bytes)
try:
validate_aec_xml(xml_doc)
except xml_utils.XmlSchemaDocValidationError as exc:
self.fail(f'{exc.__class__.__name__} raised')
expected_xml_root_tag = '{%s}AEC' % DTE_XMLNS
self.assertEqual(xml_doc.getroottree().getroot().tag, expected_xml_root_tag)
@unittest.skip("TODO: Implement for 'validate_aec_xml'.")
def test_validate_aec_xml_fail(self) -> None:
self.assertIsNotNone(validate_aec_xml)
class AecXmlParserTest(unittest.TestCase):
"""
Tests for :func:`parse_aec_xml`.
"""
def _set_obj_1(self) -> None:
aec_xml_bytes: bytes = read_test_file_bytes(
'test_data/sii-rtc/AEC--76354771-K--33--170--SEQ-2.xml',
)
aec_signature_value: bytes = encoding_utils.decode_base64_strict(
read_test_file_bytes(
'test_data/sii-crypto/AEC--76354771-K--33--170--SEQ-2-signature-value-base64.txt',
),
)
aec_cert_der_bytes: bytes = read_test_file_bytes(
'test_data/sii-crypto/AEC--76354771-K--33--170--SEQ-2-cert.der',
)
aec_dte_cert_der_bytes: bytes = read_test_file_bytes(
'test_data/sii-crypto/DTE--76354771-K--33--170-cert.der',
)
aec_dte_signature_value: bytes = encoding_utils.decode_base64_strict(
read_test_file_bytes(
'test_data/sii-crypto/DTE--76354771-K--33--170-signature-value-base64.txt',
),
)
self.aec_1_xml_bytes = aec_xml_bytes
self.aec_1_signature_value = aec_signature_value
self.aec_1_cert_der_bytes = aec_cert_der_bytes
self.aec_1_dte_cert_der_bytes = aec_dte_cert_der_bytes
self.aec_1_dte_signature_value = aec_dte_signature_value
def _set_obj_2(self) -> None:
aec_xml_bytes: bytes = read_test_file_bytes(
'test_data/sii-rtc/AEC--76399752-9--33--25568--SEQ-1.xml',
)
aec_signature_value: bytes = encoding_utils.decode_base64_strict(
read_test_file_bytes(
'test_data/sii-crypto/AEC--76399752-9--33--25568--SEQ-1-signature-value-base64.txt',
),
)
aec_cert_der_bytes: bytes = read_test_file_bytes(
'test_data/sii-crypto/AEC--76399752-9--33--25568--SEQ-1-cert.der',
)
aec_dte_cert_der_bytes: bytes = read_test_file_bytes(
'test_data/sii-crypto/DTE--76399752-9--33--25568-cert.der',
)
aec_dte_signature_value: bytes = encoding_utils.decode_base64_strict(
read_test_file_bytes(
'test_data/sii-crypto/DTE--76399752-9--33--25568-signature-value-base64.txt',
),
)
self.aec_2_xml_bytes = aec_xml_bytes
self.aec_2_signature_value = aec_signature_value
self.aec_2_cert_der_bytes = aec_cert_der_bytes
self.aec_2_dte_cert_der_bytes = aec_dte_cert_der_bytes
self.aec_2_dte_signature_value = aec_dte_signature_value
def test_parse_aec_xml_ok_1(self) -> None:
self._set_obj_1()
aec_xml_bytes = self.aec_1_xml_bytes
aec_signature_value = self.aec_1_signature_value
aec_cert_der_bytes = self.aec_1_cert_der_bytes
aec_dte_signature_value = self.aec_1_dte_signature_value
aec_dte_cert_der_bytes = self.aec_1_dte_cert_der_bytes
expected_output = AecXml(
dte=DteXmlData(
emisor_rut=Rut('76354771-K'),
tipo_dte=TipoDteEnum.FACTURA_ELECTRONICA,
folio=170,
fecha_emision_date=date(2019, 4, 1),
receptor_rut=Rut('96790240-3'),
monto_total=2996301,
emisor_razon_social='INGENIERIA ENACON SPA',
receptor_razon_social='MINERA LOS PELAMBRES',
fecha_vencimiento_date=None,
firma_documento_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 1, 1, 36, 40),
tz=DteXmlData.DATETIME_FIELDS_TZ,
),
signature_value=aec_dte_signature_value,
signature_x509_cert_der=aec_dte_cert_der_bytes,
emisor_giro='Ingenieria y Construccion',
emisor_email='ENACONLTDA@GMAIL.COM',
receptor_email=None,
),
cedente_rut=Rut('76389992-6'),
cesionario_rut=Rut('76598556-0'),
fecha_firma_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 5, 12, 57, 32),
tz=AecXml.DATETIME_FIELDS_TZ,
),
signature_value=aec_signature_value,
signature_x509_cert_der=aec_cert_der_bytes,
cesiones=[
CesionAecXml(
dte=DteDataL1(
emisor_rut=Rut('76354771-K'),
tipo_dte=TipoDteEnum.FACTURA_ELECTRONICA,
folio=170,
fecha_emision_date=date(2019, 4, 1),
receptor_rut=Rut('96790240-3'),
monto_total=2996301,
),
seq=1,
cedente_rut=Rut('76354771-K'),
cesionario_rut=Rut('76389992-6'),
monto_cesion=2996301,
fecha_cesion_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 1, 10, 22, 2),
tz=CesionAecXml.DATETIME_FIELDS_TZ,
),
fecha_ultimo_vencimiento=date(2019, 5, 1),
cedente_razon_social='SERVICIOS BONILLA Y LOPEZ Y COMPAÑIA LIMITADA',
cedente_direccion='MERCED 753 16 ARBOLEDA DE QUIILOTA',
cedente_email='enaconltda@gmail.com',
cedente_persona_autorizada_rut=Rut('76354771-K'),
cedente_persona_autorizada_nombre='SERVICIOS BONILLA Y LOPEZ Y COMPAÑIA LIM',
cesionario_razon_social='ST CAPITAL S.A.',
cesionario_direccion='Isidora Goyenechea 2939 Oficina 602',
cesionario_email='fynpal-app-notif-st-capital@fynpal.com',
dte_deudor_email=None,
cedente_declaracion_jurada=(
'Se declara bajo juramento que SERVICIOS BONILLA Y LOPEZ Y COMPAÑIA '
'LIMITADA, RUT 76354771-K ha puesto a disposición del cesionario ST '
'CAPITAL S.A., RUT 76389992-6, el o los documentos donde constan los '
'recibos de las mercaderías entregadas o servicios prestados, entregados '
'por parte del deudor de la factura MINERA LOS PELAMBRES, RUT 96790240-3, '
'deacuerdo a lo establecido en la Ley N°19.983.'
),
),
CesionAecXml(
dte=DteDataL1(
emisor_rut=Rut('76354771-K'),
tipo_dte=TipoDteEnum.FACTURA_ELECTRONICA,
folio=170,
fecha_emision_date=date(2019, 4, 1),
receptor_rut=Rut('96790240-3'),
monto_total=2996301,
),
seq=2,
cedente_rut=Rut('76389992-6'),
cesionario_rut=Rut('76598556-0'),
monto_cesion=2996301,
fecha_cesion_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 5, 12, 57, 32),
tz=CesionAecXml.DATETIME_FIELDS_TZ,
),
fecha_ultimo_vencimiento=date(2019, 5, 1),
cedente_razon_social='ST CAPITAL S.A.',
cedente_direccion='Isidora Goyenechea 2939 Oficina 602',
cedente_email='APrat@Financiaenlinea.com',
cesionario_razon_social='Fondo de Inversión Privado Deuda y Facturas',
cesionario_direccion='Arrayan 2750 Oficina 703 Providencia',
cesionario_email='solicitudes@stcapital.cl',
cedente_persona_autorizada_rut=Rut('16360379-9'),
cedente_persona_autorizada_nombre='ANDRES PRATS VIAL',
dte_deudor_email=None,
cedente_declaracion_jurada=(
'Se declara bajo juramento que ST CAPITAL S.A., RUT 76389992-6 ha puesto '
'a disposicion del cesionario Fondo de Inversión Privado Deuda y Facturas, '
'RUT 76598556-0, el documento validamente emitido al deudor MINERA LOS '
'PELAMBRES, RUT 96790240-3.'
),
),
],
contacto_nombre='ST Capital Servicios Financieros',
contacto_telefono=None,
contacto_email='APrat@Financiaenlinea.com',
)
xml_doc = xml_utils.parse_untrusted_xml(aec_xml_bytes)
aec_xml = parse_aec_xml(xml_doc)
self.assertEqual(aec_xml, expected_output)
def test_parse_aec_xml_ok_2(self) -> None:
self._set_obj_2()
aec_xml_bytes = self.aec_2_xml_bytes
aec_signature_value = self.aec_2_signature_value
aec_cert_der_bytes = self.aec_2_cert_der_bytes
aec_dte_signature_value = self.aec_2_dte_signature_value
aec_dte_cert_der_bytes = self.aec_2_dte_cert_der_bytes
expected_output = AecXml(
dte=DteXmlData(
emisor_rut=Rut('76399752-9'),
tipo_dte=TipoDteEnum.FACTURA_ELECTRONICA,
folio=25568,
fecha_emision_date=date(2019, 3, 29),
receptor_rut=Rut('96874030-K'),
monto_total=230992,
emisor_razon_social='COMERCIALIZADORA INNOVA MOBEL SPA',
receptor_razon_social='EMPRESAS LA POLAR S.A.',
fecha_vencimiento_date=None,
firma_documento_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 3, 28, 13, 59, 52),
tz=DteXmlData.DATETIME_FIELDS_TZ,
),
signature_value=aec_dte_signature_value,
signature_x509_cert_der=aec_dte_cert_der_bytes,
emisor_giro='COMERCIALIZACION DE PRODUCTOS PARA EL HOGAR',
emisor_email='ANGEL.PEZO@APCASESORIAS.CL',
receptor_email=None,
),
cedente_rut=Rut('76399752-9'),
cesionario_rut=Rut('76389992-6'),
fecha_firma_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 4, 9, 9, 52),
tz=AecXml.DATETIME_FIELDS_TZ,
),
signature_value=aec_signature_value,
signature_x509_cert_der=aec_cert_der_bytes,
cesiones=[
CesionAecXml(
dte=DteDataL1(
emisor_rut=Rut('76399752-9'),
tipo_dte=TipoDteEnum.FACTURA_ELECTRONICA,
folio=25568,
fecha_emision_date=date(2019, 3, 29),
receptor_rut=Rut('96874030-K'),
monto_total=230992,
),
seq=1,
cedente_rut=Rut('76399752-9'),
cesionario_rut=Rut('76389992-6'),
monto_cesion=230992,
fecha_cesion_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 4, 9, 9, 52),
tz=CesionAecXml.DATETIME_FIELDS_TZ,
),
fecha_ultimo_vencimiento=date(2019, 4, 28),
cedente_razon_social='COMERCIALIZADORA INNOVA MOBEL SPA',
cedente_direccion='LOS CIPRESES 2834',
cedente_email='camilo.perez@innovamobel.cl',
cedente_persona_autorizada_rut=Rut('76399752-9'),
cedente_persona_autorizada_nombre='COMERCIALIZADORA INNOVA MOBEL SPA',
cesionario_razon_social='ST CAPITAL S.A.',
cesionario_direccion='Isidora Goyenechea 2939 Oficina 602',
cesionario_email='fynpal-app-notif-st-capital@fynpal.com',
dte_deudor_email=None,
cedente_declaracion_jurada=(
'Se declara bajo juramento que COMERCIALIZADORA INNOVA MOBEL SPA, RUT '
'76399752-9 ha puesto a disposición del cesionario ST CAPITAL S.A., RUT '
'76389992-6, el o los documentos donde constan los recibos de las '
'mercaderías entregadas o servicios prestados, entregados por parte del '
'deudor de la factura EMPRESAS LA POLAR S.A., RUT 96874030-K, deacuerdo a '
'lo establecido en la Ley N°19.983.'
),
),
],
contacto_nombre=None,
contacto_telefono=None,
contacto_email='fynpal-app-notif-st-capital@fynpal.com',
)
xml_doc = xml_utils.parse_untrusted_xml(aec_xml_bytes)
aec_xml = parse_aec_xml(xml_doc)
self.assertEqual(aec_xml, expected_output)
| 44.235632 | 100 | 0.59023 | from __future__ import annotations
import unittest
from datetime import date, datetime
from cl_sii.dte.data_models import DteDataL1, DteXmlData
from cl_sii.dte.constants import TipoDteEnum
from cl_sii.dte.parse import DTE_XMLNS
from cl_sii.libs import encoding_utils
from cl_sii.libs import tz_utils
from cl_sii.libs import xml_utils
from cl_sii.rut import Rut
from cl_sii.rtc.data_models_aec import CesionAecXml, AecXml
from cl_sii.rtc.parse_aec import AEC_XML_SCHEMA_OBJ, parse_aec_xml, validate_aec_xml
from .utils import read_test_file_bytes
class AecXmlSchemaTest(unittest.TestCase):
@unittest.skip("TODO: Implement for 'AEC_XML_SCHEMA_OBJ'.")
def test_AEC_XML_SCHEMA_OBJ(self):
self.assertIsNotNone(AEC_XML_SCHEMA_OBJ)
class AecXmlValidatorTest(unittest.TestCase):
def _set_obj_1(self) -> None:
aec_xml_bytes: bytes = read_test_file_bytes(
'test_data/sii-rtc/AEC--76354771-K--33--170--SEQ-2.xml',
)
self.aec_1_xml_bytes = aec_xml_bytes
def _set_obj_2(self) -> None:
aec_xml_bytes: bytes = read_test_file_bytes(
'test_data/sii-rtc/AEC--76399752-9--33--25568--SEQ-1.xml',
)
self.aec_2_xml_bytes = aec_xml_bytes
def test_validate_aec_xml_ok_1(self) -> None:
self._set_obj_1()
aec_xml_bytes = self.aec_1_xml_bytes
xml_doc = xml_utils.parse_untrusted_xml(aec_xml_bytes)
try:
validate_aec_xml(xml_doc)
except xml_utils.XmlSchemaDocValidationError as exc:
self.fail(f'{exc.__class__.__name__} raised')
expected_xml_root_tag = '{%s}AEC' % DTE_XMLNS
self.assertEqual(xml_doc.getroottree().getroot().tag, expected_xml_root_tag)
def test_validate_aec_xml_ok_2(self) -> None:
self._set_obj_2()
aec_xml_bytes = self.aec_2_xml_bytes
xml_doc = xml_utils.parse_untrusted_xml(aec_xml_bytes)
try:
validate_aec_xml(xml_doc)
except xml_utils.XmlSchemaDocValidationError as exc:
self.fail(f'{exc.__class__.__name__} raised')
expected_xml_root_tag = '{%s}AEC' % DTE_XMLNS
self.assertEqual(xml_doc.getroottree().getroot().tag, expected_xml_root_tag)
@unittest.skip("TODO: Implement for 'validate_aec_xml'.")
def test_validate_aec_xml_fail(self) -> None:
self.assertIsNotNone(validate_aec_xml)
class AecXmlParserTest(unittest.TestCase):
def _set_obj_1(self) -> None:
aec_xml_bytes: bytes = read_test_file_bytes(
'test_data/sii-rtc/AEC--76354771-K--33--170--SEQ-2.xml',
)
aec_signature_value: bytes = encoding_utils.decode_base64_strict(
read_test_file_bytes(
'test_data/sii-crypto/AEC--76354771-K--33--170--SEQ-2-signature-value-base64.txt',
),
)
aec_cert_der_bytes: bytes = read_test_file_bytes(
'test_data/sii-crypto/AEC--76354771-K--33--170--SEQ-2-cert.der',
)
aec_dte_cert_der_bytes: bytes = read_test_file_bytes(
'test_data/sii-crypto/DTE--76354771-K--33--170-cert.der',
)
aec_dte_signature_value: bytes = encoding_utils.decode_base64_strict(
read_test_file_bytes(
'test_data/sii-crypto/DTE--76354771-K--33--170-signature-value-base64.txt',
),
)
self.aec_1_xml_bytes = aec_xml_bytes
self.aec_1_signature_value = aec_signature_value
self.aec_1_cert_der_bytes = aec_cert_der_bytes
self.aec_1_dte_cert_der_bytes = aec_dte_cert_der_bytes
self.aec_1_dte_signature_value = aec_dte_signature_value
def _set_obj_2(self) -> None:
aec_xml_bytes: bytes = read_test_file_bytes(
'test_data/sii-rtc/AEC--76399752-9--33--25568--SEQ-1.xml',
)
aec_signature_value: bytes = encoding_utils.decode_base64_strict(
read_test_file_bytes(
'test_data/sii-crypto/AEC--76399752-9--33--25568--SEQ-1-signature-value-base64.txt',
),
)
aec_cert_der_bytes: bytes = read_test_file_bytes(
'test_data/sii-crypto/AEC--76399752-9--33--25568--SEQ-1-cert.der',
)
aec_dte_cert_der_bytes: bytes = read_test_file_bytes(
'test_data/sii-crypto/DTE--76399752-9--33--25568-cert.der',
)
aec_dte_signature_value: bytes = encoding_utils.decode_base64_strict(
read_test_file_bytes(
'test_data/sii-crypto/DTE--76399752-9--33--25568-signature-value-base64.txt',
),
)
self.aec_2_xml_bytes = aec_xml_bytes
self.aec_2_signature_value = aec_signature_value
self.aec_2_cert_der_bytes = aec_cert_der_bytes
self.aec_2_dte_cert_der_bytes = aec_dte_cert_der_bytes
self.aec_2_dte_signature_value = aec_dte_signature_value
def test_parse_aec_xml_ok_1(self) -> None:
self._set_obj_1()
aec_xml_bytes = self.aec_1_xml_bytes
aec_signature_value = self.aec_1_signature_value
aec_cert_der_bytes = self.aec_1_cert_der_bytes
aec_dte_signature_value = self.aec_1_dte_signature_value
aec_dte_cert_der_bytes = self.aec_1_dte_cert_der_bytes
expected_output = AecXml(
dte=DteXmlData(
emisor_rut=Rut('76354771-K'),
tipo_dte=TipoDteEnum.FACTURA_ELECTRONICA,
folio=170,
fecha_emision_date=date(2019, 4, 1),
receptor_rut=Rut('96790240-3'),
monto_total=2996301,
emisor_razon_social='INGENIERIA ENACON SPA',
receptor_razon_social='MINERA LOS PELAMBRES',
fecha_vencimiento_date=None,
firma_documento_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 1, 1, 36, 40),
tz=DteXmlData.DATETIME_FIELDS_TZ,
),
signature_value=aec_dte_signature_value,
signature_x509_cert_der=aec_dte_cert_der_bytes,
emisor_giro='Ingenieria y Construccion',
emisor_email='ENACONLTDA@GMAIL.COM',
receptor_email=None,
),
cedente_rut=Rut('76389992-6'),
cesionario_rut=Rut('76598556-0'),
fecha_firma_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 5, 12, 57, 32),
tz=AecXml.DATETIME_FIELDS_TZ,
),
signature_value=aec_signature_value,
signature_x509_cert_der=aec_cert_der_bytes,
cesiones=[
CesionAecXml(
dte=DteDataL1(
emisor_rut=Rut('76354771-K'),
tipo_dte=TipoDteEnum.FACTURA_ELECTRONICA,
folio=170,
fecha_emision_date=date(2019, 4, 1),
receptor_rut=Rut('96790240-3'),
monto_total=2996301,
),
seq=1,
cedente_rut=Rut('76354771-K'),
cesionario_rut=Rut('76389992-6'),
monto_cesion=2996301,
fecha_cesion_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 1, 10, 22, 2),
tz=CesionAecXml.DATETIME_FIELDS_TZ,
),
fecha_ultimo_vencimiento=date(2019, 5, 1),
cedente_razon_social='SERVICIOS BONILLA Y LOPEZ Y COMPAÑIA LIMITADA',
cedente_direccion='MERCED 753 16 ARBOLEDA DE QUIILOTA',
cedente_email='enaconltda@gmail.com',
cedente_persona_autorizada_rut=Rut('76354771-K'),
cedente_persona_autorizada_nombre='SERVICIOS BONILLA Y LOPEZ Y COMPAÑIA LIM',
cesionario_razon_social='ST CAPITAL S.A.',
cesionario_direccion='Isidora Goyenechea 2939 Oficina 602',
cesionario_email='fynpal-app-notif-st-capital@fynpal.com',
dte_deudor_email=None,
cedente_declaracion_jurada=(
'Se declara bajo juramento que SERVICIOS BONILLA Y LOPEZ Y COMPAÑIA '
'LIMITADA, RUT 76354771-K ha puesto a disposición del cesionario ST '
'CAPITAL S.A., RUT 76389992-6, el o los documentos donde constan los '
'recibos de las mercaderías entregadas o servicios prestados, entregados '
'por parte del deudor de la factura MINERA LOS PELAMBRES, RUT 96790240-3, '
'deacuerdo a lo establecido en la Ley N°19.983.'
),
),
CesionAecXml(
dte=DteDataL1(
emisor_rut=Rut('76354771-K'),
tipo_dte=TipoDteEnum.FACTURA_ELECTRONICA,
folio=170,
fecha_emision_date=date(2019, 4, 1),
receptor_rut=Rut('96790240-3'),
monto_total=2996301,
),
seq=2,
cedente_rut=Rut('76389992-6'),
cesionario_rut=Rut('76598556-0'),
monto_cesion=2996301,
fecha_cesion_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 5, 12, 57, 32),
tz=CesionAecXml.DATETIME_FIELDS_TZ,
),
fecha_ultimo_vencimiento=date(2019, 5, 1),
cedente_razon_social='ST CAPITAL S.A.',
cedente_direccion='Isidora Goyenechea 2939 Oficina 602',
cedente_email='APrat@Financiaenlinea.com',
cesionario_razon_social='Fondo de Inversión Privado Deuda y Facturas',
cesionario_direccion='Arrayan 2750 Oficina 703 Providencia',
cesionario_email='solicitudes@stcapital.cl',
cedente_persona_autorizada_rut=Rut('16360379-9'),
cedente_persona_autorizada_nombre='ANDRES PRATS VIAL',
dte_deudor_email=None,
cedente_declaracion_jurada=(
'Se declara bajo juramento que ST CAPITAL S.A., RUT 76389992-6 ha puesto '
'a disposicion del cesionario Fondo de Inversión Privado Deuda y Facturas, '
'RUT 76598556-0, el documento validamente emitido al deudor MINERA LOS '
'PELAMBRES, RUT 96790240-3.'
),
),
],
contacto_nombre='ST Capital Servicios Financieros',
contacto_telefono=None,
contacto_email='APrat@Financiaenlinea.com',
)
xml_doc = xml_utils.parse_untrusted_xml(aec_xml_bytes)
aec_xml = parse_aec_xml(xml_doc)
self.assertEqual(aec_xml, expected_output)
def test_parse_aec_xml_ok_2(self) -> None:
self._set_obj_2()
aec_xml_bytes = self.aec_2_xml_bytes
aec_signature_value = self.aec_2_signature_value
aec_cert_der_bytes = self.aec_2_cert_der_bytes
aec_dte_signature_value = self.aec_2_dte_signature_value
aec_dte_cert_der_bytes = self.aec_2_dte_cert_der_bytes
expected_output = AecXml(
dte=DteXmlData(
emisor_rut=Rut('76399752-9'),
tipo_dte=TipoDteEnum.FACTURA_ELECTRONICA,
folio=25568,
fecha_emision_date=date(2019, 3, 29),
receptor_rut=Rut('96874030-K'),
monto_total=230992,
emisor_razon_social='COMERCIALIZADORA INNOVA MOBEL SPA',
receptor_razon_social='EMPRESAS LA POLAR S.A.',
fecha_vencimiento_date=None,
firma_documento_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 3, 28, 13, 59, 52),
tz=DteXmlData.DATETIME_FIELDS_TZ,
),
signature_value=aec_dte_signature_value,
signature_x509_cert_der=aec_dte_cert_der_bytes,
emisor_giro='COMERCIALIZACION DE PRODUCTOS PARA EL HOGAR',
emisor_email='ANGEL.PEZO@APCASESORIAS.CL',
receptor_email=None,
),
cedente_rut=Rut('76399752-9'),
cesionario_rut=Rut('76389992-6'),
fecha_firma_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 4, 9, 9, 52),
tz=AecXml.DATETIME_FIELDS_TZ,
),
signature_value=aec_signature_value,
signature_x509_cert_der=aec_cert_der_bytes,
cesiones=[
CesionAecXml(
dte=DteDataL1(
emisor_rut=Rut('76399752-9'),
tipo_dte=TipoDteEnum.FACTURA_ELECTRONICA,
folio=25568,
fecha_emision_date=date(2019, 3, 29),
receptor_rut=Rut('96874030-K'),
monto_total=230992,
),
seq=1,
cedente_rut=Rut('76399752-9'),
cesionario_rut=Rut('76389992-6'),
monto_cesion=230992,
fecha_cesion_dt=tz_utils.convert_naive_dt_to_tz_aware(
dt=datetime(2019, 4, 4, 9, 9, 52),
tz=CesionAecXml.DATETIME_FIELDS_TZ,
),
fecha_ultimo_vencimiento=date(2019, 4, 28),
cedente_razon_social='COMERCIALIZADORA INNOVA MOBEL SPA',
cedente_direccion='LOS CIPRESES 2834',
cedente_email='camilo.perez@innovamobel.cl',
cedente_persona_autorizada_rut=Rut('76399752-9'),
cedente_persona_autorizada_nombre='COMERCIALIZADORA INNOVA MOBEL SPA',
cesionario_razon_social='ST CAPITAL S.A.',
cesionario_direccion='Isidora Goyenechea 2939 Oficina 602',
cesionario_email='fynpal-app-notif-st-capital@fynpal.com',
dte_deudor_email=None,
cedente_declaracion_jurada=(
'Se declara bajo juramento que COMERCIALIZADORA INNOVA MOBEL SPA, RUT '
'76399752-9 ha puesto a disposición del cesionario ST CAPITAL S.A., RUT '
'76389992-6, el o los documentos donde constan los recibos de las '
'mercaderías entregadas o servicios prestados, entregados por parte del '
'deudor de la factura EMPRESAS LA POLAR S.A., RUT 96874030-K, deacuerdo a '
'lo establecido en la Ley N°19.983.'
),
),
],
contacto_nombre=None,
contacto_telefono=None,
contacto_email='fynpal-app-notif-st-capital@fynpal.com',
)
xml_doc = xml_utils.parse_untrusted_xml(aec_xml_bytes)
aec_xml = parse_aec_xml(xml_doc)
self.assertEqual(aec_xml, expected_output)
| true | true |
f72ae161a0eb4e5d0974932d1ca4ef7364cf371f | 152 | py | Python | aiocloudflare/api/zones/dns_records/import_/import_.py | Stewart86/aioCloudflare | 341c0941f8f888a8b7e696e64550bce5da4949e6 | [
"MIT"
] | 2 | 2021-09-14T13:20:55.000Z | 2022-02-24T14:18:24.000Z | aiocloudflare/api/zones/dns_records/import_/import_.py | Stewart86/aioCloudflare | 341c0941f8f888a8b7e696e64550bce5da4949e6 | [
"MIT"
] | 46 | 2021-09-08T08:39:45.000Z | 2022-03-29T12:31:05.000Z | aiocloudflare/api/zones/dns_records/import_/import_.py | Stewart86/aioCloudflare | 341c0941f8f888a8b7e696e64550bce5da4949e6 | [
"MIT"
] | 1 | 2021-12-30T23:02:23.000Z | 2021-12-30T23:02:23.000Z | from aiocloudflare.commons.auth import Auth
class Import_(Auth):
_endpoint1 = "zones"
_endpoint2 = "dns_records/import"
_endpoint3 = None
| 19 | 43 | 0.723684 | from aiocloudflare.commons.auth import Auth
class Import_(Auth):
_endpoint1 = "zones"
_endpoint2 = "dns_records/import"
_endpoint3 = None
| true | true |
f72ae291978b1bc7fcf2a7bbfa465ce316156938 | 596 | py | Python | ROSpractice/src/topics_quiz/src/topics_quiz_node.py | kasiv008/Robotics | 302b3336005acd81202ebbbb0c52a4b2692fa9c7 | [
"MIT"
] | 1 | 2021-07-19T10:15:08.000Z | 2021-07-19T10:15:08.000Z | ROSpractice/src/topics_quiz/src/topics_quiz_node.py | kasiv008/Robotics | 302b3336005acd81202ebbbb0c52a4b2692fa9c7 | [
"MIT"
] | null | null | null | ROSpractice/src/topics_quiz/src/topics_quiz_node.py | kasiv008/Robotics | 302b3336005acd81202ebbbb0c52a4b2692fa9c7 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import rospy
from geometry_msgs.msg import Twist
from sensor_msgs.msg import LaserScan
def callback(msg):
L,M,R = msg.ranges[719],msg.ranges[360],msg.ranges[0]
move.linear.x = .2
if M < 1.2:
move.linear.x = .05
move.angular.z = .1
elif L > 30 and R > 30 and M > 30:
move.linear.x = .2
move.angular.z = 0
pub.publish(move)
rospy.init_node('topics_quiz_node')
pub = rospy.Publisher('/cmd_vel',Twist)
sub = rospy.Subscriber('/kobuki/laser/scan',
LaserScan,callback)
rate = rospy.Rate(2)
move = Twist()
rospy.spin()
| 24.833333 | 57 | 0.642617 |
import rospy
from geometry_msgs.msg import Twist
from sensor_msgs.msg import LaserScan
def callback(msg):
L,M,R = msg.ranges[719],msg.ranges[360],msg.ranges[0]
move.linear.x = .2
if M < 1.2:
move.linear.x = .05
move.angular.z = .1
elif L > 30 and R > 30 and M > 30:
move.linear.x = .2
move.angular.z = 0
pub.publish(move)
rospy.init_node('topics_quiz_node')
pub = rospy.Publisher('/cmd_vel',Twist)
sub = rospy.Subscriber('/kobuki/laser/scan',
LaserScan,callback)
rate = rospy.Rate(2)
move = Twist()
rospy.spin()
| true | true |
f72ae3fa136caa90b5e27aab7455fdec4407560e | 2,016 | py | Python | alipay/aop/api/domain/KoubeiSalesKbassetStuffProduceqrcodeBatchqueryModel.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/domain/KoubeiSalesKbassetStuffProduceqrcodeBatchqueryModel.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/domain/KoubeiSalesKbassetStuffProduceqrcodeBatchqueryModel.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
from alipay.aop.api.constant.ParamConstants import *
class KoubeiSalesKbassetStuffProduceqrcodeBatchqueryModel(object):
def __init__(self):
self._batch_id = None
self._page_size = None
self._produce_order_id = None
@property
def batch_id(self):
return self._batch_id
@batch_id.setter
def batch_id(self, value):
self._batch_id = value
@property
def page_size(self):
return self._page_size
@page_size.setter
def page_size(self, value):
self._page_size = value
@property
def produce_order_id(self):
return self._produce_order_id
@produce_order_id.setter
def produce_order_id(self, value):
self._produce_order_id = value
def to_alipay_dict(self):
params = dict()
if self.batch_id:
if hasattr(self.batch_id, 'to_alipay_dict'):
params['batch_id'] = self.batch_id.to_alipay_dict()
else:
params['batch_id'] = self.batch_id
if self.page_size:
if hasattr(self.page_size, 'to_alipay_dict'):
params['page_size'] = self.page_size.to_alipay_dict()
else:
params['page_size'] = self.page_size
if self.produce_order_id:
if hasattr(self.produce_order_id, 'to_alipay_dict'):
params['produce_order_id'] = self.produce_order_id.to_alipay_dict()
else:
params['produce_order_id'] = self.produce_order_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = KoubeiSalesKbassetStuffProduceqrcodeBatchqueryModel()
if 'batch_id' in d:
o.batch_id = d['batch_id']
if 'page_size' in d:
o.page_size = d['page_size']
if 'produce_order_id' in d:
o.produce_order_id = d['produce_order_id']
return o
| 28.394366 | 83 | 0.613095 |
import simplejson as json
from alipay.aop.api.constant.ParamConstants import *
class KoubeiSalesKbassetStuffProduceqrcodeBatchqueryModel(object):
def __init__(self):
self._batch_id = None
self._page_size = None
self._produce_order_id = None
@property
def batch_id(self):
return self._batch_id
@batch_id.setter
def batch_id(self, value):
self._batch_id = value
@property
def page_size(self):
return self._page_size
@page_size.setter
def page_size(self, value):
self._page_size = value
@property
def produce_order_id(self):
return self._produce_order_id
@produce_order_id.setter
def produce_order_id(self, value):
self._produce_order_id = value
def to_alipay_dict(self):
params = dict()
if self.batch_id:
if hasattr(self.batch_id, 'to_alipay_dict'):
params['batch_id'] = self.batch_id.to_alipay_dict()
else:
params['batch_id'] = self.batch_id
if self.page_size:
if hasattr(self.page_size, 'to_alipay_dict'):
params['page_size'] = self.page_size.to_alipay_dict()
else:
params['page_size'] = self.page_size
if self.produce_order_id:
if hasattr(self.produce_order_id, 'to_alipay_dict'):
params['produce_order_id'] = self.produce_order_id.to_alipay_dict()
else:
params['produce_order_id'] = self.produce_order_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = KoubeiSalesKbassetStuffProduceqrcodeBatchqueryModel()
if 'batch_id' in d:
o.batch_id = d['batch_id']
if 'page_size' in d:
o.page_size = d['page_size']
if 'produce_order_id' in d:
o.produce_order_id = d['produce_order_id']
return o
| true | true |
f72ae4fe9cb98106976c818db916bbe6b063c51a | 2,243 | py | Python | sdk/python/tests/unit/test_feature_views.py | kevjumba/feast | 44d53fda71b5a82d9fb6e044b01d97080c2d018c | [
"Apache-2.0"
] | 810 | 2018-12-25T15:16:11.000Z | 2020-05-14T09:49:40.000Z | sdk/python/tests/unit/test_feature_views.py | kevjumba/feast | 44d53fda71b5a82d9fb6e044b01d97080c2d018c | [
"Apache-2.0"
] | 701 | 2018-12-21T05:18:43.000Z | 2020-05-16T01:30:21.000Z | sdk/python/tests/unit/test_feature_views.py | kevjumba/feast | 44d53fda71b5a82d9fb6e044b01d97080c2d018c | [
"Apache-2.0"
] | 155 | 2018-12-22T11:05:04.000Z | 2020-05-14T07:33:41.000Z | from datetime import timedelta
import pytest
from feast import PushSource
from feast.batch_feature_view import BatchFeatureView
from feast.data_format import AvroFormat
from feast.data_source import KafkaSource
from feast.infra.offline_stores.file_source import FileSource
from feast.stream_feature_view import StreamFeatureView
def test_create_batch_feature_view():
batch_source = FileSource(path="some path")
BatchFeatureView(
name="test batch feature view",
entities=[],
ttl=timedelta(days=30),
source=batch_source,
)
with pytest.raises(ValueError):
BatchFeatureView(
name="test batch feature view", entities=[], ttl=timedelta(days=30)
)
stream_source = KafkaSource(
name="kafka",
timestamp_field="",
bootstrap_servers="",
message_format=AvroFormat(""),
topic="topic",
batch_source=FileSource(path="some path"),
)
with pytest.raises(ValueError):
BatchFeatureView(
name="test batch feature view",
entities=[],
ttl=timedelta(days=30),
source=stream_source,
)
def test_create_stream_feature_view():
stream_source = KafkaSource(
name="kafka",
timestamp_field="",
bootstrap_servers="",
message_format=AvroFormat(""),
topic="topic",
batch_source=FileSource(path="some path"),
)
StreamFeatureView(
name="test kafka stream feature view",
entities=[],
ttl=timedelta(days=30),
source=stream_source,
)
push_source = PushSource(
name="push source", batch_source=FileSource(path="some path")
)
StreamFeatureView(
name="test push source feature view",
entities=[],
ttl=timedelta(days=30),
source=push_source,
)
with pytest.raises(ValueError):
StreamFeatureView(
name="test batch feature view", entities=[], ttl=timedelta(days=30)
)
with pytest.raises(ValueError):
StreamFeatureView(
name="test batch feature view",
entities=[],
ttl=timedelta(days=30),
source=FileSource(path="some path"),
)
| 27.353659 | 79 | 0.628622 | from datetime import timedelta
import pytest
from feast import PushSource
from feast.batch_feature_view import BatchFeatureView
from feast.data_format import AvroFormat
from feast.data_source import KafkaSource
from feast.infra.offline_stores.file_source import FileSource
from feast.stream_feature_view import StreamFeatureView
def test_create_batch_feature_view():
batch_source = FileSource(path="some path")
BatchFeatureView(
name="test batch feature view",
entities=[],
ttl=timedelta(days=30),
source=batch_source,
)
with pytest.raises(ValueError):
BatchFeatureView(
name="test batch feature view", entities=[], ttl=timedelta(days=30)
)
stream_source = KafkaSource(
name="kafka",
timestamp_field="",
bootstrap_servers="",
message_format=AvroFormat(""),
topic="topic",
batch_source=FileSource(path="some path"),
)
with pytest.raises(ValueError):
BatchFeatureView(
name="test batch feature view",
entities=[],
ttl=timedelta(days=30),
source=stream_source,
)
def test_create_stream_feature_view():
stream_source = KafkaSource(
name="kafka",
timestamp_field="",
bootstrap_servers="",
message_format=AvroFormat(""),
topic="topic",
batch_source=FileSource(path="some path"),
)
StreamFeatureView(
name="test kafka stream feature view",
entities=[],
ttl=timedelta(days=30),
source=stream_source,
)
push_source = PushSource(
name="push source", batch_source=FileSource(path="some path")
)
StreamFeatureView(
name="test push source feature view",
entities=[],
ttl=timedelta(days=30),
source=push_source,
)
with pytest.raises(ValueError):
StreamFeatureView(
name="test batch feature view", entities=[], ttl=timedelta(days=30)
)
with pytest.raises(ValueError):
StreamFeatureView(
name="test batch feature view",
entities=[],
ttl=timedelta(days=30),
source=FileSource(path="some path"),
)
| true | true |
f72ae59ecb83441e8b44b0616951c153ac6dd839 | 8,726 | py | Python | lambda/py/mutagen/_file.py | frivas/alexa-mixed-polly | bf0fde9005a66f3d6f0193799eacef934d166de7 | [
"W3C"
] | 2 | 2019-07-29T15:45:31.000Z | 2019-11-17T23:33:58.000Z | lambda/py/mutagen/_file.py | frivas/alexa-mixed-polly | bf0fde9005a66f3d6f0193799eacef934d166de7 | [
"W3C"
] | null | null | null | lambda/py/mutagen/_file.py | frivas/alexa-mixed-polly | bf0fde9005a66f3d6f0193799eacef934d166de7 | [
"W3C"
] | 1 | 2019-01-06T15:18:58.000Z | 2019-01-06T15:18:58.000Z | # -*- coding: utf-8 -*-
# Copyright (C) 2005 Michael Urman
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import warnings
from mutagen._util import DictMixin, loadfile
from mutagen._compat import izip
class FileType(DictMixin):
"""FileType(filething, **kwargs)
Args:
filething (filething): A filename or a file-like object
Subclasses might take further options via keyword arguments.
An abstract object wrapping tags and audio stream information.
Each file format has different potential tags and stream
information.
FileTypes implement an interface very similar to Metadata; the
dict interface, save, load, and delete calls on a FileType call
the appropriate methods on its tag data.
Attributes:
info (`StreamInfo`): contains length, bitrate, sample rate
tags (`Tags`): metadata tags, if any, otherwise `None`
"""
__module__ = "mutagen"
info = None
tags = None
filename = None
_mimes = ["application/octet-stream"]
def __init__(self, *args, **kwargs):
if not args and not kwargs:
warnings.warn("FileType constructor requires a filename",
DeprecationWarning)
else:
self.load(*args, **kwargs)
@loadfile()
def load(self, filething, *args, **kwargs):
raise NotImplementedError
def __getitem__(self, key):
"""Look up a metadata tag key.
If the file has no tags at all, a KeyError is raised.
"""
if self.tags is None:
raise KeyError(key)
else:
return self.tags[key]
def __setitem__(self, key, value):
"""Set a metadata tag.
If the file has no tags, an appropriate format is added (but
not written until save is called).
"""
if self.tags is None:
self.add_tags()
self.tags[key] = value
def __delitem__(self, key):
"""Delete a metadata tag key.
If the file has no tags at all, a KeyError is raised.
"""
if self.tags is None:
raise KeyError(key)
else:
del(self.tags[key])
def keys(self):
"""Return a list of keys in the metadata tag.
If the file has no tags at all, an empty list is returned.
"""
if self.tags is None:
return []
else:
return self.tags.keys()
@loadfile(writable=True)
def delete(self, filething=None):
"""delete(filething=None)
Remove tags from a file.
In cases where the tagging format is independent of the file type
(for example `mutagen.id3.ID3`) all traces of the tagging format will
be removed.
In cases where the tag is part of the file type, all tags and
padding will be removed.
The tags attribute will be cleared as well if there is one.
Does nothing if the file has no tags.
Raises:
mutagen.MutagenError: if deleting wasn't possible
"""
if self.tags is not None:
return self.tags.delete(filething)
@loadfile(writable=True)
def save(self, filething=None, **kwargs):
"""save(filething=None, **kwargs)
Save metadata tags.
Raises:
MutagenError: if saving wasn't possible
"""
if self.tags is not None:
return self.tags.save(filething, **kwargs)
def pprint(self):
"""
Returns:
text: stream information and comment key=value pairs.
"""
stream = "%s (%s)" % (self.info.pprint(), self.mime[0])
try:
tags = self.tags.pprint()
except AttributeError:
return stream
else:
return stream + ((tags and "\n" + tags) or "")
def add_tags(self):
"""Adds new tags to the file.
Raises:
mutagen.MutagenError:
if tags already exist or adding is not possible.
"""
raise NotImplementedError
@property
def mime(self):
"""A list of mime types (:class:`mutagen.text`)"""
mimes = []
for Kind in type(self).__mro__:
for mime in getattr(Kind, '_mimes', []):
if mime not in mimes:
mimes.append(mime)
return mimes
@staticmethod
def score(filename, fileobj, header):
"""Returns a score for how likely the file can be parsed by this type.
Args:
filename (fspath): a file path
fileobj (fileobj): a file object open in rb mode. Position is
undefined
header (bytes): data of undefined length, starts with the start of
the file.
Returns:
int: negative if definitely not a matching type, otherwise a score,
the bigger the more certain that the file can be loaded.
"""
raise NotImplementedError
class StreamInfo(object):
"""Abstract stream information object.
Provides attributes for length, bitrate, sample rate etc.
See the implementations for details.
"""
__module__ = "mutagen"
def pprint(self):
"""
Returns:
text: Print stream information
"""
raise NotImplementedError
@loadfile(method=False)
def File(filething, options=None, easy=False):
"""File(filething, options=None, easy=False)
Guess the type of the file and try to open it.
The file type is decided by several things, such as the first 128
bytes (which usually contains a file type identifier), the
filename extension, and the presence of existing tags.
If no appropriate type could be found, None is returned.
Args:
filething (filething)
options: Sequence of :class:`FileType` implementations,
defaults to all included ones.
easy (bool): If the easy wrappers should be returnd if available.
For example :class:`EasyMP3 <mp3.EasyMP3>` instead of
:class:`MP3 <mp3.MP3>`.
Returns:
FileType: A FileType instance for the detected type or `None` in case
the type couln't be determined.
Raises:
MutagenError: in case the detected type fails to load the file.
"""
if options is None:
from mutagen.asf import ASF
from mutagen.apev2 import APEv2File
from mutagen.flac import FLAC
if easy:
from mutagen.easyid3 import EasyID3FileType as ID3FileType
else:
from mutagen.id3 import ID3FileType
if easy:
from mutagen.mp3 import EasyMP3 as MP3
else:
from mutagen.mp3 import MP3
from mutagen.oggflac import OggFLAC
from mutagen.oggspeex import OggSpeex
from mutagen.oggtheora import OggTheora
from mutagen.oggvorbis import OggVorbis
from mutagen.oggopus import OggOpus
if easy:
from mutagen.trueaudio import EasyTrueAudio as TrueAudio
else:
from mutagen.trueaudio import TrueAudio
from mutagen.wavpack import WavPack
if easy:
from mutagen.easymp4 import EasyMP4 as MP4
else:
from mutagen.mp4 import MP4
from mutagen.musepack import Musepack
from mutagen.monkeysaudio import MonkeysAudio
from mutagen.optimfrog import OptimFROG
from mutagen.aiff import AIFF
from mutagen.aac import AAC
from mutagen.smf import SMF
from mutagen.dsf import DSF
options = [MP3, TrueAudio, OggTheora, OggSpeex, OggVorbis, OggFLAC,
FLAC, AIFF, APEv2File, MP4, ID3FileType, WavPack,
Musepack, MonkeysAudio, OptimFROG, ASF, OggOpus, AAC,
SMF, DSF]
if not options:
return None
fileobj = filething.fileobj
try:
header = fileobj.read(128)
except IOError:
header = b""
# Sort by name after score. Otherwise import order affects
# Kind sort order, which affects treatment of things with
# equals scores.
results = [(Kind.score(filething.name, fileobj, header), Kind.__name__)
for Kind in options]
results = list(izip(results, options))
results.sort()
(score, name), Kind = results[-1]
if score > 0:
try:
fileobj.seek(0, 0)
except IOError:
pass
return Kind(fileobj, filename=filething.filename)
else:
return None
| 28.990033 | 79 | 0.607609 |
import warnings
from mutagen._util import DictMixin, loadfile
from mutagen._compat import izip
class FileType(DictMixin):
__module__ = "mutagen"
info = None
tags = None
filename = None
_mimes = ["application/octet-stream"]
def __init__(self, *args, **kwargs):
if not args and not kwargs:
warnings.warn("FileType constructor requires a filename",
DeprecationWarning)
else:
self.load(*args, **kwargs)
@loadfile()
def load(self, filething, *args, **kwargs):
raise NotImplementedError
def __getitem__(self, key):
if self.tags is None:
raise KeyError(key)
else:
return self.tags[key]
def __setitem__(self, key, value):
if self.tags is None:
self.add_tags()
self.tags[key] = value
def __delitem__(self, key):
if self.tags is None:
raise KeyError(key)
else:
del(self.tags[key])
def keys(self):
if self.tags is None:
return []
else:
return self.tags.keys()
@loadfile(writable=True)
def delete(self, filething=None):
if self.tags is not None:
return self.tags.delete(filething)
@loadfile(writable=True)
def save(self, filething=None, **kwargs):
if self.tags is not None:
return self.tags.save(filething, **kwargs)
def pprint(self):
stream = "%s (%s)" % (self.info.pprint(), self.mime[0])
try:
tags = self.tags.pprint()
except AttributeError:
return stream
else:
return stream + ((tags and "\n" + tags) or "")
def add_tags(self):
raise NotImplementedError
@property
def mime(self):
mimes = []
for Kind in type(self).__mro__:
for mime in getattr(Kind, '_mimes', []):
if mime not in mimes:
mimes.append(mime)
return mimes
@staticmethod
def score(filename, fileobj, header):
raise NotImplementedError
class StreamInfo(object):
__module__ = "mutagen"
def pprint(self):
raise NotImplementedError
@loadfile(method=False)
def File(filething, options=None, easy=False):
if options is None:
from mutagen.asf import ASF
from mutagen.apev2 import APEv2File
from mutagen.flac import FLAC
if easy:
from mutagen.easyid3 import EasyID3FileType as ID3FileType
else:
from mutagen.id3 import ID3FileType
if easy:
from mutagen.mp3 import EasyMP3 as MP3
else:
from mutagen.mp3 import MP3
from mutagen.oggflac import OggFLAC
from mutagen.oggspeex import OggSpeex
from mutagen.oggtheora import OggTheora
from mutagen.oggvorbis import OggVorbis
from mutagen.oggopus import OggOpus
if easy:
from mutagen.trueaudio import EasyTrueAudio as TrueAudio
else:
from mutagen.trueaudio import TrueAudio
from mutagen.wavpack import WavPack
if easy:
from mutagen.easymp4 import EasyMP4 as MP4
else:
from mutagen.mp4 import MP4
from mutagen.musepack import Musepack
from mutagen.monkeysaudio import MonkeysAudio
from mutagen.optimfrog import OptimFROG
from mutagen.aiff import AIFF
from mutagen.aac import AAC
from mutagen.smf import SMF
from mutagen.dsf import DSF
options = [MP3, TrueAudio, OggTheora, OggSpeex, OggVorbis, OggFLAC,
FLAC, AIFF, APEv2File, MP4, ID3FileType, WavPack,
Musepack, MonkeysAudio, OptimFROG, ASF, OggOpus, AAC,
SMF, DSF]
if not options:
return None
fileobj = filething.fileobj
try:
header = fileobj.read(128)
except IOError:
header = b""
results = [(Kind.score(filething.name, fileobj, header), Kind.__name__)
for Kind in options]
results = list(izip(results, options))
results.sort()
(score, name), Kind = results[-1]
if score > 0:
try:
fileobj.seek(0, 0)
except IOError:
pass
return Kind(fileobj, filename=filething.filename)
else:
return None
| true | true |
f72ae5ad21d0d2e7c0cc825a649cff1858a27800 | 5,781 | py | Python | src/coolbeans/extort/ib.py | runarp/coolbeans | 128a7f2e45690d2d22b05608e555c44334f46859 | [
"MIT"
] | 5 | 2020-05-17T04:48:25.000Z | 2022-01-27T09:36:45.000Z | src/coolbeans/extort/ib.py | runarp/coolbeans | 128a7f2e45690d2d22b05608e555c44334f46859 | [
"MIT"
] | 1 | 2020-05-17T06:21:52.000Z | 2020-05-22T13:49:33.000Z | src/coolbeans/extort/ib.py | runarp/coolbeans | 128a7f2e45690d2d22b05608e555c44334f46859 | [
"MIT"
] | 1 | 2021-01-28T03:00:27.000Z | 2021-01-28T03:00:27.000Z | """Example Extorter, useful as a starting point"""
import typing
import logging
import dataclasses
import datetime
# 3rdparty
import slugify
# We use ibflex
from ibflex import parser, FlexStatement, CashAction
from coolbeans.extort.base import ExtortionProtocol
from coolbeans.tools.seeds import Trade, Transfer, Expense, Income, EventDetail
logger = logging.getLogger(__name__)
def trade_key(trade):
if trade.openCloseIndicator:
o = trade.openCloseIndicator.name + ':'
else:
o = ''
return f"{o}{trade.tradeDate.strftime('%Y-%m-%d')}:{trade.ibOrderID}"
def clean_symbol(symbol: str) -> str:
symbol = slugify.slugify(symbol, separator='_')
if symbol[0].isdigit():
symbol = "X" + symbol
symbol = symbol.upper()
return symbol
class Extorter(ExtortionProtocol):
FILE_OPEN_MODE = None # This requires a file-name, not a
ib_account_id = ""
def extort(self, stream: typing.Union[typing.IO[typing.AnyStr], str]):
"""Extract as much information as possible from the workbook"""
for statement in parser.parse(stream).FlexStatements:
for record in self.extract_cash(statement):
yield dataclasses.asdict(record)
for trade in self.extract_trades(statement):
yield dataclasses.asdict(trade)
@staticmethod
def extract_cash(statement: FlexStatement):
"""
Args:
statement: The Statement to extract entries from
Returns:
iterator of DataClass instances for these records
"""
for record in statement.CashTransactions:
date = record.dateTime
if record.type in (
CashAction.DEPOSITWITHDRAW,
):
yield Transfer(
id=record.transactionID,
date=date,
amount=record.amount,
currency=record.currency,
subaccount=record.accountId,
narration=record.description,
event_detail=EventDetail.TRANSFER_DEPOSIT.name if record.amount > 0 else EventDetail.TRANSFER_WITHDRAWAL.name,
meta={
'type': record.type.value,
'rate': record.fxRateToBase
}
)
elif record.amount < 0:
event_detail = EventDetail.EXPENSE_FEES
if record.type in (CashAction.BONDINTPAID, CashAction.BROKERINTPAID):
event_detail = EventDetail.EXPENSE_INTEREST
if record.type == CashAction.WHTAX:
event_detail = EventDetail.EXPENSE_TAX
yield Expense(
id=record.transactionID,
date=date,
amount=record.amount,
event_detail=event_detail,
currency=record.currency,
subaccount=record.accountId,
narration=record.description,
meta={
'type': record.type.value,
'rate': record.fxRateToBase
}
)
else:
yield Income(
id=record.transactionID,
date=date,
amount=record.amount,
currency=record.currency,
subaccount=record.accountId,
narration=record.description,
meta={
'type': record.type.value,
'rate': record.fxRateToBase
}
)
@staticmethod
def extract_trades(statement: FlexStatement):
"""Pull Trades from a FlexStatement
"""
by_order: typing.Dict[str, Trade] = {}
for trade in statement.Trades:
key = trade_key(trade)
assert key.strip(), f"Invalid Key {len(key)}"
if not trade.openCloseIndicator:
# This isn't a trade at all.
continue
if key in by_order:
combined = by_order[key]
combined.add_trade(
quantity=trade.quantity * trade.multiplier,
price=trade.tradePrice,
fees=trade.ibCommission
)
else:
seed = Trade(
id=key,
date=trade.tradeDate,
price=trade.tradePrice,
currency=trade.currency,
quantity=trade.quantity * trade.multiplier,
commodity=clean_symbol(trade.symbol),
fees=trade.ibCommission,
fees_currency=trade.ibCommissionCurrency,
subaccount=trade.accountId,
event_detail=EventDetail.TRADE_OPEN if trade.openCloseIndicator.name == 'OPEN' else EventDetail.TRADE_CLOSE,
meta={
'exchange': trade.exchange,
'symbol': trade.symbol,
}
)
by_order[key] = seed
for trade in by_order.values():
yield trade
# if trade.securityID is None and "." in trade.symbol:
# # FOREX Trade, not really a valid Symbol at all
# # TODO: Better check than blank securityID
# # Usually [currency].[commodity]. For example GBP.JPY
# # In that case trade.currency is JPY, so we just need to parse out the GBP part
# safe_symbol, _ = trade.symbol.split('.')
# else:
# safe_symbol = self.clean_symbol(trade.symbol)
| 33.034286 | 130 | 0.530877 |
import typing
import logging
import dataclasses
import datetime
import slugify
from ibflex import parser, FlexStatement, CashAction
from coolbeans.extort.base import ExtortionProtocol
from coolbeans.tools.seeds import Trade, Transfer, Expense, Income, EventDetail
logger = logging.getLogger(__name__)
def trade_key(trade):
if trade.openCloseIndicator:
o = trade.openCloseIndicator.name + ':'
else:
o = ''
return f"{o}{trade.tradeDate.strftime('%Y-%m-%d')}:{trade.ibOrderID}"
def clean_symbol(symbol: str) -> str:
symbol = slugify.slugify(symbol, separator='_')
if symbol[0].isdigit():
symbol = "X" + symbol
symbol = symbol.upper()
return symbol
class Extorter(ExtortionProtocol):
FILE_OPEN_MODE = None
ib_account_id = ""
def extort(self, stream: typing.Union[typing.IO[typing.AnyStr], str]):
for statement in parser.parse(stream).FlexStatements:
for record in self.extract_cash(statement):
yield dataclasses.asdict(record)
for trade in self.extract_trades(statement):
yield dataclasses.asdict(trade)
@staticmethod
def extract_cash(statement: FlexStatement):
for record in statement.CashTransactions:
date = record.dateTime
if record.type in (
CashAction.DEPOSITWITHDRAW,
):
yield Transfer(
id=record.transactionID,
date=date,
amount=record.amount,
currency=record.currency,
subaccount=record.accountId,
narration=record.description,
event_detail=EventDetail.TRANSFER_DEPOSIT.name if record.amount > 0 else EventDetail.TRANSFER_WITHDRAWAL.name,
meta={
'type': record.type.value,
'rate': record.fxRateToBase
}
)
elif record.amount < 0:
event_detail = EventDetail.EXPENSE_FEES
if record.type in (CashAction.BONDINTPAID, CashAction.BROKERINTPAID):
event_detail = EventDetail.EXPENSE_INTEREST
if record.type == CashAction.WHTAX:
event_detail = EventDetail.EXPENSE_TAX
yield Expense(
id=record.transactionID,
date=date,
amount=record.amount,
event_detail=event_detail,
currency=record.currency,
subaccount=record.accountId,
narration=record.description,
meta={
'type': record.type.value,
'rate': record.fxRateToBase
}
)
else:
yield Income(
id=record.transactionID,
date=date,
amount=record.amount,
currency=record.currency,
subaccount=record.accountId,
narration=record.description,
meta={
'type': record.type.value,
'rate': record.fxRateToBase
}
)
@staticmethod
def extract_trades(statement: FlexStatement):
by_order: typing.Dict[str, Trade] = {}
for trade in statement.Trades:
key = trade_key(trade)
assert key.strip(), f"Invalid Key {len(key)}"
if not trade.openCloseIndicator:
continue
if key in by_order:
combined = by_order[key]
combined.add_trade(
quantity=trade.quantity * trade.multiplier,
price=trade.tradePrice,
fees=trade.ibCommission
)
else:
seed = Trade(
id=key,
date=trade.tradeDate,
price=trade.tradePrice,
currency=trade.currency,
quantity=trade.quantity * trade.multiplier,
commodity=clean_symbol(trade.symbol),
fees=trade.ibCommission,
fees_currency=trade.ibCommissionCurrency,
subaccount=trade.accountId,
event_detail=EventDetail.TRADE_OPEN if trade.openCloseIndicator.name == 'OPEN' else EventDetail.TRADE_CLOSE,
meta={
'exchange': trade.exchange,
'symbol': trade.symbol,
}
)
by_order[key] = seed
for trade in by_order.values():
yield trade
# if trade.securityID is None and "." in trade.symbol:
# # FOREX Trade, not really a valid Symbol at all
# # TODO: Better check than blank securityID
# # Usually [currency].[commodity]. For example GBP.JPY
# # In that case trade.currency is JPY, so we just need to parse out the GBP part
# safe_symbol, _ = trade.symbol.split('.')
# else:
# safe_symbol = self.clean_symbol(trade.symbol)
| true | true |
f72ae5e176716f5b8b5bebf5ecd595df75c371dc | 1,555 | py | Python | example/run_SolveOneAgent_online.py | zehuilu/DrMaMP-Distributed-Real-time-Multi-agent-Mission-Planning-Algorithm | 894875ebddf7d1f6bbf7a47ce82f05d7be2bafdc | [
"Apache-2.0"
] | 4 | 2022-02-22T05:12:18.000Z | 2022-03-29T01:56:37.000Z | example/run_SolveOneAgent_online.py | zehuilu/DrMaMP-Distributed-Real-time-Multi-agent-Mission-Planning-Algorithm | 894875ebddf7d1f6bbf7a47ce82f05d7be2bafdc | [
"Apache-2.0"
] | null | null | null | example/run_SolveOneAgent_online.py | zehuilu/DrMaMP-Distributed-Real-time-Multi-agent-Mission-Planning-Algorithm | 894875ebddf7d1f6bbf7a47ce82f05d7be2bafdc | [
"Apache-2.0"
] | 3 | 2022-02-23T03:14:56.000Z | 2022-03-14T12:22:05.000Z | #!/usr/bin/env python3
import asyncio
import random
import matplotlib.pyplot as plt
import pathmagic
with pathmagic.context():
from Simulator import Simulator
from MissionPlanner import MissionPlanner
if __name__ == "__main__":
# define the world
map_width_meter = 25.0
map_height_meter = 25.0
map_resolution = 2
value_non_obs = 0 # the cell is empty
value_obs = 255 # the cell is blocked
# create a simulator
MySimulator = Simulator(map_width_meter, map_height_meter, map_resolution, value_non_obs, value_obs)
# number of obstacles
num_obs = 250
# [width, length] size of each obstacle [meter]
size_obs = [1, 1]
# generate random obstacles
MySimulator.generate_random_obs(num_obs, size_obs)
# randomly generate agents and targets
num_agents = 1
num_targets = 8
agents_position, targets_position = MySimulator.generate_agents_and_targets(num_agents, num_targets)
# average agent velocity in cells
agent_velocity_ave = [random.randint(4,8) for i in range(num_agents)]
# planning and visualization frequency in Hz
planning_frequency = 5
# initialize a planner
MyPlanner = MissionPlanner(MySimulator)
# run the planner online
asyncio.run(MyPlanner.run_planner({"agents_position": agents_position,
"targets_position": targets_position,
"agent_velocity_ave": agent_velocity_ave,
"planning_frequency": planning_frequency}))
| 34.555556 | 104 | 0.686817 |
import asyncio
import random
import matplotlib.pyplot as plt
import pathmagic
with pathmagic.context():
from Simulator import Simulator
from MissionPlanner import MissionPlanner
if __name__ == "__main__":
map_width_meter = 25.0
map_height_meter = 25.0
map_resolution = 2
value_non_obs = 0
value_obs = 255
MySimulator = Simulator(map_width_meter, map_height_meter, map_resolution, value_non_obs, value_obs)
num_obs = 250
size_obs = [1, 1]
MySimulator.generate_random_obs(num_obs, size_obs)
num_agents = 1
num_targets = 8
agents_position, targets_position = MySimulator.generate_agents_and_targets(num_agents, num_targets)
agent_velocity_ave = [random.randint(4,8) for i in range(num_agents)]
planning_frequency = 5
MyPlanner = MissionPlanner(MySimulator)
asyncio.run(MyPlanner.run_planner({"agents_position": agents_position,
"targets_position": targets_position,
"agent_velocity_ave": agent_velocity_ave,
"planning_frequency": planning_frequency}))
| true | true |
f72ae622b3e7a87cfbd8de23dda483349b388bb1 | 26,682 | py | Python | test/functional/tests/io_class/test_io_classification.py | josehu07/open-cas-linux-mf | 5c6870be8bbb6816645955b6e479c9b5c7c0074d | [
"BSD-3-Clause-Clear"
] | 2 | 2021-08-13T14:44:45.000Z | 2022-01-10T07:41:40.000Z | test/functional/tests/io_class/test_io_classification.py | josehu07/open-cas-linux-mf | 5c6870be8bbb6816645955b6e479c9b5c7c0074d | [
"BSD-3-Clause-Clear"
] | null | null | null | test/functional/tests/io_class/test_io_classification.py | josehu07/open-cas-linux-mf | 5c6870be8bbb6816645955b6e479c9b5c7c0074d | [
"BSD-3-Clause-Clear"
] | null | null | null | #
# Copyright(c) 2019-2020 Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause-Clear
#
import random
from itertools import permutations
import pytest
from api.cas.ioclass_config import IoClass
from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan
from test_tools import fs_utils
from test_tools.dd import Dd
from test_tools.disk_utils import Filesystem
from test_tools.fio.fio import Fio
from test_tools.fio.fio_param import ReadWrite, IoEngine
from test_utils.filesystem.file import File
from test_utils.os_utils import sync, Udev
from .io_class_common import *
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
def test_ioclass_lba():
"""Write data to random lba and check if it is cached according to range
defined in ioclass rule"""
cache, core = prepare()
ioclass_id = 1
min_cached_lba = 56
max_cached_lba = 200
iterations = 100
dd_size = Size(1, Unit.Blocks512)
dd_count = 1
# Prepare ioclass config
ioclass_config.add_ioclass(
ioclass_id=ioclass_id,
eviction_priority=1,
allocation=True,
rule=f"lba:ge:{min_cached_lba}&lba:le:{max_cached_lba}&done",
ioclass_config_path=ioclass_config_path,
)
# Prepare cache for test
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
cache.flush_cache()
# Check if lbas from defined range are cached
dirty_count = 0
# '8' step is set to prevent writing cache line more than once
TestRun.LOGGER.info(f"Writing to one sector in each cache line from range.")
for lba in range(min_cached_lba, max_cached_lba, 8):
dd = (
Dd()
.input("/dev/zero")
.output(f"{core.system_path}")
.count(dd_count)
.block_size(dd_size)
.seek(lba)
)
dd.run()
sync()
dirty_count += 1
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
if dirty.get_value(Unit.Blocks4096) != dirty_count:
TestRun.LOGGER.error(f"LBA {lba} not cached")
cache.flush_cache()
# Check if lba outside of defined range are not cached
TestRun.LOGGER.info(f"Writing to random sectors outside of cached range.")
for i in range(iterations):
rand_lba = random.randrange(2000)
if min_cached_lba <= rand_lba <= max_cached_lba:
continue
dd = (
Dd()
.input("/dev/zero")
.output(f"{core.system_path}")
.count(dd_count)
.block_size(dd_size)
.seek(rand_lba)
)
dd.run()
sync()
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
if dirty.get_value(Unit.Blocks4096) != 0:
TestRun.LOGGER.error(f"Inappropriately cached lba: {rand_lba}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
def test_ioclass_request_size():
cache, core = prepare()
ioclass_id = 1
iterations = 100
ioclass_config.add_ioclass(
ioclass_id=ioclass_id,
eviction_priority=1,
allocation=True,
rule=f"request_size:ge:8192&request_size:le:16384&done",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
Udev.disable()
# Check if requests with appropriate size are cached
TestRun.LOGGER.info(
f"Check if requests with size within defined range are cached"
)
cached_req_sizes = [Size(2, Unit.Blocks4096), Size(4, Unit.Blocks4096)]
for i in range(iterations):
cache.flush_cache()
req_size = random.choice(cached_req_sizes)
dd = (
Dd()
.input("/dev/zero")
.output(core.system_path)
.count(1)
.block_size(req_size)
.oflag("direct")
)
dd.run()
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
if dirty.get_value(Unit.Blocks4096) != req_size.value / Unit.Blocks4096.value:
TestRun.fail("Incorrect number of dirty blocks!")
cache.flush_cache()
# Check if requests with inappropriate size are not cached
TestRun.LOGGER.info(
f"Check if requests with size outside defined range are not cached"
)
not_cached_req_sizes = [
Size(1, Unit.Blocks4096),
Size(8, Unit.Blocks4096),
Size(16, Unit.Blocks4096),
]
for i in range(iterations):
req_size = random.choice(not_cached_req_sizes)
dd = (
Dd()
.input("/dev/zero")
.output(core.system_path)
.count(1)
.block_size(req_size)
.oflag("direct")
)
dd.run()
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
if dirty.get_value(Unit.Blocks4096) != 0:
TestRun.fail("Dirty data present!")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", list(Filesystem) + [False])
def test_ioclass_direct(filesystem):
"""
Perform buffered/direct IO to/from files or raw block device.
Data from buffered IO should be cached.
Data from buffered IO should not be cached and if performed to/from already cached data
should cause reclassification to unclassified IO class.
"""
cache, core = prepare()
Udev.disable()
ioclass_id = 1
io_size = Size(random.randint(1000, 2000), Unit.Blocks4096)
# direct IO class
ioclass_config.add_ioclass(
ioclass_id=ioclass_id,
eviction_priority=1,
allocation=True,
rule="direct",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
fio = (
Fio().create_command()
.io_engine(IoEngine.libaio)
.size(io_size)
.offset(io_size)
.read_write(ReadWrite.write)
.target(f"{mountpoint}/tmp_file" if filesystem else core.system_path)
)
if filesystem:
TestRun.LOGGER.info(
f"Preparing {filesystem.name} filesystem and mounting {core.system_path} at"
f" {mountpoint}"
)
core.create_filesystem(filesystem)
core.mount(mountpoint)
sync()
else:
TestRun.LOGGER.info("Testing on raw exported object")
base_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
TestRun.LOGGER.info(f"Buffered writes to {'file' if filesystem else 'device'}")
fio.run()
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
if new_occupancy != base_occupancy:
TestRun.fail("Buffered writes were cached!\n"
f"Expected: {base_occupancy}, actual: {new_occupancy}")
TestRun.LOGGER.info(f"Direct writes to {'file' if filesystem else 'device'}")
fio.direct()
fio.run()
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
if new_occupancy != base_occupancy + io_size:
TestRun.fail("Wrong number of direct writes was cached!\n"
f"Expected: {base_occupancy + io_size}, actual: {new_occupancy}")
TestRun.LOGGER.info(f"Buffered reads from {'file' if filesystem else 'device'}")
fio.remove_param("readwrite").remove_param("direct")
fio.read_write(ReadWrite.read)
fio.run()
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
if new_occupancy != base_occupancy:
TestRun.fail("Buffered reads did not cause reclassification!"
f"Expected occupancy: {base_occupancy}, actual: {new_occupancy}")
TestRun.LOGGER.info(f"Direct reads from {'file' if filesystem else 'device'}")
fio.direct()
fio.run()
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
if new_occupancy != base_occupancy + io_size:
TestRun.fail("Wrong number of direct reads was cached!\n"
f"Expected: {base_occupancy + io_size}, actual: {new_occupancy}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", Filesystem)
def test_ioclass_metadata(filesystem):
"""
Perform operations on files that cause metadata update.
Determine if every such operation results in increased writes to cached metadata.
Exact values may not be tested as each file system has different metadata structure.
"""
cache, core = prepare()
Udev.disable()
ioclass_id = random.randint(1, ioclass_config.MAX_IO_CLASS_ID)
# metadata IO class
ioclass_config.add_ioclass(
ioclass_id=ioclass_id,
eviction_priority=1,
allocation=True,
rule="metadata&done",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
TestRun.LOGGER.info(f"Preparing {filesystem.name} filesystem "
f"and mounting {core.system_path} at {mountpoint}")
core.create_filesystem(filesystem)
core.mount(mountpoint)
sync()
requests_to_metadata_before = cache.get_io_class_statistics(
io_class_id=ioclass_id).request_stats.write
TestRun.LOGGER.info("Creating 20 test files")
files = []
for i in range(1, 21):
file_path = f"{mountpoint}/test_file_{i}"
dd = (
Dd()
.input("/dev/urandom")
.output(file_path)
.count(random.randint(5, 50))
.block_size(Size(1, Unit.MebiByte))
.oflag("sync")
)
dd.run()
files.append(File(file_path))
TestRun.LOGGER.info("Checking requests to metadata")
requests_to_metadata_after = cache.get_io_class_statistics(
io_class_id=ioclass_id).request_stats.write
if requests_to_metadata_after == requests_to_metadata_before:
TestRun.fail("No requests to metadata while creating files!")
requests_to_metadata_before = requests_to_metadata_after
TestRun.LOGGER.info("Renaming all test files")
for file in files:
file.move(f"{file.full_path}_renamed")
sync()
TestRun.LOGGER.info("Checking requests to metadata")
requests_to_metadata_after = cache.get_io_class_statistics(
io_class_id=ioclass_id).request_stats.write
if requests_to_metadata_after == requests_to_metadata_before:
TestRun.fail("No requests to metadata while renaming files!")
requests_to_metadata_before = requests_to_metadata_after
test_dir_path = f"{mountpoint}/test_dir"
TestRun.LOGGER.info(f"Creating directory {test_dir_path}")
fs_utils.create_directory(path=test_dir_path)
TestRun.LOGGER.info(f"Moving test files into {test_dir_path}")
for file in files:
file.move(test_dir_path)
sync()
TestRun.LOGGER.info("Checking requests to metadata")
requests_to_metadata_after = cache.get_io_class_statistics(
io_class_id=ioclass_id).request_stats.write
if requests_to_metadata_after == requests_to_metadata_before:
TestRun.fail("No requests to metadata while moving files!")
TestRun.LOGGER.info(f"Removing {test_dir_path}")
fs_utils.remove(path=test_dir_path, force=True, recursive=True)
TestRun.LOGGER.info("Checking requests to metadata")
requests_to_metadata_after = cache.get_io_class_statistics(
io_class_id=ioclass_id).request_stats.write
if requests_to_metadata_after == requests_to_metadata_before:
TestRun.fail("No requests to metadata while deleting directory with files!")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", Filesystem)
def test_ioclass_id_as_condition(filesystem):
"""
Load config in which IO class ids are used as conditions in other IO class definitions.
Check if performed IO is properly classified.
"""
cache, core = prepare()
Udev.disable()
base_dir_path = f"{mountpoint}/base_dir"
ioclass_file_size = Size(random.randint(25, 50), Unit.MebiByte)
ioclass_file_size_bytes = int(ioclass_file_size.get_value(Unit.Byte))
# directory condition
ioclass_config.add_ioclass(
ioclass_id=1,
eviction_priority=1,
allocation=True,
rule=f"directory:{base_dir_path}",
ioclass_config_path=ioclass_config_path,
)
# file size condition
ioclass_config.add_ioclass(
ioclass_id=2,
eviction_priority=1,
allocation=True,
rule=f"file_size:eq:{ioclass_file_size_bytes}",
ioclass_config_path=ioclass_config_path,
)
# direct condition
ioclass_config.add_ioclass(
ioclass_id=3,
eviction_priority=1,
allocation=True,
rule="direct",
ioclass_config_path=ioclass_config_path,
)
# IO class 1 OR 2 condition
ioclass_config.add_ioclass(
ioclass_id=4,
eviction_priority=1,
allocation=True,
rule="io_class:1|io_class:2",
ioclass_config_path=ioclass_config_path,
)
# IO class 4 AND file size condition (same as IO class 2)
ioclass_config.add_ioclass(
ioclass_id=5,
eviction_priority=1,
allocation=True,
rule=f"io_class:4&file_size:eq:{ioclass_file_size_bytes}",
ioclass_config_path=ioclass_config_path,
)
# IO class 3 condition
ioclass_config.add_ioclass(
ioclass_id=6,
eviction_priority=1,
allocation=True,
rule="io_class:3",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
TestRun.LOGGER.info(f"Preparing {filesystem.name} filesystem "
f"and mounting {core.system_path} at {mountpoint}")
core.create_filesystem(filesystem)
core.mount(mountpoint)
fs_utils.create_directory(base_dir_path)
sync()
# IO fulfilling IO class 1 condition (and not IO class 2)
# Should be classified as IO class 4
base_occupancy = cache.get_io_class_statistics(io_class_id=4).usage_stats.occupancy
non_ioclass_file_size = Size(random.randrange(1, 25), Unit.MebiByte)
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(non_ioclass_file_size)
.read_write(ReadWrite.write)
.target(f"{base_dir_path}/test_file_1")
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=4).usage_stats.occupancy
if new_occupancy != base_occupancy + non_ioclass_file_size:
TestRun.fail("Writes were not properly cached!\n"
f"Expected: {base_occupancy + non_ioclass_file_size}, actual: {new_occupancy}")
# IO fulfilling IO class 2 condition (and not IO class 1)
# Should be classified as IO class 5
base_occupancy = cache.get_io_class_statistics(io_class_id=5).usage_stats.occupancy
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(ioclass_file_size)
.read_write(ReadWrite.write)
.target(f"{mountpoint}/test_file_2")
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=5).usage_stats.occupancy
if new_occupancy != base_occupancy + ioclass_file_size:
TestRun.fail("Writes were not properly cached!\n"
f"Expected: {base_occupancy + ioclass_file_size}, actual: {new_occupancy}")
# IO fulfilling IO class 1 and 2 conditions
# Should be classified as IO class 5
base_occupancy = new_occupancy
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(ioclass_file_size)
.read_write(ReadWrite.write)
.target(f"{base_dir_path}/test_file_3")
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=5).usage_stats.occupancy
if new_occupancy != base_occupancy + ioclass_file_size:
TestRun.fail("Writes were not properly cached!\n"
f"Expected: {base_occupancy + ioclass_file_size}, actual: {new_occupancy}")
# Same IO but direct
# Should be classified as IO class 6
base_occupancy = cache.get_io_class_statistics(io_class_id=6).usage_stats.occupancy
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(ioclass_file_size)
.read_write(ReadWrite.write)
.target(f"{base_dir_path}/test_file_3")
.direct()
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=6).usage_stats.occupancy
if new_occupancy != base_occupancy + ioclass_file_size:
TestRun.fail("Writes were not properly cached!\n"
f"Expected: {base_occupancy + ioclass_file_size}, actual: {new_occupancy}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", Filesystem)
def test_ioclass_conditions_or(filesystem):
"""
Load config with IO class combining 5 contradicting conditions connected by OR operator.
Check if every IO fulfilling one condition is classified properly.
"""
cache, core = prepare()
Udev.disable()
# directories OR condition
ioclass_config.add_ioclass(
ioclass_id=1,
eviction_priority=1,
allocation=True,
rule=f"directory:{mountpoint}/dir1|directory:{mountpoint}/dir2|directory:"
f"{mountpoint}/dir3|directory:{mountpoint}/dir4|directory:{mountpoint}/dir5",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
TestRun.LOGGER.info(f"Preparing {filesystem.name} filesystem "
f"and mounting {core.system_path} at {mountpoint}")
core.create_filesystem(filesystem)
core.mount(mountpoint)
for i in range(1, 6):
fs_utils.create_directory(f"{mountpoint}/dir{i}")
sync()
# Perform IO fulfilling each condition and check if occupancy raises
for i in range(1, 6):
file_size = Size(random.randint(25, 50), Unit.MebiByte)
base_occupancy = cache.get_io_class_statistics(io_class_id=1).usage_stats.occupancy
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(file_size)
.read_write(ReadWrite.write)
.target(f"{mountpoint}/dir{i}/test_file")
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=1).usage_stats.occupancy
if new_occupancy != base_occupancy + file_size:
TestRun.fail("Occupancy has not increased correctly!\n"
f"Expected: {base_occupancy + file_size}, actual: {new_occupancy}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", Filesystem)
def test_ioclass_conditions_and(filesystem):
"""
Load config with IO class combining 5 conditions contradicting at least one other condition
connected by AND operator.
Check if every IO fulfilling one of the conditions is not classified.
"""
cache, core = prepare()
Udev.disable()
file_size = Size(random.randint(25, 50), Unit.MebiByte)
file_size_bytes = int(file_size.get_value(Unit.Byte))
# directories OR condition
ioclass_config.add_ioclass(
ioclass_id=1,
eviction_priority=1,
allocation=True,
rule=f"file_size:gt:{file_size_bytes}&file_size:lt:{file_size_bytes}&"
f"file_size:ge:{file_size_bytes}&file_size:le:{file_size_bytes}&"
f"file_size:eq:{file_size_bytes}",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
TestRun.LOGGER.info(f"Preparing {filesystem.name} filesystem "
f"and mounting {core.system_path} at {mountpoint}")
core.create_filesystem(filesystem)
core.mount(mountpoint)
sync()
base_occupancy = cache.get_io_class_statistics(io_class_id=1).usage_stats.occupancy
# Perform IO
for size in [file_size, file_size + Size(1, Unit.MebiByte), file_size - Size(1, Unit.MebiByte)]:
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(size)
.read_write(ReadWrite.write)
.target(f"{mountpoint}/test_file")
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=1).usage_stats.occupancy
if new_occupancy != base_occupancy:
TestRun.fail("Unexpected occupancy increase!\n"
f"Expected: {base_occupancy}, actual: {new_occupancy}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", Filesystem)
def test_ioclass_effective_ioclass(filesystem):
"""
title: Effective IO class with multiple non-exclusive conditions
description: |
Test CAS ability to properly classify IO fulfilling multiple conditions based on
IO class ids and presence of '&done' annotation in IO class rules
pass_criteria:
- In every iteration first IO is classified to the last in order IO class
- In every iteration second IO is classified to the IO class with '&done' annotation
"""
with TestRun.LOGGER.step(f"Test prepare"):
cache, core = prepare()
Udev.disable()
file_size = Size(10, Unit.Blocks4096)
file_size_bytes = int(file_size.get_value(Unit.Byte))
test_dir = f"{mountpoint}/test"
rules = ["direct", # rule contradicting other rules
f"directory:{test_dir}",
f"file_size:le:{2 * file_size_bytes}",
f"file_size:ge:{file_size_bytes // 2}"]
with TestRun.LOGGER.step(f"Preparing {filesystem.name} filesystem "
f"and mounting {core.system_path} at {mountpoint}"):
core.create_filesystem(filesystem)
core.mount(mountpoint)
fs_utils.create_directory(test_dir)
sync()
for i, permutation in TestRun.iteration(enumerate(permutations(range(1, 5)), start=1)):
with TestRun.LOGGER.step("Load IO classes in order specified by permutation"):
load_io_classes_in_permutation_order(rules, permutation, cache)
io_class_id = 3 if rules[permutation.index(4)] == "direct" else 4
with TestRun.LOGGER.step("Perform IO fulfilling the non-contradicting conditions"):
base_occupancy = cache.get_io_class_statistics(
io_class_id=io_class_id).usage_stats.occupancy
fio = (Fio().create_command()
.io_engine(IoEngine.libaio)
.size(file_size)
.read_write(ReadWrite.write)
.target(f"{test_dir}/test_file{i}"))
fio.run()
sync()
with TestRun.LOGGER.step("Check if IO was properly classified "
"(to the last non-contradicting IO class)"):
new_occupancy = cache.get_io_class_statistics(
io_class_id=io_class_id).usage_stats.occupancy
if new_occupancy != base_occupancy + file_size:
TestRun.LOGGER.error("Wrong IO classification!\n"
f"Expected: {base_occupancy + file_size}, "
f"actual: {new_occupancy}")
with TestRun.LOGGER.step("Add '&done' to the second in order non-contradicting condition"):
io_class_id = add_done_to_second_non_exclusive_condition(rules, permutation, cache)
with TestRun.LOGGER.step("Repeat IO"):
base_occupancy = cache.get_io_class_statistics(
io_class_id=io_class_id).usage_stats.occupancy
fio.run()
sync()
with TestRun.LOGGER.step("Check if IO was properly classified "
"(to the IO class with '&done' annotation)"):
new_occupancy = cache.get_io_class_statistics(
io_class_id=io_class_id).usage_stats.occupancy
if new_occupancy != base_occupancy + file_size:
TestRun.LOGGER.error("Wrong IO classification!\n"
f"Expected: {base_occupancy + file_size}, "
f"actual: {new_occupancy}")
def load_io_classes_in_permutation_order(rules, permutation, cache):
ioclass_config.remove_ioclass_config(ioclass_config_path=ioclass_config_path)
ioclass_config.create_ioclass_config(
add_default_rule=False, ioclass_config_path=ioclass_config_path
)
# To make test more precise all workload except of tested ioclass should be
# put in pass-through mode
ioclass_list = [IoClass.default(allocation=False)]
for n in range(len(rules)):
ioclass_list.append(IoClass(class_id=permutation[n], rule=rules[n]))
IoClass.save_list_to_config_file(ioclass_list,
add_default_rule=False,
ioclass_config_path=ioclass_config_path)
casadm.load_io_classes(cache.cache_id, file=ioclass_config_path)
def add_done_to_second_non_exclusive_condition(rules, permutation, cache):
non_exclusive_conditions = 0
second_class_id = 1
while True:
idx = permutation.index(second_class_id)
if rules[idx] != "direct":
non_exclusive_conditions += 1
if non_exclusive_conditions == 2:
break
second_class_id += 1
fs_utils.replace_first_pattern_occurrence(ioclass_config_path,
rules[idx], f"{rules[idx]}&done")
sync()
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
return second_class_id
| 39.296024 | 100 | 0.670227 |
import random
from itertools import permutations
import pytest
from api.cas.ioclass_config import IoClass
from storage_devices.disk import DiskType, DiskTypeSet, DiskTypeLowerThan
from test_tools import fs_utils
from test_tools.dd import Dd
from test_tools.disk_utils import Filesystem
from test_tools.fio.fio import Fio
from test_tools.fio.fio_param import ReadWrite, IoEngine
from test_utils.filesystem.file import File
from test_utils.os_utils import sync, Udev
from .io_class_common import *
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
def test_ioclass_lba():
cache, core = prepare()
ioclass_id = 1
min_cached_lba = 56
max_cached_lba = 200
iterations = 100
dd_size = Size(1, Unit.Blocks512)
dd_count = 1
ioclass_config.add_ioclass(
ioclass_id=ioclass_id,
eviction_priority=1,
allocation=True,
rule=f"lba:ge:{min_cached_lba}&lba:le:{max_cached_lba}&done",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
cache.flush_cache()
dirty_count = 0
TestRun.LOGGER.info(f"Writing to one sector in each cache line from range.")
for lba in range(min_cached_lba, max_cached_lba, 8):
dd = (
Dd()
.input("/dev/zero")
.output(f"{core.system_path}")
.count(dd_count)
.block_size(dd_size)
.seek(lba)
)
dd.run()
sync()
dirty_count += 1
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
if dirty.get_value(Unit.Blocks4096) != dirty_count:
TestRun.LOGGER.error(f"LBA {lba} not cached")
cache.flush_cache()
TestRun.LOGGER.info(f"Writing to random sectors outside of cached range.")
for i in range(iterations):
rand_lba = random.randrange(2000)
if min_cached_lba <= rand_lba <= max_cached_lba:
continue
dd = (
Dd()
.input("/dev/zero")
.output(f"{core.system_path}")
.count(dd_count)
.block_size(dd_size)
.seek(rand_lba)
)
dd.run()
sync()
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
if dirty.get_value(Unit.Blocks4096) != 0:
TestRun.LOGGER.error(f"Inappropriately cached lba: {rand_lba}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
def test_ioclass_request_size():
cache, core = prepare()
ioclass_id = 1
iterations = 100
ioclass_config.add_ioclass(
ioclass_id=ioclass_id,
eviction_priority=1,
allocation=True,
rule=f"request_size:ge:8192&request_size:le:16384&done",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
Udev.disable()
TestRun.LOGGER.info(
f"Check if requests with size within defined range are cached"
)
cached_req_sizes = [Size(2, Unit.Blocks4096), Size(4, Unit.Blocks4096)]
for i in range(iterations):
cache.flush_cache()
req_size = random.choice(cached_req_sizes)
dd = (
Dd()
.input("/dev/zero")
.output(core.system_path)
.count(1)
.block_size(req_size)
.oflag("direct")
)
dd.run()
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
if dirty.get_value(Unit.Blocks4096) != req_size.value / Unit.Blocks4096.value:
TestRun.fail("Incorrect number of dirty blocks!")
cache.flush_cache()
TestRun.LOGGER.info(
f"Check if requests with size outside defined range are not cached"
)
not_cached_req_sizes = [
Size(1, Unit.Blocks4096),
Size(8, Unit.Blocks4096),
Size(16, Unit.Blocks4096),
]
for i in range(iterations):
req_size = random.choice(not_cached_req_sizes)
dd = (
Dd()
.input("/dev/zero")
.output(core.system_path)
.count(1)
.block_size(req_size)
.oflag("direct")
)
dd.run()
dirty = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.dirty
if dirty.get_value(Unit.Blocks4096) != 0:
TestRun.fail("Dirty data present!")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", list(Filesystem) + [False])
def test_ioclass_direct(filesystem):
cache, core = prepare()
Udev.disable()
ioclass_id = 1
io_size = Size(random.randint(1000, 2000), Unit.Blocks4096)
ioclass_config.add_ioclass(
ioclass_id=ioclass_id,
eviction_priority=1,
allocation=True,
rule="direct",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
fio = (
Fio().create_command()
.io_engine(IoEngine.libaio)
.size(io_size)
.offset(io_size)
.read_write(ReadWrite.write)
.target(f"{mountpoint}/tmp_file" if filesystem else core.system_path)
)
if filesystem:
TestRun.LOGGER.info(
f"Preparing {filesystem.name} filesystem and mounting {core.system_path} at"
f" {mountpoint}"
)
core.create_filesystem(filesystem)
core.mount(mountpoint)
sync()
else:
TestRun.LOGGER.info("Testing on raw exported object")
base_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
TestRun.LOGGER.info(f"Buffered writes to {'file' if filesystem else 'device'}")
fio.run()
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
if new_occupancy != base_occupancy:
TestRun.fail("Buffered writes were cached!\n"
f"Expected: {base_occupancy}, actual: {new_occupancy}")
TestRun.LOGGER.info(f"Direct writes to {'file' if filesystem else 'device'}")
fio.direct()
fio.run()
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
if new_occupancy != base_occupancy + io_size:
TestRun.fail("Wrong number of direct writes was cached!\n"
f"Expected: {base_occupancy + io_size}, actual: {new_occupancy}")
TestRun.LOGGER.info(f"Buffered reads from {'file' if filesystem else 'device'}")
fio.remove_param("readwrite").remove_param("direct")
fio.read_write(ReadWrite.read)
fio.run()
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
if new_occupancy != base_occupancy:
TestRun.fail("Buffered reads did not cause reclassification!"
f"Expected occupancy: {base_occupancy}, actual: {new_occupancy}")
TestRun.LOGGER.info(f"Direct reads from {'file' if filesystem else 'device'}")
fio.direct()
fio.run()
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=ioclass_id).usage_stats.occupancy
if new_occupancy != base_occupancy + io_size:
TestRun.fail("Wrong number of direct reads was cached!\n"
f"Expected: {base_occupancy + io_size}, actual: {new_occupancy}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", Filesystem)
def test_ioclass_metadata(filesystem):
cache, core = prepare()
Udev.disable()
ioclass_id = random.randint(1, ioclass_config.MAX_IO_CLASS_ID)
ioclass_config.add_ioclass(
ioclass_id=ioclass_id,
eviction_priority=1,
allocation=True,
rule="metadata&done",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
TestRun.LOGGER.info(f"Preparing {filesystem.name} filesystem "
f"and mounting {core.system_path} at {mountpoint}")
core.create_filesystem(filesystem)
core.mount(mountpoint)
sync()
requests_to_metadata_before = cache.get_io_class_statistics(
io_class_id=ioclass_id).request_stats.write
TestRun.LOGGER.info("Creating 20 test files")
files = []
for i in range(1, 21):
file_path = f"{mountpoint}/test_file_{i}"
dd = (
Dd()
.input("/dev/urandom")
.output(file_path)
.count(random.randint(5, 50))
.block_size(Size(1, Unit.MebiByte))
.oflag("sync")
)
dd.run()
files.append(File(file_path))
TestRun.LOGGER.info("Checking requests to metadata")
requests_to_metadata_after = cache.get_io_class_statistics(
io_class_id=ioclass_id).request_stats.write
if requests_to_metadata_after == requests_to_metadata_before:
TestRun.fail("No requests to metadata while creating files!")
requests_to_metadata_before = requests_to_metadata_after
TestRun.LOGGER.info("Renaming all test files")
for file in files:
file.move(f"{file.full_path}_renamed")
sync()
TestRun.LOGGER.info("Checking requests to metadata")
requests_to_metadata_after = cache.get_io_class_statistics(
io_class_id=ioclass_id).request_stats.write
if requests_to_metadata_after == requests_to_metadata_before:
TestRun.fail("No requests to metadata while renaming files!")
requests_to_metadata_before = requests_to_metadata_after
test_dir_path = f"{mountpoint}/test_dir"
TestRun.LOGGER.info(f"Creating directory {test_dir_path}")
fs_utils.create_directory(path=test_dir_path)
TestRun.LOGGER.info(f"Moving test files into {test_dir_path}")
for file in files:
file.move(test_dir_path)
sync()
TestRun.LOGGER.info("Checking requests to metadata")
requests_to_metadata_after = cache.get_io_class_statistics(
io_class_id=ioclass_id).request_stats.write
if requests_to_metadata_after == requests_to_metadata_before:
TestRun.fail("No requests to metadata while moving files!")
TestRun.LOGGER.info(f"Removing {test_dir_path}")
fs_utils.remove(path=test_dir_path, force=True, recursive=True)
TestRun.LOGGER.info("Checking requests to metadata")
requests_to_metadata_after = cache.get_io_class_statistics(
io_class_id=ioclass_id).request_stats.write
if requests_to_metadata_after == requests_to_metadata_before:
TestRun.fail("No requests to metadata while deleting directory with files!")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", Filesystem)
def test_ioclass_id_as_condition(filesystem):
cache, core = prepare()
Udev.disable()
base_dir_path = f"{mountpoint}/base_dir"
ioclass_file_size = Size(random.randint(25, 50), Unit.MebiByte)
ioclass_file_size_bytes = int(ioclass_file_size.get_value(Unit.Byte))
ioclass_config.add_ioclass(
ioclass_id=1,
eviction_priority=1,
allocation=True,
rule=f"directory:{base_dir_path}",
ioclass_config_path=ioclass_config_path,
)
ioclass_config.add_ioclass(
ioclass_id=2,
eviction_priority=1,
allocation=True,
rule=f"file_size:eq:{ioclass_file_size_bytes}",
ioclass_config_path=ioclass_config_path,
)
ioclass_config.add_ioclass(
ioclass_id=3,
eviction_priority=1,
allocation=True,
rule="direct",
ioclass_config_path=ioclass_config_path,
)
ioclass_config.add_ioclass(
ioclass_id=4,
eviction_priority=1,
allocation=True,
rule="io_class:1|io_class:2",
ioclass_config_path=ioclass_config_path,
)
ioclass_config.add_ioclass(
ioclass_id=5,
eviction_priority=1,
allocation=True,
rule=f"io_class:4&file_size:eq:{ioclass_file_size_bytes}",
ioclass_config_path=ioclass_config_path,
)
ioclass_config.add_ioclass(
ioclass_id=6,
eviction_priority=1,
allocation=True,
rule="io_class:3",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
TestRun.LOGGER.info(f"Preparing {filesystem.name} filesystem "
f"and mounting {core.system_path} at {mountpoint}")
core.create_filesystem(filesystem)
core.mount(mountpoint)
fs_utils.create_directory(base_dir_path)
sync()
base_occupancy = cache.get_io_class_statistics(io_class_id=4).usage_stats.occupancy
non_ioclass_file_size = Size(random.randrange(1, 25), Unit.MebiByte)
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(non_ioclass_file_size)
.read_write(ReadWrite.write)
.target(f"{base_dir_path}/test_file_1")
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=4).usage_stats.occupancy
if new_occupancy != base_occupancy + non_ioclass_file_size:
TestRun.fail("Writes were not properly cached!\n"
f"Expected: {base_occupancy + non_ioclass_file_size}, actual: {new_occupancy}")
base_occupancy = cache.get_io_class_statistics(io_class_id=5).usage_stats.occupancy
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(ioclass_file_size)
.read_write(ReadWrite.write)
.target(f"{mountpoint}/test_file_2")
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=5).usage_stats.occupancy
if new_occupancy != base_occupancy + ioclass_file_size:
TestRun.fail("Writes were not properly cached!\n"
f"Expected: {base_occupancy + ioclass_file_size}, actual: {new_occupancy}")
base_occupancy = new_occupancy
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(ioclass_file_size)
.read_write(ReadWrite.write)
.target(f"{base_dir_path}/test_file_3")
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=5).usage_stats.occupancy
if new_occupancy != base_occupancy + ioclass_file_size:
TestRun.fail("Writes were not properly cached!\n"
f"Expected: {base_occupancy + ioclass_file_size}, actual: {new_occupancy}")
base_occupancy = cache.get_io_class_statistics(io_class_id=6).usage_stats.occupancy
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(ioclass_file_size)
.read_write(ReadWrite.write)
.target(f"{base_dir_path}/test_file_3")
.direct()
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=6).usage_stats.occupancy
if new_occupancy != base_occupancy + ioclass_file_size:
TestRun.fail("Writes were not properly cached!\n"
f"Expected: {base_occupancy + ioclass_file_size}, actual: {new_occupancy}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", Filesystem)
def test_ioclass_conditions_or(filesystem):
cache, core = prepare()
Udev.disable()
ioclass_config.add_ioclass(
ioclass_id=1,
eviction_priority=1,
allocation=True,
rule=f"directory:{mountpoint}/dir1|directory:{mountpoint}/dir2|directory:"
f"{mountpoint}/dir3|directory:{mountpoint}/dir4|directory:{mountpoint}/dir5",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
TestRun.LOGGER.info(f"Preparing {filesystem.name} filesystem "
f"and mounting {core.system_path} at {mountpoint}")
core.create_filesystem(filesystem)
core.mount(mountpoint)
for i in range(1, 6):
fs_utils.create_directory(f"{mountpoint}/dir{i}")
sync()
for i in range(1, 6):
file_size = Size(random.randint(25, 50), Unit.MebiByte)
base_occupancy = cache.get_io_class_statistics(io_class_id=1).usage_stats.occupancy
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(file_size)
.read_write(ReadWrite.write)
.target(f"{mountpoint}/dir{i}/test_file")
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=1).usage_stats.occupancy
if new_occupancy != base_occupancy + file_size:
TestRun.fail("Occupancy has not increased correctly!\n"
f"Expected: {base_occupancy + file_size}, actual: {new_occupancy}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", Filesystem)
def test_ioclass_conditions_and(filesystem):
cache, core = prepare()
Udev.disable()
file_size = Size(random.randint(25, 50), Unit.MebiByte)
file_size_bytes = int(file_size.get_value(Unit.Byte))
ioclass_config.add_ioclass(
ioclass_id=1,
eviction_priority=1,
allocation=True,
rule=f"file_size:gt:{file_size_bytes}&file_size:lt:{file_size_bytes}&"
f"file_size:ge:{file_size_bytes}&file_size:le:{file_size_bytes}&"
f"file_size:eq:{file_size_bytes}",
ioclass_config_path=ioclass_config_path,
)
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
TestRun.LOGGER.info(f"Preparing {filesystem.name} filesystem "
f"and mounting {core.system_path} at {mountpoint}")
core.create_filesystem(filesystem)
core.mount(mountpoint)
sync()
base_occupancy = cache.get_io_class_statistics(io_class_id=1).usage_stats.occupancy
for size in [file_size, file_size + Size(1, Unit.MebiByte), file_size - Size(1, Unit.MebiByte)]:
(Fio().create_command()
.io_engine(IoEngine.libaio)
.size(size)
.read_write(ReadWrite.write)
.target(f"{mountpoint}/test_file")
.run())
sync()
new_occupancy = cache.get_io_class_statistics(io_class_id=1).usage_stats.occupancy
if new_occupancy != base_occupancy:
TestRun.fail("Unexpected occupancy increase!\n"
f"Expected: {base_occupancy}, actual: {new_occupancy}")
@pytest.mark.require_disk("cache", DiskTypeSet([DiskType.optane, DiskType.nand]))
@pytest.mark.require_disk("core", DiskTypeLowerThan("cache"))
@pytest.mark.parametrizex("filesystem", Filesystem)
def test_ioclass_effective_ioclass(filesystem):
with TestRun.LOGGER.step(f"Test prepare"):
cache, core = prepare()
Udev.disable()
file_size = Size(10, Unit.Blocks4096)
file_size_bytes = int(file_size.get_value(Unit.Byte))
test_dir = f"{mountpoint}/test"
rules = ["direct",
f"directory:{test_dir}",
f"file_size:le:{2 * file_size_bytes}",
f"file_size:ge:{file_size_bytes // 2}"]
with TestRun.LOGGER.step(f"Preparing {filesystem.name} filesystem "
f"and mounting {core.system_path} at {mountpoint}"):
core.create_filesystem(filesystem)
core.mount(mountpoint)
fs_utils.create_directory(test_dir)
sync()
for i, permutation in TestRun.iteration(enumerate(permutations(range(1, 5)), start=1)):
with TestRun.LOGGER.step("Load IO classes in order specified by permutation"):
load_io_classes_in_permutation_order(rules, permutation, cache)
io_class_id = 3 if rules[permutation.index(4)] == "direct" else 4
with TestRun.LOGGER.step("Perform IO fulfilling the non-contradicting conditions"):
base_occupancy = cache.get_io_class_statistics(
io_class_id=io_class_id).usage_stats.occupancy
fio = (Fio().create_command()
.io_engine(IoEngine.libaio)
.size(file_size)
.read_write(ReadWrite.write)
.target(f"{test_dir}/test_file{i}"))
fio.run()
sync()
with TestRun.LOGGER.step("Check if IO was properly classified "
"(to the last non-contradicting IO class)"):
new_occupancy = cache.get_io_class_statistics(
io_class_id=io_class_id).usage_stats.occupancy
if new_occupancy != base_occupancy + file_size:
TestRun.LOGGER.error("Wrong IO classification!\n"
f"Expected: {base_occupancy + file_size}, "
f"actual: {new_occupancy}")
with TestRun.LOGGER.step("Add '&done' to the second in order non-contradicting condition"):
io_class_id = add_done_to_second_non_exclusive_condition(rules, permutation, cache)
with TestRun.LOGGER.step("Repeat IO"):
base_occupancy = cache.get_io_class_statistics(
io_class_id=io_class_id).usage_stats.occupancy
fio.run()
sync()
with TestRun.LOGGER.step("Check if IO was properly classified "
"(to the IO class with '&done' annotation)"):
new_occupancy = cache.get_io_class_statistics(
io_class_id=io_class_id).usage_stats.occupancy
if new_occupancy != base_occupancy + file_size:
TestRun.LOGGER.error("Wrong IO classification!\n"
f"Expected: {base_occupancy + file_size}, "
f"actual: {new_occupancy}")
def load_io_classes_in_permutation_order(rules, permutation, cache):
ioclass_config.remove_ioclass_config(ioclass_config_path=ioclass_config_path)
ioclass_config.create_ioclass_config(
add_default_rule=False, ioclass_config_path=ioclass_config_path
)
ioclass_list = [IoClass.default(allocation=False)]
for n in range(len(rules)):
ioclass_list.append(IoClass(class_id=permutation[n], rule=rules[n]))
IoClass.save_list_to_config_file(ioclass_list,
add_default_rule=False,
ioclass_config_path=ioclass_config_path)
casadm.load_io_classes(cache.cache_id, file=ioclass_config_path)
def add_done_to_second_non_exclusive_condition(rules, permutation, cache):
non_exclusive_conditions = 0
second_class_id = 1
while True:
idx = permutation.index(second_class_id)
if rules[idx] != "direct":
non_exclusive_conditions += 1
if non_exclusive_conditions == 2:
break
second_class_id += 1
fs_utils.replace_first_pattern_occurrence(ioclass_config_path,
rules[idx], f"{rules[idx]}&done")
sync()
casadm.load_io_classes(cache_id=cache.cache_id, file=ioclass_config_path)
return second_class_id
| true | true |
f72ae69658ad2a7325bb00e82f738fd441ad6552 | 1,684 | py | Python | flask_controller/controller.py | AlexFence/FlaskController | efbd51a6970d407128f79876e8724b75fe6ec156 | [
"MIT"
] | 3 | 2020-10-19T08:18:51.000Z | 2022-02-06T04:29:38.000Z | flask_controller/controller.py | TypicalFence/FlaskController | efbd51a6970d407128f79876e8724b75fe6ec156 | [
"MIT"
] | null | null | null | flask_controller/controller.py | TypicalFence/FlaskController | efbd51a6970d407128f79876e8724b75fe6ec156 | [
"MIT"
] | null | null | null | from abc import ABC
def route(rule, **options):
"""Decorator for defining routes of FlaskController classes.
Acts in the same way ass @app.route.
Can be used for a class to set a base route too.
Args:
path (str): The path of the newly defined route
options: refer to flasks docs for those, all of them can be used
"""
def decorator(f):
f._route = (rule, options)
return f
return decorator
class FlaskController(ABC):
"""Baseclass for the Controller Classes.
Extend tis class and use it in conjunction with the route decoractor
to define routes for your flask app.
Use the register method to add your defined routes to a flask app.
"""
def __init__(self):
super(FlaskController, self).__init__()
def register(self, app):
"""Adds the routes of a Controller to a Flask instance.
Args:
app (Flask)
"""
members = dir(self)
routes = []
for member in members:
if hasattr(getattr(self, member), "_route"):
if member is not "__class__":
routes.append(member)
self._register_routes(routes, app)
def _register_routes(self, routes, app):
for route in routes:
func = getattr(self, route)
real_route = self._generate_route(func._route[0])
options = func._route[1]
app.add_url_rule(real_route, route + real_route, func, **options)
def _generate_route(self, route):
base_route = ""
if hasattr(self, "_route"):
base_route = self._route[0]
return base_route + route
| 28.066667 | 78 | 0.604513 | from abc import ABC
def route(rule, **options):
def decorator(f):
f._route = (rule, options)
return f
return decorator
class FlaskController(ABC):
def __init__(self):
super(FlaskController, self).__init__()
def register(self, app):
members = dir(self)
routes = []
for member in members:
if hasattr(getattr(self, member), "_route"):
if member is not "__class__":
routes.append(member)
self._register_routes(routes, app)
def _register_routes(self, routes, app):
for route in routes:
func = getattr(self, route)
real_route = self._generate_route(func._route[0])
options = func._route[1]
app.add_url_rule(real_route, route + real_route, func, **options)
def _generate_route(self, route):
base_route = ""
if hasattr(self, "_route"):
base_route = self._route[0]
return base_route + route
| true | true |
f72ae6adfbd9100ea1e159819c5e0ed61df33f44 | 24,028 | py | Python | windows_packages_gpu/torch/testing/_internal/jit_metaprogramming_utils.py | codeproject/DeepStack | d96368a3db1bc0266cb500ba3701d130834da0e6 | [
"Apache-2.0"
] | 353 | 2020-12-10T10:47:17.000Z | 2022-03-31T23:08:29.000Z | windows_packages_gpu/torch/testing/_internal/jit_metaprogramming_utils.py | codeproject/DeepStack | d96368a3db1bc0266cb500ba3701d130834da0e6 | [
"Apache-2.0"
] | 80 | 2020-12-10T09:54:22.000Z | 2022-03-30T22:08:45.000Z | windows_packages_gpu/torch/testing/_internal/jit_metaprogramming_utils.py | codeproject/DeepStack | d96368a3db1bc0266cb500ba3701d130834da0e6 | [
"Apache-2.0"
] | 63 | 2020-12-10T17:10:34.000Z | 2022-03-28T16:27:07.000Z | # Torch
from torch.jit.annotations import BroadcastingList2, BroadcastingList3 # noqa: F401
from torch.testing._internal.common_methods_invocations import non_differentiable, create_input, \
unpack_variables
import torch.nn.functional as F
import torch
import torch.cuda
import torch.jit
import torch.jit._logging
import torch.jit.frontend
from torch.testing._internal.common_nn import module_tests, new_module_tests
from copy import deepcopy
import math # noqa: F401
# Testing utils
from torch._six import inf
torch.set_default_dtype(torch.double)
L = 20
M = 10
S = 5
# NB: JIT script tests for all nn functional interfaces, script mode does
# not support in_place operations yet, so no inplace operation tests added.
# removed all the deprecated functions
#
# (
# method name,
# input size/constructing fn,
# args (tuple represents shape of a tensor arg),
# test variant name(will be used at test name suffix,
# 'inplace' skips grad tests), // optional
# (True, nonfusible_nodes, fusible_nodes) for autodiff // optional
# fn to determine if test should be skipped, // optional
# fn mapping output to part that should be gradcheck'ed, // optional
# kwargs for function, // optional
# )
nn_functional_tests = [
('conv1d', (S, S, S), ((S, S, S),)),
('conv2d', (S, S, S, S), ((S, S, S, S),)),
('conv3d', (S, S, S, S, S), ((S, S, S, S, S),)),
('conv_transpose1d', (S, S, S), ((S, S, S),)),
('conv_transpose2d', (S, S, S, S), ((S, S, S, S),)),
('conv_transpose3d', (S, S, S, S, S), ((S, S, S, S, S),)),
('conv_tbc', (S, S, S), ((S, S, S), (S,), 2)),
('avg_pool1d', (S, S, S), (3,)),
('avg_pool2d', (S, S, S, S), (3,), '', (True,)),
('avg_pool3d', (S, S, S, S, S), (3,)),
('fractional_max_pool2d', (S, S, S, S), (3, [2, 3],)),
('max_pool1d', (S, S, S), (2, 1)),
('max_pool1d', (S, S, S), (2, 1, 1, 1, False, True), 'with_indices'),
('max_pool2d', (S, S, S, S), (2, 1), '', (True, 'aten::max_pool2d_with_indices')),
('max_pool2d', (S, S, S, S), (2, 1, 1, 1, False, True), 'with_indices', (True, 'aten::max_pool2d_with_indices')),
('max_pool3d', (S, S, S, S, S), (2, 1)),
('max_unpool1d', torch.tensor([[[2., 4]]]), (torch.tensor([[[1, 3]]]), 2, 2, 0)),
('max_unpool2d', torch.tensor([[[[2., 4]]]]), (torch.tensor([[[[1, 3]]]]), 2, 2, 0)),
('max_unpool3d', torch.tensor([[[[[2., 4]]]]]), (torch.tensor([[[[[1, 3]]]]]), 2, 2, 0)),
('lp_pool1d', (S, S, S), (2., 3, 2,)),
('lp_pool2d', (S, S, S, S), (2., 3, 2,)),
('adaptive_max_pool1d', (S, S, S), (5,)),
('adaptive_max_pool2d', (S, S, S, S), ([5, 7],)),
('adaptive_max_pool3d', (S, S, S, S, S), ([3, 2, 2],)),
('adaptive_avg_pool1d', (S, S, S), (5,), '', (True,)),
('adaptive_avg_pool2d', (S, S, S, S), ([5, 7],), '', (True,)),
('adaptive_avg_pool3d', (S, S, S, S, S), ([3, 2, 2],), '', (True,)),
('dropout', (S, S, S), (0.5,), '', (True,
['aten::bernoulli_',
'aten::empty_like', 'aten::mul', 'aten::div'])),
('alpha_dropout', (S, S, S), (0.5,)),
('dropout2d', (S, S, S), (0.5,)),
('dropout3d', (S, S, S), (0.5,)),
('feature_alpha_dropout', (S, S, S), (0.5,)),
('threshold', (S, S, S), (0.1, 2.), '', (True,)),
('threshold', (S, S, S), (0.1, 2., True), 'inplace'),
('relu', (S, S, S), (), '', (True,)),
('relu', (S, S, S), (), 'inplace'),
('glu', (S - 1, S - 1, S - 1), (),),
('hardtanh', (S, S, S), (-0.5, 0.5),),
('hardtanh', (S, S, S), (-0.5, 0.5, True), 'inplace'),
('relu6', (S, S, S), (),),
('relu6', (S, S, S), (True), 'inplace'),
('elu', (S, S, S), (0.9,),),
('elu', (S, S, S), (0.9, True), 'inplace'),
('selu', (S, S, S), (),),
('selu', (S, S, S), (True), 'inplace'),
('celu', (S, S, S), (0.9,),),
('celu', (S, S, S), (0.9, True), 'inplace'),
('leaky_relu', (S, S, S), (0.02,),),
('leaky_relu', (S, S, S), (0.02,), 'inplace'),
('rrelu', (S, S), (0.1, 0.3, False),),
('rrelu', (S, S), (0.1, 0.3, False, True), 'inplace'),
('hardshrink', (S, S, S), (0.4,),),
('tanhshrink', (S, S, S), (),),
('softsign', (S, S, S), (),),
('softplus', (S, S, S), (),),
('softmin', (S, S, S), (0,),),
('softmax', (S, S, S), (0,), '', (True,)),
('softmax', (S, S, S), (0, 3, torch.double), 'with_all_args', (True,)),
('tanh', (S, S, S), (), '', (True,)),
('sigmoid', (S, S, S), (), '', (True,)),
('log_softmax', (S, S, S), (0,), '', (True,)),
('linear', (S, S), ((M, S),), '', (True, ['aten::t', 'aten::matmul'])),
('linear', (S, S), ((M, S), (M,)), 'addmm', (True, ['aten::add', 'aten::mm'])),
('bilinear', (S, S, S), ((S, S, M), torch.zeros(M, S, M),),),
('embedding', torch.tensor([[1, 2, 4, 5], [4, 3, 2, 5]]), (torch.rand(6, 3), ), '', (True,)),
('embedding_bag', torch.tensor([1, 2, 4, 2]), (torch.rand(5, 3), torch.tensor([0, 4]),),),
('batch_norm', (S, S), (non_differentiable(torch.randn(S)), non_differentiable(torch.ones(S)), ),
'', (False, 'aten::_batch_norm_impl_index')),
('instance_norm', (S, S, S), (non_differentiable(torch.zeros(S)), non_differentiable(torch.ones(S))),),
('layer_norm', (S, S, S, S), ([5],), '',
(False, ['aten::contiguous', 'aten::_batch_norm_impl_index'])),
('layer_norm', (S, S, S, S), ([5], non_differentiable(torch.rand(S)),), 'with_only_weight',
(False, ['aten::contiguous', 'aten::_batch_norm_impl_index'])),
('layer_norm', (S, S, S, S), ([5], None, non_differentiable(torch.rand(S)),), 'with_only_bias',
(False, ['aten::contiguous', 'aten::_batch_norm_impl_index'])),
('layer_norm', (S, S, S, S), ([5], non_differentiable(torch.rand(S)),
non_differentiable(torch.rand(S))), 'with_weight_and_bias',
(False, ['aten::contiguous', 'aten::_batch_norm_impl_index', 'aten::addcmul'])),
('group_norm', (S, S, S), (1, torch.rand(5),),),
('local_response_norm', (S, S, S), (2, ),),
('nll_loss', F.log_softmax(torch.randn(3, 5), dim=0), (torch.tensor([1, 0, 4]),), '', (True, 'aten::nll_loss_forward')),
('poisson_nll_loss', torch.rand(S, 2), (torch.rand(S, 2),),),
('poisson_nll_loss', torch.rand(S, 2), (torch.rand(S, 2), True, True), 'full'),
('kl_div', F.log_softmax(torch.randn(S, 10), 1), (F.softmax(torch.randn(S, 10), 1),),),
('cross_entropy', (3, S), (torch.randint(S, (3,), dtype=torch.int64),),),
('binary_cross_entropy_with_logits', (3,), (torch.empty(3).random_(2), ),),
('smooth_l1_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('l1_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('mse_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('smooth_l1_loss', (3, S), ((torch.rand(3, S)),), 'with_grad'),
('l1_loss', (3, S), ((torch.rand(3, S)),), 'with_grad'),
('mse_loss', (3, S), ((torch.rand(3, S)),), 'with_grad'),
('margin_ranking_loss', (3, S), ((3, S), (S,)),),
('hinge_embedding_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('soft_margin_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('multilabel_soft_margin_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('cosine_embedding_loss', (S, S), ((S, S), non_differentiable(torch.rand(S,))),),
('pixel_shuffle', (1, 9, 4, 4), (3,),),
('affine_grid', (S, 2, 3), (torch.Size([S, 1, 7, 7]),),),
('pad', (3, 3, 4, 2), ([1, 1],),),
('pairwise_distance', (S, S), ((S, S),),),
('pdist', (S, S), (),),
('cosine_similarity', (S, S), ((S, S),),),
('triplet_margin_loss', (S, S), ((S, S), (S, S)),),
('normalize', (S, S, S), (),),
('unfold', (S, S, S, S), ([2, 3]),),
('fold', (1, 3 * 2 * 2, 12), ([4, 5], [2, 2]),),
('grid_sample', (S, S, S, S), (non_differentiable(torch.rand(S, S, S, 2)),),),
('gumbel_softmax', (S, S), (2.,), '', (True, ['aten::softmax', 'aten::add', 'aten::div'], ['aten::neg'])),
('gumbel_softmax', (S, S), (2., True,), 'hard', (True, ['aten::softmax', 'aten::add', 'aten::div'], ['aten::neg'])),
('multilabel_margin_loss', torch.tensor([[0.2, -0.2, 0.07]]), (torch.tensor([[0, 0, 1]]),),),
('multi_margin_loss', (S, S), (non_differentiable(torch.randint(S, (S, ), dtype=torch.int64)),
1, 1., non_differentiable(torch.randn(S))),),
('binary_cross_entropy', torch.randn(3, 2).sigmoid(), (non_differentiable(torch.rand(3, 2)),
non_differentiable(torch.randn(3, 2))),),
('binary_cross_entropy', torch.randn(3, 2).sigmoid(),
(non_differentiable(torch.rand(3, 2)),
non_differentiable(torch.randn(3, 2)), None, None, 'mean'), 'size_average'),
('ctc_loss', torch.rand(S, S, S).log_softmax(2).detach().requires_grad_(),
(torch.randint(1, S, (S, S), dtype=torch.long), torch.full((S,), S, dtype=torch.long),
torch.randint(1, S, (S,), dtype=torch.long))),
('upsample', torch.randn(S, S, M, M), (None, 2.), 'with_scale'),
('upsample', torch.randn(S, S, M, M), (4,), 'with_size'),
('interpolate', torch.zeros(3, 3).view(1, 1, 3, 3), (2,), 'nearest_4d'),
('interpolate', torch.randn(S, S, M, M), (None, 2.), 'nearest_4d_with_scale'),
('interpolate', torch.randn(S, S, M, M), (4,), 'nearest_4d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 1, 3, 3), (2,), 'area_4d'),
('interpolate', torch.randn(S, S, M, M), (None, 2.), 'area_4d_with_scale'),
('interpolate', torch.randn(S, S, M, M), (4,), 'area_4d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 1, 3, 3), (2,), 'bilinear_4d'),
('interpolate', torch.randn(S, S, M, M), (None, 2.), 'bilinear_4d_with_scale'),
('interpolate', torch.randn(S, S, M, M), (4,), 'bilinear_4d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 1, 3, 3), (2,), 'bicubic_4d'),
('interpolate', torch.randn(S, S, M, M), (None, 2.), 'bicubic_4d_with_scale'),
('interpolate', torch.randn(S, S, M, M), (4,), 'bicubic_4d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 3, 3), (2,), 'nearest_3d'),
('interpolate', torch.randn(S, M, M), (None, 2.), 'nearest_3d_with_scale'),
('interpolate', torch.randn(S, M, M), (4,), 'nearest_3d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 3, 3), (2,), 'area_3d'),
('interpolate', torch.randn(S, M, M), (None, 2.), 'area_3d_with_scale'),
('interpolate', torch.randn(S, M, M), (4,), 'area_3d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 3, 3), (2,), 'linear_3d'),
('interpolate', torch.randn(S, M, M), (None, 2.), 'linear_3d_with_scale'),
('interpolate', torch.randn(S, M, M), (4,), 'linear_3d_with_size'),
('interpolate', torch.randn(S, M, M, M, M), (None, 2.), 'nearest_5d_with_scale'),
('interpolate', torch.randn(S, M, M, M, M), (4,), 'nearest_5d_with_size'),
('interpolate', torch.zeros(3, 3, 3).view(1, 1, 3, 3, 3), (2,), 'area_5d'),
('interpolate', torch.randn(S, M, M, M, M), (None, 2.), 'area_5d_with_scale'),
('interpolate', torch.randn(S, M, M, M, M), (4,), 'area_5d_with_size'),
('interpolate', torch.zeros(3, 3, 3).view(1, 1, 3, 3, 3), (2,), 'trilinear_5d'),
('interpolate', torch.randn(S, M, M, M, M), (None, 2.), 'trilinear_5d_with_scale'),
('interpolate', torch.randn(S, M, M, M, M), (4,), 'trilinear_5d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 1, 3, 3), (2, None, 'nearest', None, False),
'nearest_4d_not_recompute_scale_factor'),
('interpolate', torch.randn(S, S, M, M), (4, None, 'nearest', None, False),
'nearest_4d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, S, M, M), (None, 2., 'bilinear', None, False),
'bilinear_4d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, S, M, M), (4, None, 'bilinear', None, False),
'bilinear_4d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, S, M, M), (None, 2., 'bicubic', None, False),
'bicubic_4d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, S, M, M), (4, None, 'bicubic', None, False),
'bicubic_4d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M), (None, 2., 'nearest', None, False),
'nearest_3d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M), (4, None, 'nearest', None, False),
'nearest_3d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M), (None, 2., 'linear', None, False),
'linear_3d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M), (4, None, 'linear', None, False),
'linear_3d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M, M, M), (None, 2., 'nearest', None, False),
'nearest_5d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M, M, M), (4, None, 'nearest', None, False),
'nearest_5d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M, M, M), (None, 2., 'trilinear', None, False),
'trilinear_5d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M, M, M), (4, None, 'trilinear', None, False),
'trilinear_5d_with_size_not_recompute_scale_factor'),
]
script_template = '''
def the_method({}):
return {}
'''
def get_call(method_name, func_type, args, kwargs):
kwargs_str = ', '.join([k + '=' + str(v) for k, v in kwargs.items()])
self_arg = args[0]
if(func_type == 'method'):
args = args[1:]
argument_str = ', '.join(args)
argument_str += ', ' if len(args) and len(kwargs) else ''
argument_str += kwargs_str
if func_type == 'functional':
call = 'torch.{}({})'.format(method_name, argument_str)
elif func_type == 'method':
call = '{}.{}({})'.format(self_arg, method_name, argument_str)
elif func_type == 'nn_functional':
call = 'torch.nn.functional.{}({})'.format(method_name, argument_str)
else:
raise 'Unsupported function type'
return call
def get_constant(x):
if x == inf:
return 'math.inf'
if x == -inf:
return '-math.inf'
return x
def get_script_args(args):
formals = []
tensors = []
actuals = []
for arg in args:
if isinstance(arg, torch.Tensor):
name = 'i{}'.format(len(formals))
formals.append(name)
actuals.append(name)
tensors.append(arg)
elif isinstance(arg, str):
actuals.append("'{}'".format(arg))
else:
actuals.append(str(get_constant(arg)))
return (formals, tensors, actuals)
# create a script function from (name, func_type, output_process_fn),
# and returns the compiled function and example inputs
def gen_script_fn_and_args(method_name, func_type, *args, **kwargs):
formals, tensors, actuals = get_script_args(args)
call = get_call(method_name, func_type, actuals, kwargs)
script = script_template.format(', '.join(formals), call)
CU = torch.jit.CompilationUnit(script)
return CU.the_method, tensors
# create a script function from (name, func_type, output_process_fn),
# returns a function takes in (args, kwargs) and runs the compiled function and
# then applies the post process fn to the outputs
def create_script_fn(self, method_name, func_type, output_process_fn):
def script_fn(*args, **kwargs):
fn, tensors = gen_script_fn_and_args(method_name, func_type, *args, **kwargs)
self.assertExportImport(fn.graph, tensors)
output = output_process_fn(fn(*tensors))
script_fn.last_graph = fn.graph_for(*tensors)
return output
return script_fn
# make a new function where all non-tensor arguments in 'args' have been partially
# applied, and all tensor arguments remain.
# used to trace functions when some arguments are not tensors
def partial_apply_nontensors(fn, args, **kwargs):
source = ['t' if isinstance(arg, torch.Tensor) else 's' for arg in args]
def new_fn(*tensors_):
tensors = iter(tensors_)
return fn(*(args[i] if s == 's' else next(tensors) for i, s in enumerate(source)), **kwargs)
return new_fn, [arg for arg in args if isinstance(arg, torch.Tensor)]
# create a trace function from input fn
def create_traced_fn(self, fn):
def traced_fn(*inputs, **kwargs):
fn_tensors, inputs_tensors = partial_apply_nontensors(fn, inputs, **kwargs)
# `check_trace` is set to False because check_trace is run with @no_grad
# Also, `check_against_reference` already does all the checks
# against python function
traced = torch.jit.trace(fn_tensors, inputs_tensors, check_trace=False)
self.assertExportImport(traced.graph, inputs_tensors)
output = traced(*inputs_tensors)
traced_fn.last_graph = traced.graph_for(*inputs_tensors)
return output
return traced_fn
# known to be failing in script
EXCLUDE_SCRIPT = {
'test_norm_fro_default',
'test_norm_fro_cpu',
'test_norm_nuc',
'test_norm_fro',
'test_norm_nuc_batched',
# aten op has additional cudnn argument
'test_nn_unfold',
# flaky test - TODO fix
'test_nn_ctc_loss',
# unknown builtin op
'test_nn_fold',
# jit doesn't support sparse tensors.
'test_to_sparse'
}
# generates a script function and set of example inputs
# from a specified test in the format of nn_functional_tests
def get_nn_functional_compiled_fn_and_inputs(name, self_size, args, variant_name='', *extra_args):
test_name = 'test_nn_' + name
if variant_name != '':
test_name = test_name + '_' + variant_name
no_grad = variant_name == 'inplace'
self_variable = create_input((self_size,))[0][0]
kwargs = None
# need to record this because methods can change the size (e.g. unsqueeze)
args_variable, kwargs_variable = create_input(args)
self_tensor = deepcopy(self_variable.data)
args_tensor = deepcopy(unpack_variables(args_variable))
f_args_variable = (self_variable,) + args_variable
f_args_tensor = (self_tensor,) + args_tensor
with torch.jit._disable_emit_hooks():
script_fn, inputs = gen_script_fn_and_args(name, "nn_functional", *f_args_variable)
return script_fn, inputs
# additional modules test
# TODO: delete this list once we make all nn_tests work
additional_module_tests = [
{
'module_name': 'Bilinear',
'constructor_args': (S, S, M),
'input_size': (S, S),
'extra_args': ((S, S),)
},
{
'module_name': 'RNNCell',
'constructor_args': (S, S),
'input_size': (S, S),
},
{
'module_name': 'LSTMCell',
'constructor_args': (S, S),
'input_size': (S, S),
},
{
'module_name': 'GRUCell',
'constructor_args': (S, S),
'input_size': (S, S),
},
{
'module_name': 'MultiheadAttention',
'constructor_args': (128, 8),
'input_size': (10, 8, 128),
'extra_args': (torch.randn(10, 8, 128), torch.randn(10, 8, 128)),
'slowTest': True
},
{
'module_name': 'Transformer',
'constructor_args': (1, 1, 1, 1, 2),
'input_size': (3, 1, 1),
'extra_args': (torch.randn(1, 1, 1),),
'slowTest': True
}
]
EXCLUDE_SCRIPT_MODULES = {
'test_nn_AdaptiveAvgPool2d_tuple_none',
'test_nn_AdaptiveAvgPool3d_tuple_none',
'test_nn_AdaptiveMaxPool2d_tuple_none',
'test_nn_AdaptiveMaxPool3d_tuple_none',
# Doesn't use future division, so this is not supported
'test_nn_CrossMapLRN2d',
}
script_method_template = '''
def forward({}):
return {}
'''
def create_script_module(self, nn_module, constructor_args, *args, **kwargs):
def script_module(*args, **kwargs):
formals, tensors, actuals = get_script_args(args)
method_args = ', '.join(['self'] + actuals)
call_args_str = ', '.join(actuals)
call = "self.submodule({})".format(call_args_str)
script = script_method_template.format(method_args, call)
submodule_constants = []
if kwargs.get('is_constant'):
submodule_constants = ['submodule']
# Create module to use the script method
class TheModule(torch.jit.ScriptModule):
__constants__ = submodule_constants
def __init__(self):
super(TheModule, self).__init__()
self.submodule = nn_module(*constructor_args)
def make_module(script):
module = TheModule()
# check __repr__
str(module)
module.define(script)
return module
module = make_module(script)
if self:
self.assertExportImportModule(module, tensors)
module(*args)
create_script_module.last_graph = module.graph
return module
return script_module
def get_nn_module_name_from_kwargs(**kwargs):
if 'module_name' in kwargs:
return kwargs['module_name']
elif 'fullname' in kwargs:
return kwargs['fullname']
elif 'constructor' in kwargs:
return kwargs['constructor'].__name__
def get_nn_mod_test_name(**kwargs):
name = get_nn_module_name_from_kwargs(**kwargs)
test_name = name
if 'desc' in kwargs:
test_name = "{}_{}".format(test_name, kwargs['desc'])
return 'test_nn_{}'.format(test_name)
def get_nn_module_class_from_kwargs(**kwargs):
name = get_nn_module_name_from_kwargs(**kwargs)
index = name.find("_")
if index == -1:
return name
else:
return name[0:name.find("_")]
def try_get_nn_module_compiled_mod_and_inputs(*args, **kwargs):
name = get_nn_module_name_from_kwargs(**kwargs)
if 'desc' in kwargs and 'eval' in kwargs['desc']:
# eval() is not supported, so skip these tests
return
test_name = name
if 'desc' in kwargs:
test_name = "{}_{}".format(test_name, kwargs['desc'])
test_name = get_nn_mod_test_name(**kwargs)
if test_name in EXCLUDE_SCRIPT_MODULES:
return
if 'constructor' in kwargs:
nn_module = kwargs['constructor']
else:
nn_module = getattr(torch.nn, name)
if "FunctionalModule" in str(nn_module):
return
if 'constructor_args_fn' in kwargs:
constructor_args = kwargs['constructor_args_fn']()
else:
constructor_args = kwargs.get('constructor_args', ())
# Set up inputs from tuple of sizes or constructor fn
if 'input_fn' in kwargs:
input = kwargs['input_fn']()
else:
input = (kwargs['input_size'],)
# Extra parameters to forward()
if 'extra_args' in kwargs:
input = input + kwargs['extra_args']
if 'target_size' in kwargs:
input = input + (kwargs['target_size'],)
elif 'target_fn' in kwargs:
if torch.is_tensor(input):
input = (input,)
input = input + (kwargs['target_fn'](),)
args_variable, kwargs_variable = create_input(input)
f_args_variable = deepcopy(unpack_variables(args_variable))
out_var = deepcopy(f_args_variable)
args, mod = f_args_variable, create_script_module(None, nn_module, constructor_args, *f_args_variable)(*f_args_variable)
return mod, out_var
def get_all_nn_module_tests():
return module_tests + new_module_tests + additional_module_tests
| 44.91215 | 125 | 0.579657 |
from torch.jit.annotations import BroadcastingList2, BroadcastingList3
from torch.testing._internal.common_methods_invocations import non_differentiable, create_input, \
unpack_variables
import torch.nn.functional as F
import torch
import torch.cuda
import torch.jit
import torch.jit._logging
import torch.jit.frontend
from torch.testing._internal.common_nn import module_tests, new_module_tests
from copy import deepcopy
import math
from torch._six import inf
torch.set_default_dtype(torch.double)
L = 20
M = 10
S = 5
# kwargs for function, // optional
# )
nn_functional_tests = [
('conv1d', (S, S, S), ((S, S, S),)),
('conv2d', (S, S, S, S), ((S, S, S, S),)),
('conv3d', (S, S, S, S, S), ((S, S, S, S, S),)),
('conv_transpose1d', (S, S, S), ((S, S, S),)),
('conv_transpose2d', (S, S, S, S), ((S, S, S, S),)),
('conv_transpose3d', (S, S, S, S, S), ((S, S, S, S, S),)),
('conv_tbc', (S, S, S), ((S, S, S), (S,), 2)),
('avg_pool1d', (S, S, S), (3,)),
('avg_pool2d', (S, S, S, S), (3,), '', (True,)),
('avg_pool3d', (S, S, S, S, S), (3,)),
('fractional_max_pool2d', (S, S, S, S), (3, [2, 3],)),
('max_pool1d', (S, S, S), (2, 1)),
('max_pool1d', (S, S, S), (2, 1, 1, 1, False, True), 'with_indices'),
('max_pool2d', (S, S, S, S), (2, 1), '', (True, 'aten::max_pool2d_with_indices')),
('max_pool2d', (S, S, S, S), (2, 1, 1, 1, False, True), 'with_indices', (True, 'aten::max_pool2d_with_indices')),
('max_pool3d', (S, S, S, S, S), (2, 1)),
('max_unpool1d', torch.tensor([[[2., 4]]]), (torch.tensor([[[1, 3]]]), 2, 2, 0)),
('max_unpool2d', torch.tensor([[[[2., 4]]]]), (torch.tensor([[[[1, 3]]]]), 2, 2, 0)),
('max_unpool3d', torch.tensor([[[[[2., 4]]]]]), (torch.tensor([[[[[1, 3]]]]]), 2, 2, 0)),
('lp_pool1d', (S, S, S), (2., 3, 2,)),
('lp_pool2d', (S, S, S, S), (2., 3, 2,)),
('adaptive_max_pool1d', (S, S, S), (5,)),
('adaptive_max_pool2d', (S, S, S, S), ([5, 7],)),
('adaptive_max_pool3d', (S, S, S, S, S), ([3, 2, 2],)),
('adaptive_avg_pool1d', (S, S, S), (5,), '', (True,)),
('adaptive_avg_pool2d', (S, S, S, S), ([5, 7],), '', (True,)),
('adaptive_avg_pool3d', (S, S, S, S, S), ([3, 2, 2],), '', (True,)),
('dropout', (S, S, S), (0.5,), '', (True,
['aten::bernoulli_',
'aten::empty_like', 'aten::mul', 'aten::div'])),
('alpha_dropout', (S, S, S), (0.5,)),
('dropout2d', (S, S, S), (0.5,)),
('dropout3d', (S, S, S), (0.5,)),
('feature_alpha_dropout', (S, S, S), (0.5,)),
('threshold', (S, S, S), (0.1, 2.), '', (True,)),
('threshold', (S, S, S), (0.1, 2., True), 'inplace'),
('relu', (S, S, S), (), '', (True,)),
('relu', (S, S, S), (), 'inplace'),
('glu', (S - 1, S - 1, S - 1), (),),
('hardtanh', (S, S, S), (-0.5, 0.5),),
('hardtanh', (S, S, S), (-0.5, 0.5, True), 'inplace'),
('relu6', (S, S, S), (),),
('relu6', (S, S, S), (True), 'inplace'),
('elu', (S, S, S), (0.9,),),
('elu', (S, S, S), (0.9, True), 'inplace'),
('selu', (S, S, S), (),),
('selu', (S, S, S), (True), 'inplace'),
('celu', (S, S, S), (0.9,),),
('celu', (S, S, S), (0.9, True), 'inplace'),
('leaky_relu', (S, S, S), (0.02,),),
('leaky_relu', (S, S, S), (0.02,), 'inplace'),
('rrelu', (S, S), (0.1, 0.3, False),),
('rrelu', (S, S), (0.1, 0.3, False, True), 'inplace'),
('hardshrink', (S, S, S), (0.4,),),
('tanhshrink', (S, S, S), (),),
('softsign', (S, S, S), (),),
('softplus', (S, S, S), (),),
('softmin', (S, S, S), (0,),),
('softmax', (S, S, S), (0,), '', (True,)),
('softmax', (S, S, S), (0, 3, torch.double), 'with_all_args', (True,)),
('tanh', (S, S, S), (), '', (True,)),
('sigmoid', (S, S, S), (), '', (True,)),
('log_softmax', (S, S, S), (0,), '', (True,)),
('linear', (S, S), ((M, S),), '', (True, ['aten::t', 'aten::matmul'])),
('linear', (S, S), ((M, S), (M,)), 'addmm', (True, ['aten::add', 'aten::mm'])),
('bilinear', (S, S, S), ((S, S, M), torch.zeros(M, S, M),),),
('embedding', torch.tensor([[1, 2, 4, 5], [4, 3, 2, 5]]), (torch.rand(6, 3), ), '', (True,)),
('embedding_bag', torch.tensor([1, 2, 4, 2]), (torch.rand(5, 3), torch.tensor([0, 4]),),),
('batch_norm', (S, S), (non_differentiable(torch.randn(S)), non_differentiable(torch.ones(S)), ),
'', (False, 'aten::_batch_norm_impl_index')),
('instance_norm', (S, S, S), (non_differentiable(torch.zeros(S)), non_differentiable(torch.ones(S))),),
('layer_norm', (S, S, S, S), ([5],), '',
(False, ['aten::contiguous', 'aten::_batch_norm_impl_index'])),
('layer_norm', (S, S, S, S), ([5], non_differentiable(torch.rand(S)),), 'with_only_weight',
(False, ['aten::contiguous', 'aten::_batch_norm_impl_index'])),
('layer_norm', (S, S, S, S), ([5], None, non_differentiable(torch.rand(S)),), 'with_only_bias',
(False, ['aten::contiguous', 'aten::_batch_norm_impl_index'])),
('layer_norm', (S, S, S, S), ([5], non_differentiable(torch.rand(S)),
non_differentiable(torch.rand(S))), 'with_weight_and_bias',
(False, ['aten::contiguous', 'aten::_batch_norm_impl_index', 'aten::addcmul'])),
('group_norm', (S, S, S), (1, torch.rand(5),),),
('local_response_norm', (S, S, S), (2, ),),
('nll_loss', F.log_softmax(torch.randn(3, 5), dim=0), (torch.tensor([1, 0, 4]),), '', (True, 'aten::nll_loss_forward')),
('poisson_nll_loss', torch.rand(S, 2), (torch.rand(S, 2),),),
('poisson_nll_loss', torch.rand(S, 2), (torch.rand(S, 2), True, True), 'full'),
('kl_div', F.log_softmax(torch.randn(S, 10), 1), (F.softmax(torch.randn(S, 10), 1),),),
('cross_entropy', (3, S), (torch.randint(S, (3,), dtype=torch.int64),),),
('binary_cross_entropy_with_logits', (3,), (torch.empty(3).random_(2), ),),
('smooth_l1_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('l1_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('mse_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('smooth_l1_loss', (3, S), ((torch.rand(3, S)),), 'with_grad'),
('l1_loss', (3, S), ((torch.rand(3, S)),), 'with_grad'),
('mse_loss', (3, S), ((torch.rand(3, S)),), 'with_grad'),
('margin_ranking_loss', (3, S), ((3, S), (S,)),),
('hinge_embedding_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('soft_margin_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('multilabel_soft_margin_loss', (3, S), (non_differentiable(torch.rand(3, S)),),),
('cosine_embedding_loss', (S, S), ((S, S), non_differentiable(torch.rand(S,))),),
('pixel_shuffle', (1, 9, 4, 4), (3,),),
('affine_grid', (S, 2, 3), (torch.Size([S, 1, 7, 7]),),),
('pad', (3, 3, 4, 2), ([1, 1],),),
('pairwise_distance', (S, S), ((S, S),),),
('pdist', (S, S), (),),
('cosine_similarity', (S, S), ((S, S),),),
('triplet_margin_loss', (S, S), ((S, S), (S, S)),),
('normalize', (S, S, S), (),),
('unfold', (S, S, S, S), ([2, 3]),),
('fold', (1, 3 * 2 * 2, 12), ([4, 5], [2, 2]),),
('grid_sample', (S, S, S, S), (non_differentiable(torch.rand(S, S, S, 2)),),),
('gumbel_softmax', (S, S), (2.,), '', (True, ['aten::softmax', 'aten::add', 'aten::div'], ['aten::neg'])),
('gumbel_softmax', (S, S), (2., True,), 'hard', (True, ['aten::softmax', 'aten::add', 'aten::div'], ['aten::neg'])),
('multilabel_margin_loss', torch.tensor([[0.2, -0.2, 0.07]]), (torch.tensor([[0, 0, 1]]),),),
('multi_margin_loss', (S, S), (non_differentiable(torch.randint(S, (S, ), dtype=torch.int64)),
1, 1., non_differentiable(torch.randn(S))),),
('binary_cross_entropy', torch.randn(3, 2).sigmoid(), (non_differentiable(torch.rand(3, 2)),
non_differentiable(torch.randn(3, 2))),),
('binary_cross_entropy', torch.randn(3, 2).sigmoid(),
(non_differentiable(torch.rand(3, 2)),
non_differentiable(torch.randn(3, 2)), None, None, 'mean'), 'size_average'),
('ctc_loss', torch.rand(S, S, S).log_softmax(2).detach().requires_grad_(),
(torch.randint(1, S, (S, S), dtype=torch.long), torch.full((S,), S, dtype=torch.long),
torch.randint(1, S, (S,), dtype=torch.long))),
('upsample', torch.randn(S, S, M, M), (None, 2.), 'with_scale'),
('upsample', torch.randn(S, S, M, M), (4,), 'with_size'),
('interpolate', torch.zeros(3, 3).view(1, 1, 3, 3), (2,), 'nearest_4d'),
('interpolate', torch.randn(S, S, M, M), (None, 2.), 'nearest_4d_with_scale'),
('interpolate', torch.randn(S, S, M, M), (4,), 'nearest_4d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 1, 3, 3), (2,), 'area_4d'),
('interpolate', torch.randn(S, S, M, M), (None, 2.), 'area_4d_with_scale'),
('interpolate', torch.randn(S, S, M, M), (4,), 'area_4d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 1, 3, 3), (2,), 'bilinear_4d'),
('interpolate', torch.randn(S, S, M, M), (None, 2.), 'bilinear_4d_with_scale'),
('interpolate', torch.randn(S, S, M, M), (4,), 'bilinear_4d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 1, 3, 3), (2,), 'bicubic_4d'),
('interpolate', torch.randn(S, S, M, M), (None, 2.), 'bicubic_4d_with_scale'),
('interpolate', torch.randn(S, S, M, M), (4,), 'bicubic_4d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 3, 3), (2,), 'nearest_3d'),
('interpolate', torch.randn(S, M, M), (None, 2.), 'nearest_3d_with_scale'),
('interpolate', torch.randn(S, M, M), (4,), 'nearest_3d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 3, 3), (2,), 'area_3d'),
('interpolate', torch.randn(S, M, M), (None, 2.), 'area_3d_with_scale'),
('interpolate', torch.randn(S, M, M), (4,), 'area_3d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 3, 3), (2,), 'linear_3d'),
('interpolate', torch.randn(S, M, M), (None, 2.), 'linear_3d_with_scale'),
('interpolate', torch.randn(S, M, M), (4,), 'linear_3d_with_size'),
('interpolate', torch.randn(S, M, M, M, M), (None, 2.), 'nearest_5d_with_scale'),
('interpolate', torch.randn(S, M, M, M, M), (4,), 'nearest_5d_with_size'),
('interpolate', torch.zeros(3, 3, 3).view(1, 1, 3, 3, 3), (2,), 'area_5d'),
('interpolate', torch.randn(S, M, M, M, M), (None, 2.), 'area_5d_with_scale'),
('interpolate', torch.randn(S, M, M, M, M), (4,), 'area_5d_with_size'),
('interpolate', torch.zeros(3, 3, 3).view(1, 1, 3, 3, 3), (2,), 'trilinear_5d'),
('interpolate', torch.randn(S, M, M, M, M), (None, 2.), 'trilinear_5d_with_scale'),
('interpolate', torch.randn(S, M, M, M, M), (4,), 'trilinear_5d_with_size'),
('interpolate', torch.zeros(3, 3).view(1, 1, 3, 3), (2, None, 'nearest', None, False),
'nearest_4d_not_recompute_scale_factor'),
('interpolate', torch.randn(S, S, M, M), (4, None, 'nearest', None, False),
'nearest_4d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, S, M, M), (None, 2., 'bilinear', None, False),
'bilinear_4d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, S, M, M), (4, None, 'bilinear', None, False),
'bilinear_4d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, S, M, M), (None, 2., 'bicubic', None, False),
'bicubic_4d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, S, M, M), (4, None, 'bicubic', None, False),
'bicubic_4d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M), (None, 2., 'nearest', None, False),
'nearest_3d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M), (4, None, 'nearest', None, False),
'nearest_3d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M), (None, 2., 'linear', None, False),
'linear_3d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M), (4, None, 'linear', None, False),
'linear_3d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M, M, M), (None, 2., 'nearest', None, False),
'nearest_5d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M, M, M), (4, None, 'nearest', None, False),
'nearest_5d_with_size_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M, M, M), (None, 2., 'trilinear', None, False),
'trilinear_5d_with_scale_not_recompute_scale_factor'),
('interpolate', torch.randn(S, M, M, M, M), (4, None, 'trilinear', None, False),
'trilinear_5d_with_size_not_recompute_scale_factor'),
]
script_template = '''
def the_method({}):
return {}
'''
def get_call(method_name, func_type, args, kwargs):
kwargs_str = ', '.join([k + '=' + str(v) for k, v in kwargs.items()])
self_arg = args[0]
if(func_type == 'method'):
args = args[1:]
argument_str = ', '.join(args)
argument_str += ', ' if len(args) and len(kwargs) else ''
argument_str += kwargs_str
if func_type == 'functional':
call = 'torch.{}({})'.format(method_name, argument_str)
elif func_type == 'method':
call = '{}.{}({})'.format(self_arg, method_name, argument_str)
elif func_type == 'nn_functional':
call = 'torch.nn.functional.{}({})'.format(method_name, argument_str)
else:
raise 'Unsupported function type'
return call
def get_constant(x):
if x == inf:
return 'math.inf'
if x == -inf:
return '-math.inf'
return x
def get_script_args(args):
formals = []
tensors = []
actuals = []
for arg in args:
if isinstance(arg, torch.Tensor):
name = 'i{}'.format(len(formals))
formals.append(name)
actuals.append(name)
tensors.append(arg)
elif isinstance(arg, str):
actuals.append("'{}'".format(arg))
else:
actuals.append(str(get_constant(arg)))
return (formals, tensors, actuals)
# create a script function from (name, func_type, output_process_fn),
# and returns the compiled function and example inputs
def gen_script_fn_and_args(method_name, func_type, *args, **kwargs):
formals, tensors, actuals = get_script_args(args)
call = get_call(method_name, func_type, actuals, kwargs)
script = script_template.format(', '.join(formals), call)
CU = torch.jit.CompilationUnit(script)
return CU.the_method, tensors
# create a script function from (name, func_type, output_process_fn),
# returns a function takes in (args, kwargs) and runs the compiled function and
# then applies the post process fn to the outputs
def create_script_fn(self, method_name, func_type, output_process_fn):
def script_fn(*args, **kwargs):
fn, tensors = gen_script_fn_and_args(method_name, func_type, *args, **kwargs)
self.assertExportImport(fn.graph, tensors)
output = output_process_fn(fn(*tensors))
script_fn.last_graph = fn.graph_for(*tensors)
return output
return script_fn
# make a new function where all non-tensor arguments in 'args' have been partially
# applied, and all tensor arguments remain.
# used to trace functions when some arguments are not tensors
def partial_apply_nontensors(fn, args, **kwargs):
source = ['t' if isinstance(arg, torch.Tensor) else 's' for arg in args]
def new_fn(*tensors_):
tensors = iter(tensors_)
return fn(*(args[i] if s == 's' else next(tensors) for i, s in enumerate(source)), **kwargs)
return new_fn, [arg for arg in args if isinstance(arg, torch.Tensor)]
# create a trace function from input fn
def create_traced_fn(self, fn):
def traced_fn(*inputs, **kwargs):
fn_tensors, inputs_tensors = partial_apply_nontensors(fn, inputs, **kwargs)
# `check_trace` is set to False because check_trace is run with @no_grad
# Also, `check_against_reference` already does all the checks
# against python function
traced = torch.jit.trace(fn_tensors, inputs_tensors, check_trace=False)
self.assertExportImport(traced.graph, inputs_tensors)
output = traced(*inputs_tensors)
traced_fn.last_graph = traced.graph_for(*inputs_tensors)
return output
return traced_fn
# known to be failing in script
EXCLUDE_SCRIPT = {
'test_norm_fro_default',
'test_norm_fro_cpu',
'test_norm_nuc',
'test_norm_fro',
'test_norm_nuc_batched',
# aten op has additional cudnn argument
'test_nn_unfold',
# flaky test - TODO fix
'test_nn_ctc_loss',
# unknown builtin op
'test_nn_fold',
# jit doesn't support sparse tensors.
'test_to_sparse'
}
def get_nn_functional_compiled_fn_and_inputs(name, self_size, args, variant_name='', *extra_args):
test_name = 'test_nn_' + name
if variant_name != '':
test_name = test_name + '_' + variant_name
no_grad = variant_name == 'inplace'
self_variable = create_input((self_size,))[0][0]
kwargs = None
args_variable, kwargs_variable = create_input(args)
self_tensor = deepcopy(self_variable.data)
args_tensor = deepcopy(unpack_variables(args_variable))
f_args_variable = (self_variable,) + args_variable
f_args_tensor = (self_tensor,) + args_tensor
with torch.jit._disable_emit_hooks():
script_fn, inputs = gen_script_fn_and_args(name, "nn_functional", *f_args_variable)
return script_fn, inputs
additional_module_tests = [
{
'module_name': 'Bilinear',
'constructor_args': (S, S, M),
'input_size': (S, S),
'extra_args': ((S, S),)
},
{
'module_name': 'RNNCell',
'constructor_args': (S, S),
'input_size': (S, S),
},
{
'module_name': 'LSTMCell',
'constructor_args': (S, S),
'input_size': (S, S),
},
{
'module_name': 'GRUCell',
'constructor_args': (S, S),
'input_size': (S, S),
},
{
'module_name': 'MultiheadAttention',
'constructor_args': (128, 8),
'input_size': (10, 8, 128),
'extra_args': (torch.randn(10, 8, 128), torch.randn(10, 8, 128)),
'slowTest': True
},
{
'module_name': 'Transformer',
'constructor_args': (1, 1, 1, 1, 2),
'input_size': (3, 1, 1),
'extra_args': (torch.randn(1, 1, 1),),
'slowTest': True
}
]
EXCLUDE_SCRIPT_MODULES = {
'test_nn_AdaptiveAvgPool2d_tuple_none',
'test_nn_AdaptiveAvgPool3d_tuple_none',
'test_nn_AdaptiveMaxPool2d_tuple_none',
'test_nn_AdaptiveMaxPool3d_tuple_none',
'test_nn_CrossMapLRN2d',
}
script_method_template = '''
def forward({}):
return {}
'''
def create_script_module(self, nn_module, constructor_args, *args, **kwargs):
def script_module(*args, **kwargs):
formals, tensors, actuals = get_script_args(args)
method_args = ', '.join(['self'] + actuals)
call_args_str = ', '.join(actuals)
call = "self.submodule({})".format(call_args_str)
script = script_method_template.format(method_args, call)
submodule_constants = []
if kwargs.get('is_constant'):
submodule_constants = ['submodule']
# Create module to use the script method
class TheModule(torch.jit.ScriptModule):
__constants__ = submodule_constants
def __init__(self):
super(TheModule, self).__init__()
self.submodule = nn_module(*constructor_args)
def make_module(script):
module = TheModule()
# check __repr__
str(module)
module.define(script)
return module
module = make_module(script)
if self:
self.assertExportImportModule(module, tensors)
module(*args)
create_script_module.last_graph = module.graph
return module
return script_module
def get_nn_module_name_from_kwargs(**kwargs):
if 'module_name' in kwargs:
return kwargs['module_name']
elif 'fullname' in kwargs:
return kwargs['fullname']
elif 'constructor' in kwargs:
return kwargs['constructor'].__name__
def get_nn_mod_test_name(**kwargs):
name = get_nn_module_name_from_kwargs(**kwargs)
test_name = name
if 'desc' in kwargs:
test_name = "{}_{}".format(test_name, kwargs['desc'])
return 'test_nn_{}'.format(test_name)
def get_nn_module_class_from_kwargs(**kwargs):
name = get_nn_module_name_from_kwargs(**kwargs)
index = name.find("_")
if index == -1:
return name
else:
return name[0:name.find("_")]
def try_get_nn_module_compiled_mod_and_inputs(*args, **kwargs):
name = get_nn_module_name_from_kwargs(**kwargs)
if 'desc' in kwargs and 'eval' in kwargs['desc']:
# eval() is not supported, so skip these tests
return
test_name = name
if 'desc' in kwargs:
test_name = "{}_{}".format(test_name, kwargs['desc'])
test_name = get_nn_mod_test_name(**kwargs)
if test_name in EXCLUDE_SCRIPT_MODULES:
return
if 'constructor' in kwargs:
nn_module = kwargs['constructor']
else:
nn_module = getattr(torch.nn, name)
if "FunctionalModule" in str(nn_module):
return
if 'constructor_args_fn' in kwargs:
constructor_args = kwargs['constructor_args_fn']()
else:
constructor_args = kwargs.get('constructor_args', ())
# Set up inputs from tuple of sizes or constructor fn
if 'input_fn' in kwargs:
input = kwargs['input_fn']()
else:
input = (kwargs['input_size'],)
# Extra parameters to forward()
if 'extra_args' in kwargs:
input = input + kwargs['extra_args']
if 'target_size' in kwargs:
input = input + (kwargs['target_size'],)
elif 'target_fn' in kwargs:
if torch.is_tensor(input):
input = (input,)
input = input + (kwargs['target_fn'](),)
args_variable, kwargs_variable = create_input(input)
f_args_variable = deepcopy(unpack_variables(args_variable))
out_var = deepcopy(f_args_variable)
args, mod = f_args_variable, create_script_module(None, nn_module, constructor_args, *f_args_variable)(*f_args_variable)
return mod, out_var
def get_all_nn_module_tests():
return module_tests + new_module_tests + additional_module_tests
| true | true |
f72ae77f6af21241e139bcfcb73ffd4cb6993215 | 566 | py | Python | setup.py | galperins4/python-client | c8b6ea1f33801254eb560429b2c775d10fe60273 | [
"MIT"
] | 1 | 2018-06-15T11:19:23.000Z | 2018-06-15T11:19:23.000Z | setup.py | galperins4/mirror-python-client | c8b6ea1f33801254eb560429b2c775d10fe60273 | [
"MIT"
] | null | null | null | setup.py | galperins4/mirror-python-client | c8b6ea1f33801254eb560429b2c775d10fe60273 | [
"MIT"
] | null | null | null | import sys
import setuptools
requires = [
'requests>=2.19.1',
'backoff>=1.6.0',
'flatten_dict>=0.3.0'
]
tests_require = []
extras_require = {}
setuptools.setup(
name='hedera-python-client',
description='Python API client for Hedera Hashgraph.',
version='0.0.1',
author='TBD',
author_email='TBD',
url='https://github.com/galperins4/hedera-python-client',
packages=setuptools.find_packages(exclude=['tests', 'tests.*']),
install_requires=requires,
extras_require=extras_require,
tests_require=tests_require,
)
| 20.214286 | 68 | 0.676678 | import sys
import setuptools
requires = [
'requests>=2.19.1',
'backoff>=1.6.0',
'flatten_dict>=0.3.0'
]
tests_require = []
extras_require = {}
setuptools.setup(
name='hedera-python-client',
description='Python API client for Hedera Hashgraph.',
version='0.0.1',
author='TBD',
author_email='TBD',
url='https://github.com/galperins4/hedera-python-client',
packages=setuptools.find_packages(exclude=['tests', 'tests.*']),
install_requires=requires,
extras_require=extras_require,
tests_require=tests_require,
)
| true | true |
f72ae7e848291c51786e5d2a992f0c9c85761179 | 7,832 | py | Python | plugins/modules/oci_object_storage_replication_policy_facts.py | sagar2938/oci-ansible-collection | 5b8ce583a0d5d0aabf14494d61aea4649e18d1e6 | [
"Apache-2.0"
] | null | null | null | plugins/modules/oci_object_storage_replication_policy_facts.py | sagar2938/oci-ansible-collection | 5b8ce583a0d5d0aabf14494d61aea4649e18d1e6 | [
"Apache-2.0"
] | null | null | null | plugins/modules/oci_object_storage_replication_policy_facts.py | sagar2938/oci-ansible-collection | 5b8ce583a0d5d0aabf14494d61aea4649e18d1e6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# Copyright (c) 2020, 2021 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_object_storage_replication_policy_facts
short_description: Fetches details about one or multiple ReplicationPolicy resources in Oracle Cloud Infrastructure
description:
- Fetches details about one or multiple ReplicationPolicy resources in Oracle Cloud Infrastructure
- List the replication policies associated with a bucket.
- If I(replication_id) is specified, the details of a single ReplicationPolicy will be returned.
version_added: "2.9.0"
author: Oracle (@oracle)
options:
namespace_name:
description:
- The Object Storage namespace used for the request.
type: str
required: true
bucket_name:
description:
- "The name of the bucket. Avoid entering confidential information.
Example: `my-new-bucket1`"
type: str
required: true
replication_id:
description:
- The ID of the replication policy.
- Required to get a specific replication_policy.
type: str
aliases: ["id"]
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_name_option ]
"""
EXAMPLES = """
- name: Get a specific replication_policy
oci_object_storage_replication_policy_facts:
# required
namespace_name: namespace_name_example
bucket_name: my-new-bucket1
replication_id: "ocid1.replication.oc1..xxxxxxEXAMPLExxxxxx"
- name: List replication_policies
oci_object_storage_replication_policy_facts:
# required
namespace_name: namespace_name_example
bucket_name: my-new-bucket1
"""
RETURN = """
replication_policies:
description:
- List of ReplicationPolicy resources
returned: on success
type: complex
contains:
id:
description:
- The id of the replication policy.
returned: on success
type: str
sample: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
name:
description:
- The name of the policy.
returned: on success
type: str
sample: name_example
destination_region_name:
description:
- "The destination region to replicate to, for example \\"us-ashburn-1\\"."
returned: on success
type: str
sample: destination_region_name_example
destination_bucket_name:
description:
- The bucket to replicate to in the destination region. Replication policy creation does not automatically
create a destination bucket. Create the destination bucket before creating the policy.
returned: on success
type: str
sample: destination_bucket_name_example
time_created:
description:
- The date when the replication policy was created as per L(RFC 3339,https://tools.ietf.org/html/rfc3339).
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
time_last_sync:
description:
- Changes made to the source bucket before this time has been replicated.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
status:
description:
- The replication status of the policy. If the status is CLIENT_ERROR, once the user fixes the issue
described in the status message, the status will become ACTIVE.
returned: on success
type: str
sample: ACTIVE
status_message:
description:
- A human-readable description of the status.
returned: on success
type: str
sample: status_message_example
sample: [{
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"name": "name_example",
"destination_region_name": "destination_region_name_example",
"destination_bucket_name": "destination_bucket_name_example",
"time_created": "2013-10-20T19:20:30+01:00",
"time_last_sync": "2013-10-20T19:20:30+01:00",
"status": "ACTIVE",
"status_message": "status_message_example"
}]
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceFactsHelperBase,
get_custom_class,
)
try:
from oci.object_storage import ObjectStorageClient
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class ReplicationPolicyFactsHelperGen(OCIResourceFactsHelperBase):
"""Supported operations: get, list"""
def get_required_params_for_get(self):
return [
"namespace_name",
"bucket_name",
"replication_id",
]
def get_required_params_for_list(self):
return [
"namespace_name",
"bucket_name",
]
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_replication_policy,
namespace_name=self.module.params.get("namespace_name"),
bucket_name=self.module.params.get("bucket_name"),
replication_id=self.module.params.get("replication_id"),
)
def list_resources(self):
optional_list_method_params = [
"name",
]
optional_kwargs = dict(
(param, self.module.params[param])
for param in optional_list_method_params
if self.module.params.get(param) is not None
)
return oci_common_utils.list_all_resources(
self.client.list_replication_policies,
namespace_name=self.module.params.get("namespace_name"),
bucket_name=self.module.params.get("bucket_name"),
**optional_kwargs
)
ReplicationPolicyFactsHelperCustom = get_custom_class(
"ReplicationPolicyFactsHelperCustom"
)
class ResourceFactsHelper(
ReplicationPolicyFactsHelperCustom, ReplicationPolicyFactsHelperGen
):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec()
module_args.update(
dict(
namespace_name=dict(type="str", required=True),
bucket_name=dict(type="str", required=True),
replication_id=dict(aliases=["id"], type="str"),
name=dict(type="str"),
)
)
module = AnsibleModule(argument_spec=module_args)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_facts_helper = ResourceFactsHelper(
module=module,
resource_type="replication_policy",
service_client_class=ObjectStorageClient,
namespace="object_storage",
)
result = []
if resource_facts_helper.is_get():
result = [resource_facts_helper.get()]
elif resource_facts_helper.is_list():
result = resource_facts_helper.list()
else:
resource_facts_helper.fail()
module.exit_json(replication_policies=result)
if __name__ == "__main__":
main()
| 32.633333 | 122 | 0.655388 |
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_object_storage_replication_policy_facts
short_description: Fetches details about one or multiple ReplicationPolicy resources in Oracle Cloud Infrastructure
description:
- Fetches details about one or multiple ReplicationPolicy resources in Oracle Cloud Infrastructure
- List the replication policies associated with a bucket.
- If I(replication_id) is specified, the details of a single ReplicationPolicy will be returned.
version_added: "2.9.0"
author: Oracle (@oracle)
options:
namespace_name:
description:
- The Object Storage namespace used for the request.
type: str
required: true
bucket_name:
description:
- "The name of the bucket. Avoid entering confidential information.
Example: `my-new-bucket1`"
type: str
required: true
replication_id:
description:
- The ID of the replication policy.
- Required to get a specific replication_policy.
type: str
aliases: ["id"]
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_name_option ]
"""
EXAMPLES = """
- name: Get a specific replication_policy
oci_object_storage_replication_policy_facts:
# required
namespace_name: namespace_name_example
bucket_name: my-new-bucket1
replication_id: "ocid1.replication.oc1..xxxxxxEXAMPLExxxxxx"
- name: List replication_policies
oci_object_storage_replication_policy_facts:
# required
namespace_name: namespace_name_example
bucket_name: my-new-bucket1
"""
RETURN = """
replication_policies:
description:
- List of ReplicationPolicy resources
returned: on success
type: complex
contains:
id:
description:
- The id of the replication policy.
returned: on success
type: str
sample: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
name:
description:
- The name of the policy.
returned: on success
type: str
sample: name_example
destination_region_name:
description:
- "The destination region to replicate to, for example \\"us-ashburn-1\\"."
returned: on success
type: str
sample: destination_region_name_example
destination_bucket_name:
description:
- The bucket to replicate to in the destination region. Replication policy creation does not automatically
create a destination bucket. Create the destination bucket before creating the policy.
returned: on success
type: str
sample: destination_bucket_name_example
time_created:
description:
- The date when the replication policy was created as per L(RFC 3339,https://tools.ietf.org/html/rfc3339).
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
time_last_sync:
description:
- Changes made to the source bucket before this time has been replicated.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
status:
description:
- The replication status of the policy. If the status is CLIENT_ERROR, once the user fixes the issue
described in the status message, the status will become ACTIVE.
returned: on success
type: str
sample: ACTIVE
status_message:
description:
- A human-readable description of the status.
returned: on success
type: str
sample: status_message_example
sample: [{
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"name": "name_example",
"destination_region_name": "destination_region_name_example",
"destination_bucket_name": "destination_bucket_name_example",
"time_created": "2013-10-20T19:20:30+01:00",
"time_last_sync": "2013-10-20T19:20:30+01:00",
"status": "ACTIVE",
"status_message": "status_message_example"
}]
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceFactsHelperBase,
get_custom_class,
)
try:
from oci.object_storage import ObjectStorageClient
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class ReplicationPolicyFactsHelperGen(OCIResourceFactsHelperBase):
def get_required_params_for_get(self):
return [
"namespace_name",
"bucket_name",
"replication_id",
]
def get_required_params_for_list(self):
return [
"namespace_name",
"bucket_name",
]
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_replication_policy,
namespace_name=self.module.params.get("namespace_name"),
bucket_name=self.module.params.get("bucket_name"),
replication_id=self.module.params.get("replication_id"),
)
def list_resources(self):
optional_list_method_params = [
"name",
]
optional_kwargs = dict(
(param, self.module.params[param])
for param in optional_list_method_params
if self.module.params.get(param) is not None
)
return oci_common_utils.list_all_resources(
self.client.list_replication_policies,
namespace_name=self.module.params.get("namespace_name"),
bucket_name=self.module.params.get("bucket_name"),
**optional_kwargs
)
ReplicationPolicyFactsHelperCustom = get_custom_class(
"ReplicationPolicyFactsHelperCustom"
)
class ResourceFactsHelper(
ReplicationPolicyFactsHelperCustom, ReplicationPolicyFactsHelperGen
):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec()
module_args.update(
dict(
namespace_name=dict(type="str", required=True),
bucket_name=dict(type="str", required=True),
replication_id=dict(aliases=["id"], type="str"),
name=dict(type="str"),
)
)
module = AnsibleModule(argument_spec=module_args)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_facts_helper = ResourceFactsHelper(
module=module,
resource_type="replication_policy",
service_client_class=ObjectStorageClient,
namespace="object_storage",
)
result = []
if resource_facts_helper.is_get():
result = [resource_facts_helper.get()]
elif resource_facts_helper.is_list():
result = resource_facts_helper.list()
else:
resource_facts_helper.fail()
module.exit_json(replication_policies=result)
if __name__ == "__main__":
main()
| true | true |
f72ae8822be3a2b344c2b3ee4a5a5f5d65da61a6 | 3,218 | py | Python | NTP_Bot/msg_interpreter.py | PEI-I1/Nos_Tech_Problems | cf8b0b51285a912988a96cc96438f81c75fa45b7 | [
"MIT"
] | null | null | null | NTP_Bot/msg_interpreter.py | PEI-I1/Nos_Tech_Problems | cf8b0b51285a912988a96cc96438f81c75fa45b7 | [
"MIT"
] | 14 | 2020-06-05T20:19:18.000Z | 2021-09-22T18:18:23.000Z | NTP_Bot/msg_interpreter.py | PEI-I1/Nos_Tech_Problems | cf8b0b51285a912988a96cc96438f81c75fa45b7 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import tensorflow_hub as hub
import numpy as np
import tensorflow_text
import json, re, os
from threading import Thread
from keywords import keywords
embeddings = {}
embed = None
def loadModelData():
''' Loads Tensorflow enconder and pre-encodes the problem data
'''
global embed
global embeddings
embed = hub.load("https://tfhub.dev/google/universal-sentence-encoder-multilingual-large/2")
feature_types = ['Sintoma', 'Tipificacao_Nivel_1', 'Tipificacao_Nivel_2', 'Tipificacao_Nivel_3']
with open(os.getcwd() + '/input_options.json') as json_file:
data = json.load(json_file)
for typ in feature_types:
embedProblemData(data, typ, embeddings)
def embedProblemData(data, feature_type, embeddings):
''' Calculates embeddings for all the values of feature_type
:param: data
:param: feature type
:param: dict that maps feature values to their embeddings
'''
raw_features = [x for x in data[feature_type]]
proc_features = [x.lower() for x in raw_features]
feature_embeddings = embed(proc_features)["outputs"]
for i in range(0, len(raw_features)):
embeddings[raw_features[i]] = feature_embeddings[i]
def replaceWithKeywords(line, keywords):
''' Replaces matches in line with a keyword
:param: string to look for expressions
:param: dictionary object that matches keywords with expressions
:return: list of versions of the line with replaced expressions
'''
keyworded_versions = [line]
for keyword, matches in keywords.items():
keyworded_versions.extend([re.sub(match, keyword, line) for match in matches if re.search(match, line)])
return keyworded_versions
def getFeatureSuggestion(line, keywords, ss_vals, ss_embeddings, category):
''' Calculates feature from category that is semantically closest to the one described in
line
:param: target
:param:
'''
ll = line.lower()
line_versions = replaceWithKeywords(ll, keywords['common'])
if category>0:
line_versions.extend(replaceWithKeywords(ll, keywords['tip_'+str(category)]))
sentence_embeddings = [embed(line_version)["outputs"] for line_version in line_versions]
similarity_matrices = [list(np.inner(sent_emb, ss_embeddings)[0])
for sent_emb in sentence_embeddings]
max_values = [max(similarity_matrice) for similarity_matrice in similarity_matrices]
max_abs = max(max_values)
similarity_matrix = similarity_matrices[max_values.index(max_abs)]
sugestao = ss_vals[similarity_matrix.index(max_abs)]
return sugestao, max_abs
def extractProblemData(prob_desc, search_space, category):
''' Extracts the string in the search space that is semantically
closest to the problem description
:param: problem description
:param: search space of the possible strings
:param: search space category (simptome or typification)
:return: closest string that belongs to search_space and confidence
'''
ss_embeddings = [embeddings[ss_val] for ss_val in search_space]
return getFeatureSuggestion(prob_desc, keywords, search_space, ss_embeddings, category)
| 37.858824 | 112 | 0.720945 |
import tensorflow_hub as hub
import numpy as np
import tensorflow_text
import json, re, os
from threading import Thread
from keywords import keywords
embeddings = {}
embed = None
def loadModelData():
global embed
global embeddings
embed = hub.load("https://tfhub.dev/google/universal-sentence-encoder-multilingual-large/2")
feature_types = ['Sintoma', 'Tipificacao_Nivel_1', 'Tipificacao_Nivel_2', 'Tipificacao_Nivel_3']
with open(os.getcwd() + '/input_options.json') as json_file:
data = json.load(json_file)
for typ in feature_types:
embedProblemData(data, typ, embeddings)
def embedProblemData(data, feature_type, embeddings):
raw_features = [x for x in data[feature_type]]
proc_features = [x.lower() for x in raw_features]
feature_embeddings = embed(proc_features)["outputs"]
for i in range(0, len(raw_features)):
embeddings[raw_features[i]] = feature_embeddings[i]
def replaceWithKeywords(line, keywords):
keyworded_versions = [line]
for keyword, matches in keywords.items():
keyworded_versions.extend([re.sub(match, keyword, line) for match in matches if re.search(match, line)])
return keyworded_versions
def getFeatureSuggestion(line, keywords, ss_vals, ss_embeddings, category):
ll = line.lower()
line_versions = replaceWithKeywords(ll, keywords['common'])
if category>0:
line_versions.extend(replaceWithKeywords(ll, keywords['tip_'+str(category)]))
sentence_embeddings = [embed(line_version)["outputs"] for line_version in line_versions]
similarity_matrices = [list(np.inner(sent_emb, ss_embeddings)[0])
for sent_emb in sentence_embeddings]
max_values = [max(similarity_matrice) for similarity_matrice in similarity_matrices]
max_abs = max(max_values)
similarity_matrix = similarity_matrices[max_values.index(max_abs)]
sugestao = ss_vals[similarity_matrix.index(max_abs)]
return sugestao, max_abs
def extractProblemData(prob_desc, search_space, category):
ss_embeddings = [embeddings[ss_val] for ss_val in search_space]
return getFeatureSuggestion(prob_desc, keywords, search_space, ss_embeddings, category)
| true | true |
f72ae89046ac8b319ed71a62b07e68d530306531 | 3,901 | py | Python | powerwatch/analysis/old_analysis_scripts/average_time_pw_uplug.py | nklugman/PlugWatch | 4fbd2506a6808542fc5246e87d3c382761da1eaf | [
"MIT"
] | null | null | null | powerwatch/analysis/old_analysis_scripts/average_time_pw_uplug.py | nklugman/PlugWatch | 4fbd2506a6808542fc5246e87d3c382761da1eaf | [
"MIT"
] | null | null | null | powerwatch/analysis/old_analysis_scripts/average_time_pw_uplug.py | nklugman/PlugWatch | 4fbd2506a6808542fc5246e87d3c382761da1eaf | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from pyspark.sql import SparkSession
from pyspark.sql.functions import col, window, asc, desc, lead, lag, udf, hour
from pyspark.sql.functions import month, year, lit, when, collect_list, struct, mean, stddev, stddev_pop
import pyspark.sql.functions as F
from pyspark.sql.window import Window
from pyspark.sql.types import FloatType, IntegerType, DateType, TimestampType
from pyspark import SparkConf
import yaml
import datetime
import os
from math import isnan
conf = SparkConf()
conf.set("spark.jars", os.getenv("HOME") + "/.ivy2/jars/org.postgresql_postgresql-42.1.1.jar")
conf.set("spark.executor.extrajavaoptions", "-Xmx15000m")
conf.set("spark.executor.memory", "15g")
conf.set("spark.driver.memory", "15g")
conf.set("spark.storage.memoryFraction", "0")
spark = SparkSession.builder \
.config(conf=conf) \
.master("local[4]") \
.appName("SAIDI Calculator") \
.getOrCreate()
config = open('config.yaml')
config = yaml.load(config)
#connect to the database
pw_df = spark.read.jdbc("jdbc:postgresql://timescale.lab11.eecs.umich.edu/powerwatch", "pw_dedupe",
properties={"user": config['user'], "password": config['password'],"driver":"org.postgresql.Driver"})
#read the data that we care about
pw_df = pw_df.select(pw_df['core_id'],pw_df['time'],pw_df['is_powered'],pw_df['product_id'],pw_df['millis'],pw_df['last_unplug_millis'],pw_df['last_plug_millis'])
pw_df = pw_df.filter("product_id = 7008 OR product_id= 7009")
#now we need to created a window function that looks at the leading lagging edge of is powered and detects transitions
#then we can filter out all data that is not a transition
def detectTransition(value1, value2):
if(value1 == value2):
return 0
else:
return 1
udfDetectTransition = udf(detectTransition, IntegerType())
w = Window.partitionBy("core_id").orderBy(asc("time"))
is_powered_lag = lag("is_powered",1).over(w)
pw_df = pw_df.withColumn("transition", udfDetectTransition("is_powered",is_powered_lag))
#filter out all transitions
pw_df = pw_df.filter("transition != 0")
#now count each outage (really restoration)
def countOutage(value1, value2, value3):
if(value1 == False and value2 == True and value3 == True):
return 1
else:
return 0
udfCountTransition = udf(countOutage, IntegerType())
is_powered_lead = lead("is_powered",1).over(w)
is_powered_lag = lag("is_powered",1).over(w)
pw_df = pw_df.withColumn("outage", udfCountTransition("is_powered", is_powered_lead, is_powered_lag))
#now find all the exact outage and restore times using millis
def timeCorrect(time, millis, unplugMillis):
if(unplugMillis == 0 or millis == None or unplugMillis == None or isnan(millis) or isnan(unplugMillis)):
return time
elif unplugMillis > millis:
return time
else:
return time - datetime.timedelta(microseconds = (int(millis)-int(unplugMillis))*1000)
udftimeCorrect = udf(timeCorrect, TimestampType())
pw_df = pw_df.withColumn("outage_time", udftimeCorrect("time","millis","last_unplug_millis"))
#now filter out everything that is not an outage. We should have a time and end_time for every outage
pw_df = pw_df.filter("outage != 0")
w = Window.orderBy(asc("outage_time")).rowsBetween(-1,1)
pw_df = pw_df.withColumn("outage_window_list",collect_list(F.struct("outage_time","core_id")).over(w))
def filterOutage(time, imei, timeList):
times = []
for i in timeList:
if imei != i[1]:
t = (i[0] - time).total_seconds()
if(t > 0):
times.append(t)
if len(times) > 0:
return min(times)
return None
udfFilterTransition = udf(filterOutage, FloatType())
pw_df = pw_df.withColumn("seconds_until_next_unplug", udfFilterTransition("outage_time","core_id","outage_window_list"))
print(pw_df.stat.approxQuantile("seconds_until_next_unplug", [x*0.01 for x in range(0,100)], 0.0))
| 40.216495 | 162 | 0.722892 |
from pyspark.sql import SparkSession
from pyspark.sql.functions import col, window, asc, desc, lead, lag, udf, hour
from pyspark.sql.functions import month, year, lit, when, collect_list, struct, mean, stddev, stddev_pop
import pyspark.sql.functions as F
from pyspark.sql.window import Window
from pyspark.sql.types import FloatType, IntegerType, DateType, TimestampType
from pyspark import SparkConf
import yaml
import datetime
import os
from math import isnan
conf = SparkConf()
conf.set("spark.jars", os.getenv("HOME") + "/.ivy2/jars/org.postgresql_postgresql-42.1.1.jar")
conf.set("spark.executor.extrajavaoptions", "-Xmx15000m")
conf.set("spark.executor.memory", "15g")
conf.set("spark.driver.memory", "15g")
conf.set("spark.storage.memoryFraction", "0")
spark = SparkSession.builder \
.config(conf=conf) \
.master("local[4]") \
.appName("SAIDI Calculator") \
.getOrCreate()
config = open('config.yaml')
config = yaml.load(config)
pw_df = spark.read.jdbc("jdbc:postgresql://timescale.lab11.eecs.umich.edu/powerwatch", "pw_dedupe",
properties={"user": config['user'], "password": config['password'],"driver":"org.postgresql.Driver"})
pw_df = pw_df.select(pw_df['core_id'],pw_df['time'],pw_df['is_powered'],pw_df['product_id'],pw_df['millis'],pw_df['last_unplug_millis'],pw_df['last_plug_millis'])
pw_df = pw_df.filter("product_id = 7008 OR product_id= 7009")
def detectTransition(value1, value2):
if(value1 == value2):
return 0
else:
return 1
udfDetectTransition = udf(detectTransition, IntegerType())
w = Window.partitionBy("core_id").orderBy(asc("time"))
is_powered_lag = lag("is_powered",1).over(w)
pw_df = pw_df.withColumn("transition", udfDetectTransition("is_powered",is_powered_lag))
pw_df = pw_df.filter("transition != 0")
def countOutage(value1, value2, value3):
if(value1 == False and value2 == True and value3 == True):
return 1
else:
return 0
udfCountTransition = udf(countOutage, IntegerType())
is_powered_lead = lead("is_powered",1).over(w)
is_powered_lag = lag("is_powered",1).over(w)
pw_df = pw_df.withColumn("outage", udfCountTransition("is_powered", is_powered_lead, is_powered_lag))
def timeCorrect(time, millis, unplugMillis):
if(unplugMillis == 0 or millis == None or unplugMillis == None or isnan(millis) or isnan(unplugMillis)):
return time
elif unplugMillis > millis:
return time
else:
return time - datetime.timedelta(microseconds = (int(millis)-int(unplugMillis))*1000)
udftimeCorrect = udf(timeCorrect, TimestampType())
pw_df = pw_df.withColumn("outage_time", udftimeCorrect("time","millis","last_unplug_millis"))
pw_df = pw_df.filter("outage != 0")
w = Window.orderBy(asc("outage_time")).rowsBetween(-1,1)
pw_df = pw_df.withColumn("outage_window_list",collect_list(F.struct("outage_time","core_id")).over(w))
def filterOutage(time, imei, timeList):
times = []
for i in timeList:
if imei != i[1]:
t = (i[0] - time).total_seconds()
if(t > 0):
times.append(t)
if len(times) > 0:
return min(times)
return None
udfFilterTransition = udf(filterOutage, FloatType())
pw_df = pw_df.withColumn("seconds_until_next_unplug", udfFilterTransition("outage_time","core_id","outage_window_list"))
print(pw_df.stat.approxQuantile("seconds_until_next_unplug", [x*0.01 for x in range(0,100)], 0.0))
| true | true |
f72ae8f83fbcedd3eb02039ff2317a6935549fc8 | 5,975 | py | Python | lightlab/equipment/visa_bases/driver_base.py | CharLee674/rvisa_lightlab | b43e36f3436b60c8c5f3088b4cb0896c5360aa4a | [
"MIT"
] | null | null | null | lightlab/equipment/visa_bases/driver_base.py | CharLee674/rvisa_lightlab | b43e36f3436b60c8c5f3088b4cb0896c5360aa4a | [
"MIT"
] | null | null | null | lightlab/equipment/visa_bases/driver_base.py | CharLee674/rvisa_lightlab | b43e36f3436b60c8c5f3088b4cb0896c5360aa4a | [
"MIT"
] | null | null | null | from abc import ABC, abstractmethod
from contextlib import contextmanager
import socket
import time
from lightlab import visalogger as logger
from rvisa.util import from_ascii_block
class InstrumentSessionBase(ABC):
''' Base class for Instrument sessions, to be inherited and specialized
by VISAObject and PrologixGPIBObject'''
@abstractmethod
def spoll(self):
pass
@abstractmethod
def LLO(self):
pass
@abstractmethod
def LOC(self):
pass
@abstractmethod
def open(self):
pass
@abstractmethod
def close(self):
pass
@abstractmethod
def write(self):
pass
@abstractmethod
def query(self):
pass
@abstractmethod
def wait(self):
pass
@abstractmethod
def clear(self):
pass
@abstractmethod
def query_raw_binary(self):
pass
def query_ascii_values(self, message, converter='f', separator=',',
container=list):
''' Taken from pvisa.'''
block = self.query(message)
return from_ascii_block(block, converter, separator, container)
def instrID(self):
r"""Returns the \*IDN? string"""
return self.query('*IDN?')
@property
@abstractmethod
def timeout(self):
pass
@timeout.setter
@abstractmethod
def timeout(self, newTimeout):
pass
CR = '\r'
LF = '\n'
class TCPSocketConnection(object):
''' Opens a TCP socket connection, much like netcat.
Usage:
s = TCPSocketConnection('socket-server.school.edu', 1111)
s.connect() # connects to socket and leaves it open
s.send('command') # sends the command through the socket
r = s.recv(1000) # receives a message of up to 1000 bytes
s.disconnect() # shuts down connection
'''
port = None #: socket server's port number
_socket = None
_termination = None
def __init__(self, ip_address, port, timeout=2, termination=LF):
"""
Args:
ip_address (str): hostname or ip address of the socket server
port (int): socket server's port number
timeout (float): timeout in seconds for establishing socket
connection to socket server, default 2.
"""
self.timeout = timeout
self.port = port
self.ip_address = ip_address
self._termination = termination
def _send(self, socket, value):
encoded_value = (('%s' % value) + self._termination).encode('ascii')
sent = socket.sendall(encoded_value)
return sent
def _recv(self, socket, msg_length=2048):
received_value = socket.recv(msg_length)
return received_value.decode('ascii')
def connect(self):
''' Connects to the socket and leaves the connection open.
If already connected, does nothing.
Returns:
socket object.
'''
if self._socket is None:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP)
try:
logger.debug("Attempting new connection (timeout = %s)", str(self.timeout))
init_time = time.time()
s.settimeout(self.timeout)
s.connect((self.ip_address, self.port))
except socket.error:
# avoiding shutdown to prevent sending any data to remote socket
# https://stackoverflow.com/questions/13109899/does-socket-become-unusable-after-connect-fails
# s.shutdown(socket.SHUT_WR)
s.close()
del s
logger.error('Cannot connect to resource.')
raise
else:
final_time = time.time()
elapsed_time_ms = 1e3 * (final_time - init_time)
logger.debug("Connected. Time elapsed: %s msec", '{:.2f}'.format(elapsed_time_ms))
self._socket = s
return self._socket
else:
return self._socket
def disconnect(self):
''' If connected, disconnects and kills the socket.'''
if self._socket is not None:
self._socket.shutdown(socket.SHUT_WR)
self._socket.close()
self._socket = None
@contextmanager
def connected(self):
''' Context manager for ensuring that the socket is connected while
sending and receiving commands to remote socket.
This is safe to use everywhere, even if the socket is previously connected.
It can also be nested.
This is useful to bundle multiple commands that you desire to be
executed together in a single socket connection, for example:
.. code-block:: python
def query(self, query_msg, msg_length=2048):
with self.connected():
self._send(self._socket, query_msg)
recv = self._recv(self._socket, msg_length)
return recv
'''
previously_connected = (self._socket is not None)
self.connect()
try:
yield self
finally:
if not previously_connected:
self.disconnect()
def startup(self):
raise NotImplementedError
def send(self, value):
''' Sends an ASCII string to the socket server. Auto-connects if necessary.
Args:
value (str): value to be sent
'''
with self.connected():
sent = self._send(self._socket, value)
return sent
def recv(self, msg_length=2048):
''' Receives an ASCII string from the socket server. Auto-connects if necessary.
Args:
msg_length (int): maximum message length.
'''
with self.connected():
recv = self._recv(self._socket, msg_length)
return recv
def query(self, query_msg, msg_length=2048):
raise NotImplementedError
| 29.146341 | 110 | 0.594979 | from abc import ABC, abstractmethod
from contextlib import contextmanager
import socket
import time
from lightlab import visalogger as logger
from rvisa.util import from_ascii_block
class InstrumentSessionBase(ABC):
@abstractmethod
def spoll(self):
pass
@abstractmethod
def LLO(self):
pass
@abstractmethod
def LOC(self):
pass
@abstractmethod
def open(self):
pass
@abstractmethod
def close(self):
pass
@abstractmethod
def write(self):
pass
@abstractmethod
def query(self):
pass
@abstractmethod
def wait(self):
pass
@abstractmethod
def clear(self):
pass
@abstractmethod
def query_raw_binary(self):
pass
def query_ascii_values(self, message, converter='f', separator=',',
container=list):
block = self.query(message)
return from_ascii_block(block, converter, separator, container)
def instrID(self):
return self.query('*IDN?')
@property
@abstractmethod
def timeout(self):
pass
@timeout.setter
@abstractmethod
def timeout(self, newTimeout):
pass
CR = '\r'
LF = '\n'
class TCPSocketConnection(object):
port = None
_socket = None
_termination = None
def __init__(self, ip_address, port, timeout=2, termination=LF):
self.timeout = timeout
self.port = port
self.ip_address = ip_address
self._termination = termination
def _send(self, socket, value):
encoded_value = (('%s' % value) + self._termination).encode('ascii')
sent = socket.sendall(encoded_value)
return sent
def _recv(self, socket, msg_length=2048):
received_value = socket.recv(msg_length)
return received_value.decode('ascii')
def connect(self):
if self._socket is None:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP)
try:
logger.debug("Attempting new connection (timeout = %s)", str(self.timeout))
init_time = time.time()
s.settimeout(self.timeout)
s.connect((self.ip_address, self.port))
except socket.error:
# avoiding shutdown to prevent sending any data to remote socket
# https://stackoverflow.com/questions/13109899/does-socket-become-unusable-after-connect-fails
# s.shutdown(socket.SHUT_WR)
s.close()
del s
logger.error('Cannot connect to resource.')
raise
else:
final_time = time.time()
elapsed_time_ms = 1e3 * (final_time - init_time)
logger.debug("Connected. Time elapsed: %s msec", '{:.2f}'.format(elapsed_time_ms))
self._socket = s
return self._socket
else:
return self._socket
def disconnect(self):
if self._socket is not None:
self._socket.shutdown(socket.SHUT_WR)
self._socket.close()
self._socket = None
@contextmanager
def connected(self):
previously_connected = (self._socket is not None)
self.connect()
try:
yield self
finally:
if not previously_connected:
self.disconnect()
def startup(self):
raise NotImplementedError
def send(self, value):
with self.connected():
sent = self._send(self._socket, value)
return sent
def recv(self, msg_length=2048):
with self.connected():
recv = self._recv(self._socket, msg_length)
return recv
def query(self, query_msg, msg_length=2048):
raise NotImplementedError
| true | true |
f72ae943e83fcbed48d9e3f084fe924867622c96 | 2,382 | py | Python | simple_ado/user.py | Bhaskers-Blu-Org2/simple_ado | bbfb1cd5d513cce0f606188e803db3dcf667cb75 | [
"MIT"
] | null | null | null | simple_ado/user.py | Bhaskers-Blu-Org2/simple_ado | bbfb1cd5d513cce0f606188e803db3dcf667cb75 | [
"MIT"
] | null | null | null | simple_ado/user.py | Bhaskers-Blu-Org2/simple_ado | bbfb1cd5d513cce0f606188e803db3dcf667cb75 | [
"MIT"
] | 1 | 2020-07-30T13:18:16.000Z | 2020-07-30T13:18:16.000Z | #!/usr/bin/env python3
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
"""ADO user API wrapper."""
import logging
from typing import cast
from simple_ado.base_client import ADOBaseClient
from simple_ado.context import ADOContext
from simple_ado.exceptions import ADOException
from simple_ado.http_client import ADOHTTPClient
from simple_ado.types import TeamFoundationId
class ADOUserClient(ADOBaseClient):
"""Wrapper class around the ADO user APIs.
:param context: The context information for the client
:param http_client: The HTTP client to use for the client
:param log: The logger to use
"""
def __init__(
self, context: ADOContext, http_client: ADOHTTPClient, log: logging.Logger
) -> None:
super().__init__(context, http_client, log.getChild("user"))
def get_team_foundation_id(self, identity: str) -> TeamFoundationId:
"""Fetch the unique Team Foundation GUID for a given identity.
:param str identity: The identity to fetch for (should be email for users and display name for groups)
:returns: The team foundation ID
:raises ADOException: If we can't get the identity from the response
"""
request_url = self.http_client.api_endpoint(is_default_collection=False, is_project=False)
request_url += "/IdentityPicker/Identities?api-version=5.1-preview.1"
body = {
"query": identity,
"identityTypes": ["user", "group"],
"operationScopes": ["ims"],
"properties": ["DisplayName", "Mail"],
"filterByAncestorEntityIds": [],
"filterByEntityIds": [],
}
response = self.http_client.post(request_url, json_data=body)
response_data = self.http_client.decode_response(response)
try:
result = response_data["results"][0]["identities"][0]
except:
raise ADOException("Could not resolve identity: " + identity)
if result["entityType"] == "User" and identity.lower() == result["mail"].lower():
return cast(TeamFoundationId, str(result["localId"]))
if result["entityType"] == "Group" and identity.lower() == result["displayName"].lower():
return cast(TeamFoundationId, str(result["localId"]))
raise ADOException("Could not resolve identity: " + identity)
| 35.552239 | 110 | 0.670025 |
import logging
from typing import cast
from simple_ado.base_client import ADOBaseClient
from simple_ado.context import ADOContext
from simple_ado.exceptions import ADOException
from simple_ado.http_client import ADOHTTPClient
from simple_ado.types import TeamFoundationId
class ADOUserClient(ADOBaseClient):
def __init__(
self, context: ADOContext, http_client: ADOHTTPClient, log: logging.Logger
) -> None:
super().__init__(context, http_client, log.getChild("user"))
def get_team_foundation_id(self, identity: str) -> TeamFoundationId:
request_url = self.http_client.api_endpoint(is_default_collection=False, is_project=False)
request_url += "/IdentityPicker/Identities?api-version=5.1-preview.1"
body = {
"query": identity,
"identityTypes": ["user", "group"],
"operationScopes": ["ims"],
"properties": ["DisplayName", "Mail"],
"filterByAncestorEntityIds": [],
"filterByEntityIds": [],
}
response = self.http_client.post(request_url, json_data=body)
response_data = self.http_client.decode_response(response)
try:
result = response_data["results"][0]["identities"][0]
except:
raise ADOException("Could not resolve identity: " + identity)
if result["entityType"] == "User" and identity.lower() == result["mail"].lower():
return cast(TeamFoundationId, str(result["localId"]))
if result["entityType"] == "Group" and identity.lower() == result["displayName"].lower():
return cast(TeamFoundationId, str(result["localId"]))
raise ADOException("Could not resolve identity: " + identity)
| true | true |
f72aea0d6cc0cce475a487b99abf5840a183729c | 152 | py | Python | controller/apps.py | skyrred/Gestion | c38c4d1fa229f5b0e0ef2667ff98864a28dc3241 | [
"Apache-2.0"
] | 1 | 2021-11-15T14:55:36.000Z | 2021-11-15T14:55:36.000Z | controller/apps.py | skyrred/Gestion | c38c4d1fa229f5b0e0ef2667ff98864a28dc3241 | [
"Apache-2.0"
] | null | null | null | controller/apps.py | skyrred/Gestion | c38c4d1fa229f5b0e0ef2667ff98864a28dc3241 | [
"Apache-2.0"
] | null | null | null | from django.apps import AppConfig
class ControllerConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'controller'
| 21.714286 | 56 | 0.769737 | from django.apps import AppConfig
class ControllerConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'controller'
| true | true |
f72aeafc60f1c50f2b50e3c33dc739dfa7cb4e8a | 1,675 | py | Python | opening2d.py | Nobu575/AppItk | 91de313115b753a6fb1ae67f53d4979580ef768b | [
"MIT"
] | null | null | null | opening2d.py | Nobu575/AppItk | 91de313115b753a6fb1ae67f53d4979580ef768b | [
"MIT"
] | null | null | null | opening2d.py | Nobu575/AppItk | 91de313115b753a6fb1ae67f53d4979580ef768b | [
"MIT"
] | null | null | null | import numpy as np
import itk
import matplotlib.pyplot as plt
# Input file name
input_filename = './jenga_g_150.png'
# Set dimension
Dimension = 2
# Read input image
itk_image = itk.imread(input_filename)
# Setting for input image (Grayscale)
InputPixelType = itk.UC
InputImageType = itk.Image[InputPixelType, Dimension]
# Loading
reader = itk.ImageFileReader[InputImageType].New()
reader.SetFileName(input_filename)
# Apply a filter: Thresholding
thresholdFilter = itk.BinaryThresholdImageFilter[InputImageType,InputImageType].New()
thresholdFilter.SetInput(reader.GetOutput())
thresholdFilter.SetUpperThreshold(200)
thresholdFilter.SetOutsideValue(1)
thresholdFilter.SetInsideValue(0)
StructuringElementType = itk.FlatStructuringElement[Dimension]
structuringElement = StructuringElementType.Ball(3)
# Apply Opening (erosion and dilation)
erodeFilter = itk.BinaryErodeImageFilter[InputImageType,InputImageType,StructuringElementType].New()
erodeFilter.SetInput(thresholdFilter.GetOutput())
erodeFilter.SetKernel(structuringElement)
erodeFilter.SetForegroundValue(1)
dilateFilter = itk.BinaryDilateImageFilter[InputImageType,InputImageType,StructuringElementType].New()
dilateFilter.SetInput(erodeFilter.GetOutput())
dilateFilter.SetKernel(structuringElement)
dilateFilter.SetForegroundValue(1)
dilateFilter.Update()
# Plot the input and output images.
plt.figure(figsize=(12, 4), dpi=50)
plt.subplot(1,3,1),plt.title("original"),plt.imshow(itk_image, cmap="gray")
plt.subplot(1,3,2),plt.title("threshold"),plt.imshow(thresholdFilter.GetOutput())
plt.subplot(1,3,3),plt.title("output"),plt.imshow(dilateFilter.GetOutput())
plt.savefig("./img/jenga_opening2d.png") | 33.5 | 102 | 0.819104 | import numpy as np
import itk
import matplotlib.pyplot as plt
input_filename = './jenga_g_150.png'
Dimension = 2
itk_image = itk.imread(input_filename)
InputPixelType = itk.UC
InputImageType = itk.Image[InputPixelType, Dimension]
reader = itk.ImageFileReader[InputImageType].New()
reader.SetFileName(input_filename)
thresholdFilter = itk.BinaryThresholdImageFilter[InputImageType,InputImageType].New()
thresholdFilter.SetInput(reader.GetOutput())
thresholdFilter.SetUpperThreshold(200)
thresholdFilter.SetOutsideValue(1)
thresholdFilter.SetInsideValue(0)
StructuringElementType = itk.FlatStructuringElement[Dimension]
structuringElement = StructuringElementType.Ball(3)
erodeFilter = itk.BinaryErodeImageFilter[InputImageType,InputImageType,StructuringElementType].New()
erodeFilter.SetInput(thresholdFilter.GetOutput())
erodeFilter.SetKernel(structuringElement)
erodeFilter.SetForegroundValue(1)
dilateFilter = itk.BinaryDilateImageFilter[InputImageType,InputImageType,StructuringElementType].New()
dilateFilter.SetInput(erodeFilter.GetOutput())
dilateFilter.SetKernel(structuringElement)
dilateFilter.SetForegroundValue(1)
dilateFilter.Update()
plt.figure(figsize=(12, 4), dpi=50)
plt.subplot(1,3,1),plt.title("original"),plt.imshow(itk_image, cmap="gray")
plt.subplot(1,3,2),plt.title("threshold"),plt.imshow(thresholdFilter.GetOutput())
plt.subplot(1,3,3),plt.title("output"),plt.imshow(dilateFilter.GetOutput())
plt.savefig("./img/jenga_opening2d.png") | true | true |
f72aed1738f6ccb62f4bf6aeaaf1bcc63b40247b | 2,587 | py | Python | update.py | boost/bucket-antivirus-function | 6eb93406e28f81a4c612f0dec29670451e0c5589 | [
"Apache-2.0"
] | null | null | null | update.py | boost/bucket-antivirus-function | 6eb93406e28f81a4c612f0dec29670451e0c5589 | [
"Apache-2.0"
] | null | null | null | update.py | boost/bucket-antivirus-function | 6eb93406e28f81a4c612f0dec29670451e0c5589 | [
"Apache-2.0"
] | 1 | 2020-07-16T12:47:24.000Z | 2020-07-16T12:47:24.000Z | # -*- coding: utf-8 -*-
# Upside Travel, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import boto3
import clamav
from common import AV_DEFINITION_PATH
from common import AV_DEFINITION_S3_BUCKET
from common import AV_DEFINITION_S3_PREFIX
from common import CLAMAVLIB_PATH
from common import get_timestamp
import shutil
def lambda_handler(event, context):
s3 = boto3.resource("s3")
s3_client = boto3.client("s3")
print("Script starting at %s\n" % (get_timestamp()))
for root, dirs, files in os.walk(AV_DEFINITION_PATH):
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
to_download = clamav.update_defs_from_s3(
s3_client, AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX
)
print("Skipping clamav definition download %s\n" % (get_timestamp()))
# for download in to_download.values():
# s3_path = download["s3_path"]
# local_path = download["local_path"]
# print("Downloading definition file %s from s3://%s" % (local_path, s3_path))
# s3.Bucket(AV_DEFINITION_S3_BUCKET).download_file(s3_path, local_path)
# print("Downloading definition file %s complete!" % (local_path))
clamav.update_defs_from_freshclam(AV_DEFINITION_PATH, CLAMAVLIB_PATH)
# If main.cvd gets updated (very rare), we will need to force freshclam
# to download the compressed version to keep file sizes down.
# The existence of main.cud is the trigger to know this has happened.
if os.path.exists(os.path.join(AV_DEFINITION_PATH, "main.cud")):
os.remove(os.path.join(AV_DEFINITION_PATH, "main.cud"))
if os.path.exists(os.path.join(AV_DEFINITION_PATH, "main.cvd")):
os.remove(os.path.join(AV_DEFINITION_PATH, "main.cvd"))
clamav.update_defs_from_freshclam(AV_DEFINITION_PATH, CLAMAVLIB_PATH)
clamav.upload_defs_to_s3(
s3_client, AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX, AV_DEFINITION_PATH
)
print("Script finished at %s\n" % get_timestamp()) | 39.8 | 87 | 0.719366 |
import os
import boto3
import clamav
from common import AV_DEFINITION_PATH
from common import AV_DEFINITION_S3_BUCKET
from common import AV_DEFINITION_S3_PREFIX
from common import CLAMAVLIB_PATH
from common import get_timestamp
import shutil
def lambda_handler(event, context):
s3 = boto3.resource("s3")
s3_client = boto3.client("s3")
print("Script starting at %s\n" % (get_timestamp()))
for root, dirs, files in os.walk(AV_DEFINITION_PATH):
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
to_download = clamav.update_defs_from_s3(
s3_client, AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX
)
print("Skipping clamav definition download %s\n" % (get_timestamp()))
clamav.update_defs_from_freshclam(AV_DEFINITION_PATH, CLAMAVLIB_PATH)
if os.path.exists(os.path.join(AV_DEFINITION_PATH, "main.cud")):
os.remove(os.path.join(AV_DEFINITION_PATH, "main.cud"))
if os.path.exists(os.path.join(AV_DEFINITION_PATH, "main.cvd")):
os.remove(os.path.join(AV_DEFINITION_PATH, "main.cvd"))
clamav.update_defs_from_freshclam(AV_DEFINITION_PATH, CLAMAVLIB_PATH)
clamav.upload_defs_to_s3(
s3_client, AV_DEFINITION_S3_BUCKET, AV_DEFINITION_S3_PREFIX, AV_DEFINITION_PATH
)
print("Script finished at %s\n" % get_timestamp()) | true | true |
f72aeddbd79707ad743350eba5e76f34ba47af5c | 15,728 | py | Python | ssd.py | tristanmooo/ssd_keras | e4be1dae086e91a81b020787f94560836379dc68 | [
"MIT"
] | null | null | null | ssd.py | tristanmooo/ssd_keras | e4be1dae086e91a81b020787f94560836379dc68 | [
"MIT"
] | null | null | null | ssd.py | tristanmooo/ssd_keras | e4be1dae086e91a81b020787f94560836379dc68 | [
"MIT"
] | null | null | null | """Keras implementation of SSD."""
import keras.backend as K
from keras.layers import Activation
from keras.layers import AtrousConvolution2D
from keras.layers import Convolution2D
from keras.layers import Dense
from keras.layers import Flatten
from keras.layers import GlobalAveragePooling2D
from keras.layers import Input
from keras.layers import MaxPooling2D
from keras.layers import merge
from keras.layers import Reshape
from keras.layers import ZeroPadding2D
from keras.models import Model
from ssd_layers import Normalize
from ssd_layers import PriorBox
def SSD300(input_shape, num_classes=21):
"""SSD300 architecture.
# Arguments
input_shape: Shape of the input image,
expected to be either (300, 300, 3) or (3, 300, 300)(not tested).
num_classes: Number of classes including background.
# References
https://arxiv.org/abs/1512.02325
"""
net = {}
# Block 1 卷积层块
input_tensor = input_tensor = Input(shape=input_shape)
img_size = (input_shape[1], input_shape[0])
net['input'] = input_tensor
# 二维卷积层对二维输入进行滑动窗卷积
# keras.layers.Conv2D(filters, kernel_size, strides=(1, 1), padding='valid', data_format=None,
# dilation_rate=(1, 1), activation=None, use_bias=True, kernel_initializer='glorot_uniform',
# bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None,
# kernel_constraint=None, bias_constraint=None)
net['conv1_1'] = Convolution2D(64, 3, 3, # 64个过滤器;kernel_size:3,卷积窗口大小;strides:步长;
activation='relu', # 激活函数:ReLU
border_mode='same', # 过滤模式:same/valid
name='conv1_1')(net['input'])
net['conv1_2'] = Convolution2D(64, 3, 3,
activation='relu',
border_mode='same',
name='conv1_2')(net['conv1_1'])
# 对空间数据的最大池化
# keras.layers.MaxPooling2D(pool_size=(2, 2), strides=None, padding='valid', data_format=None)
# strides 默认为 None,为 None 时大小等于
net['pool1'] = MaxPooling2D((2, 2), strides=(2, 2), border_mode='same',
name='pool1')(net['conv1_2'])
# Block 2 卷积层块
net['conv2_1'] = Convolution2D(128, 3, 3,
activation='relu',
border_mode='same',
name='conv2_1')(net['pool1'])
net['conv2_2'] = Convolution2D(128, 3, 3,
activation='relu',
border_mode='same',
name='conv2_2')(net['conv2_1'])
net['pool2'] = MaxPooling2D((2, 2), strides=(2, 2), border_mode='same',
name='pool2')(net['conv2_2'])
# Block 3 卷积层块
net['conv3_1'] = Convolution2D(256, 3, 3,
activation='relu',
border_mode='same',
name='conv3_1')(net['pool2'])
net['conv3_2'] = Convolution2D(256, 3, 3,
activation='relu',
border_mode='same',
name='conv3_2')(net['conv3_1'])
net['conv3_3'] = Convolution2D(256, 3, 3,
activation='relu',
border_mode='same',
name='conv3_3')(net['conv3_2'])
net['pool3'] = MaxPooling2D((2, 2), strides=(2, 2), border_mode='same',
name='pool3')(net['conv3_3'])
# Block 4 卷积层块
net['conv4_1'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv4_1')(net['pool3'])
net['conv4_2'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv4_2')(net['conv4_1'])
net['conv4_3'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv4_3')(net['conv4_2'])
net['pool4'] = MaxPooling2D((2, 2), strides=(2, 2), border_mode='same',
name='pool4')(net['conv4_3'])
# Block 5 卷积层块
net['conv5_1'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv5_1')(net['pool4'])
net['conv5_2'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv5_2')(net['conv5_1'])
net['conv5_3'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv5_3')(net['conv5_2'])
net['pool5'] = MaxPooling2D((3, 3), strides=(1, 1), border_mode='same',
name='pool5')(net['conv5_3'])
# FC6 该层对二维输入进行Atrous卷积,也即膨胀卷积或带孔洞的卷积。
net['fc6'] = AtrousConvolution2D(1024, 3, 3, atrous_rate=(6, 6),
activation='relu', border_mode='same',
name='fc6')(net['pool5'])
# x = Dropout(0.5, name='drop6')(x)
# FC7
net['fc7'] = Convolution2D(1024, 1, 1, activation='relu',
border_mode='same', name='fc7')(net['fc6'])
# x = Dropout(0.5, name='drop7')(x)
# Block 6
net['conv6_1'] = Convolution2D(256, 1, 1, activation='relu',
border_mode='same',
name='conv6_1')(net['fc7'])
net['conv6_2'] = Convolution2D(512, 3, 3, subsample=(2, 2),
activation='relu', border_mode='same',
name='conv6_2')(net['conv6_1'])
# Block 7
net['conv7_1'] = Convolution2D(128, 1, 1, activation='relu',
border_mode='same',
name='conv7_1')(net['conv6_2'])
net['conv7_2'] = ZeroPadding2D()(net['conv7_1'])
net['conv7_2'] = Convolution2D(256, 3, 3, subsample=(2, 2),
activation='relu', border_mode='valid',
name='conv7_2')(net['conv7_2'])
# Block 8
net['conv8_1'] = Convolution2D(128, 1, 1, activation='relu',
border_mode='same',
name='conv8_1')(net['conv7_2'])
net['conv8_2'] = Convolution2D(256, 3, 3, subsample=(2, 2),
activation='relu', border_mode='same',
name='conv8_2')(net['conv8_1'])
# Last Pool
net['pool6'] = GlobalAveragePooling2D(name='pool6')(net['conv8_2'])
# Prediction from conv4_3
# keras.layers.BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True,
# beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones',
# beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)
# axis: 整数,需要标准化的轴 (通常是特征轴)
# 批量标准化层 (Ioffe and Szegedy, 2014)。在每一个批次的数据中标准化前一层的激活项, 即,应用一个维持激活项平均值接近 0,标准差接近 1 的转换。
net['conv4_3_norm'] = Normalize(20, name='conv4_3_norm')(net['conv4_3'])
num_priors = 3
x = Convolution2D(num_priors * 4, 3, 3, border_mode='same',
name='conv4_3_norm_mbox_loc')(net['conv4_3_norm'])
net['conv4_3_norm_mbox_loc'] = x
flatten = Flatten(name='conv4_3_norm_mbox_loc_flat')
net['conv4_3_norm_mbox_loc_flat'] = flatten(net['conv4_3_norm_mbox_loc'])
name = 'conv4_3_norm_mbox_conf'
if num_classes != 21:
name += '_{}'.format(num_classes)
x = Convolution2D(num_priors * num_classes, 3, 3, border_mode='same',
name=name)(net['conv4_3_norm'])
net['conv4_3_norm_mbox_conf'] = x
flatten = Flatten(name='conv4_3_norm_mbox_conf_flat')
net['conv4_3_norm_mbox_conf_flat'] = flatten(net['conv4_3_norm_mbox_conf'])
priorbox = PriorBox(img_size, 30.0, aspect_ratios=[2],
variances=[0.1, 0.1, 0.2, 0.2],
name='conv4_3_norm_mbox_priorbox')
net['conv4_3_norm_mbox_priorbox'] = priorbox(net['conv4_3_norm'])
# Prediction from fc7
num_priors = 6
net['fc7_mbox_loc'] = Convolution2D(num_priors * 4, 3, 3,
border_mode='same',
name='fc7_mbox_loc')(net['fc7'])
flatten = Flatten(name='fc7_mbox_loc_flat')
net['fc7_mbox_loc_flat'] = flatten(net['fc7_mbox_loc'])
name = 'fc7_mbox_conf'
if num_classes != 21:
name += '_{}'.format(num_classes)
net['fc7_mbox_conf'] = Convolution2D(num_priors * num_classes, 3, 3,
border_mode='same',
name=name)(net['fc7'])
flatten = Flatten(name='fc7_mbox_conf_flat')
net['fc7_mbox_conf_flat'] = flatten(net['fc7_mbox_conf'])
priorbox = PriorBox(img_size, 60.0, max_size=114.0, aspect_ratios=[2, 3],
variances=[0.1, 0.1, 0.2, 0.2],
name='fc7_mbox_priorbox')
net['fc7_mbox_priorbox'] = priorbox(net['fc7'])
# Prediction from conv6_2
num_priors = 6
x = Convolution2D(num_priors * 4, 3, 3, border_mode='same',
name='conv6_2_mbox_loc')(net['conv6_2'])
net['conv6_2_mbox_loc'] = x
flatten = Flatten(name='conv6_2_mbox_loc_flat')
net['conv6_2_mbox_loc_flat'] = flatten(net['conv6_2_mbox_loc'])
name = 'conv6_2_mbox_conf'
if num_classes != 21:
name += '_{}'.format(num_classes)
x = Convolution2D(num_priors * num_classes, 3, 3, border_mode='same',
name=name)(net['conv6_2'])
net['conv6_2_mbox_conf'] = x
flatten = Flatten(name='conv6_2_mbox_conf_flat')
net['conv6_2_mbox_conf_flat'] = flatten(net['conv6_2_mbox_conf'])
priorbox = PriorBox(img_size, 114.0, max_size=168.0, aspect_ratios=[2, 3],
variances=[0.1, 0.1, 0.2, 0.2],
name='conv6_2_mbox_priorbox')
net['conv6_2_mbox_priorbox'] = priorbox(net['conv6_2'])
# Prediction from conv7_2
num_priors = 6
x = Convolution2D(num_priors * 4, 3, 3, border_mode='same',
name='conv7_2_mbox_loc')(net['conv7_2'])
net['conv7_2_mbox_loc'] = x
flatten = Flatten(name='conv7_2_mbox_loc_flat')
net['conv7_2_mbox_loc_flat'] = flatten(net['conv7_2_mbox_loc'])
name = 'conv7_2_mbox_conf'
if num_classes != 21:
name += '_{}'.format(num_classes)
x = Convolution2D(num_priors * num_classes, 3, 3, border_mode='same',
name=name)(net['conv7_2'])
net['conv7_2_mbox_conf'] = x
flatten = Flatten(name='conv7_2_mbox_conf_flat')
net['conv7_2_mbox_conf_flat'] = flatten(net['conv7_2_mbox_conf'])
priorbox = PriorBox(img_size, 168.0, max_size=222.0, aspect_ratios=[2, 3],
variances=[0.1, 0.1, 0.2, 0.2],
name='conv7_2_mbox_priorbox')
net['conv7_2_mbox_priorbox'] = priorbox(net['conv7_2'])
# Prediction from conv8_2
num_priors = 6
x = Convolution2D(num_priors * 4, 3, 3, border_mode='same',
name='conv8_2_mbox_loc')(net['conv8_2'])
net['conv8_2_mbox_loc'] = x
flatten = Flatten(name='conv8_2_mbox_loc_flat')
net['conv8_2_mbox_loc_flat'] = flatten(net['conv8_2_mbox_loc'])
name = 'conv8_2_mbox_conf'
if num_classes != 21:
name += '_{}'.format(num_classes)
x = Convolution2D(num_priors * num_classes, 3, 3, border_mode='same',
name=name)(net['conv8_2'])
net['conv8_2_mbox_conf'] = x
flatten = Flatten(name='conv8_2_mbox_conf_flat')
net['conv8_2_mbox_conf_flat'] = flatten(net['conv8_2_mbox_conf'])
priorbox = PriorBox(img_size, 222.0, max_size=276.0, aspect_ratios=[2, 3],
variances=[0.1, 0.1, 0.2, 0.2],
name='conv8_2_mbox_priorbox')
net['conv8_2_mbox_priorbox'] = priorbox(net['conv8_2'])
# Prediction from pool6
num_priors = 6
x = Dense(num_priors * 4, name='pool6_mbox_loc_flat')(net['pool6'])
net['pool6_mbox_loc_flat'] = x
name = 'pool6_mbox_conf_flat'
if num_classes != 21:
name += '_{}'.format(num_classes)
x = Dense(num_priors * num_classes, name=name)(net['pool6'])
net['pool6_mbox_conf_flat'] = x
priorbox = PriorBox(img_size, 276.0, max_size=330.0, aspect_ratios=[2, 3],
variances=[0.1, 0.1, 0.2, 0.2],
name='pool6_mbox_priorbox')
if K.image_dim_ordering() == 'tf':
target_shape = (1, 1, 256)
else:
target_shape = (256, 1, 1)
net['pool6_reshaped'] = Reshape(target_shape,
name='pool6_reshaped')(net['pool6'])
net['pool6_mbox_priorbox'] = priorbox(net['pool6_reshaped'])
# Gather all predictions
net['mbox_loc'] = merge([net['conv4_3_norm_mbox_loc_flat'],
net['fc7_mbox_loc_flat'],
net['conv6_2_mbox_loc_flat'],
net['conv7_2_mbox_loc_flat'],
net['conv8_2_mbox_loc_flat'],
net['pool6_mbox_loc_flat']],
mode='concat', concat_axis=1, name='mbox_loc')
net['mbox_conf'] = merge([net['conv4_3_norm_mbox_conf_flat'],
net['fc7_mbox_conf_flat'],
net['conv6_2_mbox_conf_flat'],
net['conv7_2_mbox_conf_flat'],
net['conv8_2_mbox_conf_flat'],
net['pool6_mbox_conf_flat']],
mode='concat', concat_axis=1, name='mbox_conf')
net['mbox_priorbox'] = merge([net['conv4_3_norm_mbox_priorbox'],
net['fc7_mbox_priorbox'],
net['conv6_2_mbox_priorbox'],
net['conv7_2_mbox_priorbox'],
net['conv8_2_mbox_priorbox'],
net['pool6_mbox_priorbox']],
mode='concat', concat_axis=1,
name='mbox_priorbox')
if hasattr(net['mbox_loc'], '_keras_shape'):
num_boxes = net['mbox_loc']._keras_shape[-1] // 4
elif hasattr(net['mbox_loc'], 'int_shape'):
num_boxes = K.int_shape(net['mbox_loc'])[-1] // 4
net['mbox_loc'] = Reshape((num_boxes, 4),
name='mbox_loc_final')(net['mbox_loc'])
net['mbox_conf'] = Reshape((num_boxes, num_classes),
name='mbox_conf_logits')(net['mbox_conf'])
net['mbox_conf'] = Activation('softmax',
name='mbox_conf_final')(net['mbox_conf'])
net['predictions'] = merge([net['mbox_loc'],
net['mbox_conf'],
net['mbox_priorbox']],
mode='concat', concat_axis=2,
name='predictions')
model = Model(net['input'], net['predictions'])
return model
| 51.398693 | 127 | 0.532363 |
import keras.backend as K
from keras.layers import Activation
from keras.layers import AtrousConvolution2D
from keras.layers import Convolution2D
from keras.layers import Dense
from keras.layers import Flatten
from keras.layers import GlobalAveragePooling2D
from keras.layers import Input
from keras.layers import MaxPooling2D
from keras.layers import merge
from keras.layers import Reshape
from keras.layers import ZeroPadding2D
from keras.models import Model
from ssd_layers import Normalize
from ssd_layers import PriorBox
def SSD300(input_shape, num_classes=21):
net = {}
input_tensor = input_tensor = Input(shape=input_shape)
img_size = (input_shape[1], input_shape[0])
net['input'] = input_tensor
net['conv1_1'] = Convolution2D(64, 3, 3,
activation='relu',
border_mode='same',
name='conv1_1')(net['input'])
net['conv1_2'] = Convolution2D(64, 3, 3,
activation='relu',
border_mode='same',
name='conv1_2')(net['conv1_1'])
net['pool1'] = MaxPooling2D((2, 2), strides=(2, 2), border_mode='same',
name='pool1')(net['conv1_2'])
net['conv2_1'] = Convolution2D(128, 3, 3,
activation='relu',
border_mode='same',
name='conv2_1')(net['pool1'])
net['conv2_2'] = Convolution2D(128, 3, 3,
activation='relu',
border_mode='same',
name='conv2_2')(net['conv2_1'])
net['pool2'] = MaxPooling2D((2, 2), strides=(2, 2), border_mode='same',
name='pool2')(net['conv2_2'])
net['conv3_1'] = Convolution2D(256, 3, 3,
activation='relu',
border_mode='same',
name='conv3_1')(net['pool2'])
net['conv3_2'] = Convolution2D(256, 3, 3,
activation='relu',
border_mode='same',
name='conv3_2')(net['conv3_1'])
net['conv3_3'] = Convolution2D(256, 3, 3,
activation='relu',
border_mode='same',
name='conv3_3')(net['conv3_2'])
net['pool3'] = MaxPooling2D((2, 2), strides=(2, 2), border_mode='same',
name='pool3')(net['conv3_3'])
net['conv4_1'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv4_1')(net['pool3'])
net['conv4_2'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv4_2')(net['conv4_1'])
net['conv4_3'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv4_3')(net['conv4_2'])
net['pool4'] = MaxPooling2D((2, 2), strides=(2, 2), border_mode='same',
name='pool4')(net['conv4_3'])
net['conv5_1'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv5_1')(net['pool4'])
net['conv5_2'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv5_2')(net['conv5_1'])
net['conv5_3'] = Convolution2D(512, 3, 3,
activation='relu',
border_mode='same',
name='conv5_3')(net['conv5_2'])
net['pool5'] = MaxPooling2D((3, 3), strides=(1, 1), border_mode='same',
name='pool5')(net['conv5_3'])
net['fc6'] = AtrousConvolution2D(1024, 3, 3, atrous_rate=(6, 6),
activation='relu', border_mode='same',
name='fc6')(net['pool5'])
net['fc7'] = Convolution2D(1024, 1, 1, activation='relu',
border_mode='same', name='fc7')(net['fc6'])
net['conv6_1'] = Convolution2D(256, 1, 1, activation='relu',
border_mode='same',
name='conv6_1')(net['fc7'])
net['conv6_2'] = Convolution2D(512, 3, 3, subsample=(2, 2),
activation='relu', border_mode='same',
name='conv6_2')(net['conv6_1'])
net['conv7_1'] = Convolution2D(128, 1, 1, activation='relu',
border_mode='same',
name='conv7_1')(net['conv6_2'])
net['conv7_2'] = ZeroPadding2D()(net['conv7_1'])
net['conv7_2'] = Convolution2D(256, 3, 3, subsample=(2, 2),
activation='relu', border_mode='valid',
name='conv7_2')(net['conv7_2'])
net['conv8_1'] = Convolution2D(128, 1, 1, activation='relu',
border_mode='same',
name='conv8_1')(net['conv7_2'])
net['conv8_2'] = Convolution2D(256, 3, 3, subsample=(2, 2),
activation='relu', border_mode='same',
name='conv8_2')(net['conv8_1'])
net['pool6'] = GlobalAveragePooling2D(name='pool6')(net['conv8_2'])
net['conv4_3_norm'] = Normalize(20, name='conv4_3_norm')(net['conv4_3'])
num_priors = 3
x = Convolution2D(num_priors * 4, 3, 3, border_mode='same',
name='conv4_3_norm_mbox_loc')(net['conv4_3_norm'])
net['conv4_3_norm_mbox_loc'] = x
flatten = Flatten(name='conv4_3_norm_mbox_loc_flat')
net['conv4_3_norm_mbox_loc_flat'] = flatten(net['conv4_3_norm_mbox_loc'])
name = 'conv4_3_norm_mbox_conf'
if num_classes != 21:
name += '_{}'.format(num_classes)
x = Convolution2D(num_priors * num_classes, 3, 3, border_mode='same',
name=name)(net['conv4_3_norm'])
net['conv4_3_norm_mbox_conf'] = x
flatten = Flatten(name='conv4_3_norm_mbox_conf_flat')
net['conv4_3_norm_mbox_conf_flat'] = flatten(net['conv4_3_norm_mbox_conf'])
priorbox = PriorBox(img_size, 30.0, aspect_ratios=[2],
variances=[0.1, 0.1, 0.2, 0.2],
name='conv4_3_norm_mbox_priorbox')
net['conv4_3_norm_mbox_priorbox'] = priorbox(net['conv4_3_norm'])
num_priors = 6
net['fc7_mbox_loc'] = Convolution2D(num_priors * 4, 3, 3,
border_mode='same',
name='fc7_mbox_loc')(net['fc7'])
flatten = Flatten(name='fc7_mbox_loc_flat')
net['fc7_mbox_loc_flat'] = flatten(net['fc7_mbox_loc'])
name = 'fc7_mbox_conf'
if num_classes != 21:
name += '_{}'.format(num_classes)
net['fc7_mbox_conf'] = Convolution2D(num_priors * num_classes, 3, 3,
border_mode='same',
name=name)(net['fc7'])
flatten = Flatten(name='fc7_mbox_conf_flat')
net['fc7_mbox_conf_flat'] = flatten(net['fc7_mbox_conf'])
priorbox = PriorBox(img_size, 60.0, max_size=114.0, aspect_ratios=[2, 3],
variances=[0.1, 0.1, 0.2, 0.2],
name='fc7_mbox_priorbox')
net['fc7_mbox_priorbox'] = priorbox(net['fc7'])
num_priors = 6
x = Convolution2D(num_priors * 4, 3, 3, border_mode='same',
name='conv6_2_mbox_loc')(net['conv6_2'])
net['conv6_2_mbox_loc'] = x
flatten = Flatten(name='conv6_2_mbox_loc_flat')
net['conv6_2_mbox_loc_flat'] = flatten(net['conv6_2_mbox_loc'])
name = 'conv6_2_mbox_conf'
if num_classes != 21:
name += '_{}'.format(num_classes)
x = Convolution2D(num_priors * num_classes, 3, 3, border_mode='same',
name=name)(net['conv6_2'])
net['conv6_2_mbox_conf'] = x
flatten = Flatten(name='conv6_2_mbox_conf_flat')
net['conv6_2_mbox_conf_flat'] = flatten(net['conv6_2_mbox_conf'])
priorbox = PriorBox(img_size, 114.0, max_size=168.0, aspect_ratios=[2, 3],
variances=[0.1, 0.1, 0.2, 0.2],
name='conv6_2_mbox_priorbox')
net['conv6_2_mbox_priorbox'] = priorbox(net['conv6_2'])
num_priors = 6
x = Convolution2D(num_priors * 4, 3, 3, border_mode='same',
name='conv7_2_mbox_loc')(net['conv7_2'])
net['conv7_2_mbox_loc'] = x
flatten = Flatten(name='conv7_2_mbox_loc_flat')
net['conv7_2_mbox_loc_flat'] = flatten(net['conv7_2_mbox_loc'])
name = 'conv7_2_mbox_conf'
if num_classes != 21:
name += '_{}'.format(num_classes)
x = Convolution2D(num_priors * num_classes, 3, 3, border_mode='same',
name=name)(net['conv7_2'])
net['conv7_2_mbox_conf'] = x
flatten = Flatten(name='conv7_2_mbox_conf_flat')
net['conv7_2_mbox_conf_flat'] = flatten(net['conv7_2_mbox_conf'])
priorbox = PriorBox(img_size, 168.0, max_size=222.0, aspect_ratios=[2, 3],
variances=[0.1, 0.1, 0.2, 0.2],
name='conv7_2_mbox_priorbox')
net['conv7_2_mbox_priorbox'] = priorbox(net['conv7_2'])
num_priors = 6
x = Convolution2D(num_priors * 4, 3, 3, border_mode='same',
name='conv8_2_mbox_loc')(net['conv8_2'])
net['conv8_2_mbox_loc'] = x
flatten = Flatten(name='conv8_2_mbox_loc_flat')
net['conv8_2_mbox_loc_flat'] = flatten(net['conv8_2_mbox_loc'])
name = 'conv8_2_mbox_conf'
if num_classes != 21:
name += '_{}'.format(num_classes)
x = Convolution2D(num_priors * num_classes, 3, 3, border_mode='same',
name=name)(net['conv8_2'])
net['conv8_2_mbox_conf'] = x
flatten = Flatten(name='conv8_2_mbox_conf_flat')
net['conv8_2_mbox_conf_flat'] = flatten(net['conv8_2_mbox_conf'])
priorbox = PriorBox(img_size, 222.0, max_size=276.0, aspect_ratios=[2, 3],
variances=[0.1, 0.1, 0.2, 0.2],
name='conv8_2_mbox_priorbox')
net['conv8_2_mbox_priorbox'] = priorbox(net['conv8_2'])
num_priors = 6
x = Dense(num_priors * 4, name='pool6_mbox_loc_flat')(net['pool6'])
net['pool6_mbox_loc_flat'] = x
name = 'pool6_mbox_conf_flat'
if num_classes != 21:
name += '_{}'.format(num_classes)
x = Dense(num_priors * num_classes, name=name)(net['pool6'])
net['pool6_mbox_conf_flat'] = x
priorbox = PriorBox(img_size, 276.0, max_size=330.0, aspect_ratios=[2, 3],
variances=[0.1, 0.1, 0.2, 0.2],
name='pool6_mbox_priorbox')
if K.image_dim_ordering() == 'tf':
target_shape = (1, 1, 256)
else:
target_shape = (256, 1, 1)
net['pool6_reshaped'] = Reshape(target_shape,
name='pool6_reshaped')(net['pool6'])
net['pool6_mbox_priorbox'] = priorbox(net['pool6_reshaped'])
net['mbox_loc'] = merge([net['conv4_3_norm_mbox_loc_flat'],
net['fc7_mbox_loc_flat'],
net['conv6_2_mbox_loc_flat'],
net['conv7_2_mbox_loc_flat'],
net['conv8_2_mbox_loc_flat'],
net['pool6_mbox_loc_flat']],
mode='concat', concat_axis=1, name='mbox_loc')
net['mbox_conf'] = merge([net['conv4_3_norm_mbox_conf_flat'],
net['fc7_mbox_conf_flat'],
net['conv6_2_mbox_conf_flat'],
net['conv7_2_mbox_conf_flat'],
net['conv8_2_mbox_conf_flat'],
net['pool6_mbox_conf_flat']],
mode='concat', concat_axis=1, name='mbox_conf')
net['mbox_priorbox'] = merge([net['conv4_3_norm_mbox_priorbox'],
net['fc7_mbox_priorbox'],
net['conv6_2_mbox_priorbox'],
net['conv7_2_mbox_priorbox'],
net['conv8_2_mbox_priorbox'],
net['pool6_mbox_priorbox']],
mode='concat', concat_axis=1,
name='mbox_priorbox')
if hasattr(net['mbox_loc'], '_keras_shape'):
num_boxes = net['mbox_loc']._keras_shape[-1] // 4
elif hasattr(net['mbox_loc'], 'int_shape'):
num_boxes = K.int_shape(net['mbox_loc'])[-1] // 4
net['mbox_loc'] = Reshape((num_boxes, 4),
name='mbox_loc_final')(net['mbox_loc'])
net['mbox_conf'] = Reshape((num_boxes, num_classes),
name='mbox_conf_logits')(net['mbox_conf'])
net['mbox_conf'] = Activation('softmax',
name='mbox_conf_final')(net['mbox_conf'])
net['predictions'] = merge([net['mbox_loc'],
net['mbox_conf'],
net['mbox_priorbox']],
mode='concat', concat_axis=2,
name='predictions')
model = Model(net['input'], net['predictions'])
return model
| true | true |
f72aedf20d5a4dd130832d24767e5a8c5c2c559a | 850 | py | Python | test/record/parser/test_response_whois_nic_ve_property_nameservers_missing.py | huyphan/pyyawhois | 77fb2f73a9c67989f1d41d98f37037406a69d136 | [
"MIT"
] | null | null | null | test/record/parser/test_response_whois_nic_ve_property_nameservers_missing.py | huyphan/pyyawhois | 77fb2f73a9c67989f1d41d98f37037406a69d136 | [
"MIT"
] | null | null | null | test/record/parser/test_response_whois_nic_ve_property_nameservers_missing.py | huyphan/pyyawhois | 77fb2f73a9c67989f1d41d98f37037406a69d136 | [
"MIT"
] | null | null | null |
# This file is autogenerated. Do not edit it manually.
# If you want change the content of this file, edit
#
# spec/fixtures/responses/whois.nic.ve/property_nameservers_missing
#
# and regenerate the tests with the following script
#
# $ scripts/generate_tests.py
#
from nose.tools import *
from dateutil.parser import parse as time_parse
import yawhois
class TestWhoisNicVePropertyNameserversMissing(object):
def setUp(self):
fixture_path = "spec/fixtures/responses/whois.nic.ve/property_nameservers_missing.txt"
host = "whois.nic.ve"
part = yawhois.record.Part(open(fixture_path, "r").read(), host)
self.record = yawhois.record.Record(None, [part])
def test_nameservers(self):
eq_(self.record.nameservers.__class__.__name__, 'list')
eq_(self.record.nameservers, [])
| 31.481481 | 94 | 0.711765 |
from nose.tools import *
from dateutil.parser import parse as time_parse
import yawhois
class TestWhoisNicVePropertyNameserversMissing(object):
def setUp(self):
fixture_path = "spec/fixtures/responses/whois.nic.ve/property_nameservers_missing.txt"
host = "whois.nic.ve"
part = yawhois.record.Part(open(fixture_path, "r").read(), host)
self.record = yawhois.record.Record(None, [part])
def test_nameservers(self):
eq_(self.record.nameservers.__class__.__name__, 'list')
eq_(self.record.nameservers, [])
| true | true |
f72aee673e41aaa5710037678b883636f5df28d7 | 7,947 | py | Python | src/python/pants/backend/python/lint/pylint/rules.py | danxmoran/pants | 7fafd7d789747c9e6a266847a0ccce92c3fa0754 | [
"Apache-2.0"
] | null | null | null | src/python/pants/backend/python/lint/pylint/rules.py | danxmoran/pants | 7fafd7d789747c9e6a266847a0ccce92c3fa0754 | [
"Apache-2.0"
] | 22 | 2022-01-27T09:59:50.000Z | 2022-03-30T07:06:49.000Z | src/python/pants/backend/python/lint/pylint/rules.py | danxmoran/pants | 7fafd7d789747c9e6a266847a0ccce92c3fa0754 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from dataclasses import dataclass
from typing import Tuple
from pants.backend.python.lint.pylint.subsystem import (
Pylint,
PylintFieldSet,
PylintFirstPartyPlugins,
)
from pants.backend.python.subsystems.setup import PythonSetup
from pants.backend.python.util_rules import partition, pex_from_targets
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
from pants.backend.python.util_rules.pex import (
Pex,
PexRequest,
VenvPex,
VenvPexProcess,
VenvPexRequest,
)
from pants.backend.python.util_rules.pex_from_targets import RequirementsPexRequest
from pants.backend.python.util_rules.python_sources import (
PythonSourceFiles,
PythonSourceFilesRequest,
)
from pants.core.goals.lint import REPORT_DIR, LintResult, LintResults, LintTargetsRequest
from pants.core.util_rules.config_files import ConfigFiles, ConfigFilesRequest
from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest
from pants.engine.collection import Collection
from pants.engine.fs import CreateDigest, Digest, Directory, MergeDigests, RemovePrefix
from pants.engine.process import FallibleProcessResult
from pants.engine.rules import Get, MultiGet, collect_rules, rule
from pants.engine.target import CoarsenedTargets, Target
from pants.engine.unions import UnionRule
from pants.util.logging import LogLevel
from pants.util.ordered_set import FrozenOrderedSet
from pants.util.strutil import pluralize
@dataclass(frozen=True)
class PylintPartition:
root_field_sets: FrozenOrderedSet[PylintFieldSet]
closure: FrozenOrderedSet[Target]
resolve_description: str | None
interpreter_constraints: InterpreterConstraints
def description(self) -> str:
ics = str(sorted(str(c) for c in self.interpreter_constraints))
return f"{self.resolve_description}, {ics}" if self.resolve_description else ics
class PylintPartitions(Collection[PylintPartition]):
pass
class PylintRequest(LintTargetsRequest):
field_set_type = PylintFieldSet
name = Pylint.options_scope
def generate_argv(source_files: SourceFiles, pylint: Pylint) -> Tuple[str, ...]:
args = []
if pylint.config is not None:
args.append(f"--rcfile={pylint.config}")
args.append("--jobs={pants_concurrency}")
args.extend(pylint.args)
args.extend(source_files.files)
return tuple(args)
@rule(level=LogLevel.DEBUG)
async def pylint_lint_partition(
partition: PylintPartition, pylint: Pylint, first_party_plugins: PylintFirstPartyPlugins
) -> LintResult:
requirements_pex_get = Get(
Pex,
RequirementsPexRequest(
(fs.address for fs in partition.root_field_sets),
# NB: These constraints must be identical to the other PEXes. Otherwise, we risk using
# a different version for the requirements than the other two PEXes, which can result
# in a PEX runtime error about missing dependencies.
hardcoded_interpreter_constraints=partition.interpreter_constraints,
),
)
pylint_pex_get = Get(
Pex,
PexRequest,
pylint.to_pex_request(
interpreter_constraints=partition.interpreter_constraints,
extra_requirements=first_party_plugins.requirement_strings,
),
)
prepare_python_sources_get = Get(PythonSourceFiles, PythonSourceFilesRequest(partition.closure))
field_set_sources_get = Get(
SourceFiles, SourceFilesRequest(fs.source for fs in partition.root_field_sets)
)
# Ensure that the empty report dir exists.
report_directory_digest_get = Get(Digest, CreateDigest([Directory(REPORT_DIR)]))
(
pylint_pex,
requirements_pex,
prepared_python_sources,
field_set_sources,
report_directory,
) = await MultiGet(
pylint_pex_get,
requirements_pex_get,
prepare_python_sources_get,
field_set_sources_get,
report_directory_digest_get,
)
pylint_runner_pex, config_files = await MultiGet(
Get(
VenvPex,
VenvPexRequest(
PexRequest(
output_filename="pylint_runner.pex",
interpreter_constraints=partition.interpreter_constraints,
main=pylint.main,
internal_only=True,
pex_path=[pylint_pex, requirements_pex],
),
# TODO(John Sirois): Remove this (change to the default of symlinks) when we can
# upgrade to a version of Pylint with https://github.com/PyCQA/pylint/issues/1470
# resolved.
site_packages_copies=True,
),
),
Get(
ConfigFiles, ConfigFilesRequest, pylint.config_request(field_set_sources.snapshot.dirs)
),
)
pythonpath = list(prepared_python_sources.source_roots)
if first_party_plugins:
pythonpath.append(first_party_plugins.PREFIX)
input_digest = await Get(
Digest,
MergeDigests(
(
config_files.snapshot.digest,
first_party_plugins.sources_digest,
prepared_python_sources.source_files.snapshot.digest,
report_directory,
)
),
)
result = await Get(
FallibleProcessResult,
VenvPexProcess(
pylint_runner_pex,
argv=generate_argv(field_set_sources, pylint),
input_digest=input_digest,
output_directories=(REPORT_DIR,),
extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)},
concurrency_available=len(partition.root_field_sets),
description=f"Run Pylint on {pluralize(len(partition.root_field_sets), 'file')}.",
level=LogLevel.DEBUG,
),
)
report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR))
return LintResult.from_fallible_process_result(
result,
partition_description=partition.description(),
report=report,
)
@rule(desc="Determine if necessary to partition Pylint input", level=LogLevel.DEBUG)
async def pylint_determine_partitions(
request: PylintRequest, python_setup: PythonSetup, first_party_plugins: PylintFirstPartyPlugins
) -> PylintPartitions:
resolve_and_interpreter_constraints_to_coarsened_targets = (
await partition._by_interpreter_constraints_and_resolve(request.field_sets, python_setup)
)
first_party_ics = InterpreterConstraints.create_from_compatibility_fields(
first_party_plugins.interpreter_constraints_fields, python_setup
)
return PylintPartitions(
PylintPartition(
FrozenOrderedSet(roots),
FrozenOrderedSet(CoarsenedTargets(root_cts).closure()),
resolve if len(python_setup.resolves) > 1 else None,
InterpreterConstraints.merge((interpreter_constraints, first_party_ics)),
)
for (resolve, interpreter_constraints), (roots, root_cts) in sorted(
resolve_and_interpreter_constraints_to_coarsened_targets.items()
)
)
@rule(desc="Lint using Pylint", level=LogLevel.DEBUG)
async def pylint_lint(request: PylintRequest, pylint: Pylint) -> LintResults:
if pylint.skip:
return LintResults([], linter_name=request.name)
partitions = await Get(PylintPartitions, PylintRequest, request)
partitioned_results = await MultiGet(
Get(LintResult, PylintPartition, partition) for partition in partitions
)
return LintResults(partitioned_results, linter_name=request.name)
def rules():
return [
*collect_rules(),
UnionRule(LintTargetsRequest, PylintRequest),
*pex_from_targets.rules(),
]
| 35.959276 | 100 | 0.707059 |
from __future__ import annotations
from dataclasses import dataclass
from typing import Tuple
from pants.backend.python.lint.pylint.subsystem import (
Pylint,
PylintFieldSet,
PylintFirstPartyPlugins,
)
from pants.backend.python.subsystems.setup import PythonSetup
from pants.backend.python.util_rules import partition, pex_from_targets
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
from pants.backend.python.util_rules.pex import (
Pex,
PexRequest,
VenvPex,
VenvPexProcess,
VenvPexRequest,
)
from pants.backend.python.util_rules.pex_from_targets import RequirementsPexRequest
from pants.backend.python.util_rules.python_sources import (
PythonSourceFiles,
PythonSourceFilesRequest,
)
from pants.core.goals.lint import REPORT_DIR, LintResult, LintResults, LintTargetsRequest
from pants.core.util_rules.config_files import ConfigFiles, ConfigFilesRequest
from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest
from pants.engine.collection import Collection
from pants.engine.fs import CreateDigest, Digest, Directory, MergeDigests, RemovePrefix
from pants.engine.process import FallibleProcessResult
from pants.engine.rules import Get, MultiGet, collect_rules, rule
from pants.engine.target import CoarsenedTargets, Target
from pants.engine.unions import UnionRule
from pants.util.logging import LogLevel
from pants.util.ordered_set import FrozenOrderedSet
from pants.util.strutil import pluralize
@dataclass(frozen=True)
class PylintPartition:
root_field_sets: FrozenOrderedSet[PylintFieldSet]
closure: FrozenOrderedSet[Target]
resolve_description: str | None
interpreter_constraints: InterpreterConstraints
def description(self) -> str:
ics = str(sorted(str(c) for c in self.interpreter_constraints))
return f"{self.resolve_description}, {ics}" if self.resolve_description else ics
class PylintPartitions(Collection[PylintPartition]):
pass
class PylintRequest(LintTargetsRequest):
field_set_type = PylintFieldSet
name = Pylint.options_scope
def generate_argv(source_files: SourceFiles, pylint: Pylint) -> Tuple[str, ...]:
args = []
if pylint.config is not None:
args.append(f"--rcfile={pylint.config}")
args.append("--jobs={pants_concurrency}")
args.extend(pylint.args)
args.extend(source_files.files)
return tuple(args)
@rule(level=LogLevel.DEBUG)
async def pylint_lint_partition(
partition: PylintPartition, pylint: Pylint, first_party_plugins: PylintFirstPartyPlugins
) -> LintResult:
requirements_pex_get = Get(
Pex,
RequirementsPexRequest(
(fs.address for fs in partition.root_field_sets),
hardcoded_interpreter_constraints=partition.interpreter_constraints,
),
)
pylint_pex_get = Get(
Pex,
PexRequest,
pylint.to_pex_request(
interpreter_constraints=partition.interpreter_constraints,
extra_requirements=first_party_plugins.requirement_strings,
),
)
prepare_python_sources_get = Get(PythonSourceFiles, PythonSourceFilesRequest(partition.closure))
field_set_sources_get = Get(
SourceFiles, SourceFilesRequest(fs.source for fs in partition.root_field_sets)
)
report_directory_digest_get = Get(Digest, CreateDigest([Directory(REPORT_DIR)]))
(
pylint_pex,
requirements_pex,
prepared_python_sources,
field_set_sources,
report_directory,
) = await MultiGet(
pylint_pex_get,
requirements_pex_get,
prepare_python_sources_get,
field_set_sources_get,
report_directory_digest_get,
)
pylint_runner_pex, config_files = await MultiGet(
Get(
VenvPex,
VenvPexRequest(
PexRequest(
output_filename="pylint_runner.pex",
interpreter_constraints=partition.interpreter_constraints,
main=pylint.main,
internal_only=True,
pex_path=[pylint_pex, requirements_pex],
),
site_packages_copies=True,
),
),
Get(
ConfigFiles, ConfigFilesRequest, pylint.config_request(field_set_sources.snapshot.dirs)
),
)
pythonpath = list(prepared_python_sources.source_roots)
if first_party_plugins:
pythonpath.append(first_party_plugins.PREFIX)
input_digest = await Get(
Digest,
MergeDigests(
(
config_files.snapshot.digest,
first_party_plugins.sources_digest,
prepared_python_sources.source_files.snapshot.digest,
report_directory,
)
),
)
result = await Get(
FallibleProcessResult,
VenvPexProcess(
pylint_runner_pex,
argv=generate_argv(field_set_sources, pylint),
input_digest=input_digest,
output_directories=(REPORT_DIR,),
extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)},
concurrency_available=len(partition.root_field_sets),
description=f"Run Pylint on {pluralize(len(partition.root_field_sets), 'file')}.",
level=LogLevel.DEBUG,
),
)
report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR))
return LintResult.from_fallible_process_result(
result,
partition_description=partition.description(),
report=report,
)
@rule(desc="Determine if necessary to partition Pylint input", level=LogLevel.DEBUG)
async def pylint_determine_partitions(
request: PylintRequest, python_setup: PythonSetup, first_party_plugins: PylintFirstPartyPlugins
) -> PylintPartitions:
resolve_and_interpreter_constraints_to_coarsened_targets = (
await partition._by_interpreter_constraints_and_resolve(request.field_sets, python_setup)
)
first_party_ics = InterpreterConstraints.create_from_compatibility_fields(
first_party_plugins.interpreter_constraints_fields, python_setup
)
return PylintPartitions(
PylintPartition(
FrozenOrderedSet(roots),
FrozenOrderedSet(CoarsenedTargets(root_cts).closure()),
resolve if len(python_setup.resolves) > 1 else None,
InterpreterConstraints.merge((interpreter_constraints, first_party_ics)),
)
for (resolve, interpreter_constraints), (roots, root_cts) in sorted(
resolve_and_interpreter_constraints_to_coarsened_targets.items()
)
)
@rule(desc="Lint using Pylint", level=LogLevel.DEBUG)
async def pylint_lint(request: PylintRequest, pylint: Pylint) -> LintResults:
if pylint.skip:
return LintResults([], linter_name=request.name)
partitions = await Get(PylintPartitions, PylintRequest, request)
partitioned_results = await MultiGet(
Get(LintResult, PylintPartition, partition) for partition in partitions
)
return LintResults(partitioned_results, linter_name=request.name)
def rules():
return [
*collect_rules(),
UnionRule(LintTargetsRequest, PylintRequest),
*pex_from_targets.rules(),
]
| true | true |
f72aef2c46434bd7fee98942b7dd5f4091b26225 | 9,102 | py | Python | homeassistant/components/philips_js/media_player.py | domwillcode/home-assistant | f170c80bea70c939c098b5c88320a1c789858958 | [
"Apache-2.0"
] | 6 | 2020-07-18T16:33:25.000Z | 2021-09-26T09:52:04.000Z | homeassistant/components/philips_js/media_player.py | domwillcode/home-assistant | f170c80bea70c939c098b5c88320a1c789858958 | [
"Apache-2.0"
] | 47 | 2020-07-23T07:14:33.000Z | 2022-03-31T06:01:46.000Z | homeassistant/components/philips_js/media_player.py | klauern/home-assistant-core | c18ba6aec0627e6afb6442c678edb5ff2bb17db6 | [
"Apache-2.0"
] | 5 | 2020-03-29T00:29:13.000Z | 2021-09-06T20:58:40.000Z | """Media Player component to integrate TVs exposing the Joint Space API."""
from datetime import timedelta
import logging
from haphilipsjs import PhilipsTV
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_CHANNEL,
SUPPORT_NEXT_TRACK,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import (
CONF_API_VERSION,
CONF_HOST,
CONF_NAME,
STATE_OFF,
STATE_ON,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import call_later, track_time_interval
from homeassistant.helpers.script import Script
_LOGGER = logging.getLogger(__name__)
SUPPORT_PHILIPS_JS = (
SUPPORT_TURN_OFF
| SUPPORT_VOLUME_STEP
| SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_MUTE
| SUPPORT_SELECT_SOURCE
| SUPPORT_NEXT_TRACK
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_PLAY_MEDIA
)
CONF_ON_ACTION = "turn_on_action"
DEFAULT_NAME = "Philips TV"
DEFAULT_API_VERSION = "1"
DEFAULT_SCAN_INTERVAL = 30
DELAY_ACTION_DEFAULT = 2.0
DELAY_ACTION_ON = 10.0
PREFIX_SEPARATOR = ": "
PREFIX_SOURCE = "Input"
PREFIX_CHANNEL = "Channel"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_API_VERSION, default=DEFAULT_API_VERSION): cv.string,
vol.Optional(CONF_ON_ACTION): cv.SCRIPT_SCHEMA,
}
)
def _inverted(data):
return {v: k for k, v in data.items()}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Philips TV platform."""
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
api_version = config.get(CONF_API_VERSION)
turn_on_action = config.get(CONF_ON_ACTION)
tvapi = PhilipsTV(host, api_version)
on_script = Script(hass, turn_on_action) if turn_on_action else None
add_entities([PhilipsTVMediaPlayer(tvapi, name, on_script)])
class PhilipsTVMediaPlayer(MediaPlayerEntity):
"""Representation of a Philips TV exposing the JointSpace API."""
def __init__(self, tv, name, on_script):
"""Initialize the Philips TV."""
self._tv = tv
self._name = name
self._sources = {}
self._channels = {}
self._on_script = on_script
self._supports = SUPPORT_PHILIPS_JS
if self._on_script:
self._supports |= SUPPORT_TURN_ON
self._update_task = None
def _update_soon(self, delay):
"""Reschedule update task."""
if self._update_task:
self._update_task()
self._update_task = None
self.schedule_update_ha_state(force_refresh=False)
def update_forced(event_time):
self.schedule_update_ha_state(force_refresh=True)
def update_and_restart(event_time):
update_forced(event_time)
self._update_task = track_time_interval(
self.hass, update_forced, timedelta(seconds=DEFAULT_SCAN_INTERVAL)
)
call_later(self.hass, delay, update_and_restart)
async def async_added_to_hass(self):
"""Start running updates once we are added to hass."""
await self.hass.async_add_executor_job(self._update_soon, 0)
@property
def name(self):
"""Return the device name."""
return self._name
@property
def should_poll(self):
"""Device should be polled."""
return False
@property
def supported_features(self):
"""Flag media player features that are supported."""
return self._supports
@property
def state(self):
"""Get the device state. An exception means OFF state."""
if self._tv.on:
return STATE_ON
return STATE_OFF
@property
def source(self):
"""Return the current input source."""
if self.media_content_type == MEDIA_TYPE_CHANNEL:
name = self._channels.get(self._tv.channel_id)
prefix = PREFIX_CHANNEL
else:
name = self._sources.get(self._tv.source_id)
prefix = PREFIX_SOURCE
if name is None:
return None
return prefix + PREFIX_SEPARATOR + name
@property
def source_list(self):
"""List of available input sources."""
complete = []
for source in self._sources.values():
complete.append(PREFIX_SOURCE + PREFIX_SEPARATOR + source)
for channel in self._channels.values():
complete.append(PREFIX_CHANNEL + PREFIX_SEPARATOR + channel)
return complete
def select_source(self, source):
"""Set the input source."""
data = source.split(PREFIX_SEPARATOR, 1)
if data[0] == PREFIX_SOURCE:
source_id = _inverted(self._sources).get(data[1])
if source_id:
self._tv.setSource(source_id)
elif data[0] == PREFIX_CHANNEL:
channel_id = _inverted(self._channels).get(data[1])
if channel_id:
self._tv.setChannel(channel_id)
self._update_soon(DELAY_ACTION_DEFAULT)
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._tv.volume
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._tv.muted
def turn_on(self):
"""Turn on the device."""
if self._on_script:
self._on_script.run()
self._update_soon(DELAY_ACTION_ON)
def turn_off(self):
"""Turn off the device."""
self._tv.sendKey("Standby")
self._tv.on = False
self._update_soon(DELAY_ACTION_DEFAULT)
def volume_up(self):
"""Send volume up command."""
self._tv.sendKey("VolumeUp")
self._update_soon(DELAY_ACTION_DEFAULT)
def volume_down(self):
"""Send volume down command."""
self._tv.sendKey("VolumeDown")
self._update_soon(DELAY_ACTION_DEFAULT)
def mute_volume(self, mute):
"""Send mute command."""
self._tv.setVolume(None, mute)
self._update_soon(DELAY_ACTION_DEFAULT)
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self._tv.setVolume(volume, self._tv.muted)
self._update_soon(DELAY_ACTION_DEFAULT)
def media_previous_track(self):
"""Send rewind command."""
self._tv.sendKey("Previous")
self._update_soon(DELAY_ACTION_DEFAULT)
def media_next_track(self):
"""Send fast forward command."""
self._tv.sendKey("Next")
self._update_soon(DELAY_ACTION_DEFAULT)
@property
def media_channel(self):
"""Get current channel if it's a channel."""
if self.media_content_type == MEDIA_TYPE_CHANNEL:
return self._channels.get(self._tv.channel_id)
return None
@property
def media_title(self):
"""Title of current playing media."""
if self.media_content_type == MEDIA_TYPE_CHANNEL:
return self._channels.get(self._tv.channel_id)
return self._sources.get(self._tv.source_id)
@property
def media_content_type(self):
"""Return content type of playing media."""
if self._tv.source_id == "tv" or self._tv.source_id == "11":
return MEDIA_TYPE_CHANNEL
if self._tv.source_id is None and self._tv.channels:
return MEDIA_TYPE_CHANNEL
return None
@property
def media_content_id(self):
"""Content type of current playing media."""
if self.media_content_type == MEDIA_TYPE_CHANNEL:
return self._channels.get(self._tv.channel_id)
return None
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {"channel_list": list(self._channels.values())}
def play_media(self, media_type, media_id, **kwargs):
"""Play a piece of media."""
_LOGGER.debug("Call play media type <%s>, Id <%s>", media_type, media_id)
if media_type == MEDIA_TYPE_CHANNEL:
channel_id = _inverted(self._channels).get(media_id)
if channel_id:
self._tv.setChannel(channel_id)
self._update_soon(DELAY_ACTION_DEFAULT)
else:
_LOGGER.error("Unable to find channel <%s>", media_id)
else:
_LOGGER.error("Unsupported media type <%s>", media_type)
def update(self):
"""Get the latest data and update device state."""
self._tv.update()
self._sources = {
srcid: source["name"] or f"Source {srcid}"
for srcid, source in (self._tv.sources or {}).items()
}
self._channels = {
chid: channel["name"] for chid, channel in (self._tv.channels or {}).items()
}
| 30.854237 | 88 | 0.64777 | from datetime import timedelta
import logging
from haphilipsjs import PhilipsTV
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_CHANNEL,
SUPPORT_NEXT_TRACK,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import (
CONF_API_VERSION,
CONF_HOST,
CONF_NAME,
STATE_OFF,
STATE_ON,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import call_later, track_time_interval
from homeassistant.helpers.script import Script
_LOGGER = logging.getLogger(__name__)
SUPPORT_PHILIPS_JS = (
SUPPORT_TURN_OFF
| SUPPORT_VOLUME_STEP
| SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_MUTE
| SUPPORT_SELECT_SOURCE
| SUPPORT_NEXT_TRACK
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_PLAY_MEDIA
)
CONF_ON_ACTION = "turn_on_action"
DEFAULT_NAME = "Philips TV"
DEFAULT_API_VERSION = "1"
DEFAULT_SCAN_INTERVAL = 30
DELAY_ACTION_DEFAULT = 2.0
DELAY_ACTION_ON = 10.0
PREFIX_SEPARATOR = ": "
PREFIX_SOURCE = "Input"
PREFIX_CHANNEL = "Channel"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_API_VERSION, default=DEFAULT_API_VERSION): cv.string,
vol.Optional(CONF_ON_ACTION): cv.SCRIPT_SCHEMA,
}
)
def _inverted(data):
return {v: k for k, v in data.items()}
def setup_platform(hass, config, add_entities, discovery_info=None):
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
api_version = config.get(CONF_API_VERSION)
turn_on_action = config.get(CONF_ON_ACTION)
tvapi = PhilipsTV(host, api_version)
on_script = Script(hass, turn_on_action) if turn_on_action else None
add_entities([PhilipsTVMediaPlayer(tvapi, name, on_script)])
class PhilipsTVMediaPlayer(MediaPlayerEntity):
def __init__(self, tv, name, on_script):
self._tv = tv
self._name = name
self._sources = {}
self._channels = {}
self._on_script = on_script
self._supports = SUPPORT_PHILIPS_JS
if self._on_script:
self._supports |= SUPPORT_TURN_ON
self._update_task = None
def _update_soon(self, delay):
if self._update_task:
self._update_task()
self._update_task = None
self.schedule_update_ha_state(force_refresh=False)
def update_forced(event_time):
self.schedule_update_ha_state(force_refresh=True)
def update_and_restart(event_time):
update_forced(event_time)
self._update_task = track_time_interval(
self.hass, update_forced, timedelta(seconds=DEFAULT_SCAN_INTERVAL)
)
call_later(self.hass, delay, update_and_restart)
async def async_added_to_hass(self):
await self.hass.async_add_executor_job(self._update_soon, 0)
@property
def name(self):
return self._name
@property
def should_poll(self):
return False
@property
def supported_features(self):
return self._supports
@property
def state(self):
if self._tv.on:
return STATE_ON
return STATE_OFF
@property
def source(self):
if self.media_content_type == MEDIA_TYPE_CHANNEL:
name = self._channels.get(self._tv.channel_id)
prefix = PREFIX_CHANNEL
else:
name = self._sources.get(self._tv.source_id)
prefix = PREFIX_SOURCE
if name is None:
return None
return prefix + PREFIX_SEPARATOR + name
@property
def source_list(self):
complete = []
for source in self._sources.values():
complete.append(PREFIX_SOURCE + PREFIX_SEPARATOR + source)
for channel in self._channels.values():
complete.append(PREFIX_CHANNEL + PREFIX_SEPARATOR + channel)
return complete
def select_source(self, source):
data = source.split(PREFIX_SEPARATOR, 1)
if data[0] == PREFIX_SOURCE:
source_id = _inverted(self._sources).get(data[1])
if source_id:
self._tv.setSource(source_id)
elif data[0] == PREFIX_CHANNEL:
channel_id = _inverted(self._channels).get(data[1])
if channel_id:
self._tv.setChannel(channel_id)
self._update_soon(DELAY_ACTION_DEFAULT)
@property
def volume_level(self):
return self._tv.volume
@property
def is_volume_muted(self):
return self._tv.muted
def turn_on(self):
if self._on_script:
self._on_script.run()
self._update_soon(DELAY_ACTION_ON)
def turn_off(self):
self._tv.sendKey("Standby")
self._tv.on = False
self._update_soon(DELAY_ACTION_DEFAULT)
def volume_up(self):
self._tv.sendKey("VolumeUp")
self._update_soon(DELAY_ACTION_DEFAULT)
def volume_down(self):
self._tv.sendKey("VolumeDown")
self._update_soon(DELAY_ACTION_DEFAULT)
def mute_volume(self, mute):
self._tv.setVolume(None, mute)
self._update_soon(DELAY_ACTION_DEFAULT)
def set_volume_level(self, volume):
self._tv.setVolume(volume, self._tv.muted)
self._update_soon(DELAY_ACTION_DEFAULT)
def media_previous_track(self):
self._tv.sendKey("Previous")
self._update_soon(DELAY_ACTION_DEFAULT)
def media_next_track(self):
self._tv.sendKey("Next")
self._update_soon(DELAY_ACTION_DEFAULT)
@property
def media_channel(self):
if self.media_content_type == MEDIA_TYPE_CHANNEL:
return self._channels.get(self._tv.channel_id)
return None
@property
def media_title(self):
if self.media_content_type == MEDIA_TYPE_CHANNEL:
return self._channels.get(self._tv.channel_id)
return self._sources.get(self._tv.source_id)
@property
def media_content_type(self):
if self._tv.source_id == "tv" or self._tv.source_id == "11":
return MEDIA_TYPE_CHANNEL
if self._tv.source_id is None and self._tv.channels:
return MEDIA_TYPE_CHANNEL
return None
@property
def media_content_id(self):
if self.media_content_type == MEDIA_TYPE_CHANNEL:
return self._channels.get(self._tv.channel_id)
return None
@property
def device_state_attributes(self):
return {"channel_list": list(self._channels.values())}
def play_media(self, media_type, media_id, **kwargs):
_LOGGER.debug("Call play media type <%s>, Id <%s>", media_type, media_id)
if media_type == MEDIA_TYPE_CHANNEL:
channel_id = _inverted(self._channels).get(media_id)
if channel_id:
self._tv.setChannel(channel_id)
self._update_soon(DELAY_ACTION_DEFAULT)
else:
_LOGGER.error("Unable to find channel <%s>", media_id)
else:
_LOGGER.error("Unsupported media type <%s>", media_type)
def update(self):
self._tv.update()
self._sources = {
srcid: source["name"] or f"Source {srcid}"
for srcid, source in (self._tv.sources or {}).items()
}
self._channels = {
chid: channel["name"] for chid, channel in (self._tv.channels or {}).items()
}
| true | true |
f72aef7afd8a21811ad53f8b289714ccd5098693 | 8,333 | py | Python | genie/assay.py | veo-ibd/Genie | 735e3aa0dc71aab0c404fd0cb3a34c8e1d9784c2 | [
"MIT"
] | null | null | null | genie/assay.py | veo-ibd/Genie | 735e3aa0dc71aab0c404fd0cb3a34c8e1d9784c2 | [
"MIT"
] | null | null | null | genie/assay.py | veo-ibd/Genie | 735e3aa0dc71aab0c404fd0cb3a34c8e1d9784c2 | [
"MIT"
] | 1 | 2022-01-20T16:33:19.000Z | 2022-01-20T16:33:19.000Z | import os
import logging
import subprocess
import yaml
import pandas as pd
from .example_filetype_format import FileTypeFormat
from . import process_functions
logger = logging.getLogger(__name__)
class Assayinfo(FileTypeFormat):
'''
Assay information file type
'''
_fileType = "assayinfo"
_process_kwargs = ["newPath", "databaseSynId"]
def _validateFilename(self, filepath_list):
assert os.path.basename(filepath_list[0]) == "assay_information.yaml"
def process_steps(self, assay_info_df, newPath, databaseSynId):
# databaseSynId = kwargs['databaseSynId']
# Must pass in a list
process_assay_info_df = self._process(assay_info_df)
col = ['SEQ_ASSAY_ID', 'is_paired_end', 'library_selection',
'library_strategy', 'platform', 'read_length',
'instrument_model', 'gene_padding', 'number_of_genes',
'variant_classifications', 'CENTER']
process_functions.updateData(
self.syn,
databaseSynId,
process_assay_info_df,
self.center,
col=col,
filterByColumn="CENTER",
toDelete=True)
process_assay_info_df.to_csv(newPath, sep="\t", index=False)
return(newPath)
def _process(self, df):
'''
Processing function for Assay information
- Standardizes SEQ_ASSAY_ID
- Default 10 for gene_padding
- Fills in variant_classifications
Args:
df: Assay information dataframe
Returns:
dataframe: Processed dataframe
'''
seq_assay_ids = [
assay.upper().replace('_', '-') for assay in df['SEQ_ASSAY_ID']]
df['SEQ_ASSAY_ID'] = seq_assay_ids
if process_functions.checkColExist(df, "gene_padding"):
df['gene_padding'] = df['gene_padding'].fillna(10)
df['gene_padding'] = df['gene_padding'].astype(int)
else:
df['gene_padding'] = 10
if not process_functions.checkColExist(df, "variant_classifications"):
df['variant_classifications'] = pd.np.nan
df['CENTER'] = self.center
return(df)
def _get_dataframe(self, filepath_list):
'''
Takes in yaml file, returns dataframe
'''
filepath = filepath_list[0]
try:
with open(filepath, 'r') as yamlfile:
# https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation
# Must add this because yaml load deprecation
panel_info_dict = yaml.load(yamlfile, Loader=yaml.FullLoader)
except Exception:
raise ValueError(
"assay_information.yaml: Can't read in your file. "
"Please make sure the file is a correctly formatted yaml")
assay_info_df = pd.DataFrame(panel_info_dict)
assay_info_df = assay_info_df.transpose()
assay_info_df['SEQ_ASSAY_ID'] = assay_info_df.index
assay_info_df.reset_index(drop=True, inplace=True)
return(assay_info_df)
def _validate(self, assay_info_df):
'''
Validates the values of assay information file
Args:
assay_info_df: assay information dataframe
Returns:
tuple: error and warning
'''
total_error = ""
warning = ""
if process_functions.checkColExist(assay_info_df, "SEQ_ASSAY_ID"):
all_seq_assays = assay_info_df.SEQ_ASSAY_ID.unique()
if not all([assay.startswith(self.center)
for assay in all_seq_assays]):
total_error += \
"Assay_information.yaml: Please make sure your all your" +\
" SEQ_ASSAY_IDs start with your center abbreviation.\n"
else:
total_error += \
"Assay_information.yaml: Must have SEQ_ASSAY_ID column.\n"
read_group_dict = process_functions.get_gdc_data_dictionary(
"read_group")
read_group_headers = read_group_dict['properties']
warn, error = process_functions.check_col_and_values(
assay_info_df,
'is_paired_end',
[True, False],
filename="Assay_information.yaml",
required=True)
warning += warn
total_error += error
warn, error = process_functions.check_col_and_values(
assay_info_df, 'library_selection',
read_group_headers['library_selection']['enum'],
filename="Assay_information.yaml",
required=True)
warning += warn
total_error += error
warn, error = process_functions.check_col_and_values(
assay_info_df,
'library_strategy',
read_group_headers['library_strategy']['enum'],
filename="Assay_information.yaml",
required=True)
warning += warn
total_error += error
warn, error = process_functions.check_col_and_values(
assay_info_df,
'platform',
read_group_headers['platform']['enum'],
filename="Assay_information.yaml",
required=True)
warning += warn
total_error += error
instrument_model = read_group_headers['instrument_model']['enum']
instrument_model.append(None)
warn, error = process_functions.check_col_and_values(
assay_info_df,
'instrument_model',
instrument_model,
filename="Assay_information.yaml",
required=True)
warning += warn
total_error += error
variant_classes = \
['Splice_Site', 'Nonsense_Mutation', 'Frame_Shift_Del',
'Frame_Shift_Ins', 'Nonstop_Mutation', 'Translation_Start_Site',
'In_Frame_Ins', 'In_Frame_Del', 'Missense_Mutation',
'Intron', 'Splice_Region', 'Silent', 'RNA', "5'UTR", "3'UTR",
'IGR', "5'Flank", "3'Flank", None]
warn, error = process_functions.check_col_and_values(
assay_info_df,
'variant_classifications',
variant_classes,
filename="Assay_information.yaml",
na_allowed=True)
warning += warn
total_error += error
# if not process_functions.checkColExist(
# assay_info_df, "target_capture_kit"):
# total_error += ("Assay_information.yaml: "
# "Must have target_capture_kit column.\n")
if process_functions.checkColExist(assay_info_df, "read_length"):
if not all([process_functions.checkInt(i)
for i in assay_info_df["read_length"]
if i is not None and not pd.isnull(i)]):
total_error += \
("Assay_information.yaml: "
"Please double check your read_length. "
"It must be an integer or null.\n")
else:
total_error += \
("Assay_information.yaml: "
"Must have read_length column.\n")
if process_functions.checkColExist(assay_info_df, "number_of_genes"):
if not all([process_functions.checkInt(i)
for i in assay_info_df["number_of_genes"]]):
total_error += \
("Assay_information.yaml: "
"Please double check your number_of_genes. "
"It must be an integer.\n")
else:
total_error += \
("Assay_information.yaml: "
"Must have number_of_genes column.\n")
if process_functions.checkColExist(assay_info_df, "gene_padding"):
if not all([process_functions.checkInt(i)
for i in assay_info_df["gene_padding"]
if i is not None and not pd.isnull(i)]):
total_error += \
("Assay_information.yaml: "
"Please double check your gene_padding. "
"It must be an integer or blank.\n")
else:
warning += \
("Assay_information.yaml: "
"gene_padding is by default 10 if not specified.\n")
return(total_error, warning)
| 36.388646 | 89 | 0.582503 | import os
import logging
import subprocess
import yaml
import pandas as pd
from .example_filetype_format import FileTypeFormat
from . import process_functions
logger = logging.getLogger(__name__)
class Assayinfo(FileTypeFormat):
_fileType = "assayinfo"
_process_kwargs = ["newPath", "databaseSynId"]
def _validateFilename(self, filepath_list):
assert os.path.basename(filepath_list[0]) == "assay_information.yaml"
def process_steps(self, assay_info_df, newPath, databaseSynId):
process_assay_info_df = self._process(assay_info_df)
col = ['SEQ_ASSAY_ID', 'is_paired_end', 'library_selection',
'library_strategy', 'platform', 'read_length',
'instrument_model', 'gene_padding', 'number_of_genes',
'variant_classifications', 'CENTER']
process_functions.updateData(
self.syn,
databaseSynId,
process_assay_info_df,
self.center,
col=col,
filterByColumn="CENTER",
toDelete=True)
process_assay_info_df.to_csv(newPath, sep="\t", index=False)
return(newPath)
def _process(self, df):
seq_assay_ids = [
assay.upper().replace('_', '-') for assay in df['SEQ_ASSAY_ID']]
df['SEQ_ASSAY_ID'] = seq_assay_ids
if process_functions.checkColExist(df, "gene_padding"):
df['gene_padding'] = df['gene_padding'].fillna(10)
df['gene_padding'] = df['gene_padding'].astype(int)
else:
df['gene_padding'] = 10
if not process_functions.checkColExist(df, "variant_classifications"):
df['variant_classifications'] = pd.np.nan
df['CENTER'] = self.center
return(df)
def _get_dataframe(self, filepath_list):
filepath = filepath_list[0]
try:
with open(filepath, 'r') as yamlfile:
panel_info_dict = yaml.load(yamlfile, Loader=yaml.FullLoader)
except Exception:
raise ValueError(
"assay_information.yaml: Can't read in your file. "
"Please make sure the file is a correctly formatted yaml")
assay_info_df = pd.DataFrame(panel_info_dict)
assay_info_df = assay_info_df.transpose()
assay_info_df['SEQ_ASSAY_ID'] = assay_info_df.index
assay_info_df.reset_index(drop=True, inplace=True)
return(assay_info_df)
def _validate(self, assay_info_df):
total_error = ""
warning = ""
if process_functions.checkColExist(assay_info_df, "SEQ_ASSAY_ID"):
all_seq_assays = assay_info_df.SEQ_ASSAY_ID.unique()
if not all([assay.startswith(self.center)
for assay in all_seq_assays]):
total_error += \
"Assay_information.yaml: Please make sure your all your" +\
" SEQ_ASSAY_IDs start with your center abbreviation.\n"
else:
total_error += \
"Assay_information.yaml: Must have SEQ_ASSAY_ID column.\n"
read_group_dict = process_functions.get_gdc_data_dictionary(
"read_group")
read_group_headers = read_group_dict['properties']
warn, error = process_functions.check_col_and_values(
assay_info_df,
'is_paired_end',
[True, False],
filename="Assay_information.yaml",
required=True)
warning += warn
total_error += error
warn, error = process_functions.check_col_and_values(
assay_info_df, 'library_selection',
read_group_headers['library_selection']['enum'],
filename="Assay_information.yaml",
required=True)
warning += warn
total_error += error
warn, error = process_functions.check_col_and_values(
assay_info_df,
'library_strategy',
read_group_headers['library_strategy']['enum'],
filename="Assay_information.yaml",
required=True)
warning += warn
total_error += error
warn, error = process_functions.check_col_and_values(
assay_info_df,
'platform',
read_group_headers['platform']['enum'],
filename="Assay_information.yaml",
required=True)
warning += warn
total_error += error
instrument_model = read_group_headers['instrument_model']['enum']
instrument_model.append(None)
warn, error = process_functions.check_col_and_values(
assay_info_df,
'instrument_model',
instrument_model,
filename="Assay_information.yaml",
required=True)
warning += warn
total_error += error
variant_classes = \
['Splice_Site', 'Nonsense_Mutation', 'Frame_Shift_Del',
'Frame_Shift_Ins', 'Nonstop_Mutation', 'Translation_Start_Site',
'In_Frame_Ins', 'In_Frame_Del', 'Missense_Mutation',
'Intron', 'Splice_Region', 'Silent', 'RNA', "5'UTR", "3'UTR",
'IGR', "5'Flank", "3'Flank", None]
warn, error = process_functions.check_col_and_values(
assay_info_df,
'variant_classifications',
variant_classes,
filename="Assay_information.yaml",
na_allowed=True)
warning += warn
total_error += error
# if not process_functions.checkColExist(
# assay_info_df, "target_capture_kit"):
# total_error += ("Assay_information.yaml: "
# "Must have target_capture_kit column.\n")
if process_functions.checkColExist(assay_info_df, "read_length"):
if not all([process_functions.checkInt(i)
for i in assay_info_df["read_length"]
if i is not None and not pd.isnull(i)]):
total_error += \
("Assay_information.yaml: "
"Please double check your read_length. "
"It must be an integer or null.\n")
else:
total_error += \
("Assay_information.yaml: "
"Must have read_length column.\n")
if process_functions.checkColExist(assay_info_df, "number_of_genes"):
if not all([process_functions.checkInt(i)
for i in assay_info_df["number_of_genes"]]):
total_error += \
("Assay_information.yaml: "
"Please double check your number_of_genes. "
"It must be an integer.\n")
else:
total_error += \
("Assay_information.yaml: "
"Must have number_of_genes column.\n")
if process_functions.checkColExist(assay_info_df, "gene_padding"):
if not all([process_functions.checkInt(i)
for i in assay_info_df["gene_padding"]
if i is not None and not pd.isnull(i)]):
total_error += \
("Assay_information.yaml: "
"Please double check your gene_padding. "
"It must be an integer or blank.\n")
else:
warning += \
("Assay_information.yaml: "
"gene_padding is by default 10 if not specified.\n")
return(total_error, warning)
| true | true |
f72af06f509cb3b16be313e070fe087431a96b9c | 1,550 | py | Python | dlfairness/other/get_weight/alm.py | lin-tan/fairness-variance | 7f6aee23160707ffe78f429e5d960022ea1c9fe4 | [
"BSD-3-Clause"
] | null | null | null | dlfairness/other/get_weight/alm.py | lin-tan/fairness-variance | 7f6aee23160707ffe78f429e5d960022ea1c9fe4 | [
"BSD-3-Clause"
] | null | null | null | dlfairness/other/get_weight/alm.py | lin-tan/fairness-variance | 7f6aee23160707ffe78f429e5d960022ea1c9fe4 | [
"BSD-3-Clause"
] | null | null | null | import argparse
import pandas as pd
import json
import pickle
import numpy as np
from pathlib import Path
from scipy.special import softmax
import shutil
parser = argparse.ArgumentParser()
parser.add_argument('--config', type=str)
parser.add_argument('--raw_result_dir', type=str)
parser.add_argument('--output_dir', type=str)
args = parser.parse_args()
with open(args.config, 'r') as f:
config_json = json.load(f)
for config in config_json:
class_bias_result = []
for no_try in range(16):
if (config['dataset'] != 'CelebA') or (not config['training_type'] in ['no-constraints', 'l2-penalty', 'fair-alm']):
continue
exp_result_path = Path(
args.raw_result_dir,
"{0}_{1}_{2}_{3}/{4}".format(config['network'],
config['training_type'],
config['dataset'],
config['random_seed'],
str(no_try)))
p = Path(exp_result_path, 'checkpoint')
ckpt_path = Path(p, 'ckpt_80.t7')
if config['training_type'] == 'no-constraints':
tech = 'A-Base'
elif config['training_type'] == 'l2-penalty':
tech = 'A-L2'
elif config['training_type'] == 'fair-alm':
tech = 'A-ALM'
copy_path = Path(args.output_dir, tech, 'run_' + str(no_try).zfill(2) + '.pth')
copy_path.parent.mkdir(parents=True, exist_ok=True)
shutil.copy(ckpt_path, copy_path) | 35.227273 | 124 | 0.570323 | import argparse
import pandas as pd
import json
import pickle
import numpy as np
from pathlib import Path
from scipy.special import softmax
import shutil
parser = argparse.ArgumentParser()
parser.add_argument('--config', type=str)
parser.add_argument('--raw_result_dir', type=str)
parser.add_argument('--output_dir', type=str)
args = parser.parse_args()
with open(args.config, 'r') as f:
config_json = json.load(f)
for config in config_json:
class_bias_result = []
for no_try in range(16):
if (config['dataset'] != 'CelebA') or (not config['training_type'] in ['no-constraints', 'l2-penalty', 'fair-alm']):
continue
exp_result_path = Path(
args.raw_result_dir,
"{0}_{1}_{2}_{3}/{4}".format(config['network'],
config['training_type'],
config['dataset'],
config['random_seed'],
str(no_try)))
p = Path(exp_result_path, 'checkpoint')
ckpt_path = Path(p, 'ckpt_80.t7')
if config['training_type'] == 'no-constraints':
tech = 'A-Base'
elif config['training_type'] == 'l2-penalty':
tech = 'A-L2'
elif config['training_type'] == 'fair-alm':
tech = 'A-ALM'
copy_path = Path(args.output_dir, tech, 'run_' + str(no_try).zfill(2) + '.pth')
copy_path.parent.mkdir(parents=True, exist_ok=True)
shutil.copy(ckpt_path, copy_path) | true | true |
f72af113f201219d494c2ae51b9d0c0fae085aeb | 925 | py | Python | Codefights/arcade/intro/level-7/33.stringsRearrangement/Python/solution1.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | 7 | 2017-09-20T16:40:39.000Z | 2021-08-31T18:15:08.000Z | Codefights/arcade/intro/level-7/33.stringsRearrangement/Python/solution1.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | null | null | null | Codefights/arcade/intro/level-7/33.stringsRearrangement/Python/solution1.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | null | null | null | # Python3
def diffOne(a, b):
count = 0
for i in range(len(a)):
if a[i] != b[i]:
count += 1
if count == 2:
return False
return bool(count)
def func(inputArray, curr):
if len(inputArray) == 1:
return diffOne(inputArray[0], curr)
for i in range(len(inputArray)):
if diffOne(inputArray[i], curr):
inputArray[i], inputArray[-1] = inputArray[-1], inputArray[i]
if func(inputArray[:-1], inputArray[-1]):
return True
inputArray[i], inputArray[-1] = inputArray[-1], inputArray[i]
return False
def stringsRearrangement(inputArray):
for i in range(len(inputArray)):
inputArray[i], inputArray[-1] = inputArray[-1], inputArray[i]
if func(inputArray[:-1], inputArray[-1]):
return True
inputArray[i], inputArray[-1] = inputArray[-1], inputArray[i]
return False
| 30.833333 | 73 | 0.572973 |
def diffOne(a, b):
count = 0
for i in range(len(a)):
if a[i] != b[i]:
count += 1
if count == 2:
return False
return bool(count)
def func(inputArray, curr):
if len(inputArray) == 1:
return diffOne(inputArray[0], curr)
for i in range(len(inputArray)):
if diffOne(inputArray[i], curr):
inputArray[i], inputArray[-1] = inputArray[-1], inputArray[i]
if func(inputArray[:-1], inputArray[-1]):
return True
inputArray[i], inputArray[-1] = inputArray[-1], inputArray[i]
return False
def stringsRearrangement(inputArray):
for i in range(len(inputArray)):
inputArray[i], inputArray[-1] = inputArray[-1], inputArray[i]
if func(inputArray[:-1], inputArray[-1]):
return True
inputArray[i], inputArray[-1] = inputArray[-1], inputArray[i]
return False
| true | true |
f72af114783cb0a76af49c20e78ca72551409642 | 1,378 | py | Python | setup.py | jamesgregson/easy_image_io | 4b5af29f3ccc37e4b10fbdc1e18d508ed04b882d | [
"MIT"
] | 1 | 2017-08-17T11:59:45.000Z | 2017-08-17T11:59:45.000Z | setup.py | jamesgregson/easy_image_io | 4b5af29f3ccc37e4b10fbdc1e18d508ed04b882d | [
"MIT"
] | null | null | null | setup.py | jamesgregson/easy_image_io | 4b5af29f3ccc37e4b10fbdc1e18d508ed04b882d | [
"MIT"
] | null | null | null | from setuptools import setup, Extension
import numpy
import os
import config
def find(name, path):
for root, dirs, files in os.walk(path):
if name in files:
return os.path.join(root, name)
return '';
print('locating directories...')
defines = [ ('MAJOR_VERSION',0),('MINOR_VERSION',1) ]
include_dirs = [ numpy.get_include() ]
libraries = []
library_dirs = []
print('checking for tiffio.h...')
if find('tiffio.h', config.tiff_include_dir) != '':
defines.append( ('cimg_use_tiff',1) )
include_dirs.append( config.tiff_include_dir )
libraries.append( 'tiff' )
library_dirs.append( config.tiff_library_dir )
print('checking for png.h...')
if find('png.h', config.png_include_dir ) != '':
defines.append( ('cimg_use_png',1) )
include_dirs.append( config.png_include_dir )
libraries.append( 'png' )
library_dirs.append( config.png_library_dir )
for lib in config.libs:
libraries.append( lib )
print('Setting up extension...')
easy_image_io = Extension('easy_image_io',
define_macros=defines,
sources=['easy_image_io.cpp'],
include_dirs=include_dirs,
library_dirs=library_dirs,
libraries=libraries )
print('Building extension...')
setup(name='easy_image_io', version='0.1', ext_modules=[ easy_image_io ] )
| 30.622222 | 74 | 0.650218 | from setuptools import setup, Extension
import numpy
import os
import config
def find(name, path):
for root, dirs, files in os.walk(path):
if name in files:
return os.path.join(root, name)
return '';
print('locating directories...')
defines = [ ('MAJOR_VERSION',0),('MINOR_VERSION',1) ]
include_dirs = [ numpy.get_include() ]
libraries = []
library_dirs = []
print('checking for tiffio.h...')
if find('tiffio.h', config.tiff_include_dir) != '':
defines.append( ('cimg_use_tiff',1) )
include_dirs.append( config.tiff_include_dir )
libraries.append( 'tiff' )
library_dirs.append( config.tiff_library_dir )
print('checking for png.h...')
if find('png.h', config.png_include_dir ) != '':
defines.append( ('cimg_use_png',1) )
include_dirs.append( config.png_include_dir )
libraries.append( 'png' )
library_dirs.append( config.png_library_dir )
for lib in config.libs:
libraries.append( lib )
print('Setting up extension...')
easy_image_io = Extension('easy_image_io',
define_macros=defines,
sources=['easy_image_io.cpp'],
include_dirs=include_dirs,
library_dirs=library_dirs,
libraries=libraries )
print('Building extension...')
setup(name='easy_image_io', version='0.1', ext_modules=[ easy_image_io ] )
| true | true |
f72af1e60284b4758cddcb59383f494df80a1a1a | 148,700 | py | Python | all/emojitations/data/hy.py | idleberg/sublime-emojitations | b2b4e8ce2c33ed0f6b8d6db6085e21da4e8d895b | [
"MIT"
] | 6 | 2016-08-31T14:42:36.000Z | 2021-09-05T23:55:47.000Z | all/emojitations/data/hy.py | idleberg/sublime-emojitations | b2b4e8ce2c33ed0f6b8d6db6085e21da4e8d895b | [
"MIT"
] | 1 | 2016-10-20T10:52:06.000Z | 2016-10-20T18:47:19.000Z | all/emojitations/data/hy.py | idleberg/sublime-emojitations | b2b4e8ce2c33ed0f6b8d6db6085e21da4e8d895b | [
"MIT"
] | 5 | 2016-08-31T14:48:11.000Z | 2021-09-05T23:55:33.000Z | from emojitations.emojitypes import EmojiAnnotations
emoji = [
EmojiAnnotations(emoji='😀', codepoints=(128512,), name='ծիծաղող դեմք', slug='ծիծաղող_դեմք', annotations=frozenset({'դեմք', 'քմծիծաղել'})),
EmojiAnnotations(emoji='😁', codepoints=(128513,), name='ծիծաղող դեմք ժպտացող աչքերով', slug='ծիծաղող_դեմք_ժպտացող_աչքերով', annotations=frozenset({'աչք', 'դեմք', 'ժպտալ', 'քմծիծաղել'})),
EmojiAnnotations(emoji='😂', codepoints=(128514,), name='դեմք ուրախության արցունքներով', slug='դեմք_ուրախության_արցունքներով', annotations=frozenset({'ուրախություն', 'դեմք', 'ծիծաղել', 'արցունք'})),
EmojiAnnotations(emoji='😃', codepoints=(128515,), name='ժպտացող դեմք բաց բերանով', slug='ժպտացող_դեմք_բաց_բերանով', annotations=frozenset({'բաց', 'դեմք', 'ժպտալ', 'բերան'})),
EmojiAnnotations(emoji='😄', codepoints=(128516,), name='ժպտացող դեմք բաց բերանով և ժպտացող աչքերով', slug='ժպտացող_դեմք_բաց_բերանով_և_ժպտացող_աչքերով', annotations=frozenset({'բաց', 'աչք', 'դեմք', 'ժպտալ', 'բերան'})),
EmojiAnnotations(emoji='😅', codepoints=(128517,), name='ժպտացող դեմք բաց բերանով և սառը քրտինքով', slug='ժպտացող_դեմք_բաց_բերանով_և_սառը_քրտինքով', annotations=frozenset({'բաց', 'սառը', 'դեմք', 'ժպտալ', 'քրտինք'})),
EmojiAnnotations(emoji='😆', codepoints=(128518,), name='ժպտացող դեմք բաց բերանով և ամուր փակած աչքերով', slug='ժպտացող_դեմք_բաց_բերանով_և_ամուր_փակած_աչքերով', annotations=frozenset({'ժպտալ', 'գոհ', 'ծիծաղել', 'դեմք', 'բաց', 'բերան'})),
EmojiAnnotations(emoji='😉', codepoints=(128521,), name='աչքով անող դեմք', slug='աչքով_անող_դեմք', annotations=frozenset({'դեմք', 'աչքով անել'})),
EmojiAnnotations(emoji='😊', codepoints=(128522,), name='ժպտացող դեմք ժպտացող աչքերով', slug='ժպտացող_դեմք_ժպտացող_աչքերով', annotations=frozenset({'աչք', 'դեմք', 'ժպտալ', 'շիկնել'})),
EmojiAnnotations(emoji='😋', codepoints=(128523,), name='համեղ ուտելիք վայելող դեմք', slug='համեղ_ուտելիք_վայելող_դեմք', annotations=frozenset({'դեմք', 'վեյելել', 'ժպտալ', 'համեղ', 'նյամ'})),
EmojiAnnotations(emoji='😎', codepoints=(128526,), name='ժպտացող դեմք արևային ակնոցով', slug='ժպտացող_դեմք_արևային_ակնոցով', annotations=frozenset({'աչք', 'ակնոց', 'զիլ', 'ժպտալ', 'պայծառ', 'արևային ակնոց', 'դեմք', 'եղանակ', 'արև'})),
EmojiAnnotations(emoji='😍', codepoints=(128525,), name='ժպտացող դեմք սրտաձև աչքերով', slug='ժպտացող_դեմք_սրտաձև_աչքերով', annotations=frozenset({'աչք', 'դեմք', 'սիրտ', 'ժպտալ', 'սեր'})),
EmojiAnnotations(emoji='😘', codepoints=(128536,), name='համբույր ուղարկող դեմք', slug='համբույր_ուղարկող_դեմք', annotations=frozenset({'դեմք', 'սիրտ', 'համբուրել'})),
EmojiAnnotations(emoji='😗', codepoints=(128535,), name='համբուրող դեմք', slug='համբուրող_դեմք', annotations=frozenset({'դեմք', 'համբույր'})),
EmojiAnnotations(emoji='😙', codepoints=(128537,), name='համբուրող դեմք ժպտացող աչքերով', slug='համբուրող_դեմք_ժպտացող_աչքերով', annotations=frozenset({'աչք', 'դեմք', 'համբուրել', 'ժպտալ'})),
EmojiAnnotations(emoji='😚', codepoints=(128538,), name='համբուրող դեմք փակ աչքերով', slug='համբուրող_դեմք_փակ_աչքերով', annotations=frozenset({'աչք', 'դեմք', 'փակ', 'համբուրել'})),
EmojiAnnotations(emoji='☺', codepoints=(9786,), name='ժպտացող դեմք', slug='ժպտացող_դեմք', annotations=frozenset({'դեմք', 'ժպտալ', 'անկաշկանդ'})),
EmojiAnnotations(emoji='\U0001f642', codepoints=(128578,), name='թեթևակի ժպտացող դեմք', slug='թեթևակի_ժպտացող_դեմք', annotations=frozenset({'դեմք', 'ժպտալ'})),
EmojiAnnotations(emoji='\U0001f917', codepoints=(129303,), name='գրկող դեմք', slug='գրկող_դեմք', annotations=frozenset({'գրկախառնում', 'դեմք', 'գրկախառնվել'})),
EmojiAnnotations(emoji='😇', codepoints=(128519,), name='ժպտացող դեմք լուսապսակով', slug='ժպտացող_դեմք_լուսապսակով', annotations=frozenset({'անմեղ', 'լուսապսակ', 'ժպտալ', 'հրեշտակ', 'դեմք', 'հեքիաթ', 'ֆանտազիա'})),
EmojiAnnotations(emoji='\U0001f914', codepoints=(129300,), name='մտածող դեմք', slug='մտածող_դեմք', annotations=frozenset({'մտածող', 'դեմք'})),
EmojiAnnotations(emoji='😐', codepoints=(128528,), name='չեզոք դեմք', slug='չեզոք_դեմք', annotations=frozenset({'դեմք', 'չեզոք', 'անվրդով'})),
EmojiAnnotations(emoji='😑', codepoints=(128529,), name='անհույզ դեմք', slug='անհույզ_դեմք', annotations=frozenset({'դեմք', 'ոչինչ չարտահայտող', 'անարտահայտիչ', 'առանց էմոցիաների'})),
EmojiAnnotations(emoji='😶', codepoints=(128566,), name='առանց բերանի դեմք', slug='առանց_բերանի_դեմք', annotations=frozenset({'դեմք', 'լուռ', 'բերան', 'հանգիստ'})),
EmojiAnnotations(emoji='\U0001f644', codepoints=(128580,), name='պտտվող աչքերով դեմք', slug='պտտվող_աչքերով_դեմք', annotations=frozenset({'դեմք', 'աչքեր', 'պտտվող'})),
EmojiAnnotations(emoji='😏', codepoints=(128527,), name='կեղծ ժպտացող դեմք', slug='կեղծ_ժպտացող_դեմք', annotations=frozenset({'դեմք', 'կեղծ ժպտալ'})),
EmojiAnnotations(emoji='😣', codepoints=(128547,), name='համառող դեմք', slug='համառող_դեմք', annotations=frozenset({'դեմք', 'համառել'})),
EmojiAnnotations(emoji='😥', codepoints=(128549,), name='հիասթափված; բայց թեթևացած դեմք', slug='հիասթափված;_բայց_թեթևացած_դեմք', annotations=frozenset({'դեմք', 'թեթևացած', 'հիասթափված'})),
EmojiAnnotations(emoji='😮', codepoints=(128558,), name='բաց բերանով դեմք', slug='բաց_բերանով_դեմք', annotations=frozenset({'բաց', 'դեմք', 'բերան', 'համակրանք'})),
EmojiAnnotations(emoji='\U0001f910', codepoints=(129296,), name='ճարմանդավոր բերանով դեմք', slug='ճարմանդավոր_բերանով_դեմք', annotations=frozenset({'դեմք', 'բերան', 'ճարմանդ'})),
EmojiAnnotations(emoji='😯', codepoints=(128559,), name='սաստված դեմք', slug='սաստված_դեմք', annotations=frozenset({'զարմացած', 'դեմք', 'սաստված', 'ապշած'})),
EmojiAnnotations(emoji='😪', codepoints=(128554,), name='քնատ դեմք', slug='քնատ_դեմք', annotations=frozenset({'քնել', 'դեմք'})),
EmojiAnnotations(emoji='😫', codepoints=(128555,), name='հոգնած դեմք', slug='հոգնած_դեմք', annotations=frozenset({'դեմք', 'հոգնած'})),
EmojiAnnotations(emoji='😴', codepoints=(128564,), name='քնած դեմք', slug='քնած_դեմք', annotations=frozenset({'քնել', 'դեմք', 'խռռ'})),
EmojiAnnotations(emoji='😌', codepoints=(128524,), name='թեթևացած դեմք', slug='թեթևացած_դեմք', annotations=frozenset({'դեմք', 'թեթևացած'})),
EmojiAnnotations(emoji='\U0001f913', codepoints=(129299,), name='գերազանցիկի դեմք', slug='գերազանցիկի_դեմք', annotations=frozenset({'դեմք', 'ցնդած', 'հիմար'})),
EmojiAnnotations(emoji='😛', codepoints=(128539,), name='լեզու հանած դեմք', slug='լեզու_հանած_դեմք', annotations=frozenset({'դեմք', 'լեզու'})),
EmojiAnnotations(emoji='😜', codepoints=(128540,), name='լեզու հանած և աչքով անող դեմք', slug='լեզու_հանած_և_աչքով_անող_դեմք', annotations=frozenset({'աչք', 'դեմք', 'կատակել', 'լեզու', 'աչքով անել'})),
EmojiAnnotations(emoji='😝', codepoints=(128541,), name='լեզու հանած և ամուր փակած աչքերով դեմք', slug='լեզու_հանած_և_ամուր_փակած_աչքերով_դեմք', annotations=frozenset({'աչք', 'դեմք', 'սարսափելի', 'համ', 'լեզու'})),
EmojiAnnotations(emoji='☹', codepoints=(9785,), name='խոժոռված դեմք', slug='խոժոռված_դեմք', annotations=frozenset({'դեմք', 'խոժոռված'})),
EmojiAnnotations(emoji='\U0001f641', codepoints=(128577,), name='թեթևակի խոժոռված դեմք', slug='թեթևակի_խոժոռված_դեմք', annotations=frozenset({'դեմք', 'խոժոռված'})),
EmojiAnnotations(emoji='😒', codepoints=(128530,), name='անտրամադիր դեմք', slug='անտրամադիր_դեմք', annotations=frozenset({'դեմք', 'անտրամադիր', 'դժբախտ'})),
EmojiAnnotations(emoji='😓', codepoints=(128531,), name='սառը քրտինքով դեմք', slug='սառը_քրտինքով_դեմք', annotations=frozenset({'սառը', 'դեմք', 'քրտինք'})),
EmojiAnnotations(emoji='😔', codepoints=(128532,), name='մտածկոտ դեմք', slug='մտածկոտ_դեմք', annotations=frozenset({'դեմք', 'մռայլված', 'մտածկոտ'})),
EmojiAnnotations(emoji='😕', codepoints=(128533,), name='շփոթված դեմք', slug='շփոթված_դեմք', annotations=frozenset({'դեմք', 'շփոթված'})),
EmojiAnnotations(emoji='😖', codepoints=(128534,), name='ցնցված դեմք', slug='ցնցված_դեմք', annotations=frozenset({'դեմք', 'ցնցված'})),
EmojiAnnotations(emoji='\U0001f643', codepoints=(128579,), name='գլխնիվայր դեմք', slug='գլխնիվայր_դեմք', annotations=frozenset({'դեմք', 'գլխնիվայր'})),
EmojiAnnotations(emoji='😷', codepoints=(128567,), name='բժշկական դիմակով դեմք', slug='բժշկական_դիմակով_դեմք', annotations=frozenset({'հիվանդ', 'բժիշկ', 'սառը', 'դեմք', 'բժշկական', 'դիմակ'})),
EmojiAnnotations(emoji='\U0001f912', codepoints=(129298,), name='ջերմաչափով դեմք', slug='ջերմաչափով_դեմք', annotations=frozenset({'դեմք', 'հիվանդ', 'ջերմաչափ'})),
EmojiAnnotations(emoji='\U0001f915', codepoints=(129301,), name='գլխակապով դեմք', slug='գլխակապով_դեմք', annotations=frozenset({'դեմք', 'վիրակապ', 'վնասվածք'})),
EmojiAnnotations(emoji='\U0001f911', codepoints=(129297,), name='թղթադրամը բերանին դեմք', slug='թղթադրամը_բերանին_դեմք', annotations=frozenset({'դեմք', 'փող', 'բերան'})),
EmojiAnnotations(emoji='😲', codepoints=(128562,), name='ապշահար դեմք', slug='ապշահար_դեմք', annotations=frozenset({'դեմք', 'ցնցված', 'ապշահար', 'ամբողջովին'})),
EmojiAnnotations(emoji='😞', codepoints=(128542,), name='հիասթափված դեմք', slug='հիասթափված_դեմք', annotations=frozenset({'դեմք', 'հիասթափված'})),
EmojiAnnotations(emoji='😟', codepoints=(128543,), name='անհանգստացած դեմք', slug='անհանգստացած_դեմք', annotations=frozenset({'անհանգստացած', 'դեմք'})),
EmojiAnnotations(emoji='😤', codepoints=(128548,), name='քթից գոլորշի հանող դեմք', slug='քթից_գոլորշի_հանող_դեմք', annotations=frozenset({'դեմք', 'հաղթած', 'հաղթանակ'})),
EmojiAnnotations(emoji='😢', codepoints=(128546,), name='արտասվող դեմք', slug='արտասվող_դեմք', annotations=frozenset({'արտասվել', 'դեմք', 'տխուր', 'արտասուք'})),
EmojiAnnotations(emoji='😭', codepoints=(128557,), name='բարձրաձայն արտասվող դեմք', slug='բարձրաձայն_արտասվող_դեմք', annotations=frozenset({'արտասվել', 'դեմք', 'տխուր', 'հեկեկալ', 'արտասուք'})),
EmojiAnnotations(emoji='😦', codepoints=(128550,), name='բաց բերանով խոժոռված դեմք', slug='բաց_բերանով_խոժոռված_դեմք', annotations=frozenset({'բաց', 'դեմք', 'բերան', 'խոժոռված'})),
EmojiAnnotations(emoji='😧', codepoints=(128551,), name='վշտահար դեմք', slug='վշտահար_դեմք', annotations=frozenset({'վշտահար', 'դեմք'})),
EmojiAnnotations(emoji='😨', codepoints=(128552,), name='վախեցած դեմք', slug='վախեցած_դեմք', annotations=frozenset({'վախեցած', 'դեմք', 'սարսափած', 'վախ'})),
EmojiAnnotations(emoji='😩', codepoints=(128553,), name='ուժասպառ դեմք', slug='ուժասպառ_դեմք', annotations=frozenset({'դեմք', 'հոգնած', 'ուժասպառ'})),
EmojiAnnotations(emoji='😬', codepoints=(128556,), name='ծամածռվող դեմք', slug='ծամածռվող_դեմք', annotations=frozenset({'դեմք', 'ծամածռություն'})),
EmojiAnnotations(emoji='😰', codepoints=(128560,), name='բաց բերանով և սառը քրտինքով դեմք', slug='բաց_բերանով_և_սառը_քրտինքով_դեմք', annotations=frozenset({'հապշտապ', 'բաց', 'սառը', 'դեմք', 'կապույտ', 'բերան', 'քրտինք'})),
EmojiAnnotations(emoji='😱', codepoints=(128561,), name='վախից գոռացող դեմք', slug='վախից_գոռացող_դեմք', annotations=frozenset({'վախեցած', 'ծամել', 'սարսափած', 'վախ', 'դեմք', 'ճչալ'})),
EmojiAnnotations(emoji='😳', codepoints=(128563,), name='շիկնած դեմք', slug='շիկնած_դեմք', annotations=frozenset({'դեմք', 'հիացած', 'շիկնած'})),
EmojiAnnotations(emoji='😵', codepoints=(128565,), name='գլխապտույտ ունեցող դեմք', slug='գլխապտույտ_ունեցող_դեմք', annotations=frozenset({'դեմք', 'գլխապտույտ'})),
EmojiAnnotations(emoji='😡', codepoints=(128545,), name='դժգոհ դեմք', slug='դժգոհ_դեմք', annotations=frozenset({'բարկացած', 'դեմք', 'զայրույթ', 'կարմիր', 'խենք', 'դժգոհ'})),
EmojiAnnotations(emoji='😠', codepoints=(128544,), name='բարկացած դեմք', slug='բարկացած_դեմք', annotations=frozenset({'դեմք', 'խենք', 'բարկացած'})),
EmojiAnnotations(emoji='😈', codepoints=(128520,), name='ժպտացող դեմք եղջյուրներով', slug='ժպտացող_դեմք_եղջյուրներով', annotations=frozenset({'դեմք', 'հեքիաթ', 'ֆանտազիա', 'ժպտալ', 'եղջյուրներ'})),
EmojiAnnotations(emoji='👿', codepoints=(128127,), name='սատանայի ճուտ', slug='սատանայի_ճուտ', annotations=frozenset({'դեմք', 'սատանա', 'հեքիաթ', 'ֆանտազիա', 'դև'})),
EmojiAnnotations(emoji='👹', codepoints=(128121,), name='մարդակեր հսկա', slug='մարդակեր_հսկա', annotations=frozenset({'դեմք', 'հեքիաթ', 'ֆանտազիա', 'ճապոնական', 'հրեշ', 'արարած'})),
EmojiAnnotations(emoji='👺', codepoints=(128122,), name='չար ոգի', slug='չար_ոգի', annotations=frozenset({'դեմք', 'հեքիաթ', 'ֆանտազիա', 'ճապոնական', 'հրեշ', 'արարած'})),
EmojiAnnotations(emoji='💀', codepoints=(128128,), name='գանգ', slug='գանգ', annotations=frozenset({'դեմք', 'հեքիաթ', 'մարմին', 'մահ', 'հրեշ'})),
EmojiAnnotations(emoji='☠', codepoints=(9760,), name='գանգ և խաչված ոսկորներ', slug='գանգ_և_խաչված_ոսկորներ', annotations=frozenset({'մարմին', 'ոսկորներ', 'գանգ', 'խաչված', 'դեմք', 'մահ', 'հրեշ'})),
EmojiAnnotations(emoji='👻', codepoints=(128123,), name='ուրվական', slug='ուրվական', annotations=frozenset({'դեմք', 'հեքիաթ', 'ֆանտազիա', 'արարած', 'հրեշ'})),
EmojiAnnotations(emoji='👽', codepoints=(128125,), name='այլմոլորակային', slug='այլմոլորակային', annotations=frozenset({'տիեզերք', 'դեմք', 'հեքիաթ', 'ֆանտազիա', 'չթօ', 'հրեշ', 'արարած'})),
EmojiAnnotations(emoji='👾', codepoints=(128126,), name='այլմոլորակային հրեշ', slug='այլմոլորակային_հրեշ', annotations=frozenset({'տիեզերք', 'այլմոլորակային', 'դեմք', 'հեքիաթ', 'ֆանտազիա', 'չթօ', 'հրեշ', 'արարած'})),
EmojiAnnotations(emoji='\U0001f916', codepoints=(129302,), name='ռոբոտի դեմք', slug='ռոբոտի_դեմք', annotations=frozenset({'դեմք', 'ռոբոտ', 'հրեշ'})),
EmojiAnnotations(emoji='💩', codepoints=(128169,), name='կեղտի կույտ', slug='կեղտի_կույտ', annotations=frozenset({'գոմաղբ', 'կոմիքս', 'դեմք', 'կեղտ', 'հրեշ', 'կղանք'})),
EmojiAnnotations(emoji='😺', codepoints=(128570,), name='ժպտացող կատվի դեմք բաց բերանով', slug='ժպտացող_կատվի_դեմք_բաց_բերանով', annotations=frozenset({'բաց', 'կատու', 'դեմք', 'ժպտալ', 'բերան'})),
EmojiAnnotations(emoji='😸', codepoints=(128568,), name='ծիծաղող կատվի դեմք ժպտացող աչքերով', slug='ծիծաղող_կատվի_դեմք_ժպտացող_աչքերով', annotations=frozenset({'կատու', 'աչք', 'դեմք', 'ժպտալ', 'կանաչ'})),
EmojiAnnotations(emoji='😹', codepoints=(128569,), name='կատվի դեմք ուրախության արցունքներով', slug='կատվի_դեմք_ուրախության_արցունքներով', annotations=frozenset({'ուրախություն', 'կատու', 'դեմք', 'արտասուք'})),
EmojiAnnotations(emoji='😻', codepoints=(128571,), name='ժպտացող կատվի դեմք սրտաձև աչքերով', slug='ժպտացող_կատվի_դեմք_սրտաձև_աչքերով', annotations=frozenset({'աչք', 'սիրտ', 'ժպտալ', 'սեր', 'դեմք', 'կատու'})),
EmojiAnnotations(emoji='😼', codepoints=(128572,), name='կատվի դեմք ծամածռված ժպիտով', slug='կատվի_դեմք_ծամածռված_ժպիտով', annotations=frozenset({'ծամածռված', 'կատու', 'դեմք', 'հեգնական', 'ժպտալ'})),
EmojiAnnotations(emoji='😽', codepoints=(128573,), name='համբուրող կատվի դեմք փակ աչքերով', slug='համբուրող_կատվի_դեմք_փակ_աչքերով', annotations=frozenset({'կատու', 'աչք', 'դեմք', 'համբույր'})),
EmojiAnnotations(emoji='🙀', codepoints=(128576,), name='ուժասպառ կատվի դեմք', slug='ուժասպառ_կատվի_դեմք', annotations=frozenset({'զարմացած', 'կատու', 'դեմք', 'ուժասպառ', 'օհ'})),
EmojiAnnotations(emoji='😿', codepoints=(128575,), name='արտասվող կատվի դեմք', slug='արտասվող_կատվի_դեմք', annotations=frozenset({'արտասվել', 'կատու', 'դեմք', 'տխուր', 'արտասուք'})),
EmojiAnnotations(emoji='😾', codepoints=(128574,), name='դժգոհ կատվի դեմք', slug='դժգոհ_կատվի_դեմք', annotations=frozenset({'կատու', 'դեմք', 'դժգոհ'})),
EmojiAnnotations(emoji='🙈', codepoints=(128584,), name='ոչինչ չեմ տեսնում', slug='ոչինչ_չեմ_տեսնում', annotations=frozenset({'չար', 'կապիկ', 'ժեստ', 'ոչ', 'տեսնել', 'դեմք', 'արգելված'})),
EmojiAnnotations(emoji='🙉', codepoints=(128585,), name='ոչինչ չեմ լսում', slug='ոչինչ_չեմ_լսում', annotations=frozenset({'լսել', 'չար', 'կապիկ', 'ժեստ', 'ոչ', 'դեմք', 'արգելված'})),
EmojiAnnotations(emoji='🙊', codepoints=(128586,), name='ոչինչ չեմ ասում', slug='ոչինչ_չեմ_ասում', annotations=frozenset({'չար', 'կապիկ', 'ժեստ', 'ոչ', 'դեմք', 'արգելված', 'խոսալ'})),
EmojiAnnotations(emoji='👧', codepoints=(128103,), name='աղջիկ', slug='աղջիկ', annotations=frozenset({'օրիորդ', 'կենդանակերպ', 'կույս'})),
EmojiAnnotations(emoji='👴', codepoints=(128116,), name='տարեց տղամարդ', slug='տարեց_տղամարդ', annotations=frozenset({'տղամարդ', 'տարեց'})),
EmojiAnnotations(emoji='👵', codepoints=(128117,), name='տարեց կին', slug='տարեց_կին', annotations=frozenset({'տարեց', 'կին'})),
EmojiAnnotations(emoji='👮', codepoints=(128110,), name='ոստիկան', slug='ոստիկան', annotations=frozenset({'սպա', 'ոստիկանություն'})),
EmojiAnnotations(emoji='👲', codepoints=(128114,), name='չինական գլխարկով մարդ', slug='չինական_գլխարկով_մարդ', annotations=frozenset({'գլխարկ', 'մարդ', 'չինական'})),
EmojiAnnotations(emoji='👳', codepoints=(128115,), name='չալմայով մարդ', slug='չալմայով_մարդ', annotations=frozenset({'չալմա', 'մարդ'})),
EmojiAnnotations(emoji='👷', codepoints=(128119,), name='շինարար', slug='շինարար', annotations=frozenset({'գլխարկ', 'շինարարություն', 'աշխատող'})),
EmojiAnnotations(emoji='⛑', codepoints=(9937,), name='սպիտակ խաչով սաղավարտ', slug='սպիտակ_խաչով_սաղավարտ', annotations=frozenset({'գլխարկ', 'դեմք', 'խաչ', 'սաղավարտ', 'օգնություն'})),
EmojiAnnotations(emoji='👸', codepoints=(128120,), name='արքայադուստր', slug='արքայադուստր', annotations=frozenset({'հեքիաթ', 'ֆանտազիա'})),
EmojiAnnotations(emoji='\U0001f575', codepoints=(128373,), name='խուզարկու', slug='խուզարկու', annotations=frozenset({'լրտես'})),
EmojiAnnotations(emoji='🎅', codepoints=(127877,), name='սանտա կլաուս', slug='սանտա_կլաուս', annotations=frozenset({'տոն', 'սանտա', 'հեքիաթ', 'ֆանտազիա', 'սուրբ ծնունդ', 'հայր'})),
EmojiAnnotations(emoji='👼', codepoints=(128124,), name='մանուկ-հրեշտակ', slug='մանուկ_հրեշտակ', annotations=frozenset({'երեխա', 'դեմք', 'հեքիաթ', 'ֆանտազիա', 'հրեշտակ'})),
EmojiAnnotations(emoji='💆', codepoints=(128134,), name='դեմքի մերսում', slug='դեմքի_մերսում', annotations=frozenset({'սրահ', 'մերսում'})),
EmojiAnnotations(emoji='💇', codepoints=(128135,), name='սանրվածք', slug='սանրվածք', annotations=frozenset({'վարսավիր', 'գեղեցկություն', 'սրահ'})),
EmojiAnnotations(emoji='👰', codepoints=(128112,), name='քողով հարս', slug='քողով_հարս', annotations=frozenset({'քող', 'հարս', 'հարսանիք'})),
EmojiAnnotations(emoji='🙍', codepoints=(128589,), name='խոժոռված դեմքով անձ', slug='խոժոռված_դեմքով_անձ', annotations=frozenset({'խոժոռված', 'ժեստ'})),
EmojiAnnotations(emoji='🙎', codepoints=(128590,), name='դժգոհ անձ', slug='դժգոհ_անձ', annotations=frozenset({'ժեստ', 'դժգոհ'})),
EmojiAnnotations(emoji='🙅', codepoints=(128581,), name='ոչ ցույց տվող', slug='ոչ_ցույց_տվող', annotations=frozenset({'արգելված', 'ձեռք', 'ժեստ', 'ոչ'})),
EmojiAnnotations(emoji='🙆', codepoints=(128582,), name='ok ցույց տվող', slug='ok_ցույց_տվող', annotations=frozenset({'ձեռք', 'ժեստ', 'ok'})),
EmojiAnnotations(emoji='💁', codepoints=(128129,), name='տեղեկատու բյուրոյի աշխատող', slug='տեղեկատու_բյուրոյի_աշխատող', annotations=frozenset({'հանդուգն', 'ձեռք', 'օգնել', 'տեղեկատվություն'})),
EmojiAnnotations(emoji='🙋', codepoints=(128587,), name='ձեռք բարձրացնող ուրախ անձ', slug='ձեռք_բարձրացնող_ուրախ_անձ', annotations=frozenset({'ձեռք', 'երջանիկ', 'ժեստ', 'բարձրացված'})),
EmojiAnnotations(emoji='🙇', codepoints=(128583,), name='խոնարհվող անձ', slug='խոնարհվող_անձ', annotations=frozenset({'ներողություն խնդրել', 'ներողություն', 'ժեստ', 'խոնարհվել'})),
EmojiAnnotations(emoji='🙌', codepoints=(128588,), name='ձեռքերը բարձրացնող անձ', slug='ձեռքերը_բարձրացնող_անձ', annotations=frozenset({'մարմին', 'տոն', 'ժեստ', 'ձեռք', 'ուռա', 'բարձրացված'})),
EmojiAnnotations(emoji='🙏', codepoints=(128591,), name='միացված ձեռքի ափեր', slug='միացված_ձեռքի_ափեր', annotations=frozenset({'աղոթել', 'խնդրել', 'մարմին', 'խնդրում եմ', 'ժեստ', 'խոնարհվել', 'ձեռք', 'շնորհակալություն', 'միացված'})),
EmojiAnnotations(emoji='\U0001f5e3', codepoints=(128483,), name='խոսացող գլուխ', slug='խոսացող_գլուխ', annotations=frozenset({'դեմք', 'գլուխ', 'ուրվագիծ', 'խոսացող', 'խոսալ'})),
EmojiAnnotations(emoji='👤', codepoints=(128100,), name='ուրվագծված կիսանդրի', slug='ուրվագծված_կիսանդրի', annotations=frozenset({'ուրվագիծ', 'կիսանդրի'})),
EmojiAnnotations(emoji='👥', codepoints=(128101,), name='ուրվագծված կիսանդրիներ', slug='ուրվագծված_կիսանդրիներ', annotations=frozenset({'ուրվագիծ', 'կիսանդրի'})),
EmojiAnnotations(emoji='🚶', codepoints=(128694,), name='հետիոտն', slug='հետիոտն', annotations=frozenset({'քայլել', 'զբոսանք', 'զբոսնել'})),
EmojiAnnotations(emoji='🏃', codepoints=(127939,), name='վազող', slug='վազող', annotations=frozenset({'մարաթոն', 'վազք'})),
EmojiAnnotations(emoji='👯', codepoints=(128111,), name='պարող կանայք', slug='պարող_կանայք', annotations=frozenset({'ականջ', 'աղջիկ', 'կին', 'պարող', 'ճագար'})),
EmojiAnnotations(emoji='\U0001f574', codepoints=(128372,), name='տեղում ճախրող գործնական կոստյումով մարդ', slug='տեղում_ճախրող_գործնական_կոստյումով_մարդ', annotations=frozenset({'կոստյում', 'բիզնես', 'մարդ'})),
EmojiAnnotations(emoji='💏', codepoints=(128143,), name='համբույր', slug='համբույր', annotations=frozenset({'զույգ', 'սիրավեպ'})),
EmojiAnnotations(emoji='💑', codepoints=(128145,), name='սրտիկով զույգ', slug='սրտիկով_զույգ', annotations=frozenset({'զույգ', 'սիրտ', 'սիրավեպ', 'սեր'})),
EmojiAnnotations(emoji='👪', codepoints=(128106,), name='ընտանիք', slug='ընտանիք', annotations=frozenset({'երեխա', 'մայր', 'հայր'})),
EmojiAnnotations(emoji='👫', codepoints=(128107,), name='իրար ձեռք բռնած մարդ և կին', slug='իրար_ձեռք_բռնած_մարդ_և_կին', annotations=frozenset({'զույգ', 'ձեռք', 'տղամարդ', 'բռնել', 'կին'})),
EmojiAnnotations(emoji='👬', codepoints=(128108,), name='իրար ձեռք բռնած երկու տղամարդ', slug='իրար_ձեռք_բռնած_երկու_տղամարդ', annotations=frozenset({'երկվորյակ', 'ձեռք', 'տղամարդ', 'բռնել', 'կենդանակերպ', 'զույգ'})),
EmojiAnnotations(emoji='👭', codepoints=(128109,), name='իրար ձեռք բռնած երկու կին', slug='իրար_ձեռք_բռնած_երկու_կին', annotations=frozenset({'զույգ', 'ձեռք', 'բռնել', 'կին'})),
EmojiAnnotations(emoji='\U0001f3fb', codepoints=(127995,), name='մաշկի տիպ-1-2', slug='մաշկի_տիպ_1_2', annotations=frozenset({'տոն', 'ֆիցպատրիկ', 'մաշկ', 'զմայլիկների կերպափոխիչ'})),
EmojiAnnotations(emoji='\U0001f3fc', codepoints=(127996,), name='մաշկի տիպ-3', slug='մաշկի_տիպ_3', annotations=frozenset({'տոն', 'ֆիցպատրիկ', 'մաշկ', 'զմայլիկների կերպափոխիչ'})),
EmojiAnnotations(emoji='\U0001f3fd', codepoints=(127997,), name='մաշկի տիպ-4', slug='մաշկի_տիպ_4', annotations=frozenset({'տոն', 'ֆիցպատրիկ', 'մաշկ', 'զմայլիկների կերպափոխիչ'})),
EmojiAnnotations(emoji='\U0001f3fe', codepoints=(127998,), name='մաշկի տիպ-5', slug='մաշկի_տիպ_5', annotations=frozenset({'տոն', 'ֆիցպատրիկ', 'մաշկ', 'զմայլիկների կերպափոխիչ'})),
EmojiAnnotations(emoji='\U0001f3ff', codepoints=(127999,), name='մաշկի տիպ-6', slug='մաշկի_տիպ_6', annotations=frozenset({'տոն', 'ֆիցպատրիկ', 'մաշկ', 'զմայլիկների կերպափոխիչ'})),
EmojiAnnotations(emoji='💪', codepoints=(128170,), name='ձգված բիցեպս', slug='ձգված_բիցեպս', annotations=frozenset({'բիցեպս', 'մարմին', 'կոմիքս', 'ձգել', 'մկան'})),
EmojiAnnotations(emoji='👈', codepoints=(128072,), name='դեպի ձախ ուղղված ցուցամատ', slug='դեպի_ձախ_ուղղված_ցուցամատ', annotations=frozenset({'ցուցամատ', 'ձեռք', 'ուղղված', 'մատ', 'մարմին'})),
EmojiAnnotations(emoji='👉', codepoints=(128073,), name='դեպի աջ ուղղված ցուցամատ', slug='դեպի_աջ_ուղղված_ցուցամատ', annotations=frozenset({'ցուցամատ', 'ձեռք', 'ուղղված', 'մատ', 'մարմին'})),
EmojiAnnotations(emoji='☝', codepoints=(9757,), name='դեպի վեր ուղղված ցուցամատ ձեռքի ափի կողմից', slug='դեպի_վեր_ուղղված_ցուցամատ_ձեռքի_ափի_կողմից', annotations=frozenset({'մարմին', 'ցուցամատ', 'ձեռքի ափ', 'ձեռք', 'ուղղված', 'մատ', 'վեր'})),
EmojiAnnotations(emoji='👆', codepoints=(128070,), name='դեպի վեր ուղղված ցուցամատ', slug='դեպի_վեր_ուղղված_ցուցամատ', annotations=frozenset({'մարմին', 'ցուցամատ', 'ձեռք', 'ուղղված', 'մատ', 'վեր'})),
EmojiAnnotations(emoji='\U0001f595', codepoints=(128405,), name='մեջտեղի մատ', slug='մեջտեղի_մատ', annotations=frozenset({'ձեռք', 'մատ', 'մարմին'})),
EmojiAnnotations(emoji='👇', codepoints=(128071,), name='դեպի վար ուղղված ցուցամատ', slug='դեպի_վար_ուղղված_ցուցամատ', annotations=frozenset({'մարմին', 'ցուցամատ', 'ձեռք', 'ուղղված', 'վար', 'մատ'})),
EmojiAnnotations(emoji='✌', codepoints=(9996,), name='հաղթական ձեռք', slug='հաղթական_ձեռք', annotations=frozenset({'ձեռք', 'v', 'մարմին', 'հաղթանակ'})),
EmojiAnnotations(emoji='\U0001f596', codepoints=(128406,), name='վուլկանցիների ողջույն', slug='վուլկանցիների_ողջույն', annotations=frozenset({'ձեռք', 'մատ', 'մարմին', 'վուլկան'})),
EmojiAnnotations(emoji='\U0001f918', codepoints=(129304,), name='եղջյուրների նշան', slug='եղջյուրների_նշան', annotations=frozenset({'ձեռք', 'մատ', 'մարմին', 'եղջյուրներ'})),
EmojiAnnotations(emoji='\U0001f590', codepoints=(128400,), name='բացված մատներով բարձրացված ձեռք', slug='բացված_մատներով_բարձրացված_ձեռք', annotations=frozenset({'ձեռք', 'մատ', 'մարմին', 'բացված'})),
EmojiAnnotations(emoji='✋', codepoints=(9995,), name='բարձրացված ձեռք', slug='բարձրացված_ձեռք', annotations=frozenset({'ձեռք', 'մարմին'})),
EmojiAnnotations(emoji='👌', codepoints=(128076,), name='ok ցույց տվող ձեռք', slug='ok_ցույց_տվող_ձեռք', annotations=frozenset({'ձեռք', 'մարմին', 'ok'})),
EmojiAnnotations(emoji='👍', codepoints=(128077,), name='բութ մատը վեր', slug='բութ_մատը_վեր', annotations=frozenset({'բութ', '+1', 'ձեռք', 'մարմին', 'վեր'})),
EmojiAnnotations(emoji='👎', codepoints=(128078,), name='բութ մատը ներքև', slug='բութ_մատը_ներքև', annotations=frozenset({'-1', 'ներքև', 'ձեռք', 'մարմին', 'բութ մատ'})),
EmojiAnnotations(emoji='✊', codepoints=(9994,), name='բարձրացված բռունցք', slug='բարձրացված_բռունցք', annotations=frozenset({'հարված', 'բռունցք', 'ձեռք', 'մարմին', 'սեղմված'})),
EmojiAnnotations(emoji='👊', codepoints=(128074,), name='հանդիպակաց բռունցք', slug='հանդիպակաց_բռունցք', annotations=frozenset({'հարված', 'բռունցք', 'ձեռք', 'մարմին', 'սեղմված'})),
EmojiAnnotations(emoji='👋', codepoints=(128075,), name='թափահարող ձեռք', slug='թափահարող_ձեռք', annotations=frozenset({'ձեռք', 'թափահարել', 'թափահարող', 'մարմին'})),
EmojiAnnotations(emoji='👏', codepoints=(128079,), name='ծափահարող ձեռքեր', slug='ծափահարող_ձեռքեր', annotations=frozenset({'ձեռք', 'մարմին', 'ծափահարել'})),
EmojiAnnotations(emoji='👐', codepoints=(128080,), name='բաց ձեռքեր', slug='բաց_ձեռքեր', annotations=frozenset({'բաց', 'ձեռք', 'մարմին'})),
EmojiAnnotations(emoji='✍', codepoints=(9997,), name='գրող ձեռք', slug='գրող_ձեռք', annotations=frozenset({'ձեռք', 'մարմին', 'գրել'})),
EmojiAnnotations(emoji='💅', codepoints=(128133,), name='եղունգների լաքապատում', slug='եղունգների_լաքապատում', annotations=frozenset({'խնամք', 'մարմին', 'հղկել', 'մատնահարդարում', 'եղունգ', 'կոսմետիկա'})),
EmojiAnnotations(emoji='👂', codepoints=(128066,), name='ականջ', slug='ականջ', annotations=frozenset({'մարմին'})),
EmojiAnnotations(emoji='👃', codepoints=(128067,), name='քիթ', slug='քիթ', annotations=frozenset({'մարմին'})),
EmojiAnnotations(emoji='👣', codepoints=(128099,), name='ոտնահետքեր', slug='ոտնահետքեր', annotations=frozenset({'հագուստ', 'ոտնահետք', 'հետք', 'մարմին'})),
EmojiAnnotations(emoji='👀', codepoints=(128064,), name='աչքեր', slug='աչքեր', annotations=frozenset({'աչք', 'դեմք', 'մարմին'})),
EmojiAnnotations(emoji='\U0001f441', codepoints=(128065,), name='աչք', slug='աչք', annotations=frozenset({'մարմին'})),
EmojiAnnotations(emoji='👅', codepoints=(128069,), name='լեզու', slug='լեզու', annotations=frozenset({'մարմին'})),
EmojiAnnotations(emoji='👄', codepoints=(128068,), name='բերան', slug='բերան', annotations=frozenset({'շուրթեր', 'մարմին'})),
EmojiAnnotations(emoji='💋', codepoints=(128139,), name='համբույրի հետք', slug='համբույրի_հետք', annotations=frozenset({'սիրտ', 'շուրթեր', 'հետք', 'սիրավեպ', 'համբույր'})),
EmojiAnnotations(emoji='💘', codepoints=(128152,), name='նետահարված սիրտ', slug='նետահարված_սիրտ', annotations=frozenset({'նետ', 'սիրտ', 'սիրավեպ', 'կուպիդոն'})),
EmojiAnnotations(emoji='❤', codepoints=(10084,), name='կարմիր սիրտ', slug='կարմիր_սիրտ', annotations=frozenset({'սիրտ'})),
EmojiAnnotations(emoji='💓', codepoints=(128147,), name='բաբախող սիրտ', slug='բաբախող_սիրտ', annotations=frozenset({'սիրտ', 'սրտխփոց', 'պուլսացիա', 'բաբախյուն'})),
EmojiAnnotations(emoji='💔', codepoints=(128148,), name='կոտրված սիրտ', slug='կոտրված_սիրտ', annotations=frozenset({'սիրտ', 'կոտրված', 'կոտրել'})),
EmojiAnnotations(emoji='💕', codepoints=(128149,), name='երկու սրտեր', slug='երկու_սրտեր', annotations=frozenset({'սիրտ', 'սեր'})),
EmojiAnnotations(emoji='💖', codepoints=(128150,), name='շողշողացող սիրտ', slug='շողշողացող_սիրտ', annotations=frozenset({'սիրտ', 'կայծ', 'ոգևորված'})),
EmojiAnnotations(emoji='💗', codepoints=(128151,), name='աճող սիրտ', slug='աճող_սիրտ', annotations=frozenset({'նյարդային', 'սիրտ', 'սրտի զարկ', 'աճող', 'ոգևորված'})),
EmojiAnnotations(emoji='💙', codepoints=(128153,), name='կապույտ սիրտ', slug='կապույտ_սիրտ', annotations=frozenset({'կապույտ', 'սիրտ'})),
EmojiAnnotations(emoji='💚', codepoints=(128154,), name='կանաչ սիրտ', slug='կանաչ_սիրտ', annotations=frozenset({'սիրտ', 'կանաչ'})),
EmojiAnnotations(emoji='💛', codepoints=(128155,), name='դեղին սիրտ', slug='դեղին_սիրտ', annotations=frozenset({'սիրտ', 'դեղին'})),
EmojiAnnotations(emoji='💜', codepoints=(128156,), name='մանուշակագույն սիրտ', slug='մանուշակագույն_սիրտ', annotations=frozenset({'սիրտ', 'մանուշակագույն'})),
EmojiAnnotations(emoji='💝', codepoints=(128157,), name='ժապավենով սիրտ', slug='ժապավենով_սիրտ', annotations=frozenset({'սիրտ', 'ժապավեն', 'վալենտին'})),
EmojiAnnotations(emoji='💞', codepoints=(128158,), name='պտտվող սրտեր', slug='պտտվող_սրտեր', annotations=frozenset({'պտտվող', 'սիրտ'})),
EmojiAnnotations(emoji='💟', codepoints=(128159,), name='սրտաձև նախշ', slug='սրտաձև_նախշ', annotations=frozenset({'սիրտ'})),
EmojiAnnotations(emoji='❣', codepoints=(10083,), name='բացականչական նշանի տեսքով սիրտ', slug='բացականչական_նշանի_տեսքով_սիրտ', annotations=frozenset({'նշան', 'կետադրական', 'սիրտ', 'բացականչություն'})),
EmojiAnnotations(emoji='💌', codepoints=(128140,), name='սիրային նամակ', slug='սիրային_նամակ', annotations=frozenset({'սիրտ', 'նամակ', 'փոստ', 'սիրավեպ', 'սեր'})),
EmojiAnnotations(emoji='💤', codepoints=(128164,), name='խռռռ', slug='խռռռ', annotations=frozenset({'քնել', 'կոմիքս'})),
EmojiAnnotations(emoji='💢', codepoints=(128162,), name='զայրույթի նշան', slug='զայրույթի_նշան', annotations=frozenset({'զայրացած', 'կոմիքս', 'խենք'})),
EmojiAnnotations(emoji='💣', codepoints=(128163,), name='ռումբ', slug='ռումբ', annotations=frozenset({'կոմիքս'})),
EmojiAnnotations(emoji='💥', codepoints=(128165,), name='բախում', slug='բախում', annotations=frozenset({'բում', 'կոմիքս'})),
EmojiAnnotations(emoji='💦', codepoints=(128166,), name='քրտինքի կաթիլներ', slug='քրտինքի_կաթիլներ', annotations=frozenset({'ցայտող', 'կոմիքս', 'քրտինք'})),
EmojiAnnotations(emoji='💨', codepoints=(128168,), name='սրընթաց', slug='սրընթաց', annotations=frozenset({'կոմիքս', 'ընթանալ', 'սլանալ'})),
EmojiAnnotations(emoji='💫', codepoints=(128171,), name='գլխապտույտ', slug='գլխապտույտ', annotations=frozenset({'կոմիքս', 'աստղ'})),
EmojiAnnotations(emoji='💬', codepoints=(128172,), name='խոսքի ամպիկ', slug='խոսքի_ամպիկ', annotations=frozenset({'երկխոսություն', 'փուչիկ', 'կոմիքս', 'պղպջակ', 'խոսք'})),
EmojiAnnotations(emoji='\U0001f5e8', codepoints=(128488,), name='խոսքի ձախակողմյա ամպիկ', slug='խոսքի_ձախակողմյա_ամպիկ', annotations=frozenset({'երկխոսություն', 'խոսք'})),
EmojiAnnotations(emoji='\U0001f5ef', codepoints=(128495,), name='զայրույթի աջակողմյա ամպիկ', slug='զայրույթի_աջակողմյա_ամպիկ', annotations=frozenset({'զայրացած', 'փուչիկ', 'պղպջակ', 'խենք'})),
EmojiAnnotations(emoji='💭', codepoints=(128173,), name='մտքի ամպիկ', slug='մտքի_ամպիկ', annotations=frozenset({'փուչիկ', 'կոմիքս', 'պղպջակ', 'միտք'})),
EmojiAnnotations(emoji='👓', codepoints=(128083,), name='ակնոց', slug='ակնոց', annotations=frozenset({'հագուստ', 'աչք'})),
EmojiAnnotations(emoji='\U0001f576', codepoints=(128374,), name='արևային ակնոց', slug='արևային_ակնոց', annotations=frozenset({'աչք', 'ակնոց', 'մուգ'})),
EmojiAnnotations(emoji='👔', codepoints=(128084,), name='փողկապ', slug='փողկապ', annotations=frozenset({'հագուստ'})),
EmojiAnnotations(emoji='👕', codepoints=(128085,), name='սպորտային վերնաշապիկ', slug='սպորտային_վերնաշապիկ', annotations=frozenset({'հագուստ', 'վերնաշապիկ', 'սպորտային'})),
EmojiAnnotations(emoji='👖', codepoints=(128086,), name='ջինս', slug='ջինս', annotations=frozenset({'հագուստ', 'տաբատ', 'շալվար'})),
EmojiAnnotations(emoji='👗', codepoints=(128087,), name='զգեստ', slug='զգեստ', annotations=frozenset({'հագուստ'})),
EmojiAnnotations(emoji='👘', codepoints=(128088,), name='կիմոնո', slug='կիմոնո', annotations=frozenset({'հագուստ'})),
EmojiAnnotations(emoji='👙', codepoints=(128089,), name='բիկինի', slug='բիկինի', annotations=frozenset({'հագուստ', 'լողալ'})),
EmojiAnnotations(emoji='👚', codepoints=(128090,), name='կնոջ հագուստ', slug='կնոջ_հագուստ', annotations=frozenset({'հագուստ', 'կին'})),
EmojiAnnotations(emoji='👛', codepoints=(128091,), name='դրամապանակ', slug='դրամապանակ', annotations=frozenset({'հագուստ', 'մետաղադրամ'})),
EmojiAnnotations(emoji='👜', codepoints=(128092,), name='ձեռքի պայուսակ', slug='ձեռքի_պայուսակ', annotations=frozenset({'հագուստ', 'պայուսակ'})),
EmojiAnnotations(emoji='👝', codepoints=(128093,), name='պայուսակ', slug='պայուսակ', annotations=frozenset({'հագուստ'})),
EmojiAnnotations(emoji='\U0001f6cd', codepoints=(128717,), name='գնումների պայուսակ', slug='գնումների_պայուսակ', annotations=frozenset({'գնումներ', 'պայուսակ', 'հյուրանոց'})),
EmojiAnnotations(emoji='🎒', codepoints=(127890,), name='դպրոցական պայուսակ', slug='դպրոցական_պայուսակ', annotations=frozenset({'դպրոց', 'պայուսակ', 'ուսապարկ'})),
EmojiAnnotations(emoji='👞', codepoints=(128094,), name='տղամարդու կոշիկ', slug='տղամարդու_կոշիկ', annotations=frozenset({'հագուստ', 'կոշիկ', 'տղամարդ'})),
EmojiAnnotations(emoji='👟', codepoints=(128095,), name='սպորտային կոշիկ', slug='սպորտային_կոշիկ', annotations=frozenset({'հագուստ', 'կոշիկ', 'կեդեր', 'մարզական'})),
EmojiAnnotations(emoji='👠', codepoints=(128096,), name='բարձրակրունկ կոշիկ', slug='բարձրակրունկ_կոշիկ', annotations=frozenset({'հագուստ', 'կոշիկ', 'կրունկ', 'կին'})),
EmojiAnnotations(emoji='👡', codepoints=(128097,), name='կնոջ սանդալ', slug='կնոջ_սանդալ', annotations=frozenset({'հագուստ', 'կոշիկ', 'սանդալ', 'կին'})),
EmojiAnnotations(emoji='👢', codepoints=(128098,), name='կնոջ երկարաճիթք կոշիկ', slug='կնոջ_երկարաճիթք_կոշիկ', annotations=frozenset({'երկարաճիթք կոշիկ', 'հագուստ', 'կոշիկ', 'կին'})),
EmojiAnnotations(emoji='👑', codepoints=(128081,), name='թագ', slug='թագ', annotations=frozenset({'հագուստ', 'արքա', 'թագուհի'})),
EmojiAnnotations(emoji='👒', codepoints=(128082,), name='կնոջ գլխարկ', slug='կնոջ_գլխարկ', annotations=frozenset({'հագուստ', 'գլխարկ', 'կին'})),
EmojiAnnotations(emoji='🎩', codepoints=(127913,), name='ցիլինդր', slug='ցիլինդր', annotations=frozenset({'հագուստ', 'գլխարկ'})),
EmojiAnnotations(emoji='🎓', codepoints=(127891,), name='շրջանավարտի գլխարկ', slug='շրջանավարտի_գլխարկ', annotations=frozenset({'գլխարկ', 'տոն', 'հագուստ', 'ավարտական'})),
EmojiAnnotations(emoji='\U0001f4ff', codepoints=(128255,), name='տերողորմյա', slug='տերողորմյա', annotations=frozenset({'հագուստ', 'վզնոց', 'ուլունքներ', 'աղոթք', 'կրոն'})),
EmojiAnnotations(emoji='💄', codepoints=(128132,), name='շրթներկ', slug='շրթներկ', annotations=frozenset({'կոսմետիա', 'դիմահարդարում'})),
EmojiAnnotations(emoji='💍', codepoints=(128141,), name='մատանի', slug='մատանի', annotations=frozenset({'ադամանդ', 'սիրավեպ'})),
EmojiAnnotations(emoji='💎', codepoints=(128142,), name='թանկարժեք քար', slug='թանկարժեք_քար', annotations=frozenset({'ադամանդ', 'ակն', 'սիրավեպ', 'գոհար'})),
EmojiAnnotations(emoji='🐵', codepoints=(128053,), name='կապիկի դեմք', slug='կապիկի_դեմք', annotations=frozenset({'դեմք', 'կապիկ'})),
EmojiAnnotations(emoji='🐶', codepoints=(128054,), name='շան դեմք', slug='շան_դեմք', annotations=frozenset({'դեմք', 'շուն', 'ընտանի կենդանի'})),
EmojiAnnotations(emoji='🐕', codepoints=(128021,), name='շուն', slug='շուն', annotations=frozenset({'ընտանի կենդանի'})),
EmojiAnnotations(emoji='🐩', codepoints=(128041,), name='պուդել', slug='պուդել', annotations=frozenset({'շուն'})),
EmojiAnnotations(emoji='🐺', codepoints=(128058,), name='գայլի դեմք', slug='գայլի_դեմք', annotations=frozenset({'դեմք', 'գայլ'})),
EmojiAnnotations(emoji='🐱', codepoints=(128049,), name='կատվի դեմք', slug='կատվի_դեմք', annotations=frozenset({'կատու', 'դեմք', 'ընտանի կենդանի'})),
EmojiAnnotations(emoji='🐈', codepoints=(128008,), name='կատու', slug='կատու', annotations=frozenset({'ընտանի կենդանի'})),
EmojiAnnotations(emoji='\U0001f981', codepoints=(129409,), name='առյուծի դեմք', slug='առյուծի_դեմք', annotations=frozenset({'դեմք', 'առյուծ', 'կենդանակերպ', 'կորյուն'})),
EmojiAnnotations(emoji='🐯', codepoints=(128047,), name='վագրի դեմք', slug='վագրի_դեմք', annotations=frozenset({'դեմք', 'վագր'})),
EmojiAnnotations(emoji='🐴', codepoints=(128052,), name='ձիու դեմք', slug='ձիու_դեմք', annotations=frozenset({'դեմք', 'ձի'})),
EmojiAnnotations(emoji='🐎', codepoints=(128014,), name='ձի', slug='ձի', annotations=frozenset({'մրցավազք', 'մրցավազքային ձի'})),
EmojiAnnotations(emoji='\U0001f984', codepoints=(129412,), name='միաեղջյուրի դեմք', slug='միաեղջյուրի_դեմք', annotations=frozenset({'դեմք', 'միաեղջյուր'})),
EmojiAnnotations(emoji='🐮', codepoints=(128046,), name='կովի դեմք', slug='կովի_դեմք', annotations=frozenset({'դեմք', 'կով'})),
EmojiAnnotations(emoji='🐂', codepoints=(128002,), name='ցուլիկ', slug='ցուլիկ', annotations=frozenset({'կենդանակերպ', 'ցուլ'})),
EmojiAnnotations(emoji='🐃', codepoints=(128003,), name='ջրագոմեշ', slug='ջրագոմեշ', annotations=frozenset({'ջուր'})),
EmojiAnnotations(emoji='🐷', codepoints=(128055,), name='խոզի դեմք', slug='խոզի_դեմք', annotations=frozenset({'դեմք', 'խոզ'})),
EmojiAnnotations(emoji='🐖', codepoints=(128022,), name='խոզ', slug='խոզ', annotations=frozenset({'էգ խոզ'})),
EmojiAnnotations(emoji='🐗', codepoints=(128023,), name='վարազ', slug='վարազ', annotations=frozenset({'խոզ'})),
EmojiAnnotations(emoji='🐽', codepoints=(128061,), name='խոզի քիթ', slug='խոզի_քիթ', annotations=frozenset({'դեմք', 'քիթ', 'խոզ'})),
EmojiAnnotations(emoji='🐏', codepoints=(128015,), name='արու ոչխար', slug='արու_ոչխար', annotations=frozenset({'ոչխար', 'կենդանակերպ', 'խոյ'})),
EmojiAnnotations(emoji='🐑', codepoints=(128017,), name='ոչխար', slug='ոչխար', annotations=frozenset({'մաքի'})),
EmojiAnnotations(emoji='🐐', codepoints=(128016,), name='այծ', slug='այծ', annotations=frozenset({'այծեղջյուր', 'կենդանակերպ'})),
EmojiAnnotations(emoji='🐪', codepoints=(128042,), name='ուղտ', slug='ուղտ', annotations=frozenset({'միասապատ', 'կուզ'})),
EmojiAnnotations(emoji='🐫', codepoints=(128043,), name='երկսապատավոր ուղտ', slug='երկսապատավոր_ուղտ', annotations=frozenset({'ուղտ', 'երկսապատանի', 'կուզ'})),
EmojiAnnotations(emoji='🐭', codepoints=(128045,), name='մկան դեմք', slug='մկան_դեմք', annotations=frozenset({'դեմք', 'մուկ'})),
EmojiAnnotations(emoji='🐹', codepoints=(128057,), name='գերմանամկան դեմք', slug='գերմանամկան_դեմք', annotations=frozenset({'դեմք', 'գերմանամուկ', 'ընտանի կենդանի'})),
EmojiAnnotations(emoji='🐰', codepoints=(128048,), name='ճագարի դեմք', slug='ճագարի_դեմք', annotations=frozenset({'դեմք', 'ընտանի կենդանի', 'նապաստակ', 'ճագար'})),
EmojiAnnotations(emoji='🐇', codepoints=(128007,), name='ճագար', slug='ճագար', annotations=frozenset({'ընտանի կենդանի', 'նապաստակ'})),
EmojiAnnotations(emoji='🐻', codepoints=(128059,), name='արջի դեմք', slug='արջի_դեմք', annotations=frozenset({'դեմք', 'արջ'})),
EmojiAnnotations(emoji='🐨', codepoints=(128040,), name='կոալա', slug='կոալա', annotations=frozenset({'արջ'})),
EmojiAnnotations(emoji='🐼', codepoints=(128060,), name='պանդայի դեմք', slug='պանդայի_դեմք', annotations=frozenset({'դեմք', 'պանդա'})),
EmojiAnnotations(emoji='🐾', codepoints=(128062,), name='թաթերի հետքեր', slug='թաթերի_հետքեր', annotations=frozenset({'ոտքեր', 'հետք', 'թաթ'})),
EmojiAnnotations(emoji='🐓', codepoints=(128019,), name='աքաղաղ', slug='աքաղաղ', annotations=frozenset({'աքաղաք'})),
EmojiAnnotations(emoji='🐣', codepoints=(128035,), name='ձվից դուրս եկող ճուտիկ', slug='ձվից_դուրս_եկող_ճուտիկ', annotations=frozenset({'ձագ', 'ձվից դուրս եկող', 'ճուտիկ'})),
EmojiAnnotations(emoji='🐤', codepoints=(128036,), name='ճուտիկ', slug='ճուտիկ', annotations=frozenset({'ձագ'})),
EmojiAnnotations(emoji='🐥', codepoints=(128037,), name='դեմքով կանգնած ճուտիկ', slug='դեմքով_կանգնած_ճուտիկ', annotations=frozenset({'ձագ', 'ճուտիկ'})),
EmojiAnnotations(emoji='\U0001f54a', codepoints=(128330,), name='աղավնի', slug='աղավնի', annotations=frozenset({'թռչուն', 'թռչել', 'խաղաղություն'})),
EmojiAnnotations(emoji='🐸', codepoints=(128056,), name='գորտի դեմք', slug='գորտի_դեմք', annotations=frozenset({'դեմք', 'գորտ'})),
EmojiAnnotations(emoji='🐍', codepoints=(128013,), name='օձ', slug='օձ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='🐲', codepoints=(128050,), name='վիշապի դեմք', slug='վիշապի_դեմք', annotations=frozenset({'վիշապ', 'դեմք', 'հեքիաթ'})),
EmojiAnnotations(emoji='🐉', codepoints=(128009,), name='վիշապ', slug='վիշապ', annotations=frozenset({'հեքիաթ'})),
EmojiAnnotations(emoji='🐳', codepoints=(128051,), name='ջուր ցայտեցնող կետաձուկ', slug='ջուր_ցայտեցնող_կետաձուկ', annotations=frozenset({'դեմք', 'կետաձուկ', 'ցայտում'})),
EmojiAnnotations(emoji='🐟', codepoints=(128031,), name='ձուկ', slug='ձուկ', annotations=frozenset({'կենդանակերպ', 'ձկներ'})),
EmojiAnnotations(emoji='🐠', codepoints=(128032,), name='արևադարձային ձուկ', slug='արևադարձային_ձուկ', annotations=frozenset({'ձուկ', 'արևադարձային'})),
EmojiAnnotations(emoji='🐡', codepoints=(128033,), name='փքաձուկ', slug='փքաձուկ', annotations=frozenset({'ձուկ'})),
EmojiAnnotations(emoji='🐚', codepoints=(128026,), name='պարուրաձև խխունջախեցի', slug='պարուրաձև_խխունջախեցի', annotations=frozenset({'պարույր', 'խխունջ'})),
EmojiAnnotations(emoji='\U0001f980', codepoints=(129408,), name='կրաբ', slug='կրաբ', annotations=frozenset({'խեցգետին', 'կենդանակերպ'})),
EmojiAnnotations(emoji='🐜', codepoints=(128028,), name='մրջյուն', slug='մրջյուն', annotations=frozenset({'միջատ'})),
EmojiAnnotations(emoji='🐝', codepoints=(128029,), name='մեղու', slug='մեղու', annotations=frozenset({'միջատ'})),
EmojiAnnotations(emoji='🐞', codepoints=(128030,), name='զատիկ', slug='զատիկ', annotations=frozenset({'միջատ', 'բզեզ'})),
EmojiAnnotations(emoji='\U0001f577', codepoints=(128375,), name='սարդ', slug='սարդ', annotations=frozenset({'միջատ'})),
EmojiAnnotations(emoji='\U0001f578', codepoints=(128376,), name='սարդոստայն', slug='սարդոստայն', annotations=frozenset({'սարդ', 'ոստայն'})),
EmojiAnnotations(emoji='\U0001f982', codepoints=(129410,), name='շագանակագույն կարիճ', slug='շագանակագույն_կարիճ', annotations=frozenset({'կարիճ', 'կենդանակերպ'})),
EmojiAnnotations(emoji='💐', codepoints=(128144,), name='ծաղկեփունջ', slug='ծաղկեփունջ', annotations=frozenset({'ծաղիկ', 'սիրավեպ', 'բույս'})),
EmojiAnnotations(emoji='🌸', codepoints=(127800,), name='բալենու ծաղիկ', slug='բալենու_ծաղիկ', annotations=frozenset({'ծաղիկ', 'բույս', 'բալ'})),
EmojiAnnotations(emoji='💮', codepoints=(128174,), name='սպիտակ ծաղիկ', slug='սպիտակ_ծաղիկ', annotations=frozenset({'ծաղիկ'})),
EmojiAnnotations(emoji='\U0001f3f5', codepoints=(127989,), name='վարդանախշ', slug='վարդանախշ', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🌹', codepoints=(127801,), name='վարդ', slug='վարդ', annotations=frozenset({'ծաղիկ', 'բույս'})),
EmojiAnnotations(emoji='🌺', codepoints=(127802,), name='հիբիսկուս', slug='հիբիսկուս', annotations=frozenset({'ծաղիկ', 'բույս'})),
EmojiAnnotations(emoji='🌻', codepoints=(127803,), name='արևածաղիկ', slug='արևածաղիկ', annotations=frozenset({'ծաղիկ', 'արև', 'բույս'})),
EmojiAnnotations(emoji='🌼', codepoints=(127804,), name='ծաղիկ', slug='ծաղիկ', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🌷', codepoints=(127799,), name='կակաչ', slug='կակաչ', annotations=frozenset({'ծաղիկ', 'բույս'})),
EmojiAnnotations(emoji='🌱', codepoints=(127793,), name='ծիլ', slug='ծիլ', annotations=frozenset({'բույս', 'մատղաշ'})),
EmojiAnnotations(emoji='🌲', codepoints=(127794,), name='եղևնի', slug='եղևնի', annotations=frozenset({'բույս', 'ծառ'})),
EmojiAnnotations(emoji='🌳', codepoints=(127795,), name='սաղարթավոր ծառ', slug='սաղարթավոր_ծառ', annotations=frozenset({'սաղարթավոր', 'բույս', 'ծառ'})),
EmojiAnnotations(emoji='🌴', codepoints=(127796,), name='արմավենի', slug='արմավենի', annotations=frozenset({'բույս', 'ծառ'})),
EmojiAnnotations(emoji='🌵', codepoints=(127797,), name='կակտուս', slug='կակտուս', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🌾', codepoints=(127806,), name='բրնձի հասկեր', slug='բրնձի_հասկեր', annotations=frozenset({'ականջ', 'բույս', 'բրինձ'})),
EmojiAnnotations(emoji='🌿', codepoints=(127807,), name='խոտաբույս', slug='խոտաբույս', annotations=frozenset({'տերև', 'բույս'})),
EmojiAnnotations(emoji='☘', codepoints=(9752,), name='երեքնուկ', slug='երեքնուկ', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🍀', codepoints=(127808,), name='քառատերև երեքնուկ', slug='քառատերև_երեքնուկ', annotations=frozenset({'4', 'չորս', 'տերև', 'բույս', 'երեքնուկ'})),
EmojiAnnotations(emoji='🍁', codepoints=(127809,), name='թխկու տերև', slug='թխկու_տերև', annotations=frozenset({'տերև', 'թխկի', 'բույս', 'ընկնող'})),
EmojiAnnotations(emoji='🍂', codepoints=(127810,), name='ընկած տերևներ', slug='ընկած_տերևներ', annotations=frozenset({'տերև', 'բույս', 'ընկնող'})),
EmojiAnnotations(emoji='🍃', codepoints=(127811,), name='ճախրող տերևներ', slug='ճախրող_տերևներ', annotations=frozenset({'տերև', 'քամի', 'փչել', 'թրթռալ', 'բույս'})),
EmojiAnnotations(emoji='🍇', codepoints=(127815,), name='խաղող', slug='խաղող', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍈', codepoints=(127816,), name='սեխ', slug='սեխ', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍉', codepoints=(127817,), name='ձմերուկ', slug='ձմերուկ', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍊', codepoints=(127818,), name='մանդարին', slug='մանդարին', annotations=frozenset({'նարինջ', 'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍋', codepoints=(127819,), name='կիտրոն', slug='կիտրոն', annotations=frozenset({'ցիտրուս', 'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍌', codepoints=(127820,), name='բանան', slug='բանան', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍍', codepoints=(127821,), name='արքայախնձոր', slug='արքայախնձոր', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍎', codepoints=(127822,), name='կարմիր խնձոր', slug='կարմիր_խնձոր', annotations=frozenset({'կարմիր', 'խնձոր', 'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍏', codepoints=(127823,), name='կանաչ խնձոր', slug='կանաչ_խնձոր', annotations=frozenset({'խնձոր', 'բույս', 'պտուղ', 'կանաչ'})),
EmojiAnnotations(emoji='🍐', codepoints=(127824,), name='տանձ', slug='տանձ', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍑', codepoints=(127825,), name='դեղձ', slug='դեղձ', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍒', codepoints=(127826,), name='բալ', slug='բալ', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍓', codepoints=(127827,), name='ելակ', slug='ելակ', annotations=frozenset({'հատապտուղ', 'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍅', codepoints=(127813,), name='լոլիկ', slug='լոլիկ', annotations=frozenset({'բույս', 'բանջարեղեն'})),
EmojiAnnotations(emoji='🍆', codepoints=(127814,), name='սմբուկ', slug='սմբուկ', annotations=frozenset({'բույս', 'բանջարեղեն'})),
EmojiAnnotations(emoji='🌽', codepoints=(127805,), name='եգիպտացորեն', slug='եգիպտացորեն', annotations=frozenset({'ականջ', 'բույս'})),
EmojiAnnotations(emoji='\U0001f336', codepoints=(127798,), name='կծու պղպեղ', slug='կծու_պղպեղ', annotations=frozenset({'պղպեղ', 'կծու', 'բույս'})),
EmojiAnnotations(emoji='🍄', codepoints=(127812,), name='սունկ', slug='սունկ', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🌰', codepoints=(127792,), name='շագանակ', slug='շագանակ', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🍞', codepoints=(127838,), name='հաց', slug='հաց', annotations=frozenset({'բոքոն'})),
EmojiAnnotations(emoji='\U0001f9c0', codepoints=(129472,), name='պանրի կտոր', slug='պանրի_կտոր', annotations=frozenset({'պանիր'})),
EmojiAnnotations(emoji='🍖', codepoints=(127830,), name='ոսկորով միս', slug='ոսկորով_միս', annotations=frozenset({'ոսկոր', 'միս'})),
EmojiAnnotations(emoji='🍗', codepoints=(127831,), name='հավի բուդ', slug='հավի_բուդ', annotations=frozenset({'ոսկոր', 'բուդ', 'հավ', 'թռչնամիս'})),
EmojiAnnotations(emoji='🍔', codepoints=(127828,), name='համբուրգեր', slug='համբուրգեր', annotations=frozenset({'բուրգեր'})),
EmojiAnnotations(emoji='🍟', codepoints=(127839,), name='տապակած կարտոֆիլ', slug='տապակած_կարտոֆիլ', annotations=frozenset({'կարտոֆիլ', 'ֆրի'})),
EmojiAnnotations(emoji='🍕', codepoints=(127829,), name='պիցցա', slug='պիցցա', annotations=frozenset({'պանիր', 'կտոր'})),
EmojiAnnotations(emoji='\U0001f32d', codepoints=(127789,), name='հոթդոգ', slug='հոթդոգ', annotations=frozenset({'նրբերշիկ', 'ֆրանկֆուրտեր'})),
EmojiAnnotations(emoji='\U0001f32e', codepoints=(127790,), name='տակո', slug='տակո', annotations=frozenset({'մեքսիկական'})),
EmojiAnnotations(emoji='\U0001f32f', codepoints=(127791,), name='բուրիտո', slug='բուրիտո', annotations=frozenset({'մեքսիկական'})),
EmojiAnnotations(emoji='🍲', codepoints=(127858,), name='թասով ճաշ', slug='թասով_ճաշ', annotations=frozenset({'թաս', 'ճաշ'})),
EmojiAnnotations(emoji='🍱', codepoints=(127857,), name='բենտո արկղիկ', slug='բենտո_արկղիկ', annotations=frozenset({'բենտո', 'արկղիկ'})),
EmojiAnnotations(emoji='🍘', codepoints=(127832,), name='բրնձի կրեկեր', slug='բրնձի_կրեկեր', annotations=frozenset({'բրինձ', 'կրեկեր'})),
EmojiAnnotations(emoji='🍙', codepoints=(127833,), name='բրնձի գնդիկ', slug='բրնձի_գնդիկ', annotations=frozenset({'գնդիկ', 'ճապոնական', 'բրինձ'})),
EmojiAnnotations(emoji='🍚', codepoints=(127834,), name='եփած բրինձ', slug='եփած_բրինձ', annotations=frozenset({'եփած', 'բրինձ'})),
EmojiAnnotations(emoji='🍛', codepoints=(127835,), name='կարրիով բրինձ', slug='կարրիով_բրինձ', annotations=frozenset({'կարրի', 'բրինձ'})),
EmojiAnnotations(emoji='🍜', codepoints=(127836,), name='տաք ապուր', slug='տաք_ապուր', annotations=frozenset({'թաս', 'տաք', 'լապշա'})),
EmojiAnnotations(emoji='🍝', codepoints=(127837,), name='սպագետի', slug='սպագետի', annotations=frozenset({'մակարոնեղեն'})),
EmojiAnnotations(emoji='🍠', codepoints=(127840,), name='կարմրացրած քաղցր կարտոֆիլ', slug='կարմրացրած_քաղցր_կարտոֆիլ', annotations=frozenset({'կարտոֆիլ', 'կարմրացրած', 'քաղցր'})),
EmojiAnnotations(emoji='🍢', codepoints=(127842,), name='օդեն', slug='օդեն', annotations=frozenset({'ծովամթերք', 'շամփուր', 'քյաբաբ', 'փայտիկ'})),
EmojiAnnotations(emoji='🍤', codepoints=(127844,), name='տապակած ծովախեցգետին', slug='տապակած_ծովախեցգետին', annotations=frozenset({'տապակած', 'ծովախեցգետին'})),
EmojiAnnotations(emoji='🍥', codepoints=(127845,), name='ձկնային տորթ պտտանախշով', slug='ձկնային_տորթ_պտտանախշով', annotations=frozenset({'տորթ', 'խմորեղեն', 'ձուկ', 'պտտանախշ'})),
EmojiAnnotations(emoji='🍡', codepoints=(127841,), name='դանգո', slug='դանգո', annotations=frozenset({'շամփուր', 'փայտիկ', 'քաղցր', 'ճապոնական', 'դեսերտ'})),
EmojiAnnotations(emoji='🍦', codepoints=(127846,), name='լցնովի պաղպաղակ', slug='լցնովի_պաղպաղակ', annotations=frozenset({'քաղցր', 'պաղպաղակ', 'դեսերտ', 'կրեմ', 'լցնովի', 'փափուկ'})),
EmojiAnnotations(emoji='🍧', codepoints=(127847,), name='մանրացված սառույց', slug='մանրացված_սառույց', annotations=frozenset({'սառույց', 'մանրացված', 'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍨', codepoints=(127848,), name='պաղպաղակ', slug='պաղպաղակ', annotations=frozenset({'կրեմ', 'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍩', codepoints=(127849,), name='դոնաթ', slug='դոնաթ', annotations=frozenset({'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍪', codepoints=(127850,), name='թխվածքաբլիթ', slug='թխվածքաբլիթ', annotations=frozenset({'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🎂', codepoints=(127874,), name='ծննդյան տորթ', slug='ծննդյան_տորթ', annotations=frozenset({'քաղցր', 'տոն', 'դեսերտ', 'տորթ', 'խմորեղեն', 'տարեդարձ'})),
EmojiAnnotations(emoji='🍰', codepoints=(127856,), name='տորթի կտոր', slug='տորթի_կտոր', annotations=frozenset({'կտոր', 'տորթ', 'խմորեղեն', 'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍫', codepoints=(127851,), name='շոկոլադե սալիկ', slug='շոկոլադե_սալիկ', annotations=frozenset({'սալիկ', 'շոկոլադ', 'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍬', codepoints=(127852,), name='կոնֆետ', slug='կոնֆետ', annotations=frozenset({'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍭', codepoints=(127853,), name='սառնաշաքար', slug='սառնաշաքար', annotations=frozenset({'կոնֆետ', 'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍮', codepoints=(127854,), name='պուդինգ', slug='պուդինգ', annotations=frozenset({'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍯', codepoints=(127855,), name='մեղրի կճուճ', slug='մեղրի_կճուճ', annotations=frozenset({'քաղցր', 'մեղր', 'կճուճ'})),
EmojiAnnotations(emoji='🍼', codepoints=(127868,), name='մանկական շիշ', slug='մանկական_շիշ', annotations=frozenset({'մանկական', 'շիշ', 'խմել', 'կաթ'})),
EmojiAnnotations(emoji='☕', codepoints=(9749,), name='տաք ըմպելիք', slug='տաք_ըմպելիք', annotations=frozenset({'սուրճ', 'թեյ', 'խմել', 'տաք', 'ըմպելիք'})),
EmojiAnnotations(emoji='🍵', codepoints=(127861,), name='թեյի բաժակ առանց բռնակի', slug='թեյի_բաժակ_առանց_բռնակի', annotations=frozenset({'թեյի բաժակ', 'բաժակ', 'խմել', 'թեյ', 'ըմպելիք'})),
EmojiAnnotations(emoji='🍶', codepoints=(127862,), name='սակե', slug='սակե', annotations=frozenset({'բար', 'շիշ', 'բաժակ', 'խմել', 'ըմպելիք'})),
EmojiAnnotations(emoji='\U0001f37e', codepoints=(127870,), name='թռչող խցանով շիշ', slug='թռչող_խցանով_շիշ', annotations=frozenset({'բար', 'խցան', 'շիշ', 'խմել', 'դուրս թռչել'})),
EmojiAnnotations(emoji='🍷', codepoints=(127863,), name='գինու բաժակ', slug='գինու_բաժակ', annotations=frozenset({'բար', 'բաժակ', 'խմել', 'գինի', 'ըմպելիք'})),
EmojiAnnotations(emoji='🍸', codepoints=(127864,), name='կոկտեյլի բաժակ', slug='կոկտեյլի_բաժակ', annotations=frozenset({'բար', 'բաժակ', 'խմել', 'կոկտեյլ'})),
EmojiAnnotations(emoji='🍹', codepoints=(127865,), name='արևադարձային ընպելիք', slug='արևադարձային_ընպելիք', annotations=frozenset({'բար', 'խմել', 'արևադարձային'})),
EmojiAnnotations(emoji='🍺', codepoints=(127866,), name='գարեջրի գավաթ', slug='գարեջրի_գավաթ', annotations=frozenset({'բար', 'խմել', 'գավաթ', 'գարեջուր'})),
EmojiAnnotations(emoji='🍻', codepoints=(127867,), name='զրնգացող գարեջրի գավաթներ', slug='զրնգացող_գարեջրի_գավաթներ', annotations=frozenset({'բար', 'զրնգալ', 'խմել', 'գավաթ', 'գարեջուր'})),
EmojiAnnotations(emoji='\U0001f37d', codepoints=(127869,), name='դանակ և պատառաքաղ ափսեի հետ', slug='դանակ_և_պատառաքաղ_ափսեի_հետ', annotations=frozenset({'խոհարարություն', 'պատառաքաղ', 'դանակ', 'ափսե'})),
EmojiAnnotations(emoji='🍴', codepoints=(127860,), name='դանակ և պատառաքաղ', slug='դանակ_և_պատառաքաղ', annotations=frozenset({'խոհարարություն', 'պատառաքաղ', 'դանակ'})),
EmojiAnnotations(emoji='🍳', codepoints=(127859,), name='թավայով ձվածեղ', slug='թավայով_ձվածեղ', annotations=frozenset({'ձու', 'թավա', 'տապակել'})),
EmojiAnnotations(emoji='\U0001f3fa', codepoints=(127994,), name='սափոր', slug='սափոր', annotations=frozenset({'խմել', 'խոհարարություն', 'գործիք', 'կենդանակերպ', 'զենք', 'ջրհոս'})),
EmojiAnnotations(emoji='🌍', codepoints=(127757,), name='եվրոպան և աֆրիկան պատկերող գլոբուս', slug='եվրոպան_և_աֆրիկան_պատկերող_գլոբուս', annotations=frozenset({'աշխարհ', 'երկիր', 'գլոբուս', 'եվրոպա', 'աֆրիկա'})),
EmojiAnnotations(emoji='🌎', codepoints=(127758,), name='ամերիկաները պատկերող գլոբուս', slug='ամերիկաները_պատկերող_գլոբուս', annotations=frozenset({'երկիր', 'գլոբուս', 'ամերիկաներ', 'աշխարհ'})),
EmojiAnnotations(emoji='🌏', codepoints=(127759,), name='ասիան և ավստրալիան պատկերող գլոբուս', slug='ասիան_և_ավստրալիան_պատկերող_գլոբուս', annotations=frozenset({'աշխարհ', 'երկիր', 'գլոբուս', 'ավստրալիա', 'ասիա'})),
EmojiAnnotations(emoji='🌐', codepoints=(127760,), name='միջօրեականներով գլոբուս', slug='միջօրեականներով_գլոբուս', annotations=frozenset({'երկիր', 'գլոբուս', 'միջօրեականներ', 'աշխարհ'})),
EmojiAnnotations(emoji='\U0001f5fa', codepoints=(128506,), name='աշխարհի քարտեզ', slug='աշխարհի_քարտեզ', annotations=frozenset({'քարտեզ', 'աշխարհ'})),
EmojiAnnotations(emoji='\U0001f3d4', codepoints=(127956,), name='ձյունածածկ գագաթով լեռ', slug='ձյունածածկ_գագաթով_լեռ', annotations=frozenset({'սառը', 'ձյուն', 'լեռ'})),
EmojiAnnotations(emoji='🌋', codepoints=(127755,), name='հրաբուխ', slug='հրաբուխ', annotations=frozenset({'եղանակ', 'լեռ', 'ժայթքում'})),
EmojiAnnotations(emoji='🗻', codepoints=(128507,), name='ֆուջի լեռ', slug='ֆուջի_լեռ', annotations=frozenset({'լեռ', 'ֆուջի'})),
EmojiAnnotations(emoji='\U0001f3d6', codepoints=(127958,), name='լողափ հովանոցով', slug='լողափ_հովանոցով', annotations=frozenset({'լողափ', 'հովանոց'})),
EmojiAnnotations(emoji='\U0001f3dd', codepoints=(127965,), name='անմարդաբնակ կղզի', slug='անմարդաբնակ_կղզի', annotations=frozenset({'կղզի', 'անմարդաբնակ'})),
EmojiAnnotations(emoji='\U0001f3de', codepoints=(127966,), name='ազգային պարկ', slug='ազգային_պարկ', annotations=frozenset({'պարկ'})),
EmojiAnnotations(emoji='\U0001f3db', codepoints=(127963,), name='հունահռոմեական շինություն', slug='հունահռոմեական_շինություն', annotations=frozenset({'հունահռոմեական', 'շինություն'})),
EmojiAnnotations(emoji='\U0001f3d7', codepoints=(127959,), name='շենքի կառուցում', slug='շենքի_կառուցում', annotations=frozenset({'շենք', 'շինարարություն'})),
EmojiAnnotations(emoji='\U0001f3d8', codepoints=(127960,), name='տան շինարարություն', slug='տան_շինարարություն', annotations=frozenset({'շենք', 'տուն'})),
EmojiAnnotations(emoji='\U0001f3d9', codepoints=(127961,), name='քաղաքի համայնապատկեր', slug='քաղաքի_համայնապատկեր', annotations=frozenset({'քաղաք', 'շենք'})),
EmojiAnnotations(emoji='\U0001f3da', codepoints=(127962,), name='լքված շինություն', slug='լքված_շինություն', annotations=frozenset({'լքված', 'շենք', 'տուն'})),
EmojiAnnotations(emoji='🏠', codepoints=(127968,), name='բնակելի տուն', slug='բնակելի_տուն', annotations=frozenset({'շենք', 'բնակարան', 'տուն'})),
EmojiAnnotations(emoji='🏡', codepoints=(127969,), name='այգիով տուն', slug='այգիով_տուն', annotations=frozenset({'այգի', 'շենք', 'բնակարան', 'տուն'})),
EmojiAnnotations(emoji='⛪', codepoints=(9962,), name='եկեղեցի', slug='եկեղեցի', annotations=frozenset({'խաչ', 'շենք', 'կրոն', 'քրիստոնեական'})),
EmojiAnnotations(emoji='\U0001f54b', codepoints=(128331,), name='կաաբա', slug='կաաբա', annotations=frozenset({'իսլամ', 'մուսուլման', 'կրոն'})),
EmojiAnnotations(emoji='\U0001f54c', codepoints=(128332,), name='մզկիթ', slug='մզկիթ', annotations=frozenset({'իսլամ', 'մուսուլման', 'կրոն'})),
EmojiAnnotations(emoji='\U0001f54d', codepoints=(128333,), name='սինագոգ', slug='սինագոգ', annotations=frozenset({'հրեա', 'հրեական', 'տաճար', 'կրոն'})),
EmojiAnnotations(emoji='⛩', codepoints=(9961,), name='սինտոյական տաճար', slug='սինտոյական_տաճար', annotations=frozenset({'տաճար', 'կրոն', 'սինտոյական'})),
EmojiAnnotations(emoji='🏢', codepoints=(127970,), name='գրասենյակային շենք', slug='գրասենյակային_շենք', annotations=frozenset({'շենք'})),
EmojiAnnotations(emoji='🏣', codepoints=(127971,), name='ճապոնական փոստատուն', slug='ճապոնական_փոստատուն', annotations=frozenset({'փոստատուն', 'շենք', 'ճապոնական'})),
EmojiAnnotations(emoji='🏤', codepoints=(127972,), name='փոստատուն', slug='փոստատուն', annotations=frozenset({'եվրոպական', 'շենք'})),
EmojiAnnotations(emoji='🏥', codepoints=(127973,), name='հիվանդանոց', slug='հիվանդանոց', annotations=frozenset({'բժշկություն', 'շենք', 'բժիշկ'})),
EmojiAnnotations(emoji='🏦', codepoints=(127974,), name='բանկ', slug='բանկ', annotations=frozenset({'շենք'})),
EmojiAnnotations(emoji='🏨', codepoints=(127976,), name='հյուրանոց', slug='հյուրանոց', annotations=frozenset({'շենք'})),
EmojiAnnotations(emoji='🏩', codepoints=(127977,), name='սիրային հյուրանոց', slug='սիրային_հյուրանոց', annotations=frozenset({'շենք', 'հյուրանոց', 'սեր'})),
EmojiAnnotations(emoji='🏪', codepoints=(127978,), name='շուրջօրյա խանութ', slug='շուրջօրյա_խանութ', annotations=frozenset({'խանութ', 'շենք', 'շուրջօրյա'})),
EmojiAnnotations(emoji='🏫', codepoints=(127979,), name='դպրոց', slug='դպրոց', annotations=frozenset({'շենք'})),
EmojiAnnotations(emoji='🏬', codepoints=(127980,), name='հանրախանութ', slug='հանրախանութ', annotations=frozenset({'խանութ', 'շենք'})),
EmojiAnnotations(emoji='🏭', codepoints=(127981,), name='գործարան', slug='գործարան', annotations=frozenset({'շենք'})),
EmojiAnnotations(emoji='🏯', codepoints=(127983,), name='ճապոնական դղյակ', slug='ճապոնական_դղյակ', annotations=frozenset({'շենք', 'ճապոնական', 'դղյակ'})),
EmojiAnnotations(emoji='🏰', codepoints=(127984,), name='դղյակ', slug='դղյակ', annotations=frozenset({'եվրոպական', 'շենք'})),
EmojiAnnotations(emoji='💒', codepoints=(128146,), name='հարսանիք', slug='հարսանիք', annotations=frozenset({'մատուռ', 'սիրավեպ'})),
EmojiAnnotations(emoji='🗼', codepoints=(128508,), name='տոկիոյի աշտարակը', slug='տոկիոյի_աշտարակը', annotations=frozenset({'աշտարակ', 'տոկյո'})),
EmojiAnnotations(emoji='🗽', codepoints=(128509,), name='ազատության արձանը', slug='ազատության_արձանը', annotations=frozenset({'ազատություն', 'արձան'})),
EmojiAnnotations(emoji='🗾', codepoints=(128510,), name='ճապոնիայի քարտեզը', slug='ճապոնիայի_քարտեզը', annotations=frozenset({'քարտեզ', 'ճապոնիա'})),
EmojiAnnotations(emoji='⛺', codepoints=(9978,), name='վրան', slug='վրան', annotations=frozenset({'ճամբար'})),
EmojiAnnotations(emoji='🌁', codepoints=(127745,), name='մառախլապատ', slug='մառախլապատ', annotations=frozenset({'եղանակ', 'մառախուղ'})),
EmojiAnnotations(emoji='🌃', codepoints=(127747,), name='աստղազարդ գիշեր', slug='աստղազարդ_գիշեր', annotations=frozenset({'եղանակ', 'գիշեր', 'աստղ'})),
EmojiAnnotations(emoji='🌄', codepoints=(127748,), name='արևածագը լեռներում', slug='արևածագը_լեռներում', annotations=frozenset({'արևածագ', 'եղանակ', 'արև', 'լեռ', 'առավոտ'})),
EmojiAnnotations(emoji='🌅', codepoints=(127749,), name='արևածագ', slug='արևածագ', annotations=frozenset({'եղանակ', 'արև', 'առավոտ'})),
EmojiAnnotations(emoji='🌆', codepoints=(127750,), name='քաղաքի համայնապատկեր մթնշաղին', slug='քաղաքի_համայնապատկեր_մթնշաղին', annotations=frozenset({'լանդշաֆտ', 'երեկո', 'շենք', 'մթնշաղ', 'մայրամուտ', 'եղանակ', 'քաղաք', 'արև'})),
EmojiAnnotations(emoji='🌇', codepoints=(127751,), name='մայրամուտ', slug='մայրամուտ', annotations=frozenset({'եղանակ', 'արև', 'շենք', 'մթնշաղ'})),
EmojiAnnotations(emoji='🌉', codepoints=(127753,), name='կամուրջը գիշերով', slug='կամուրջը_գիշերով', annotations=frozenset({'եղանակ', 'գիշեր', 'կամուրջ'})),
EmojiAnnotations(emoji='♨', codepoints=(9832,), name='տաք աղբյուրներ', slug='տաք_աղբյուրներ', annotations=frozenset({'աղբյուրներ', 'տաք', 'հոսք'})),
EmojiAnnotations(emoji='🌌', codepoints=(127756,), name='ծիր կաթին', slug='ծիր_կաթին', annotations=frozenset({'եղանակ', 'տիեզերք'})),
EmojiAnnotations(emoji='🎠', codepoints=(127904,), name='կարուսելի ձի', slug='կարուսելի_ձի', annotations=frozenset({'ձի', 'կարուսել'})),
EmojiAnnotations(emoji='🎡', codepoints=(127905,), name='սատանայի անիվ', slug='սատանայի_անիվ', annotations=frozenset({'զվարճանքների այգի', 'անիվ', 'սատանայի'})),
EmojiAnnotations(emoji='🎢', codepoints=(127906,), name='ամերիկյան բլուրներ', slug='ամերիկյան_բլուրներ', annotations=frozenset({'զվարճանքների այգի', 'բլուրներ', 'ամերիկյան'})),
EmojiAnnotations(emoji='💈', codepoints=(128136,), name='վարսավիրի ձող', slug='վարսավիրի_ձող', annotations=frozenset({'վարսավիր', 'սանրվածք', 'ձող'})),
EmojiAnnotations(emoji='🎪', codepoints=(127914,), name='կրկեսային վրան', slug='կրկեսային_վրան', annotations=frozenset({'վրան', 'կրկես'})),
EmojiAnnotations(emoji='🎭', codepoints=(127917,), name='կատարողական արվեստ', slug='կատարողական_արվեստ', annotations=frozenset({'ներկայացում', 'թատրոն', 'դիմակ', 'արվեստ'})),
EmojiAnnotations(emoji='\U0001f5bc', codepoints=(128444,), name='շրջանակ նկարով', slug='շրջանակ_նկարով', annotations=frozenset({'նկարչություն', 'նկար', 'արվեստ', 'շրջանակ', 'թանգարան'})),
EmojiAnnotations(emoji='🎨', codepoints=(127912,), name='ներկապնակ', slug='ներկապնակ', annotations=frozenset({'նկարչություն', 'արվեստ', 'թանգարան'})),
EmojiAnnotations(emoji='🎰', codepoints=(127920,), name='խաղային ավտոմատ', slug='խաղային_ավտոմատ', annotations=frozenset({'խաղ', 'ավտոմատ'})),
EmojiAnnotations(emoji='🚂', codepoints=(128642,), name='շոգեքարշ', slug='շոգեքարշ', annotations=frozenset({'փոխադրամիջոց', 'գոլորշի', 'երկաթուղի', 'գնացք'})),
EmojiAnnotations(emoji='🚃', codepoints=(128643,), name='երկաթուղային վագոն', slug='երկաթուղային_վագոն', annotations=frozenset({'փոխադրամիջոց', 'տրոլեյբուս', 'էլեկտրական', 'երկաթուղի', 'վագոն', 'տրամվայ', 'գնացք'})),
EmojiAnnotations(emoji='🚄', codepoints=(128644,), name='ճեպընթաց գնացք', slug='ճեպընթաց_գնացք', annotations=frozenset({'գնացք', 'փոխադրամիջոց', 'սինկանսեն', 'երկաթուղի', 'արագություն'})),
EmojiAnnotations(emoji='🚅', codepoints=(128645,), name='ճեպընթաց գնացք կլոր քթով', slug='ճեպընթաց_գնացք_կլոր_քթով', annotations=frozenset({'փոխադրամիջոց', 'արագություն', 'կլոր քիթ', 'երկաթուղի', 'սինկանսեն', 'գնացք'})),
EmojiAnnotations(emoji='🚆', codepoints=(128646,), name='գնացք', slug='գնացք', annotations=frozenset({'երկաթուղի', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚇', codepoints=(128647,), name='մետրո', slug='մետրո', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚈', codepoints=(128648,), name='վերգետնյա մետրո', slug='վերգետնյա_մետրո', annotations=frozenset({'երկաթուղի', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚉', codepoints=(128649,), name='կայարան', slug='կայարան', annotations=frozenset({'փոխադրամիջոց', 'երկաթուղի', 'գնացք'})),
EmojiAnnotations(emoji='🚊', codepoints=(128650,), name='տրամվայ', slug='տրամվայ', annotations=frozenset({'տրոլեյբուս', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚝', codepoints=(128669,), name='մոնոռելս', slug='մոնոռելս', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚞', codepoints=(128670,), name='լեռնային երկաթուղի', slug='լեռնային_երկաթուղի', annotations=frozenset({'վագոն', 'լեռ', 'երկաթուղի', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚋', codepoints=(128651,), name='տրամվայի վագոն', slug='տրամվայի_վագոն', annotations=frozenset({'տրամվայ', 'վագոն', 'տրոլեյբուս', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚌', codepoints=(128652,), name='ավտոբուս', slug='ավտոբուս', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚍', codepoints=(128653,), name='մոտեցող ավտոբուս', slug='մոտեցող_ավտոբուս', annotations=frozenset({'մոտեցող', 'ավտոբուս', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚎', codepoints=(128654,), name='տրոլեյբուս', slug='տրոլեյբուս', annotations=frozenset({'տրամվայ', 'ավտոբուս', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚏', codepoints=(128655,), name='ավտոբուսի կանգառ', slug='ավտոբուսի_կանգառ', annotations=frozenset({'ավտոբուս', 'կանգառ'})),
EmojiAnnotations(emoji='🚐', codepoints=(128656,), name='միկրոավտոբուս', slug='միկրոավտոբուս', annotations=frozenset({'ավտոբուս', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚑', codepoints=(128657,), name='շտապօգնության մեքենա', slug='շտապօգնության_մեքենա', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚒', codepoints=(128658,), name='հրշեջ մեքենա', slug='հրշեջ_մեքենա', annotations=frozenset({'քարշակ', 'փոխադրամիջոց', 'հրդեք', 'բեռնատար'})),
EmojiAnnotations(emoji='🚓', codepoints=(128659,), name='ոստիկանական մեքենա', slug='ոստիկանական_մեքենա', annotations=frozenset({'պարեկ', 'ոստիկանություն', 'ավտոմեքենա', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚔', codepoints=(128660,), name='մոտեցող ոստիկանական մեքենա', slug='մոտեցող_ոստիկանական_մեքենա', annotations=frozenset({'մոտեցող', 'ոստիկանություն', 'ավտոմեքենա', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚕', codepoints=(128661,), name='տաքսի', slug='տաքսի', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚖', codepoints=(128662,), name='մոտեցող տաքսի', slug='մոտեցող_տաքսի', annotations=frozenset({'մոտեցող', 'տաքսի', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚗', codepoints=(128663,), name='ավտոմեքենա', slug='ավտոմեքենա', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚘', codepoints=(128664,), name='մոտեցող ավտոմեքենա', slug='մոտեցող_ավտոմեքենա', annotations=frozenset({'մոտեցող', 'մեքենա', 'ավտոմեքենա', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚙', codepoints=(128665,), name='ավտոֆուրգոն', slug='ավտոֆուրգոն', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚚', codepoints=(128666,), name='բեռնատար', slug='բեռնատար', annotations=frozenset({'առաքում', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚛', codepoints=(128667,), name='կցորդով բեռնատար', slug='կցորդով_բեռնատար', annotations=frozenset({'փոխադրամիջոց', 'կցորդ', 'բեռնատար'})),
EmojiAnnotations(emoji='🚜', codepoints=(128668,), name='տրակտոր', slug='տրակտոր', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚲', codepoints=(128690,), name='հեծանիվ', slug='հեծանիվ', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='⛽', codepoints=(9981,), name='բենզալցակայանի պոմպ', slug='բենզալցակայանի_պոմպ', annotations=frozenset({'բենզին', 'կայան', 'վառելիք', 'պոմպ', 'բենզալցակայան'})),
EmojiAnnotations(emoji='\U0001f6e3', codepoints=(128739,), name='ավտոմայրուղի', slug='ավտոմայրուղի', annotations=frozenset({'մայրուղի', 'ճանապարհ'})),
EmojiAnnotations(emoji='\U0001f6e4', codepoints=(128740,), name='երկաթուղի', slug='երկաթուղի', annotations=frozenset({'գնացք'})),
EmojiAnnotations(emoji='🚨', codepoints=(128680,), name='ոստիկանական մեքենայի փարոս', slug='ոստիկանական_մեքենայի_փարոս', annotations=frozenset({'լույս', 'ոստիկանություն', 'փարոս', 'պտտվող', 'ավտոմեքենա', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚥', codepoints=(128677,), name='հորիզոնական լուսակիր', slug='հորիզոնական_լուսակիր', annotations=frozenset({'երթևեկություն', 'լույս', 'ազդանշան'})),
EmojiAnnotations(emoji='🚦', codepoints=(128678,), name='ուղղահայաց լուսակիր', slug='ուղղահայաց_լուսակիր', annotations=frozenset({'երթևեկություն', 'լույս', 'ազդանշան'})),
EmojiAnnotations(emoji='🚧', codepoints=(128679,), name='շինարարություն', slug='շինարարություն', annotations=frozenset({'արգելապատնեշ'})),
EmojiAnnotations(emoji='⚓', codepoints=(9875,), name='խարիսխ', slug='խարիսխ', annotations=frozenset({'գործիք', 'նավ'})),
EmojiAnnotations(emoji='⛵', codepoints=(9973,), name='առագաստանավ', slug='առագաստանավ', annotations=frozenset({'նավակ', 'հանգստավայր', 'ծով', 'զբոսանավ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚣', codepoints=(128675,), name='թիանավակ', slug='թիանավակ', annotations=frozenset({'նավակ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚤', codepoints=(128676,), name='արագընթաց մոտորանավակ', slug='արագընթաց_մոտորանավակ', annotations=frozenset({'նավակ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6f3', codepoints=(128755,), name='ուղևորատար նավ', slug='ուղևորատար_նավ', annotations=frozenset({'նավ', 'ուղևոր', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='⛴', codepoints=(9972,), name='լաստանավ', slug='լաստանավ', annotations=frozenset({'նավակ'})),
EmojiAnnotations(emoji='\U0001f6e5', codepoints=(128741,), name='մոտորանավ', slug='մոտորանավ', annotations=frozenset({'նավակ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚢', codepoints=(128674,), name='նավ', slug='նավ', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='✈', codepoints=(9992,), name='ինքնաթիռ', slug='ինքնաթիռ', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6e9', codepoints=(128745,), name='փոքր ինքնաթիռ', slug='փոքր_ինքնաթիռ', annotations=frozenset({'ինքնաթիռ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6eb', codepoints=(128747,), name='օդանավի մեկնում', slug='օդանավի_մեկնում', annotations=frozenset({'ինքնաթիռ', 'գրանցում', 'մեկնում', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6ec', codepoints=(128748,), name='օդանավի ժամանում', slug='օդանավի_ժամանում', annotations=frozenset({'վայրէջք', 'ժամանող', 'օդանավ', 'ժամանում', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='💺', codepoints=(128186,), name='նստատեղ', slug='նստատեղ', annotations=frozenset({'բազկաթոռ'})),
EmojiAnnotations(emoji='🚁', codepoints=(128641,), name='ուղղաթիռ', slug='ուղղաթիռ', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚟', codepoints=(128671,), name='կախովի երկաթուղի', slug='կախովի_երկաթուղի', annotations=frozenset({'փոխադրամիջոց', 'երկաթուղի', 'կախովի'})),
EmojiAnnotations(emoji='🚠', codepoints=(128672,), name='լեռնային ճոպանուղի', slug='լեռնային_ճոպանուղի', annotations=frozenset({'գոնդոլա', 'ճոպան', 'լեռ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚡', codepoints=(128673,), name='օդային տրամվայ', slug='օդային_տրամվայ', annotations=frozenset({'օդային', 'ճոպան', 'ճոպանուղի', 'գոնդոլա', 'վագոն', 'տրամվայ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚀', codepoints=(128640,), name='հրթիռ', slug='հրթիռ', annotations=frozenset({'տիեզերք', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6f0', codepoints=(128752,), name='արբանյակ', slug='արբանյակ', annotations=frozenset({'տիեզերք', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6ce', codepoints=(128718,), name='հյուրանոցային զանգ', slug='հյուրանոցային_զանգ', annotations=frozenset({'զանգ', 'հյուրանոց'})),
EmojiAnnotations(emoji='\U0001f6cc', codepoints=(128716,), name='մահճակալում պառկած մարդ', slug='մահճակալում_պառկած_մարդ', annotations=frozenset({'քնել', 'հյուրանոց'})),
EmojiAnnotations(emoji='\U0001f6cf', codepoints=(128719,), name='մահճակալ', slug='մահճակալ', annotations=frozenset({'քնել', 'հյուրանոց'})),
EmojiAnnotations(emoji='\U0001f6cb', codepoints=(128715,), name='բազմոց և լամպ', slug='բազմոց_և_լամպ', annotations=frozenset({'լամպ', 'բազմոց', 'հյուրանոց'})),
EmojiAnnotations(emoji='🚽', codepoints=(128701,), name='զուգարանակոնք', slug='զուգարանակոնք', annotations=frozenset({'զուգարան'})),
EmojiAnnotations(emoji='🚿', codepoints=(128703,), name='լոգարանի ցնցուղ', slug='լոգարանի_ցնցուղ', annotations=frozenset({'ջուր'})),
EmojiAnnotations(emoji='🛀', codepoints=(128704,), name='լոգանք ընդունող մարդ', slug='լոգանք_ընդունող_մարդ', annotations=frozenset({'լոգարան', 'լոգասենյակ'})),
EmojiAnnotations(emoji='🛁', codepoints=(128705,), name='լոգարան', slug='լոգարան', annotations=frozenset({'լոգասենյակ'})),
EmojiAnnotations(emoji='⌛', codepoints=(8987,), name='ավազի ժամացույց', slug='ավազի_ժամացույց', annotations=frozenset({'ժամաչափ', 'ավազ'})),
EmojiAnnotations(emoji='⏳', codepoints=(9203,), name='ավազի ժամացույց հոսող ավազով', slug='ավազի_ժամացույց_հոսող_ավազով', annotations=frozenset({'ավազի ժամացույց', 'ժամաչափ', 'ավազ'})),
EmojiAnnotations(emoji='⏰', codepoints=(9200,), name='զարթուցիչ', slug='զարթուցիչ', annotations=frozenset({'ժամացույց'})),
EmojiAnnotations(emoji='⏱', codepoints=(9201,), name='վայրկյանաչափ', slug='վայրկյանաչափ', annotations=frozenset({'ժամացույց'})),
EmojiAnnotations(emoji='⏲', codepoints=(9202,), name='ժամաչափ', slug='ժամաչափ', annotations=frozenset({'ժամացույց'})),
EmojiAnnotations(emoji='\U0001f570', codepoints=(128368,), name='բուխարու ժամացույց', slug='բուխարու_ժամացույց', annotations=frozenset({'ժամացույց'})),
EmojiAnnotations(emoji='🕛', codepoints=(128347,), name='ժամը տասներկուսը', slug='ժամը_տասներկուսը', annotations=frozenset({'12', 'ժամ', 'տասներկու', '00', 'ժամացույց', '12:00'})),
EmojiAnnotations(emoji='🕧', codepoints=(128359,), name='տասներկուսն անց կես', slug='տասներկուսն_անց_կես', annotations=frozenset({'12', 'տասներկու', '12:30', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕐', codepoints=(128336,), name='ժամը մեկը', slug='ժամը_մեկը', annotations=frozenset({'ժամ', 'մեկ', '1', '1:00', '00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕜', codepoints=(128348,), name='մեկն անց կես', slug='մեկն_անց_կես', annotations=frozenset({'մեկ', '1', '1:30', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕑', codepoints=(128337,), name='ժամը երկուսը', slug='ժամը_երկուսը', annotations=frozenset({'ժամ', 'երկու', '00', '2:00', 'ժամացույց', '2'})),
EmojiAnnotations(emoji='🕝', codepoints=(128349,), name='երկուսն անց կես', slug='երկուսն_անց_կես', annotations=frozenset({'երկու', '2:30', 'ժամացույց', '2', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕒', codepoints=(128338,), name='ժամը երեքը', slug='ժամը_երեքը', annotations=frozenset({'ժամ', 'երեք', '3', '00', '3:00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕞', codepoints=(128350,), name='երեքն անց կես', slug='երեքն_անց_կես', annotations=frozenset({'երեք', '3', '3:30', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕓', codepoints=(128339,), name='ժամը չորսը', slug='ժամը_չորսը', annotations=frozenset({'4:00', 'ժամ', '00', 'չորս', '4', 'ժամացույց'})),
EmojiAnnotations(emoji='🕟', codepoints=(128351,), name='չորսն անց կես', slug='չորսն_անց_կես', annotations=frozenset({'4:30', 'չորս', '4', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕔', codepoints=(128340,), name='ժամը հինգը', slug='ժամը_հինգը', annotations=frozenset({'ժամ', 'հինգ', '5:00', '5', '00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕠', codepoints=(128352,), name='հինգն անց կես', slug='հինգն_անց_կես', annotations=frozenset({'5:30', 'հինգ', '5', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕕', codepoints=(128341,), name='ժամը վեցը', slug='ժամը_վեցը', annotations=frozenset({'վեց', '6', 'ժամ', '6:00', '00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕡', codepoints=(128353,), name='վեցն անց կես', slug='վեցն_անց_կես', annotations=frozenset({'վեց', '6', '6:30', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕖', codepoints=(128342,), name='ժամը յոթը', slug='ժամը_յոթը', annotations=frozenset({'ժամ', 'յոթ', '7', '00', '7:00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕢', codepoints=(128354,), name='յոթն անց կես', slug='յոթն_անց_կես', annotations=frozenset({'յոթ', '7', '7:30', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕗', codepoints=(128343,), name='ժամը ութը', slug='ժամը_ութը', annotations=frozenset({'ժամ', '8:00', '00', '8', 'ժամացույց', 'ութ'})),
EmojiAnnotations(emoji='🕣', codepoints=(128355,), name='ութն անց կես', slug='ութն_անց_կես', annotations=frozenset({'8:30', '8', 'ժամացույց', 'ութ', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕘', codepoints=(128344,), name='ժամը ինը', slug='ժամը_ինը', annotations=frozenset({'ժամ', '9:00', 'ինը', '00', '9', 'ժամացույց'})),
EmojiAnnotations(emoji='🕤', codepoints=(128356,), name='ինն անց կես', slug='ինն_անց_կես', annotations=frozenset({'ինը', '9', 'ժամացույց', 'երեսուն', '30', '9:30'})),
EmojiAnnotations(emoji='🕙', codepoints=(128345,), name='ժամը տասը', slug='ժամը_տասը', annotations=frozenset({'10', '10:00', 'ժամ', 'տասը', '00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕥', codepoints=(128357,), name='տասն անց կես', slug='տասն_անց_կես', annotations=frozenset({'10', 'տասը', '10:30', 'երեսուն', 'ժամացույց', '30'})),
EmojiAnnotations(emoji='🕚', codepoints=(128346,), name='ժամը տասնմեկը', slug='ժամը_տասնմեկը', annotations=frozenset({'11', 'ժամ', '00', '11:00', 'տասնմեկ', 'ժամացույց'})),
EmojiAnnotations(emoji='🕦', codepoints=(128358,), name='տասնմեկն անց կես', slug='տասնմեկն_անց_կես', annotations=frozenset({'11', '11:30', 'տասնմեկ', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🌑', codepoints=(127761,), name='նորալուսին', slug='նորալուսին', annotations=frozenset({'եղանակ', 'մութ', 'լուսին', 'տիեզերք'})),
EmojiAnnotations(emoji='🌒', codepoints=(127762,), name='աճող մահիկ', slug='աճող_մահիկ', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք', 'մահիկ', 'աճող'})),
EmojiAnnotations(emoji='🌓', codepoints=(127763,), name='լուսինն առաջին քառորդում', slug='լուսինն_առաջին_քառորդում', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք', 'քառորդ'})),
EmojiAnnotations(emoji='🌔', codepoints=(127764,), name='աճող ուռուցիկ լուսին', slug='աճող_ուռուցիկ_լուսին', annotations=frozenset({'ուռուցիկ', 'լուսին', 'տիեզերք', 'եղանակ', 'աճող'})),
EmojiAnnotations(emoji='🌕', codepoints=(127765,), name='լիալուսին', slug='լիալուսին', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք'})),
EmojiAnnotations(emoji='🌖', codepoints=(127766,), name='նվազող ուռուցիկ լուսին', slug='նվազող_ուռուցիկ_լուսին', annotations=frozenset({'ուռուցիկ', 'լուսին', 'տիեզերք', 'նվազող', 'եղանակ'})),
EmojiAnnotations(emoji='🌗', codepoints=(127767,), name='լուսինը երկրորդ քառորդում', slug='լուսինը_երկրորդ_քառորդում', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք', 'քառորդ'})),
EmojiAnnotations(emoji='🌘', codepoints=(127768,), name='նվազող մահիկ', slug='նվազող_մահիկ', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք', 'նվազող', 'մահիկ'})),
EmojiAnnotations(emoji='🌙', codepoints=(127769,), name='մահիկ', slug='մահիկ', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք'})),
EmojiAnnotations(emoji='🌚', codepoints=(127770,), name='դեմքով նորալուսին', slug='դեմքով_նորալուսին', annotations=frozenset({'եղանակ', 'դեմք', 'լուսին', 'տիեզերք'})),
EmojiAnnotations(emoji='🌛', codepoints=(127771,), name='լուսինն առաջին քառորդում դեմքով', slug='լուսինն_առաջին_քառորդում_դեմքով', annotations=frozenset({'եղանակ', 'դեմք', 'լուսին', 'քառորդ', 'տիեզերք'})),
EmojiAnnotations(emoji='🌜', codepoints=(127772,), name='լուսինը երկրորդ քառորդում դեմքով', slug='լուսինը_երկրորդ_քառորդում_դեմքով', annotations=frozenset({'եղանակ', 'դեմք', 'լուսին', 'քառորդ', 'տիեզերք'})),
EmojiAnnotations(emoji='\U0001f321', codepoints=(127777,), name='ջերմաչափ', slug='ջերմաչափ', annotations=frozenset({'եղանակ'})),
EmojiAnnotations(emoji='☀', codepoints=(9728,), name='արև', slug='արև', annotations=frozenset({'եղանակ', 'տիեզերք', 'արևոտ', 'պայծառ', 'ճառագայթներ'})),
EmojiAnnotations(emoji='🌝', codepoints=(127773,), name='դեմքով լիալուսին', slug='դեմքով_լիալուսին', annotations=frozenset({'տիեզերք', 'լուսին', 'լիալուսին', 'պայծառ', 'դեմք', 'եղանակ'})),
EmojiAnnotations(emoji='🌞', codepoints=(127774,), name='դեմքով արև', slug='դեմքով_արև', annotations=frozenset({'եղանակ', 'դեմք', 'տիեզերք', 'արև', 'պայծառ'})),
EmojiAnnotations(emoji='⭐', codepoints=(11088,), name='սպիտակավուն աստղ', slug='սպիտակավուն_աստղ', annotations=frozenset({'աստղ'})),
EmojiAnnotations(emoji='🌟', codepoints=(127775,), name='փայլող աստղ', slug='փայլող_աստղ', annotations=frozenset({'փայլող', 'կայծ', 'աստղ'})),
EmojiAnnotations(emoji='🌠', codepoints=(127776,), name='ընկնող աստղ', slug='ընկնող_աստղ', annotations=frozenset({'տիեզերք', 'աստղ', 'ընկնող'})),
EmojiAnnotations(emoji='☁', codepoints=(9729,), name='ամպ', slug='ամպ', annotations=frozenset({'եղանակ'})),
EmojiAnnotations(emoji='⛅', codepoints=(9925,), name='արև ամպի հետևում', slug='արև_ամպի_հետևում', annotations=frozenset({'եղանակ', 'արև', 'ամպ'})),
EmojiAnnotations(emoji='⛈', codepoints=(9928,), name='կայծակով և անձրևով ամպ', slug='կայծակով_և_անձրևով_ամպ', annotations=frozenset({'եղանակ', 'ամպ', 'անձրև', 'ամպրոպ'})),
EmojiAnnotations(emoji='\U0001f324', codepoints=(127780,), name='արև փոքր ամպի հետևում', slug='արև_փոքր_ամպի_հետևում', annotations=frozenset({'եղանակ', 'արև', 'ամպ'})),
EmojiAnnotations(emoji='\U0001f325', codepoints=(127781,), name='արև մեծ ամպի հետևում', slug='արև_մեծ_ամպի_հետևում', annotations=frozenset({'եղանակ', 'արև', 'ամպ'})),
EmojiAnnotations(emoji='\U0001f326', codepoints=(127782,), name='արև անձրևով ամպի հետևում', slug='արև_անձրևով_ամպի_հետևում', annotations=frozenset({'եղանակ', 'արև', 'ամպ', 'անձրև'})),
EmojiAnnotations(emoji='\U0001f327', codepoints=(127783,), name='անձրևով ամպ', slug='անձրևով_ամպ', annotations=frozenset({'եղանակ', 'ամպ', 'անձրև'})),
EmojiAnnotations(emoji='\U0001f328', codepoints=(127784,), name='ձյունով ամպ', slug='ձյունով_ամպ', annotations=frozenset({'եղանակ', 'սառը', 'ձյուն', 'ամպ'})),
EmojiAnnotations(emoji='\U0001f329', codepoints=(127785,), name='կայծակով ամպ', slug='կայծակով_ամպ', annotations=frozenset({'եղանակ', 'ամպ', 'կայծակ'})),
EmojiAnnotations(emoji='\U0001f32a', codepoints=(127786,), name='պտտահողմ', slug='պտտահողմ', annotations=frozenset({'եղանակ', 'ամպ'})),
EmojiAnnotations(emoji='\U0001f32b', codepoints=(127787,), name='մառախուղ', slug='մառախուղ', annotations=frozenset({'եղանակ', 'ամպ'})),
EmojiAnnotations(emoji='\U0001f32c', codepoints=(127788,), name='քամու երես', slug='քամու_երես', annotations=frozenset({'եղանակ', 'դեմք', 'քամի', 'փչել', 'ամպ'})),
EmojiAnnotations(emoji='🌀', codepoints=(127744,), name='ցիկլոն', slug='ցիկլոն', annotations=frozenset({'եղանակ', 'պտտվող', 'թայֆուն'})),
EmojiAnnotations(emoji='🌈', codepoints=(127752,), name='ծիածան', slug='ծիածան', annotations=frozenset({'եղանակ', 'անձրև'})),
EmojiAnnotations(emoji='🌂', codepoints=(127746,), name='փակ անձրևանոց', slug='փակ_անձրևանոց', annotations=frozenset({'հագուստ', 'անձրևանոց', 'եղանակ', 'անձրև'})),
EmojiAnnotations(emoji='☂', codepoints=(9730,), name='անձրևանոց', slug='անձրևանոց', annotations=frozenset({'հագուստ', 'եղանակ', 'անձրև'})),
EmojiAnnotations(emoji='☔', codepoints=(9748,), name='անձրևանոց անձրևի կաթիլներով', slug='անձրևանոց_անձրևի_կաթիլներով', annotations=frozenset({'հագուստ', 'կաթիլ', 'անձրևանոց', 'անձրև', 'եղանակ'})),
EmojiAnnotations(emoji='⛱', codepoints=(9969,), name='անձրևանոց գետնի վրա', slug='անձրևանոց_գետնի_վրա', annotations=frozenset({'անձրևանոց', 'եղանակ', 'արև', 'անձրև'})),
EmojiAnnotations(emoji='⚡', codepoints=(9889,), name='բարձր լարում', slug='բարձր_լարում', annotations=frozenset({'վտանգ', 'լարում', 'էլեկտրականություն', 'էլեկտրական', 'կայծակ'})),
EmojiAnnotations(emoji='❄', codepoints=(10052,), name='ձյան փաթիլ', slug='ձյան_փաթիլ', annotations=frozenset({'եղանակ', 'սառը', 'ձյուն'})),
EmojiAnnotations(emoji='☃', codepoints=(9731,), name='ձնեմարդ', slug='ձնեմարդ', annotations=frozenset({'եղանակ', 'սառը', 'ձյուն'})),
EmojiAnnotations(emoji='⛄', codepoints=(9924,), name='ձնեմարդ առանց ձյան', slug='ձնեմարդ_առանց_ձյան', annotations=frozenset({'եղանակ', 'սառը', 'ձյուն', 'ձնեմարդ'})),
EmojiAnnotations(emoji='☄', codepoints=(9732,), name='գիսաստղ', slug='գիսաստղ', annotations=frozenset({'տիեզերք'})),
EmojiAnnotations(emoji='🔥', codepoints=(128293,), name='կրակ', slug='կրակ', annotations=frozenset({'գործիք', 'բոց'})),
EmojiAnnotations(emoji='💧', codepoints=(128167,), name='կաթիլ', slug='կաթիլ', annotations=frozenset({'եղանակ', 'սառը', 'կոմիքս', 'քրտինք'})),
EmojiAnnotations(emoji='🌊', codepoints=(127754,), name='ծովի ալիք', slug='ծովի_ալիք', annotations=frozenset({'ալիք', 'եղանակ', 'օվկիանոս', 'ջուր'})),
EmojiAnnotations(emoji='🎃', codepoints=(127875,), name='ջեքի լապտեր', slug='ջեքի_լապտեր', annotations=frozenset({'լապտեր', 'տոն', 'հելոուին', 'ջեք'})),
EmojiAnnotations(emoji='🎄', codepoints=(127876,), name='տոնածառ', slug='տոնածառ', annotations=frozenset({'սուրբ ծնունդ', 'տոն', 'ծառ'})),
EmojiAnnotations(emoji='🎆', codepoints=(127878,), name='հրավառություն', slug='հրավառություն', annotations=frozenset({'տոնակատարություն'})),
EmojiAnnotations(emoji='🎇', codepoints=(127879,), name='բենգալյան կրակ', slug='բենգալյան_կրակ', annotations=frozenset({'տոնակատարություն', 'կայծ', 'հրավառություն'})),
EmojiAnnotations(emoji='✨', codepoints=(10024,), name='կայծեր', slug='կայծեր', annotations=frozenset({'կայծ', 'աստղ'})),
EmojiAnnotations(emoji='🎈', codepoints=(127880,), name='փուչիկ', slug='փուչիկ', annotations=frozenset({'տոն'})),
EmojiAnnotations(emoji='🎉', codepoints=(127881,), name='ճայթուկ', slug='ճայթուկ', annotations=frozenset({'տոն', 'երեկույթ'})),
EmojiAnnotations(emoji='🎊', codepoints=(127882,), name='կոնֆետի', slug='կոնֆետի', annotations=frozenset({'տոն', 'գունդ'})),
EmojiAnnotations(emoji='🎋', codepoints=(127883,), name='տանաբատա', slug='տանաբատա', annotations=frozenset({'դրոշակ', 'տոն', 'ճապոնական', 'ծառ'})),
EmojiAnnotations(emoji='🎌', codepoints=(127884,), name='խաչված դրոշակներ', slug='խաչված_դրոշակներ', annotations=frozenset({'տոն', 'խաչ', 'խաչված', 'ճապոնական'})),
EmojiAnnotations(emoji='🎍', codepoints=(127885,), name='բամբուկից դեկորացիա', slug='բամբուկից_դեկորացիա', annotations=frozenset({'բամբուկ', 'տոն', 'ճապոնական', 'դեկորացիա', 'բույս'})),
EmojiAnnotations(emoji='🎎', codepoints=(127886,), name='ճապոնական տիկնիկներ', slug='ճապոնական_տիկնիկներ', annotations=frozenset({'տոն', 'փառատոն', 'ճապոնական', 'տիկնիկ'})),
EmojiAnnotations(emoji='🎏', codepoints=(127887,), name='կարպերի տեսքով նավադրոշ', slug='կարպերի_տեսքով_նավադրոշ', annotations=frozenset({'տոն', 'նավադրոշ', 'կարպ'})),
EmojiAnnotations(emoji='🎐', codepoints=(127888,), name='քամու զանգակ', slug='քամու_զանգակ', annotations=frozenset({'տոն', 'քամի', 'զանգ'})),
EmojiAnnotations(emoji='🎑', codepoints=(127889,), name='լուսնի ծես', slug='լուսնի_ծես', annotations=frozenset({'տոն', 'լուսին', 'ծես'})),
EmojiAnnotations(emoji='🎀', codepoints=(127872,), name='ժապավեն', slug='ժապավեն', annotations=frozenset({'տոն', 'տոնակատարություն'})),
EmojiAnnotations(emoji='🎁', codepoints=(127873,), name='փաթեթավորված նվեր', slug='փաթեթավորված_նվեր', annotations=frozenset({'տոն', 'փաթեթավորված', 'արկղ', 'նվեր'})),
EmojiAnnotations(emoji='\U0001f396', codepoints=(127894,), name='ռազմական մեդալ', slug='ռազմական_մեդալ', annotations=frozenset({'տոն', 'ռազմական', 'մեդալ'})),
EmojiAnnotations(emoji='\U0001f397', codepoints=(127895,), name='հուշաժապավեն', slug='հուշաժապավեն', annotations=frozenset({'տոն', 'ժապավեն', 'հուշ'})),
EmojiAnnotations(emoji='\U0001f39e', codepoints=(127902,), name='տեսաժապավեն', slug='տեսաժապավեն', annotations=frozenset({'կադր', 'ժապավեն', 'կինո', 'ֆիլմ'})),
EmojiAnnotations(emoji='\U0001f39f', codepoints=(127903,), name='մուտքի տոմս', slug='մուտքի_տոմս', annotations=frozenset({'տոմս', 'մուտք'})),
EmojiAnnotations(emoji='🎫', codepoints=(127915,), name='տոմս', slug='տոմս', annotations=frozenset({'մուտք'})),
EmojiAnnotations(emoji='⚽', codepoints=(9917,), name='ֆուտբոլի գնդակ', slug='ֆուտբոլի_գնդակ', annotations=frozenset({'ֆուտբոլ', 'գնդակ'})),
EmojiAnnotations(emoji='⚾', codepoints=(9918,), name='բեյսբոլի գնդակ', slug='բեյսբոլի_գնդակ', annotations=frozenset({'գնդակ'})),
EmojiAnnotations(emoji='🏀', codepoints=(127936,), name='բասկետբոլի գնդակ', slug='բասկետբոլի_գնդակ', annotations=frozenset({'գնդակ', 'բասկետբոլ'})),
EmojiAnnotations(emoji='🏈', codepoints=(127944,), name='ամերիկյան ֆուտբոլի գնդակ', slug='ամերիկյան_ֆուտբոլի_գնդակ', annotations=frozenset({'ֆուտբոլ', 'գնդակ', 'ամերիկյան'})),
EmojiAnnotations(emoji='🏉', codepoints=(127945,), name='ռեգբիի գնդակ', slug='ռեգբիի_գնդակ', annotations=frozenset({'ռեգբի', 'ֆուտբոլ', 'գնդակ'})),
EmojiAnnotations(emoji='🎾', codepoints=(127934,), name='թենիսի գնդակ', slug='թենիսի_գնդակ', annotations=frozenset({'գնդակ', 'թենիս', 'մեծ'})),
EmojiAnnotations(emoji='🎱', codepoints=(127921,), name='բիլիարդ', slug='բիլիարդ', annotations=frozenset({'8', 'խաղ', '8 գնդակ', 'գնդակ', 'ութ'})),
EmojiAnnotations(emoji='🎳', codepoints=(127923,), name='բոուլինգ', slug='բոուլինգ', annotations=frozenset({'խաղ', 'գնդակ'})),
EmojiAnnotations(emoji='⛳', codepoints=(9971,), name='գոլֆի դրոշակ', slug='գոլֆի_դրոշակ', annotations=frozenset({'գոլֆ', 'անցք'})),
EmojiAnnotations(emoji='\U0001f3cc', codepoints=(127948,), name='գոլֆ խաղացող', slug='գոլֆ_խաղացող', annotations=frozenset({'գոլֆ', 'գնդակ'})),
EmojiAnnotations(emoji='⛸', codepoints=(9976,), name='չմուշկ', slug='չմուշկ', annotations=frozenset({'սառույց'})),
EmojiAnnotations(emoji='🎣', codepoints=(127907,), name='կարթաձող', slug='կարթաձող', annotations=frozenset({'կարթ', 'ձուկ'})),
EmojiAnnotations(emoji='🎽', codepoints=(127933,), name='պտտվող շապիկ', slug='պտտվող_շապիկ', annotations=frozenset({'շապիկ', 'պտտվող', 'ժապավեն'})),
EmojiAnnotations(emoji='🎿', codepoints=(127935,), name='դահուկներ', slug='դահուկներ', annotations=frozenset({'ձյուն', 'դահուկ'})),
EmojiAnnotations(emoji='⛷', codepoints=(9975,), name='դահուկորդ', slug='դահուկորդ', annotations=frozenset({'ձյուն', 'դահուկ'})),
EmojiAnnotations(emoji='🏂', codepoints=(127938,), name='սնոուբորդիստ', slug='սնոուբորդիստ', annotations=frozenset({'ձյուն', 'դահուկ', 'սնոուբորդ'})),
EmojiAnnotations(emoji='🏄', codepoints=(127940,), name='սերֆեր', slug='սերֆեր', annotations=frozenset({'սերֆինգ'})),
EmojiAnnotations(emoji='🏇', codepoints=(127943,), name='ձիավազք', slug='ձիավազք', annotations=frozenset({'ձի', 'ժոկեյ', 'մրցարշավային ձի', 'մրցարշավ'})),
EmojiAnnotations(emoji='🏊', codepoints=(127946,), name='լողորդ', slug='լողորդ', annotations=frozenset({'լողալ'})),
EmojiAnnotations(emoji='⛹', codepoints=(9977,), name='գնդակով մարդ', slug='գնդակով_մարդ', annotations=frozenset({'գնդակ'})),
EmojiAnnotations(emoji='\U0001f3cb', codepoints=(127947,), name='ծանրորդ', slug='ծանրորդ', annotations=frozenset({'ծանրություն'})),
EmojiAnnotations(emoji='🚴', codepoints=(128692,), name='հեծանվորդ', slug='հեծանվորդ', annotations=frozenset({'հեծանիվ'})),
EmojiAnnotations(emoji='🚵', codepoints=(128693,), name='լեռնահեծանվորդ', slug='լեռնահեծանվորդ', annotations=frozenset({'հեծանիվ', 'լեռ', 'հեծանվորդ'})),
EmojiAnnotations(emoji='\U0001f3ce', codepoints=(127950,), name='մրցարշավային մեքենա', slug='մրցարշավային_մեքենա', annotations=frozenset({'մեքենա', 'մրցարշավ'})),
EmojiAnnotations(emoji='\U0001f3cd', codepoints=(127949,), name='մոտոցիկլետ', slug='մոտոցիկլետ', annotations=frozenset({'մրցարշավ'})),
EmojiAnnotations(emoji='\U0001f3c5', codepoints=(127941,), name='սպորտային մեդալ', slug='սպորտային_մեդալ', annotations=frozenset({'մեդալ'})),
EmojiAnnotations(emoji='🏆', codepoints=(127942,), name='գավաթ', slug='գավաթ', annotations=frozenset({'մրցանակ'})),
EmojiAnnotations(emoji='\U0001f3cf', codepoints=(127951,), name='կրիկետ', slug='կրիկետ', annotations=frozenset({'բիտա', 'խաղ', 'գնդակ'})),
EmojiAnnotations(emoji='\U0001f3d0', codepoints=(127952,), name='վոլեյբոլի գնդակ', slug='վոլեյբոլի_գնդակ', annotations=frozenset({'խաղ', 'գնդակ'})),
EmojiAnnotations(emoji='\U0001f3d1', codepoints=(127953,), name='խոտի հոկեյ', slug='խոտի_հոկեյ', annotations=frozenset({'խաղ', 'մական', 'գնդակ', 'դաշտ', 'հոկեյ'})),
EmojiAnnotations(emoji='\U0001f3d2', codepoints=(127954,), name='մական և տափօղակ', slug='մական_և_տափօղակ', annotations=frozenset({'սառույց', 'խաղ', 'տափօղակ', 'մական', 'հոկեյ'})),
EmojiAnnotations(emoji='\U0001f3d3', codepoints=(127955,), name='սեղանի թենիս', slug='սեղանի_թենիս', annotations=frozenset({'բիտա', 'խաղ', 'գնդակ', 'ռակետ', 'ձեռնաթիակ'})),
EmojiAnnotations(emoji='\U0001f3f8', codepoints=(127992,), name='բադմինտոն', slug='բադմինտոն', annotations=frozenset({'փետրագնդակ', 'խաղ', 'ռակետ', 'ձեռնաթիակ', 'վոլան'})),
EmojiAnnotations(emoji='🎯', codepoints=(127919,), name='դիպուկ հարված', slug='դիպուկ_հարված', annotations=frozenset({'հարվածել', 'խաղ', 'դարթ', 'կենտրոն', 'նշանակետ', 'թիրախ'})),
EmojiAnnotations(emoji='🎮', codepoints=(127918,), name='տեսախաղ', slug='տեսախաղ', annotations=frozenset({'խաղ', 'վահանակ'})),
EmojiAnnotations(emoji='\U0001f579', codepoints=(128377,), name='ջոյսթիք', slug='ջոյսթիք', annotations=frozenset({'խաղ', 'տեսախաղ'})),
EmojiAnnotations(emoji='🎲', codepoints=(127922,), name='զառ', slug='զառ', annotations=frozenset({'խաղ'})),
EmojiAnnotations(emoji='♠', codepoints=(9824,), name='ղառ', slug='ղառ', annotations=frozenset({'թղթախաղ', 'խաղ'})),
EmojiAnnotations(emoji='♥', codepoints=(9829,), name='սիրտ', slug='սիրտ', annotations=frozenset({'թղթախաղ', 'խաղ', 'սրտեր'})),
EmojiAnnotations(emoji='♦', codepoints=(9830,), name='քյափ', slug='քյափ', annotations=frozenset({'թղթախաղ', 'խաղ'})),
EmojiAnnotations(emoji='♣', codepoints=(9827,), name='խաչ', slug='խաչ', annotations=frozenset({'թղթախաղ', 'խաղ'})),
EmojiAnnotations(emoji='🃏', codepoints=(127183,), name='ջոկեր', slug='ջոկեր', annotations=frozenset({'թղթախաղ', 'խաղ', 'խաղալ'})),
EmojiAnnotations(emoji='🀄', codepoints=(126980,), name='մաջոնգի կարմիր վիշապ', slug='մաջոնգի_կարմիր_վիշապ', annotations=frozenset({'խաղ', 'մաջոնգ', 'կարմիր'})),
EmojiAnnotations(emoji='🎴', codepoints=(127924,), name='ծաղկի խաղաթղթեր', slug='ծաղկի_խաղաթղթեր', annotations=frozenset({'թղթախաղ', 'ծաղիկ', 'խաղ', 'խաղալ', 'ճապոնական'})),
EmojiAnnotations(emoji='🔇', codepoints=(128263,), name='բարձրախոսն անջատված է', slug='բարձրախոսն_անջատված_է', annotations=frozenset({'լուռ', 'բարձրախոս', 'անջատել ձայնը', 'հանգիստ', 'ձայն'})),
EmojiAnnotations(emoji='🔈', codepoints=(128264,), name='բարձրախոս', slug='բարձրախոս', annotations=frozenset({'ձայնի ուժգնություն', 'ձայն'})),
EmojiAnnotations(emoji='🔉', codepoints=(128265,), name='բարձրախոսը միացված է', slug='բարձրախոսը_միացված_է', annotations=frozenset({'ալիք', 'ցածր', 'բարձրախոս', 'ձայն'})),
EmojiAnnotations(emoji='🔊', codepoints=(128266,), name='բարձրախոսի ձայնը բարձր է', slug='բարձրախոսի_ձայնը_բարձր_է', annotations=frozenset({'բարձր', 'բարձրաձայն', 'երեք', 'ձայն', '3', 'բարձրախոս'})),
EmojiAnnotations(emoji='📢', codepoints=(128226,), name='մեծ բարձրախոս', slug='մեծ_բարձրախոս', annotations=frozenset({'բարձրաձայն', 'հասարակական'})),
EmojiAnnotations(emoji='📯', codepoints=(128239,), name='փոստային եղջյուր', slug='փոստային_եղջյուր', annotations=frozenset({'եղջյուր', 'փոստ', 'փոստային'})),
EmojiAnnotations(emoji='🔕', codepoints=(128277,), name='զանգակ շեղ գծիկով', slug='զանգակ_շեղ_գծիկով', annotations=frozenset({'զանգակ', 'հանգիստ', 'ոչ', 'արգելված', 'լուռ', 'անջատել ձայնը'})),
EmojiAnnotations(emoji='🎼', codepoints=(127932,), name='սոլի բանալի', slug='սոլի_բանալի', annotations=frozenset({'երաժշտություն'})),
EmojiAnnotations(emoji='🎵', codepoints=(127925,), name='նոտա', slug='նոտա', annotations=frozenset({'երաժշտություն'})),
EmojiAnnotations(emoji='🎶', codepoints=(127926,), name='նոտաներ', slug='նոտաներ', annotations=frozenset({'նոտա', 'երաժշտություն'})),
EmojiAnnotations(emoji='\U0001f399', codepoints=(127897,), name='ստուդիայի խոսափող', slug='ստուդիայի_խոսափող', annotations=frozenset({'խոսափող', 'ստուդիա', 'երաժշտություն'})),
EmojiAnnotations(emoji='\U0001f39a', codepoints=(127898,), name='ձայնի բարձրության սահոց', slug='ձայնի_բարձրության_սահոց', annotations=frozenset({'մակարդակ', 'սահոց', 'երաժշտություն'})),
EmojiAnnotations(emoji='\U0001f39b', codepoints=(127899,), name='կառավարման կոճակներ', slug='կառավարման_կոճակներ', annotations=frozenset({'կոճակներ', 'կառավարել', 'երաժշտություն'})),
EmojiAnnotations(emoji='🎤', codepoints=(127908,), name='խոսափող', slug='խոսափող', annotations=frozenset({'կարաոկե'})),
EmojiAnnotations(emoji='🎷', codepoints=(127927,), name='սաքսոֆոն', slug='սաքսոֆոն', annotations=frozenset({'գործիք', 'երաժշտություն'})),
EmojiAnnotations(emoji='🎸', codepoints=(127928,), name='կիթառ', slug='կիթառ', annotations=frozenset({'գործիք', 'երաժշտություն'})),
EmojiAnnotations(emoji='🎹', codepoints=(127929,), name='երաժշտական ստեղնաշար', slug='երաժշտական_ստեղնաշար', annotations=frozenset({'գործիք', 'ստեղնաշար', 'դաշնամուր', 'երաժշտություն'})),
EmojiAnnotations(emoji='🎺', codepoints=(127930,), name='շեփոր', slug='շեփոր', annotations=frozenset({'գործիք', 'երաժշտություն'})),
EmojiAnnotations(emoji='🎻', codepoints=(127931,), name='ջութակ', slug='ջութակ', annotations=frozenset({'գործիք', 'երաժշտություն'})),
EmojiAnnotations(emoji='📱', codepoints=(128241,), name='բջջային հեռախոս', slug='բջջային_հեռախոս', annotations=frozenset({'հեռախոս', 'շարժական', 'բջջային'})),
EmojiAnnotations(emoji='📲', codepoints=(128242,), name='բջջային հեռախոս սլաքով', slug='բջջային_հեռախոս_սլաքով', annotations=frozenset({'հեռախոս', 'զանգել', 'սլաք', 'շարժական', 'բջջային'})),
EmojiAnnotations(emoji='📞', codepoints=(128222,), name='հեռախոսի լսափող', slug='հեռախոսի_լսափող', annotations=frozenset({'լսափող', 'հեռախոս'})),
EmojiAnnotations(emoji='📠', codepoints=(128224,), name='ֆաքսի մեքենա', slug='ֆաքսի_մեքենա', annotations=frozenset({'ֆաքս'})),
EmojiAnnotations(emoji='🔌', codepoints=(128268,), name='էլեկտրական խրոց', slug='էլեկտրական_խրոց', annotations=frozenset({'էլեկտրականություն', 'էլեկտրական', 'խրոց'})),
EmojiAnnotations(emoji='💻', codepoints=(128187,), name='նոթբուք', slug='նոթբուք', annotations=frozenset({'համակարգիչ', 'անձնական'})),
EmojiAnnotations(emoji='\U0001f5a8', codepoints=(128424,), name='տպիչ', slug='տպիչ', annotations=frozenset({'համակարգիչ'})),
EmojiAnnotations(emoji='⌨', codepoints=(9000,), name='ստեղնաշար', slug='ստեղնաշար', annotations=frozenset({'համակարգիչ'})),
EmojiAnnotations(emoji='\U0001f5b1', codepoints=(128433,), name='համակարգչի մկնիկ', slug='համակարգչի_մկնիկ', annotations=frozenset({'համակարգիչ', 'մկնիկ', 'կոճակ', 'երեք', '3'})),
EmojiAnnotations(emoji='\U0001f5b2', codepoints=(128434,), name='թրեքբոլ', slug='թրեքբոլ', annotations=frozenset({'համակարգիչ'})),
EmojiAnnotations(emoji='💽', codepoints=(128189,), name='մինի սկավառակ', slug='մինի_սկավառակ', annotations=frozenset({'համակարգիչ', 'սկավառակ', 'օպտիկական'})),
EmojiAnnotations(emoji='💾', codepoints=(128190,), name='ֆլոպի սկավառակ', slug='ֆլոպի_սկավառակ', annotations=frozenset({'համակարգիչ', 'սկավառակ', 'ֆլոպի'})),
EmojiAnnotations(emoji='💿', codepoints=(128191,), name='օպտիկական սկավառակ', slug='օպտիկական_սկավառակ', annotations=frozenset({'օպտիկական', 'dvd', 'համակարգիչ', 'blu-ray', 'cd', 'սկավառակ'})),
EmojiAnnotations(emoji='📀', codepoints=(128192,), name='dvd', slug='dvd', annotations=frozenset({'համակարգիչ', 'cd', 'սկավառակ', 'օպտիկական', 'blu-ray'})),
EmojiAnnotations(emoji='🎥', codepoints=(127909,), name='ժապավենային տեսախցիկ', slug='ժապավենային_տեսախցիկ', annotations=frozenset({'տեսախցիկ', 'կինո', 'ֆիլմ'})),
EmojiAnnotations(emoji='🎬', codepoints=(127916,), name='կինոդուբլների համարացույց', slug='կինոդուբլների_համարացույց', annotations=frozenset({'ֆիլմ', 'կինոդուբլ'})),
EmojiAnnotations(emoji='\U0001f4fd', codepoints=(128253,), name='ժապավենային պրոյեկտոր', slug='ժապավենային_պրոյեկտոր', annotations=frozenset({'պրոյեկտոր', 'ժապավեն', 'կինո', 'ֆիլմ', 'վիդեո'})),
EmojiAnnotations(emoji='📺', codepoints=(128250,), name='հեռուստացույց', slug='հեռուստացույց', annotations=frozenset({'tv', 'վիդեո'})),
EmojiAnnotations(emoji='📷', codepoints=(128247,), name='ֆոտոապարատ', slug='ֆոտոապարատ', annotations=frozenset({'վիդեո'})),
EmojiAnnotations(emoji='\U0001f4f8', codepoints=(128248,), name='ֆոտոապարատ լուսաթարթիչով', slug='ֆոտոապարատ_լուսաթարթիչով', annotations=frozenset({'լուսաթարթիչ', 'ֆոտոապարատ', 'վիդեո'})),
EmojiAnnotations(emoji='📹', codepoints=(128249,), name='տեսախցիկ', slug='տեսախցիկ', annotations=frozenset({'վիդեո'})),
EmojiAnnotations(emoji='📼', codepoints=(128252,), name='տեսաերիզ', slug='տեսաերիզ', annotations=frozenset({'երիզ', 'vhs', 'վիդեո'})),
EmojiAnnotations(emoji='🔍', codepoints=(128269,), name='ձախ ուղղված խոշորացույց', slug='ձախ_ուղղված_խոշորացույց', annotations=frozenset({'գործիք', 'ապակի', 'խոշորացնող', 'որոնել'})),
EmojiAnnotations(emoji='🔎', codepoints=(128270,), name='աջ ուղղված խոշորացույց', slug='աջ_ուղղված_խոշորացույց', annotations=frozenset({'գործիք', 'ապակի', 'խոշորացնող', 'որոնել'})),
EmojiAnnotations(emoji='🔬', codepoints=(128300,), name='մանրադիտակ', slug='մանրադիտակ', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='🔭', codepoints=(128301,), name='հեռադիտակ', slug='հեռադիտակ', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='📡', codepoints=(128225,), name='արբանյակային ալեհավաք', slug='արբանյակային_ալեհավաք', annotations=frozenset({'ափսե', 'արբանյակ', 'ալեհավաք'})),
EmojiAnnotations(emoji='\U0001f56f', codepoints=(128367,), name='մոմ', slug='մոմ', annotations=frozenset({'լույս'})),
EmojiAnnotations(emoji='💡', codepoints=(128161,), name='էլեկտրական լամպ', slug='էլեկտրական_լամպ', annotations=frozenset({'գաղափար', 'լամպ', 'էլեկտրական', 'լույս', 'կոմիքս'})),
EmojiAnnotations(emoji='🔦', codepoints=(128294,), name='գրպանի լապտեր', slug='գրպանի_լապտեր', annotations=frozenset({'գործիք', 'լապտեր', 'էլեկտրական', 'լույս'})),
EmojiAnnotations(emoji='🏮', codepoints=(127982,), name='թղթե կարմիր լապտեր', slug='թղթե_կարմիր_լապտեր', annotations=frozenset({'լապտեր', 'բար', 'լույս', 'կարմիր', 'ճապոնական'})),
EmojiAnnotations(emoji='📔', codepoints=(128212,), name='ձևավոր կազմով տետր', slug='ձևավոր_կազմով_տետր', annotations=frozenset({'գիրք', 'նոթատետր', 'ձևավորված', 'կազմ'})),
EmojiAnnotations(emoji='📕', codepoints=(128213,), name='փակված գիրք', slug='փակված_գիրք', annotations=frozenset({'գիրք', 'փակված'})),
EmojiAnnotations(emoji='📖', codepoints=(128214,), name='բացված գիրք', slug='բացված_գիրք', annotations=frozenset({'գիրք', 'բացված'})),
EmojiAnnotations(emoji='📗', codepoints=(128215,), name='կանաչ գիրք', slug='կանաչ_գիրք', annotations=frozenset({'գիրք', 'կանաչ'})),
EmojiAnnotations(emoji='📘', codepoints=(128216,), name='կապույտ գիրք', slug='կապույտ_գիրք', annotations=frozenset({'գիրք', 'կապույտ'})),
EmojiAnnotations(emoji='📙', codepoints=(128217,), name='նարնջագույն գիրք', slug='նարնջագույն_գիրք', annotations=frozenset({'գիրք', 'նարնջագույն'})),
EmojiAnnotations(emoji='📚', codepoints=(128218,), name='գրքեր', slug='գրքեր', annotations=frozenset({'գիրք'})),
EmojiAnnotations(emoji='📒', codepoints=(128210,), name='հաշվապահական մատյան', slug='հաշվապահական_մատյան', annotations=frozenset({'նոթատետր'})),
EmojiAnnotations(emoji='📃', codepoints=(128195,), name='կլորացած էջ', slug='կլորացած_էջ', annotations=frozenset({'կլորացած', 'էջ', 'փաստաթուղթ'})),
EmojiAnnotations(emoji='📜', codepoints=(128220,), name='գալարաթուղթ', slug='գալարաթուղթ', annotations=frozenset({'թուղթ'})),
EmojiAnnotations(emoji='📄', codepoints=(128196,), name='էջ', slug='էջ', annotations=frozenset({'փաստաթութղ'})),
EmojiAnnotations(emoji='📰', codepoints=(128240,), name='լրագիր', slug='լրագիր', annotations=frozenset({'նորություններ', 'թերթ'})),
EmojiAnnotations(emoji='\U0001f5de', codepoints=(128478,), name='կլորացրած լրագիր', slug='կլորացրած_լրագիր', annotations=frozenset({'լրագիր', 'կլորացրած', 'նորություններ', 'թերթ'})),
EmojiAnnotations(emoji='📑', codepoints=(128209,), name='էջանիշ ներդիրներ', slug='էջանիշ_ներդիրներ', annotations=frozenset({'նշել', 'էջանիշ', 'ներդիր', 'նշիչ'})),
EmojiAnnotations(emoji='🔖', codepoints=(128278,), name='էջանիշ', slug='էջանիշ', annotations=frozenset({'նշել'})),
EmojiAnnotations(emoji='💰', codepoints=(128176,), name='փողի պարկ', slug='փողի_պարկ', annotations=frozenset({'դոլար', 'փող', 'պարկ'})),
EmojiAnnotations(emoji='💴', codepoints=(128180,), name='իեն թղթադրամ', slug='իեն_թղթադրամ', annotations=frozenset({'բանկ', 'իեն', 'փող', 'տարադրամ', 'թղթադրամ'})),
EmojiAnnotations(emoji='💵', codepoints=(128181,), name='դոլար թղթադրամ', slug='դոլար_թղթադրամ', annotations=frozenset({'բանկ', 'դոլար', 'տարադրամ', 'փող', 'թղթադրամ'})),
EmojiAnnotations(emoji='💶', codepoints=(128182,), name='եվրո թղթադրամ', slug='եվրո_թղթադրամ', annotations=frozenset({'բանկ', 'եվրո', 'փող', 'տարադրամ', 'թղթադրամ'})),
EmojiAnnotations(emoji='💷', codepoints=(128183,), name='ֆունտ թղթադրամ', slug='ֆունտ_թղթադրամ', annotations=frozenset({'բանկ', 'փող', 'տարադրամ', 'ֆունտ', 'թղթադրամ'})),
EmojiAnnotations(emoji='💸', codepoints=(128184,), name='փող թևերով', slug='փող_թևերով', annotations=frozenset({'թղթադրամ', 'դոլար', 'բանկ', 'փող', 'թռչել', 'թևեր'})),
EmojiAnnotations(emoji='💳', codepoints=(128179,), name='պլաստիկ քարտ', slug='պլաստիկ_քարտ', annotations=frozenset({'բանկ', 'վարկ', 'փող', 'քարտ'})),
EmojiAnnotations(emoji='💹', codepoints=(128185,), name='աճող դիագրամ իենով', slug='աճող_դիագրամ_իենով', annotations=frozenset({'իեն', 'վերև', 'միտում', 'բանկ', 'փող', 'տարրադրամ', 'գրաֆիկ', 'շուկա', 'բարձրանալ', 'դիագրամ', 'աճ'})),
EmojiAnnotations(emoji='✉', codepoints=(9993,), name='ծրար', slug='ծրար', annotations=frozenset({'էլփոտ', 'նամակ'})),
EmojiAnnotations(emoji='📧', codepoints=(128231,), name='էլեկտրոնային նամակ', slug='էլեկտրոնային_նամակ', annotations=frozenset({'փոստ', 'նամակ', 'էլփոստ'})),
EmojiAnnotations(emoji='📨', codepoints=(128232,), name='ստացվող ծրար', slug='ստացվող_ծրար', annotations=frozenset({'փոստ', 'ստանալ', 'ծրար', 'նամակ', 'էլփոստ', 'ստացվող'})),
EmojiAnnotations(emoji='📩', codepoints=(128233,), name='ծրար սլաքով', slug='ծրար_սլաքով', annotations=frozenset({'փոստ', 'ուղարկված', 'ծրար', 'նամակ', 'էլփոստ', 'ուղարկվող', 'ներքև', 'սլաք'})),
EmojiAnnotations(emoji='📤', codepoints=(128228,), name='ելքի արկղ', slug='ելքի_արկղ', annotations=frozenset({'դարակ', 'փոստ', 'նամակ', 'ուղարկված', 'արկղ'})),
EmojiAnnotations(emoji='📥', codepoints=(128229,), name='մուտքի արկղ', slug='մուտքի_արկղ', annotations=frozenset({'դարակ', 'փոստ', 'նամակ', 'արկղ', 'ստանալ'})),
EmojiAnnotations(emoji='📦', codepoints=(128230,), name='ծանրոց', slug='ծանրոց', annotations=frozenset({'արկղ'})),
EmojiAnnotations(emoji='📫', codepoints=(128235,), name='փակ փոստարկղ բարձրացված դրոշակով', slug='փակ_փոստարկղ_բարձրացված_դրոշակով', annotations=frozenset({'փոստատուփ', 'փոստ', 'փակ', 'փոստարկղ'})),
EmojiAnnotations(emoji='📪', codepoints=(128234,), name='փակ փոստարկղ իջեցված դրոշակով', slug='փակ_փոստարկղ_իջեցված_դրոշակով', annotations=frozenset({'փոստատուփ', 'փոստ', 'փակ', 'իջեցված', 'փոստարկղ'})),
EmojiAnnotations(emoji='📬', codepoints=(128236,), name='բաց փոստարկղ բարձրացված դրոշակով', slug='բաց_փոստարկղ_բարձրացված_դրոշակով', annotations=frozenset({'բաց', 'փոստատուփ', 'փոստ', 'փոստարկղ'})),
EmojiAnnotations(emoji='📭', codepoints=(128237,), name='բաց փոստարկղ իջեցված դրոշակով', slug='բաց_փոստարկղ_իջեցված_դրոշակով', annotations=frozenset({'բաց', 'փոստատուփ', 'փոստ', 'իջեցված', 'փոստարկղ'})),
EmojiAnnotations(emoji='📮', codepoints=(128238,), name='փոստատուփ', slug='փոստատուփ', annotations=frozenset({'փոստ', 'փոստարկղ'})),
EmojiAnnotations(emoji='\U0001f5f3', codepoints=(128499,), name='քվեատուփ քվեաթերթիկով', slug='քվեատուփ_քվեաթերթիկով', annotations=frozenset({'քվեաթերթիկ', 'տուփ'})),
EmojiAnnotations(emoji='✒', codepoints=(10002,), name='սև գրչածայր', slug='սև_գրչածայր', annotations=frozenset({'գրչածայր', 'գրիչ'})),
EmojiAnnotations(emoji='\U0001f58b', codepoints=(128395,), name='ինքնահոս գրիչ', slug='ինքնահոս_գրիչ', annotations=frozenset({'ինքնահոս', 'գրիչ'})),
EmojiAnnotations(emoji='\U0001f58a', codepoints=(128394,), name='գրիչ', slug='գրիչ', annotations=frozenset({'գնդիկավոր գրիչ'})),
EmojiAnnotations(emoji='\U0001f58c', codepoints=(128396,), name='վրձին', slug='վրձին', annotations=frozenset({'ներկել', 'նկարել'})),
EmojiAnnotations(emoji='\U0001f58d', codepoints=(128397,), name='մոմամատիտ', slug='մոմամատիտ', annotations=frozenset({'գունավոր մատիտ'})),
EmojiAnnotations(emoji='📝', codepoints=(128221,), name='հուշաթերթ', slug='հուշաթերթ', annotations=frozenset({'մատիտ'})),
EmojiAnnotations(emoji='📁', codepoints=(128193,), name='թղթապանակ', slug='թղթապանակ', annotations=frozenset({'ֆայլ'})),
EmojiAnnotations(emoji='📂', codepoints=(128194,), name='բաց թղթապանակ', slug='բաց_թղթապանակ', annotations=frozenset({'բաց', 'թղթապանակ', 'ֆայլ'})),
EmojiAnnotations(emoji='\U0001f5c2', codepoints=(128450,), name='քարտադարանի բաժանարարներ', slug='քարտադարանի_բաժանարարներ', annotations=frozenset({'ինդեքս', 'բաժանարար', 'քարտ'})),
EmojiAnnotations(emoji='📅', codepoints=(128197,), name='օրացույց', slug='օրացույց', annotations=frozenset({'ամսաթիվ'})),
EmojiAnnotations(emoji='📆', codepoints=(128198,), name='պոկովի օրացույց', slug='պոկովի_օրացույց', annotations=frozenset({'օրացույց'})),
EmojiAnnotations(emoji='\U0001f5d2', codepoints=(128466,), name='պարուրավոր նոթատետր', slug='պարուրավոր_նոթատետր', annotations=frozenset({'գրքույկ', 'տետր', 'պարույր'})),
EmojiAnnotations(emoji='\U0001f5d3', codepoints=(128467,), name='պարուրավոր օրացույց', slug='պարուրավոր_օրացույց', annotations=frozenset({'օրացույց', 'գրքույկ', 'պարույր'})),
EmojiAnnotations(emoji='📇', codepoints=(128199,), name='քարտադարան', slug='քարտադարան', annotations=frozenset({'ինդեքս', 'քարտ'})),
EmojiAnnotations(emoji='📈', codepoints=(128200,), name='աճող դիագրամ', slug='աճող_դիագրամ', annotations=frozenset({'գրաֆիկ', 'դիագրամ', 'վեր', 'աճ', 'միտում'})),
EmojiAnnotations(emoji='📉', codepoints=(128201,), name='նվազող դիագրամ', slug='նվազող_դիագրամ', annotations=frozenset({'գրաֆիկ', 'ներքև', 'դիագրամ', 'միտում'})),
EmojiAnnotations(emoji='📊', codepoints=(128202,), name='գոտեձև գծապատկեր', slug='գոտեձև_գծապատկեր', annotations=frozenset({'գոտի', 'գրաֆիկ', 'դիագրամ'})),
EmojiAnnotations(emoji='📍', codepoints=(128205,), name='գնդասեղ', slug='գնդասեղ', annotations=frozenset({'քորոց'})),
EmojiAnnotations(emoji='\U0001f587', codepoints=(128391,), name='միացված սկրեպներ', slug='միացված_սկրեպներ', annotations=frozenset({'միացնել', 'սկրեպ'})),
EmojiAnnotations(emoji='📏', codepoints=(128207,), name='քանոն', slug='քանոն', annotations=frozenset({'ուղղանկյուն'})),
EmojiAnnotations(emoji='📐', codepoints=(128208,), name='եռանկյունի քանոն', slug='եռանկյունի_քանոն', annotations=frozenset({'եռանկյունի', 'քանոն'})),
EmojiAnnotations(emoji='✂', codepoints=(9986,), name='մկրատ', slug='մկրատ', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='\U0001f5c3', codepoints=(128451,), name='քարտադարանի արկղ', slug='քարտադարանի_արկղ', annotations=frozenset({'ֆայլ', 'արկղ', 'քարտ'})),
EmojiAnnotations(emoji='\U0001f5c4', codepoints=(128452,), name='պահարան', slug='պահարան', annotations=frozenset({'ֆայլ'})),
EmojiAnnotations(emoji='🔒', codepoints=(128274,), name='կողպեք', slug='կողպեք', annotations=frozenset({'փակ'})),
EmojiAnnotations(emoji='🔓', codepoints=(128275,), name='բաց կողպեք', slug='բաց_կողպեք', annotations=frozenset({'բաց', 'ապակողպել', 'կողպեք'})),
EmojiAnnotations(emoji='🔏', codepoints=(128271,), name='կողպեք ինքնահոսով', slug='կողպեք_ինքնահոսով', annotations=frozenset({'գրչածայր', 'գաղտնիություն', 'կողպեք', 'թանաք', 'գրիչ'})),
EmojiAnnotations(emoji='🔐', codepoints=(128272,), name='փակ կողպեք բանալիով', slug='փակ_կողպեք_բանալիով', annotations=frozenset({'ապահով', 'փակ', 'բնալի', 'կողպեք'})),
EmojiAnnotations(emoji='🔑', codepoints=(128273,), name='բանալի', slug='բանալի', annotations=frozenset({'գաղտնաբառ', 'կողպեք'})),
EmojiAnnotations(emoji='\U0001f5dd', codepoints=(128477,), name='հին բանալի', slug='հին_բանալի', annotations=frozenset({'հին', 'բանալի', 'կողպեք'})),
EmojiAnnotations(emoji='🔨', codepoints=(128296,), name='մուրճ', slug='մուրճ', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='⛏', codepoints=(9935,), name='քլունգ', slug='քլունգ', annotations=frozenset({'գործիք', 'հանք'})),
EmojiAnnotations(emoji='⚒', codepoints=(9874,), name='մուրճեր', slug='մուրճեր', annotations=frozenset({'գործիք', 'մուրճ'})),
EmojiAnnotations(emoji='\U0001f6e0', codepoints=(128736,), name='մուրճ և պտուտակաբանալի', slug='մուրճ_և_պտուտակաբանալի', annotations=frozenset({'գործիք', 'պտուտակաբանալի', 'մուրճ'})),
EmojiAnnotations(emoji='🔧', codepoints=(128295,), name='պտուտակաբանալի', slug='պտուտակաբանալի', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='🔩', codepoints=(128297,), name='մանեկ ու հեղույս', slug='մանեկ_ու_հեղույս', annotations=frozenset({'մանեկ', 'գործիք', 'հեղույս'})),
EmojiAnnotations(emoji='⚙', codepoints=(9881,), name='ատամնանիվ', slug='ատամնանիվ', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='\U0001f5dc', codepoints=(128476,), name='մամլակ', slug='մամլակ', annotations=frozenset({'մամլիչ'})),
EmojiAnnotations(emoji='⚗', codepoints=(9879,), name='թորիչ', slug='թորիչ', annotations=frozenset({'քիմիա', 'գործիք'})),
EmojiAnnotations(emoji='⚖', codepoints=(9878,), name='նժարավոր կշեռք', slug='նժարավոր_կշեռք', annotations=frozenset({'հավասարակշռություն', 'կշեռք', 'գործիք', 'ծանրություն', 'արդարություն', 'կենդանակերպ'})),
EmojiAnnotations(emoji='⛓', codepoints=(9939,), name='շղթաներ', slug='շղթաներ', annotations=frozenset({'շղթա'})),
EmojiAnnotations(emoji='💉', codepoints=(128137,), name='ներարկիչ', slug='ներարկիչ', annotations=frozenset({'գործիք', 'հիվանդ', 'բժշկություն', 'ասեղ', 'բժիշկ'})),
EmojiAnnotations(emoji='💊', codepoints=(128138,), name='դեղահաբ', slug='դեղահաբ', annotations=frozenset({'հիվանդ', 'բժշկություն', 'բժիշկ'})),
EmojiAnnotations(emoji='\U0001f5e1', codepoints=(128481,), name='դաշույն', slug='դաշույն', annotations=frozenset({'զենք', 'դանակ'})),
EmojiAnnotations(emoji='🔪', codepoints=(128298,), name='խոհանոցային դանակ', slug='խոհանոցային_դանակ', annotations=frozenset({'գործիք', 'եփել', 'խոհարար', 'դանակ', 'զենք'})),
EmojiAnnotations(emoji='⚔', codepoints=(9876,), name='խաչված սրեր', slug='խաչված_սրեր', annotations=frozenset({'սրեր', 'խաչված', 'զենք'})),
EmojiAnnotations(emoji='🔫', codepoints=(128299,), name='ատրճանակ', slug='ատրճանակ', annotations=frozenset({'գործիք', 'զենք'})),
EmojiAnnotations(emoji='\U0001f6e1', codepoints=(128737,), name='վահան', slug='վահան', annotations=frozenset({'զենք'})),
EmojiAnnotations(emoji='\U0001f3f9', codepoints=(127993,), name='նետ ու աղեղ', slug='նետ_ու_աղեղ', annotations=frozenset({'գործիք', 'նետ', 'աղեղնավոր', 'զենք', 'աղեղ'})),
EmojiAnnotations(emoji='🏁', codepoints=(127937,), name='վանդակավոր դրոշ', slug='վանդակավոր_դրոշ', annotations=frozenset({'մրցարշավ', 'վանդակավոր'})),
EmojiAnnotations(emoji='\U0001f3f3', codepoints=(127987,), name='ծածանվող սպիտակ դրոշ', slug='ծածանվող_սպիտակ_դրոշ', annotations=frozenset({'ծածանվող'})),
EmojiAnnotations(emoji='\U0001f3f4', codepoints=(127988,), name='ծածանվող սև դրոշ', slug='ծածանվող_սև_դրոշ', annotations=frozenset({'ծածանվող'})),
EmojiAnnotations(emoji='🚩', codepoints=(128681,), name='եռանկյունի դրոշ', slug='եռանկյունի_դրոշ', annotations=frozenset({'փոստ'})),
EmojiAnnotations(emoji='⚰', codepoints=(9904,), name='դագաղ', slug='դագաղ', annotations=frozenset({'մահ'})),
EmojiAnnotations(emoji='⚱', codepoints=(9905,), name='աճյունասափոր', slug='աճյունասափոր', annotations=frozenset({'հուղարկավորություն', 'մահ'})),
EmojiAnnotations(emoji='🗿', codepoints=(128511,), name='մոաի', slug='մոաի', annotations=frozenset({'դեմք', 'մոայի', 'արձան'})),
EmojiAnnotations(emoji='\U0001f6e2', codepoints=(128738,), name='նավթի տակառ', slug='նավթի_տակառ', annotations=frozenset({'տակառ', 'նավթ'})),
EmojiAnnotations(emoji='🔮', codepoints=(128302,), name='բյուրեղյա գունդ', slug='բյուրեղյա_գունդ', annotations=frozenset({'բյուրեղ', 'բախտ', 'գործիք', 'հեքիաթ', 'ֆանտազիա', 'գունդ'})),
EmojiAnnotations(emoji='🏧', codepoints=(127975,), name='բանկոմատի նշան', slug='բանկոմատի_նշան', annotations=frozenset({'բանկ', 'գանձապահ', 'atm', 'բանկոմատ'})),
EmojiAnnotations(emoji='🚮', codepoints=(128686,), name='աղբամանի նշան', slug='աղբամանի_նշան', annotations=frozenset({'աղբ', 'աղբարկղ'})),
EmojiAnnotations(emoji='🚰', codepoints=(128688,), name='խմելու ջուր', slug='խմելու_ջուր', annotations=frozenset({'խմելու', 'խմել', 'ջուր'})),
EmojiAnnotations(emoji='♿', codepoints=(9855,), name='անվասայլակ', slug='անվասայլակ', annotations=frozenset({'մատչելիություն'})),
EmojiAnnotations(emoji='🚹', codepoints=(128697,), name='տղամարդկանց զուգարան', slug='տղամարդկանց_զուգարան', annotations=frozenset({'wc', 'տղամարդ', 'զուգարան'})),
EmojiAnnotations(emoji='🚺', codepoints=(128698,), name='կանանց զուգարան', slug='կանանց_զուգարան', annotations=frozenset({'wc', 'կին', 'զուգարան'})),
EmojiAnnotations(emoji='🚻', codepoints=(128699,), name='ընդհանուր զուգարան', slug='ընդհանուր_զուգարան', annotations=frozenset({'wc', 'զուգարան'})),
EmojiAnnotations(emoji='🚼', codepoints=(128700,), name='նորածնի նշան', slug='նորածնի_նշան', annotations=frozenset({'նորածին', 'փոխել'})),
EmojiAnnotations(emoji='🚾', codepoints=(128702,), name='զուգարան', slug='զուգարան', annotations=frozenset({'wc', 'ջուր'})),
EmojiAnnotations(emoji='🛂', codepoints=(128706,), name='անձնագրային ստուգում', slug='անձնագրային_ստուգում', annotations=frozenset({'անձնագիր', 'ստուգում'})),
EmojiAnnotations(emoji='🛄', codepoints=(128708,), name='ուղեբեռի վերաբերյալ բողոք', slug='ուղեբեռի_վերաբերյալ_բողոք', annotations=frozenset({'ուղեբեռ', 'բողոք'})),
EmojiAnnotations(emoji='🛅', codepoints=(128709,), name='ուղեբեռ պահախցում', slug='ուղեբեռ_պահախցում', annotations=frozenset({'ուղեբեռ', 'բեռ', 'պահարան'})),
EmojiAnnotations(emoji='🚸', codepoints=(128696,), name='ճանապարհը հատող երեխաներ', slug='ճանապարհը_հատող_երեխաներ', annotations=frozenset({'երեխա', 'երթևեկություն', 'հատող', 'հետիոտն'})),
EmojiAnnotations(emoji='⛔', codepoints=(9940,), name='մուտք չկա', slug='մուտք_չկա', annotations=frozenset({'ոչ', 'արգելված', 'երթևեկություն', 'մուտք'})),
EmojiAnnotations(emoji='🚫', codepoints=(128683,), name='արգելված է', slug='արգելված_է', annotations=frozenset({'ոչ', 'արգելված', 'մուտք'})),
EmojiAnnotations(emoji='🚳', codepoints=(128691,), name='հեծանիվների մուտքն արգելված է', slug='հեծանիվների_մուտքն_արգելված_է', annotations=frozenset({'փոխադրամիջոց', 'արգելված', 'հեծանիվ', 'ոչ'})),
EmojiAnnotations(emoji='🚭', codepoints=(128685,), name='չծխել', slug='չծխել', annotations=frozenset({'ծխել', 'արգելված', 'ոչ'})),
EmojiAnnotations(emoji='🚯', codepoints=(128687,), name='չաղտոտել', slug='չաղտոտել', annotations=frozenset({'աղբ', 'արգելված', 'ոչ'})),
EmojiAnnotations(emoji='🚱', codepoints=(128689,), name='խմելու ջուր չէ', slug='խմելու_ջուր_չէ', annotations=frozenset({'արգելված', 'խմելու', 'խմել', 'ջուր', 'ոչ'})),
EmojiAnnotations(emoji='🚷', codepoints=(128695,), name='հետիոտնների մուտքն արգելված է', slug='հետիոտնների_մուտքն_արգելված_է', annotations=frozenset({'հետիոտն', 'արգելված', 'ոչ'})),
EmojiAnnotations(emoji='⬆', codepoints=(11014,), name='վերև սլաք', slug='վերև_սլաք', annotations=frozenset({'ուղղություն', 'սլաք', 'գլխավոր', 'հյուսիս'})),
EmojiAnnotations(emoji='↗', codepoints=(8599,), name='վերև աջ սլաք', slug='վերև_աջ_սլաք', annotations=frozenset({'ուղղություն', 'սլաք', 'հյուսիս-արևելք'})),
EmojiAnnotations(emoji='➡', codepoints=(10145,), name='աջ սլաք', slug='աջ_սլաք', annotations=frozenset({'ուղղություն', 'սլաք', 'գլխավոր', 'արևելք'})),
EmojiAnnotations(emoji='↘', codepoints=(8600,), name='ներքև աջ սլաք', slug='ներքև_աջ_սլաք', annotations=frozenset({'ուղղություն', 'սլաք', 'հարավ-արևելք'})),
EmojiAnnotations(emoji='⬇', codepoints=(11015,), name='ներքև սլաք', slug='ներքև_սլաք', annotations=frozenset({'ներքև', 'ուղղություն', 'սլաք', 'գլխավոր', 'հարավ'})),
EmojiAnnotations(emoji='↙', codepoints=(8601,), name='ներքև ձախ սլաք', slug='ներքև_ձախ_սլաք', annotations=frozenset({'հարավ-արևմուտք', 'ուղղություն', 'սլաք'})),
EmojiAnnotations(emoji='⬅', codepoints=(11013,), name='ձախ սլաք', slug='ձախ_սլաք', annotations=frozenset({'արևմուտք', 'ուղղություն', 'սլաք', 'գլխավոր'})),
EmojiAnnotations(emoji='↖', codepoints=(8598,), name='վերև ձախ սլաք', slug='վերև_ձախ_սլաք', annotations=frozenset({'ուղղություն', 'սլաք', 'հյուսիս-արևմուտք'})),
EmojiAnnotations(emoji='↕', codepoints=(8597,), name='վերև-ներքև սլաք', slug='վերև_ներքև_սլաք', annotations=frozenset({'սլաք'})),
EmojiAnnotations(emoji='↔', codepoints=(8596,), name='աջ-ձախ սլաք', slug='աջ_ձախ_սլաք', annotations=frozenset({'սլաք'})),
EmojiAnnotations(emoji='↩', codepoints=(8617,), name='աջ շրջադարձի սլաք', slug='աջ_շրջադարձի_սլաք', annotations=frozenset({'սլաք'})),
EmojiAnnotations(emoji='↪', codepoints=(8618,), name='ձախ շրջադարձի սլաք', slug='ձախ_շրջադարձի_սլաք', annotations=frozenset({'սլաք'})),
EmojiAnnotations(emoji='⤴', codepoints=(10548,), name='ձախից վերև թեքվող սլաք', slug='ձախից_վերև_թեքվող_սլաք', annotations=frozenset({'սլաք'})),
EmojiAnnotations(emoji='⤵', codepoints=(10549,), name='ձախից ներքև թեքվող սլաք', slug='ձախից_ներքև_թեքվող_սլաք', annotations=frozenset({'ներքև', 'սլաք'})),
EmojiAnnotations(emoji='🔃', codepoints=(128259,), name='ժամասլաքի ուղղությամբ ուղղահայաց սլաքներ', slug='ժամասլաքի_ուղղությամբ_ուղղահայաց_սլաքներ', annotations=frozenset({'վերաբեռնել', 'ժամասլաքի ուղղությամբ', 'սլաք'})),
EmojiAnnotations(emoji='🔄', codepoints=(128260,), name='ժամասլաքին հակառակ ուղղությամբ սլաքներով կոճակ', slug='ժամասլաքին_հակառակ_ուղղությամբ_սլաքներով_կոճակ', annotations=frozenset({'հակառակ ուղղությամբ', 'սլաք', 'ժամասլաքին հակառակ ուղղությամբ'})),
EmojiAnnotations(emoji='🔙', codepoints=(128281,), name='հետ գրությամբ սլաք', slug='հետ_գրությամբ_սլաք', annotations=frozenset({'հետ', 'սլաք'})),
EmojiAnnotations(emoji='🔚', codepoints=(128282,), name='վերջ գրությամբ սլաք', slug='վերջ_գրությամբ_սլաք', annotations=frozenset({'սլաք', 'վերջ'})),
EmojiAnnotations(emoji='🔛', codepoints=(128283,), name='միացված է գրությամբ սլաք', slug='միացված_է_գրությամբ_սլաք', annotations=frozenset({'նշան', 'սլաք', 'միացված է'})),
EmojiAnnotations(emoji='🔜', codepoints=(128284,), name='շուտով գրությամբ սլաք', slug='շուտով_գրությամբ_սլաք', annotations=frozenset({'սլաք', 'շուտով'})),
EmojiAnnotations(emoji='🔝', codepoints=(128285,), name='վերև գրությամբ սլաք', slug='վերև_գրությամբ_սլաք', annotations=frozenset({'սլաք', 'վերև', 'վեր'})),
EmojiAnnotations(emoji='\U0001f6d0', codepoints=(128720,), name='աղոթատեղի', slug='աղոթատեղի', annotations=frozenset({'պաշտամունք', 'կրոն'})),
EmojiAnnotations(emoji='⚛', codepoints=(9883,), name='ատոմի նշան', slug='ատոմի_նշան', annotations=frozenset({'ատոմ', 'աթեիստ'})),
EmojiAnnotations(emoji='\U0001f549', codepoints=(128329,), name='օմ', slug='օմ', annotations=frozenset({'կրոն', 'հնդիկ'})),
EmojiAnnotations(emoji='✡', codepoints=(10017,), name='դավթի աստղ', slug='դավթի_աստղ', annotations=frozenset({'դավիթ', 'հրեա', 'հրեական', 'կրոն', 'աստղ'})),
EmojiAnnotations(emoji='☸', codepoints=(9784,), name='դհարմայի անիվ', slug='դհարմայի_անիվ', annotations=frozenset({'դհարմա', 'անիվ', 'բուդդիստ', 'կրոն'})),
EmojiAnnotations(emoji='☯', codepoints=(9775,), name='ին և յան', slug='ին_և_յան', annotations=frozenset({'յին', 'դաո', 'դաոսիստ', 'կրոն', 'յան'})),
EmojiAnnotations(emoji='✝', codepoints=(10013,), name='լատինական խաչ', slug='լատինական_խաչ', annotations=frozenset({'քրիստոնյա', 'խաչ', 'կրոն'})),
EmojiAnnotations(emoji='☦', codepoints=(9766,), name='ուղղափառ խաչ', slug='ուղղափառ_խաչ', annotations=frozenset({'քրիստոնյա', 'խաչ', 'կրոն'})),
EmojiAnnotations(emoji='☪', codepoints=(9770,), name='աստղ և մահիկ', slug='աստղ_և_մահիկ', annotations=frozenset({'իսլամ', 'մուսուլման', 'կրոն'})),
EmojiAnnotations(emoji='☮', codepoints=(9774,), name='խաղաղության նշան', slug='խաղաղության_նշան', annotations=frozenset({'խաղաղություն'})),
EmojiAnnotations(emoji='\U0001f54e', codepoints=(128334,), name='մենորա', slug='մենորա', annotations=frozenset({'մոմակալ', 'աշտանակ', 'կրոն'})),
EmojiAnnotations(emoji='🔯', codepoints=(128303,), name='կետիկով վեցթևանի աստղ', slug='կետիկով_վեցթևանի_աստղ', annotations=frozenset({'բախտ', 'աստղ'})),
EmojiAnnotations(emoji='♻', codepoints=(9851,), name='վերամշակման նշան', slug='վերամշակման_նշան', annotations=frozenset({'վերամշակել'})),
EmojiAnnotations(emoji='📛', codepoints=(128219,), name='բեյջ', slug='բեյջ', annotations=frozenset({'անուն'})),
EmojiAnnotations(emoji='🔰', codepoints=(128304,), name='սկսնակ լինելու ճապոնական նշան', slug='սկսնակ_լինելու_ճապոնական_նշան', annotations=frozenset({'հեծանակ', 'սկսնակ', 'գործիք', 'դեղին', 'տերև', 'ճապոնական', 'կանաչ'})),
EmojiAnnotations(emoji='🔱', codepoints=(128305,), name='եռաժանի խորհրդանշան', slug='եռաժանի_խորհրդանշան', annotations=frozenset({'գործիք', 'եռաժանի', 'նավ', 'խարիսխ', 'զինանշան'})),
EmojiAnnotations(emoji='⭕', codepoints=(11093,), name='մեծ թավ շրջան', slug='մեծ_թավ_շրջան', annotations=frozenset({'օ', 'շրջան'})),
EmojiAnnotations(emoji='✅', codepoints=(9989,), name='սպիտակ թավ ստուգանշան', slug='սպիտակ_թավ_ստուգանշան', annotations=frozenset({'նշել', 'ստուգել'})),
EmojiAnnotations(emoji='☑', codepoints=(9745,), name='վանդակ ստուգանշանով', slug='վանդակ_ստուգանշանով', annotations=frozenset({'նշել', 'քվեաթերթիկ', 'տուփ'})),
EmojiAnnotations(emoji='✔', codepoints=(10004,), name='թավ ստուգանշան', slug='թավ_ստուգանշան', annotations=frozenset({'նշել', 'ստուգել'})),
EmojiAnnotations(emoji='✖', codepoints=(10006,), name='բազմապատկման թավ նշան', slug='բազմապատկման_թավ_նշան', annotations=frozenset({'բազմապատկում', 'x', 'բազմապատկել', 'չեղարկել'})),
EmojiAnnotations(emoji='❌', codepoints=(10060,), name='խաչի նշան', slug='խաչի_նշան', annotations=frozenset({'բազմապատկում', 'x', 'նշել', 'բազմապատկել', 'չեղարկել'})),
EmojiAnnotations(emoji='❎', codepoints=(10062,), name='խաչի նշանով կոճակ', slug='խաչի_նշանով_կոճակ', annotations=frozenset({'նշել', 'քառակուսի'})),
EmojiAnnotations(emoji='➕', codepoints=(10133,), name='գումարման թավ նշան', slug='գումարման_թավ_նշան', annotations=frozenset({'պլյուս', 'մաթեմատիկա'})),
EmojiAnnotations(emoji='➖', codepoints=(10134,), name='հանման թավ նշան', slug='հանման_թավ_նշան', annotations=frozenset({'մինուս', 'մաթեմատիկա'})),
EmojiAnnotations(emoji='➗', codepoints=(10135,), name='բաժանման թավ նշան', slug='բաժանման_թավ_նշան', annotations=frozenset({'բաժանում', 'մաթեմատիկա'})),
EmojiAnnotations(emoji='➰', codepoints=(10160,), name='ոլորուն հանգույց', slug='ոլորուն_հանգույց', annotations=frozenset({'ոլորված', 'հանգույց'})),
EmojiAnnotations(emoji='➿', codepoints=(10175,), name='կրկնակի ոլորուն հանգույց', slug='կրկնակի_ոլորուն_հանգույց', annotations=frozenset({'ոլորված', 'կրկնակի', 'հանգույց'})),
EmojiAnnotations(emoji='〽', codepoints=(12349,), name='իորիտեն', slug='իորիտեն', annotations=frozenset({'նշել', 'մաս'})),
EmojiAnnotations(emoji='✳', codepoints=(10035,), name='ութ թևանի աստղանիշ', slug='ութ_թևանի_աստղանիշ', annotations=frozenset({'աստղանիշ'})),
EmojiAnnotations(emoji='✴', codepoints=(10036,), name='աստղիկ', slug='աստղիկ', annotations=frozenset({'աստղ'})),
EmojiAnnotations(emoji='💱', codepoints=(128177,), name='տարադրամի փոխանակում', slug='տարադրամի_փոխանակում', annotations=frozenset({'բանկ', 'փոխանակում', 'փող', 'տարադրամ'})),
EmojiAnnotations(emoji='💲', codepoints=(128178,), name='դոլարի թավ նշան', slug='դոլարի_թավ_նշան', annotations=frozenset({'դոլար', 'տարադրամ', 'փող'})),
EmojiAnnotations(emoji='‼', codepoints=(8252,), name='կրկնակի բացականչական նշան', slug='կրկնակի_բացականչական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'բացականչություն'})),
EmojiAnnotations(emoji='⁉', codepoints=(8265,), name='բացականչական հարցական նշան', slug='բացականչական_հարցական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'հարց', 'բացականչություն'})),
EmojiAnnotations(emoji='❓', codepoints=(10067,), name='հարցական նշան', slug='հարցական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'հարց'})),
EmojiAnnotations(emoji='❔', codepoints=(10068,), name='սպիտակ հարցական նշան', slug='սպիտակ_հարցական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'հարց', 'ուրվագծված'})),
EmojiAnnotations(emoji='❕', codepoints=(10069,), name='սպիտակ բացականչական նշան', slug='սպիտակ_բացականչական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'ուրվագծված', 'բացականչություն'})),
EmojiAnnotations(emoji='❗', codepoints=(10071,), name='բացականչական նշան', slug='բացականչական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'բացականչություն'})),
EmojiAnnotations(emoji='〰', codepoints=(12336,), name='ալիքաձև գծիկ', slug='ալիքաձև_գծիկ', annotations=frozenset({'ալիքաձև', 'կետադրություն', 'գծիկ'})),
EmojiAnnotations(emoji='™', codepoints=(8482,), name='ապրանքանիշ', slug='ապրանքանիշ', annotations=frozenset({'նշան', 'tm'})),
EmojiAnnotations(emoji='♈', codepoints=(9800,), name='խոյ', slug='խոյ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♉', codepoints=(9801,), name='ցուլ', slug='ցուլ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♊', codepoints=(9802,), name='երկվորյակներ', slug='երկվորյակներ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♋', codepoints=(9803,), name='խեցգետին', slug='խեցգետին', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♌', codepoints=(9804,), name='առյուծ', slug='առյուծ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♍', codepoints=(9805,), name='կույս', slug='կույս', annotations=frozenset({'օրիորդ', 'կենդանակերպ'})),
EmojiAnnotations(emoji='♎', codepoints=(9806,), name='կշեռք', slug='կշեռք', annotations=frozenset({'արդարադատություն', 'կենդանակերպ', 'հավասարակշռություն'})),
EmojiAnnotations(emoji='♏', codepoints=(9807,), name='կարիճ', slug='կարիճ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♐', codepoints=(9808,), name='աղեղնավոր', slug='աղեղնավոր', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♑', codepoints=(9809,), name='այծեղջյուր', slug='այծեղջյուր', annotations=frozenset({'այծ', 'կենդանակերպ'})),
EmojiAnnotations(emoji='♒', codepoints=(9810,), name='ջրհոս', slug='ջրհոս', annotations=frozenset({'կենդանակերպ', 'կրող', 'ջուր'})),
EmojiAnnotations(emoji='♓', codepoints=(9811,), name='ձկներ', slug='ձկներ', annotations=frozenset({'կենդանակերպ', 'ձուկ'})),
EmojiAnnotations(emoji='⛎', codepoints=(9934,), name='օձակիր', slug='օձակիր', annotations=frozenset({'օձ', 'կենդանակերպ', 'կրող'})),
EmojiAnnotations(emoji='🔀', codepoints=(128256,), name='խառնել կատարումները կոճակ', slug='խառնել_կատարումները_կոճակ', annotations=frozenset({'խաչված', 'սլաք'})),
EmojiAnnotations(emoji='🔁', codepoints=(128257,), name='անընդհատ կրկնել կոճակ', slug='անընդհատ_կրկնել_կոճակ', annotations=frozenset({'ժամասլաքի ուղղությամբ', 'սլաք', 'կրկնել'})),
EmojiAnnotations(emoji='🔂', codepoints=(128258,), name='կրկնել մեկ անգամ կոճակ', slug='կրկնել_մեկ_անգամ_կոճակ', annotations=frozenset({'ժամասլաքի ուղղությամբ', 'սլաք', 'մեկ անգամ'})),
EmojiAnnotations(emoji='▶', codepoints=(9654,), name='նվագարկել կոճակ', slug='նվագարկել_կոճակ', annotations=frozenset({'նվագարկել', 'եռանկյուն', 'սլաք', 'աջ'})),
EmojiAnnotations(emoji='⏩', codepoints=(9193,), name='արագ առաջ կոճակ', slug='արագ_առաջ_կոճակ', annotations=frozenset({'արագ', 'սլաք', 'կրկնակի', 'առաջ'})),
EmojiAnnotations(emoji='⏭', codepoints=(9197,), name='հաջորդ կատարումը կոճակ', slug='հաջորդ_կատարումը_կոճակ', annotations=frozenset({'հաջորդ տեսարանը', 'եռանկյուն', 'սլաք', 'հաջորդ կատարումը'})),
EmojiAnnotations(emoji='⏯', codepoints=(9199,), name='նվագարկել կամ դադար կոճակ', slug='նվագարկել_կամ_դադար_կոճակ', annotations=frozenset({'նվագարկել', 'դադար', 'եռանկյուն', 'սլաք', 'աջ'})),
EmojiAnnotations(emoji='◀', codepoints=(9664,), name='հետադարձել կոճակ', slug='հետադարձել_կոճակ', annotations=frozenset({'ձախ', 'եռանկյուն', 'սլաք', 'հետադարձել'})),
EmojiAnnotations(emoji='⏪', codepoints=(9194,), name='արագ հետադարձել կոճակ', slug='արագ_հետադարձել_կոճակ', annotations=frozenset({'սլաք', 'կրկնակի', 'հետադարձել'})),
EmojiAnnotations(emoji='⏮', codepoints=(9198,), name='վերջին կատարումը կոճակ', slug='վերջին_կատարումը_կոճակ', annotations=frozenset({'նախորդ տեսարանը', 'նախորդ կատարումը', 'սլաք', 'եռանկյուն'})),
EmojiAnnotations(emoji='🔼', codepoints=(128316,), name='վերև կոճակ', slug='վերև_կոճակ', annotations=frozenset({'կոճակ', 'կարմիր', 'սլաք'})),
EmojiAnnotations(emoji='⏫', codepoints=(9195,), name='արագ վերև կոճակ', slug='արագ_վերև_կոճակ', annotations=frozenset({'սլաք', 'կրկնակի'})),
EmojiAnnotations(emoji='🔽', codepoints=(128317,), name='ներքև կոճակ', slug='ներքև_կոճակ', annotations=frozenset({'ներքև', 'կոճակ', 'կարմիր', 'սլաք'})),
EmojiAnnotations(emoji='⏬', codepoints=(9196,), name='արագ ներքև կոճակ', slug='արագ_ներքև_կոճակ', annotations=frozenset({'ներքև', 'սլաք', 'կրկնակի'})),
EmojiAnnotations(emoji='\u23f8', codepoints=(9208,), name='դադար կոճակ', slug='դադար_կոճակ', annotations=frozenset({'գծեր', 'դադար', 'կրկնակի', 'ուղղահայաց'})),
EmojiAnnotations(emoji='\u23f9', codepoints=(9209,), name='ստոպ կոճակ', slug='ստոպ_կոճակ', annotations=frozenset({'ստոպ', 'քառակուսի'})),
EmojiAnnotations(emoji='\u23fa', codepoints=(9210,), name='ձայնագրել կոճակ', slug='ձայնագրել_կոճակ', annotations=frozenset({'ձայնագրել', 'շրջան'})),
EmojiAnnotations(emoji='⏏', codepoints=(9167,), name='դուրս հանել կոճակ', slug='դուրս_հանել_կոճակ', annotations=frozenset({'դուրս հանել'})),
EmojiAnnotations(emoji='🎦', codepoints=(127910,), name='կինոմատոգրաֆիա', slug='կինոմատոգրաֆիա', annotations=frozenset({'տեսախցիկ', 'ժապավեն', 'ֆիլմ'})),
EmojiAnnotations(emoji='🔅', codepoints=(128261,), name='մթեցնել կոճակ', slug='մթեցնել_կոճակ', annotations=frozenset({'պայծառություն', 'թույլ', 'մթեցնել'})),
EmojiAnnotations(emoji='🔆', codepoints=(128262,), name='պայծառեցնել կոճակ', slug='պայծառեցնել_կոճակ', annotations=frozenset({'պայծառություն', 'պայծառ'})),
EmojiAnnotations(emoji='📶', codepoints=(128246,), name='անտենայի գծիկներ', slug='անտենայի_գծիկներ', annotations=frozenset({'գծիկ', 'ազդանշան', 'հեռախոս', 'շարժական', 'անտենա', 'բջջային'})),
EmojiAnnotations(emoji='📵', codepoints=(128245,), name='բջջային հեռախոսներն արգելվում են', slug='բջջային_հեռախոսներն_արգելվում_են', annotations=frozenset({'ոչ', 'արգելված', 'հեռախոս', 'շարժական', 'բջջային'})),
EmojiAnnotations(emoji='📳', codepoints=(128243,), name='թրթռազանգի ռեժիմ', slug='թրթռազանգի_ռեժիմ', annotations=frozenset({'ռեժիմ', 'թրթռում', 'հեռախոս', 'շարժական', 'բջջային'})),
EmojiAnnotations(emoji='📴', codepoints=(128244,), name='բջջայինն անջատված է', slug='բջջայինն_անջատված_է', annotations=frozenset({'անջատված', 'հեռախոս', 'շարժական', 'բջջային'})),
EmojiAnnotations(emoji='{#⃣}', codepoints=(123, 35, 8419, 125), name='ստեղն վանդականիշ', slug='ստեղն_վանդականիշ', annotations=frozenset({'ստեղն', 'վանդականիշ', 'ֆունտ'})),
EmojiAnnotations(emoji='{*⃣}', codepoints=(123, 42, 8419, 125), name='ստեղն աստղանիշ', slug='ստեղն_աստղանիշ', annotations=frozenset({'ստեղն', 'աստղանիշ', 'աստղ'})),
EmojiAnnotations(emoji='{0⃣}', codepoints=(123, 48, 8419, 125), name='ստեղն զրո', slug='ստեղն_զրո', annotations=frozenset({'0', 'ստեղն', 'զրո'})),
EmojiAnnotations(emoji='{1⃣}', codepoints=(123, 49, 8419, 125), name='ստեղն մեկ', slug='ստեղն_մեկ', annotations=frozenset({'ստեղն', 'մեկ', '1'})),
EmojiAnnotations(emoji='{2⃣}', codepoints=(123, 50, 8419, 125), name='ստեղն երկու', slug='ստեղն_երկու', annotations=frozenset({'ստեղն', 'երկու', '2'})),
EmojiAnnotations(emoji='{3⃣}', codepoints=(123, 51, 8419, 125), name='ստեղն երեք', slug='ստեղն_երեք', annotations=frozenset({'ստեղն', 'երեք', '3'})),
EmojiAnnotations(emoji='{4⃣}', codepoints=(123, 52, 8419, 125), name='ստեղն չորս', slug='ստեղն_չորս', annotations=frozenset({'4', 'ստեղն', 'չորս'})),
EmojiAnnotations(emoji='{5⃣}', codepoints=(123, 53, 8419, 125), name='ստեղն հինգ', slug='ստեղն_հինգ', annotations=frozenset({'ստեղն', '5', 'հինգ'})),
EmojiAnnotations(emoji='{6⃣}', codepoints=(123, 54, 8419, 125), name='ստեղն վեց', slug='ստեղն_վեց', annotations=frozenset({'ստեղն', 'վեց', '6'})),
EmojiAnnotations(emoji='{7⃣}', codepoints=(123, 55, 8419, 125), name='ստեղն յոթ', slug='ստեղն_յոթ', annotations=frozenset({'7', 'ստեղն', 'յոթ'})),
EmojiAnnotations(emoji='{8⃣}', codepoints=(123, 56, 8419, 125), name='ստեղն ութ', slug='ստեղն_ութ', annotations=frozenset({'8', 'ստեղն', 'ութ'})),
EmojiAnnotations(emoji='{9⃣}', codepoints=(123, 57, 8419, 125), name='ստեղն ինը', slug='ստեղն_ինը', annotations=frozenset({'ստեղն', 'ինը', '9'})),
EmojiAnnotations(emoji='🔟', codepoints=(128287,), name='ստեղն տասը', slug='ստեղն_տասը', annotations=frozenset({'ստեղն', '10', 'տասը'})),
EmojiAnnotations(emoji='💯', codepoints=(128175,), name='հարյուր միավոր', slug='հարյուր_միավոր', annotations=frozenset({'հարյուր', 'միավոր', '100', 'ամբողջ'})),
EmojiAnnotations(emoji='🔞', codepoints=(128286,), name='տասնութից ցածր արգելվում է', slug='տասնութից_ցածր_արգելվում_է', annotations=frozenset({'18', 'ոչ', 'արգելված', 'տարիքային սահմանափակում', 'անչափահաս', 'տասնութ'})),
EmojiAnnotations(emoji='🔠', codepoints=(128288,), name='լատինատառ մեծատառ ներածում', slug='լատինատառ_մեծատառ_ներածում', annotations=frozenset({'տառեր', 'մուտքագրել', 'լատինական', 'մեծատառ'})),
EmojiAnnotations(emoji='🔡', codepoints=(128289,), name='լատինատառ փոքրատառ ներածում', slug='լատինատառ_փոքրատառ_ներածում', annotations=frozenset({'տառեր', 'մուտքագրել', 'abcd', 'լատինական', 'փոքրատառ'})),
EmojiAnnotations(emoji='🔢', codepoints=(128290,), name='թվերի ներածում', slug='թվերի_ներածում', annotations=frozenset({'մուտքագրել', '1234', 'թվեր'})),
EmojiAnnotations(emoji='🔣', codepoints=(128291,), name='նշանների ներածում', slug='նշանների_ներածում', annotations=frozenset({'մուտքագրել'})),
EmojiAnnotations(emoji='🔤', codepoints=(128292,), name='լատինատառ ներածում', slug='լատինատառ_ներածում', annotations=frozenset({'abc', 'այբուբեն', 'տառեր', 'մուտքագրել', 'լատինական'})),
EmojiAnnotations(emoji='🅰', codepoints=(127344,), name='էյ կոճակ', slug='էյ_կոճակ', annotations=frozenset({'a', 'արյուն'})),
EmojiAnnotations(emoji='🆎', codepoints=(127374,), name='էյ-բի կոճակ', slug='էյ_բի_կոճակ', annotations=frozenset({'արյուն', 'ab'})),
EmojiAnnotations(emoji='🅱', codepoints=(127345,), name='բի կոճակ', slug='բի_կոճակ', annotations=frozenset({'b', 'արյուն'})),
EmojiAnnotations(emoji='🆑', codepoints=(127377,), name='սի-էլ', slug='սի_էլ', annotations=frozenset({'cl'})),
EmojiAnnotations(emoji='ℹ', codepoints=(8505,), name='տեղեկատու', slug='տեղեկատու', annotations=frozenset({'i', 'տեղեկատվություն'})),
EmojiAnnotations(emoji='🆔', codepoints=(127380,), name='այ-դի', slug='այ_դի', annotations=frozenset({'ինքնություն', 'id'})),
EmojiAnnotations(emoji='Ⓜ', codepoints=(9410,), name='էմ տառը շրջանակի մեջ', slug='էմ_տառը_շրջանակի_մեջ', annotations=frozenset({'m', 'շրջան'})),
EmojiAnnotations(emoji='🆖', codepoints=(127382,), name='էն-ջի', slug='էն_ջի', annotations=frozenset({'ng'})),
EmojiAnnotations(emoji='🅾', codepoints=(127358,), name='օ կոճակ', slug='օ_կոճակ', annotations=frozenset({'o', 'արյուն'})),
EmojiAnnotations(emoji='🆗', codepoints=(127383,), name='օքեյ', slug='օքեյ', annotations=frozenset({'ok'})),
EmojiAnnotations(emoji='🅿', codepoints=(127359,), name='փի կոճակ', slug='փի_կոճակ', annotations=frozenset({'կայանատեղի'})),
EmojiAnnotations(emoji='🆘', codepoints=(127384,), name='սոս', slug='սոս', annotations=frozenset({'օգնել', 'sos'})),
EmojiAnnotations(emoji='🆙', codepoints=(127385,), name='ափ կոճակ', slug='ափ_կոճակ', annotations=frozenset({'նշան', 'up', 'վեր'})),
EmojiAnnotations(emoji='🆚', codepoints=(127386,), name='վի-էս', slug='վի_էս', annotations=frozenset({'ընդդեմ', 'vs'})),
EmojiAnnotations(emoji='🈁', codepoints=(127489,), name='կատականա կոկո', slug='կատականա_կոկո', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈂', codepoints=(127490,), name='կատականա սա', slug='կատականա_սա', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈷', codepoints=(127543,), name='լուսին գաղափարագիր', slug='լուսին_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈶', codepoints=(127542,), name='գոյ գաղափարագիր', slug='գոյ_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈯', codepoints=(127535,), name='մատ գաղափարագիր', slug='մատ_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🉐', codepoints=(127568,), name='առավելություն գաղափարագիր շրջանակի մեջ', slug='առավելություն_գաղափարագիր_շրջանակի_մեջ', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈹', codepoints=(127545,), name='բաժանել գաղափարագիր', slug='բաժանել_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈚', codepoints=(127514,), name='ժխտում գաղափարագիր', slug='ժխտում_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈲', codepoints=(127538,), name='արգելել գաղափարագիր', slug='արգելել_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🉑', codepoints=(127569,), name='ընդունել գաղափարագիր շրջանակի մեջ', slug='ընդունել_գաղափարագիր_շրջանակի_մեջ', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='🈸', codepoints=(127544,), name='կիրառել գաղափարագիր', slug='կիրառել_գաղափարագիր', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='🈴', codepoints=(127540,), name='միասին գաղափարագիր', slug='միասին_գաղափարագիր', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='🈳', codepoints=(127539,), name='դատարկ գաղափարագիր', slug='դատարկ_գաղափարագիր', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='㊗', codepoints=(12951,), name='շնորհավորել գաղափարագիր շրջանակի մեջ', slug='շնորհավորել_գաղափարագիր_շրջանակի_մեջ', annotations=frozenset({'շնորհավորանք', 'չինարեն', 'գաղափարագիր', 'չինական'})),
EmojiAnnotations(emoji='㊙', codepoints=(12953,), name='գաղտնի գաղափարագիր շրջանակի մեջ', slug='գաղտնի_գաղափարագիր_շրջանակի__մեջ', annotations=frozenset({'գաղափարագիր', 'չինարեն', 'գաղտնիք', 'չինական'})),
EmojiAnnotations(emoji='🈺', codepoints=(127546,), name='աշխատում է գաղափարագիր', slug='աշխատում_է_գաղափարագիր', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='🈵', codepoints=(127541,), name='լիություն գաղափարագիր', slug='լիություն_գաղափարագիր', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='▪', codepoints=(9642,), name='սև փոքր քառակուսի', slug='սև_փոքր_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='▫', codepoints=(9643,), name='սպիտակ փոքր քառակուսի', slug='սպիտակ_փոքր_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='◻', codepoints=(9723,), name='սպիտակ միջին չափի քառակուսի', slug='սպիտակ_միջին_չափի_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='◼', codepoints=(9724,), name='սև միջին չափի քառակուսի', slug='սև_միջին_չափի_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='◽', codepoints=(9725,), name='սպիտակ միջին-փոքր քառակուսի', slug='սպիտակ_միջին_փոքր_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='◾', codepoints=(9726,), name='սև միջին-փոքր քառակուսի', slug='սև_միջին_փոքր_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='⬛', codepoints=(11035,), name='սև մեծ քառակուսի', slug='սև_մեծ_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='⬜', codepoints=(11036,), name='սպիտակ մեծ քառակուսի', slug='սպիտակ_մեծ_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='🔶', codepoints=(128310,), name='նարնջագույն մեծ շեղանկյուն', slug='նարնջագույն_մեծ_շեղանկյուն', annotations=frozenset({'երկրաչափական', 'շեղանկյուն', 'նարնջագույն'})),
EmojiAnnotations(emoji='🔷', codepoints=(128311,), name='կապույտ մեծ շեղանկյուն', slug='կապույտ_մեծ_շեղանկյուն', annotations=frozenset({'կապույտ', 'երկրաչափական', 'շեղանկյուն'})),
EmojiAnnotations(emoji='🔸', codepoints=(128312,), name='նարնջագույն փոքր շեղանկյուն', slug='նարնջագույն_փոքր_շեղանկյուն', annotations=frozenset({'երկրաչափական', 'շեղանկյուն', 'նարնջագույն'})),
EmojiAnnotations(emoji='🔹', codepoints=(128313,), name='կապույտ փոքր շեղանկյուն', slug='կապույտ_փոքր_շեղանկյուն', annotations=frozenset({'կապույտ', 'երկրաչափական', 'շեղանկյուն'})),
EmojiAnnotations(emoji='🔺', codepoints=(128314,), name='կարմիր եռանկյունի ուղղված վերև', slug='կարմիր_եռանկյունի_ուղղված_վերև', annotations=frozenset({'երկրաչափական', 'կարմիր'})),
EmojiAnnotations(emoji='🔻', codepoints=(128315,), name='կարմիր եռանկյունի ուղղված ներքև', slug='կարմիր_եռանկյունի_ուղղված_ներքև', annotations=frozenset({'ներքև', 'երկրաչափական', 'կարմիր'})),
EmojiAnnotations(emoji='💠', codepoints=(128160,), name='կետով շեղանկյուն', slug='կետով_շեղանկյուն', annotations=frozenset({'երկրաչափական', 'կոմիքս', 'շեղանկյուն', 'ներսում'})),
EmojiAnnotations(emoji='🔘', codepoints=(128280,), name='կետակոճակ', slug='կետակոճակ', annotations=frozenset({'կետ', 'կոճակ', 'երկրաչափական', 'ռադիո'})),
EmojiAnnotations(emoji='🔲', codepoints=(128306,), name='սև քառակուսի կոճակ', slug='սև_քառակուսի_կոճակ', annotations=frozenset({'կոճակ', 'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='🔳', codepoints=(128307,), name='սպիտակ քառակուսի կոճակ', slug='սպիտակ_քառակուսի_կոճակ', annotations=frozenset({'կոճակ', 'երկրաչափական', 'ուրվագծված', 'քառակուսի'})),
EmojiAnnotations(emoji='⚪', codepoints=(9898,), name='սպիտակ շրջանակ', slug='սպիտակ_շրջանակ', annotations=frozenset({'երկրաչափական', 'շրջան'})),
EmojiAnnotations(emoji='⚫', codepoints=(9899,), name='սև շրջանակ', slug='սև_շրջանակ', annotations=frozenset({'երկրաչափական', 'շրջան'})),
EmojiAnnotations(emoji='🔴', codepoints=(128308,), name='կարմիր շրջանակ', slug='կարմիր_շրջանակ', annotations=frozenset({'երկրաչափական', 'կարմիր', 'շրջան'})),
EmojiAnnotations(emoji='🔵', codepoints=(128309,), name='կապույտ շրջանակ', slug='կապույտ_շրջանակ', annotations=frozenset({'կապույտ', 'երկրաչափական', 'շրջան'})),] | 154.573805 | 252 | 0.714472 | from emojitations.emojitypes import EmojiAnnotations
emoji = [
EmojiAnnotations(emoji='😀', codepoints=(128512,), name='ծիծաղող դեմք', slug='ծիծաղող_դեմք', annotations=frozenset({'դեմք', 'քմծիծաղել'})),
EmojiAnnotations(emoji='😁', codepoints=(128513,), name='ծիծաղող դեմք ժպտացող աչքերով', slug='ծիծաղող_դեմք_ժպտացող_աչքերով', annotations=frozenset({'աչք', 'դեմք', 'ժպտալ', 'քմծիծաղել'})),
EmojiAnnotations(emoji='😂', codepoints=(128514,), name='դեմք ուրախության արցունքներով', slug='դեմք_ուրախության_արցունքներով', annotations=frozenset({'ուրախություն', 'դեմք', 'ծիծաղել', 'արցունք'})),
EmojiAnnotations(emoji='😃', codepoints=(128515,), name='ժպտացող դեմք բաց բերանով', slug='ժպտացող_դեմք_բաց_բերանով', annotations=frozenset({'բաց', 'դեմք', 'ժպտալ', 'բերան'})),
EmojiAnnotations(emoji='😄', codepoints=(128516,), name='ժպտացող դեմք բաց բերանով և ժպտացող աչքերով', slug='ժպտացող_դեմք_բաց_բերանով_և_ժպտացող_աչքերով', annotations=frozenset({'բաց', 'աչք', 'դեմք', 'ժպտալ', 'բերան'})),
EmojiAnnotations(emoji='😅', codepoints=(128517,), name='ժպտացող դեմք բաց բերանով և սառը քրտինքով', slug='ժպտացող_դեմք_բաց_բերանով_և_սառը_քրտինքով', annotations=frozenset({'բաց', 'սառը', 'դեմք', 'ժպտալ', 'քրտինք'})),
EmojiAnnotations(emoji='😆', codepoints=(128518,), name='ժպտացող դեմք բաց բերանով և ամուր փակած աչքերով', slug='ժպտացող_դեմք_բաց_բերանով_և_ամուր_փակած_աչքերով', annotations=frozenset({'ժպտալ', 'գոհ', 'ծիծաղել', 'դեմք', 'բաց', 'բերան'})),
EmojiAnnotations(emoji='😉', codepoints=(128521,), name='աչքով անող դեմք', slug='աչքով_անող_դեմք', annotations=frozenset({'դեմք', 'աչքով անել'})),
EmojiAnnotations(emoji='😊', codepoints=(128522,), name='ժպտացող դեմք ժպտացող աչքերով', slug='ժպտացող_դեմք_ժպտացող_աչքերով', annotations=frozenset({'աչք', 'դեմք', 'ժպտալ', 'շիկնել'})),
EmojiAnnotations(emoji='😋', codepoints=(128523,), name='համեղ ուտելիք վայելող դեմք', slug='համեղ_ուտելիք_վայելող_դեմք', annotations=frozenset({'դեմք', 'վեյելել', 'ժպտալ', 'համեղ', 'նյամ'})),
EmojiAnnotations(emoji='😎', codepoints=(128526,), name='ժպտացող դեմք արևային ակնոցով', slug='ժպտացող_դեմք_արևային_ակնոցով', annotations=frozenset({'աչք', 'ակնոց', 'զիլ', 'ժպտալ', 'պայծառ', 'արևային ակնոց', 'դեմք', 'եղանակ', 'արև'})),
EmojiAnnotations(emoji='😍', codepoints=(128525,), name='ժպտացող դեմք սրտաձև աչքերով', slug='ժպտացող_դեմք_սրտաձև_աչքերով', annotations=frozenset({'աչք', 'դեմք', 'սիրտ', 'ժպտալ', 'սեր'})),
EmojiAnnotations(emoji='😘', codepoints=(128536,), name='համբույր ուղարկող դեմք', slug='համբույր_ուղարկող_դեմք', annotations=frozenset({'դեմք', 'սիրտ', 'համբուրել'})),
EmojiAnnotations(emoji='😗', codepoints=(128535,), name='համբուրող դեմք', slug='համբուրող_դեմք', annotations=frozenset({'դեմք', 'համբույր'})),
EmojiAnnotations(emoji='😙', codepoints=(128537,), name='համբուրող դեմք ժպտացող աչքերով', slug='համբուրող_դեմք_ժպտացող_աչքերով', annotations=frozenset({'աչք', 'դեմք', 'համբուրել', 'ժպտալ'})),
EmojiAnnotations(emoji='😚', codepoints=(128538,), name='համբուրող դեմք փակ աչքերով', slug='համբուրող_դեմք_փակ_աչքերով', annotations=frozenset({'աչք', 'դեմք', 'փակ', 'համբուրել'})),
EmojiAnnotations(emoji='☺', codepoints=(9786,), name='ժպտացող դեմք', slug='ժպտացող_դեմք', annotations=frozenset({'դեմք', 'ժպտալ', 'անկաշկանդ'})),
EmojiAnnotations(emoji='\U0001f642', codepoints=(128578,), name='թեթևակի ժպտացող դեմք', slug='թեթևակի_ժպտացող_դեմք', annotations=frozenset({'դեմք', 'ժպտալ'})),
EmojiAnnotations(emoji='\U0001f917', codepoints=(129303,), name='գրկող դեմք', slug='գրկող_դեմք', annotations=frozenset({'գրկախառնում', 'դեմք', 'գրկախառնվել'})),
EmojiAnnotations(emoji='😇', codepoints=(128519,), name='ժպտացող դեմք լուսապսակով', slug='ժպտացող_դեմք_լուսապսակով', annotations=frozenset({'անմեղ', 'լուսապսակ', 'ժպտալ', 'հրեշտակ', 'դեմք', 'հեքիաթ', 'ֆանտազիա'})),
EmojiAnnotations(emoji='\U0001f914', codepoints=(129300,), name='մտածող դեմք', slug='մտածող_դեմք', annotations=frozenset({'մտածող', 'դեմք'})),
EmojiAnnotations(emoji='😐', codepoints=(128528,), name='չեզոք դեմք', slug='չեզոք_դեմք', annotations=frozenset({'դեմք', 'չեզոք', 'անվրդով'})),
EmojiAnnotations(emoji='😑', codepoints=(128529,), name='անհույզ դեմք', slug='անհույզ_դեմք', annotations=frozenset({'դեմք', 'ոչինչ չարտահայտող', 'անարտահայտիչ', 'առանց էմոցիաների'})),
EmojiAnnotations(emoji='😶', codepoints=(128566,), name='առանց բերանի դեմք', slug='առանց_բերանի_դեմք', annotations=frozenset({'դեմք', 'լուռ', 'բերան', 'հանգիստ'})),
EmojiAnnotations(emoji='\U0001f644', codepoints=(128580,), name='պտտվող աչքերով դեմք', slug='պտտվող_աչքերով_դեմք', annotations=frozenset({'դեմք', 'աչքեր', 'պտտվող'})),
EmojiAnnotations(emoji='😏', codepoints=(128527,), name='կեղծ ժպտացող դեմք', slug='կեղծ_ժպտացող_դեմք', annotations=frozenset({'դեմք', 'կեղծ ժպտալ'})),
EmojiAnnotations(emoji='😣', codepoints=(128547,), name='համառող դեմք', slug='համառող_դեմք', annotations=frozenset({'դեմք', 'համառել'})),
EmojiAnnotations(emoji='😥', codepoints=(128549,), name='հիասթափված; բայց թեթևացած դեմք', slug='հիասթափված;_բայց_թեթևացած_դեմք', annotations=frozenset({'դեմք', 'թեթևացած', 'հիասթափված'})),
EmojiAnnotations(emoji='😮', codepoints=(128558,), name='բաց բերանով դեմք', slug='բաց_բերանով_դեմք', annotations=frozenset({'բաց', 'դեմք', 'բերան', 'համակրանք'})),
EmojiAnnotations(emoji='\U0001f910', codepoints=(129296,), name='ճարմանդավոր բերանով դեմք', slug='ճարմանդավոր_բերանով_դեմք', annotations=frozenset({'դեմք', 'բերան', 'ճարմանդ'})),
EmojiAnnotations(emoji='😯', codepoints=(128559,), name='սաստված դեմք', slug='սաստված_դեմք', annotations=frozenset({'զարմացած', 'դեմք', 'սաստված', 'ապշած'})),
EmojiAnnotations(emoji='😪', codepoints=(128554,), name='քնատ դեմք', slug='քնատ_դեմք', annotations=frozenset({'քնել', 'դեմք'})),
EmojiAnnotations(emoji='😫', codepoints=(128555,), name='հոգնած դեմք', slug='հոգնած_դեմք', annotations=frozenset({'դեմք', 'հոգնած'})),
EmojiAnnotations(emoji='😴', codepoints=(128564,), name='քնած դեմք', slug='քնած_դեմք', annotations=frozenset({'քնել', 'դեմք', 'խռռ'})),
EmojiAnnotations(emoji='😌', codepoints=(128524,), name='թեթևացած դեմք', slug='թեթևացած_դեմք', annotations=frozenset({'դեմք', 'թեթևացած'})),
EmojiAnnotations(emoji='\U0001f913', codepoints=(129299,), name='գերազանցիկի դեմք', slug='գերազանցիկի_դեմք', annotations=frozenset({'դեմք', 'ցնդած', 'հիմար'})),
EmojiAnnotations(emoji='😛', codepoints=(128539,), name='լեզու հանած դեմք', slug='լեզու_հանած_դեմք', annotations=frozenset({'դեմք', 'լեզու'})),
EmojiAnnotations(emoji='😜', codepoints=(128540,), name='լեզու հանած և աչքով անող դեմք', slug='լեզու_հանած_և_աչքով_անող_դեմք', annotations=frozenset({'աչք', 'դեմք', 'կատակել', 'լեզու', 'աչքով անել'})),
EmojiAnnotations(emoji='😝', codepoints=(128541,), name='լեզու հանած և ամուր փակած աչքերով դեմք', slug='լեզու_հանած_և_ամուր_փակած_աչքերով_դեմք', annotations=frozenset({'աչք', 'դեմք', 'սարսափելի', 'համ', 'լեզու'})),
EmojiAnnotations(emoji='☹', codepoints=(9785,), name='խոժոռված դեմք', slug='խոժոռված_դեմք', annotations=frozenset({'դեմք', 'խոժոռված'})),
EmojiAnnotations(emoji='\U0001f641', codepoints=(128577,), name='թեթևակի խոժոռված դեմք', slug='թեթևակի_խոժոռված_դեմք', annotations=frozenset({'դեմք', 'խոժոռված'})),
EmojiAnnotations(emoji='😒', codepoints=(128530,), name='անտրամադիր դեմք', slug='անտրամադիր_դեմք', annotations=frozenset({'դեմք', 'անտրամադիր', 'դժբախտ'})),
EmojiAnnotations(emoji='😓', codepoints=(128531,), name='սառը քրտինքով դեմք', slug='սառը_քրտինքով_դեմք', annotations=frozenset({'սառը', 'դեմք', 'քրտինք'})),
EmojiAnnotations(emoji='😔', codepoints=(128532,), name='մտածկոտ դեմք', slug='մտածկոտ_դեմք', annotations=frozenset({'դեմք', 'մռայլված', 'մտածկոտ'})),
EmojiAnnotations(emoji='😕', codepoints=(128533,), name='շփոթված դեմք', slug='շփոթված_դեմք', annotations=frozenset({'դեմք', 'շփոթված'})),
EmojiAnnotations(emoji='😖', codepoints=(128534,), name='ցնցված դեմք', slug='ցնցված_դեմք', annotations=frozenset({'դեմք', 'ցնցված'})),
EmojiAnnotations(emoji='\U0001f643', codepoints=(128579,), name='գլխնիվայր դեմք', slug='գլխնիվայր_դեմք', annotations=frozenset({'դեմք', 'գլխնիվայր'})),
EmojiAnnotations(emoji='😷', codepoints=(128567,), name='բժշկական դիմակով դեմք', slug='բժշկական_դիմակով_դեմք', annotations=frozenset({'հիվանդ', 'բժիշկ', 'սառը', 'դեմք', 'բժշկական', 'դիմակ'})),
EmojiAnnotations(emoji='\U0001f912', codepoints=(129298,), name='ջերմաչափով դեմք', slug='ջերմաչափով_դեմք', annotations=frozenset({'դեմք', 'հիվանդ', 'ջերմաչափ'})),
EmojiAnnotations(emoji='\U0001f915', codepoints=(129301,), name='գլխակապով դեմք', slug='գլխակապով_դեմք', annotations=frozenset({'դեմք', 'վիրակապ', 'վնասվածք'})),
EmojiAnnotations(emoji='\U0001f911', codepoints=(129297,), name='թղթադրամը բերանին դեմք', slug='թղթադրամը_բերանին_դեմք', annotations=frozenset({'դեմք', 'փող', 'բերան'})),
EmojiAnnotations(emoji='😲', codepoints=(128562,), name='ապշահար դեմք', slug='ապշահար_դեմք', annotations=frozenset({'դեմք', 'ցնցված', 'ապշահար', 'ամբողջովին'})),
EmojiAnnotations(emoji='😞', codepoints=(128542,), name='հիասթափված դեմք', slug='հիասթափված_դեմք', annotations=frozenset({'դեմք', 'հիասթափված'})),
EmojiAnnotations(emoji='😟', codepoints=(128543,), name='անհանգստացած դեմք', slug='անհանգստացած_դեմք', annotations=frozenset({'անհանգստացած', 'դեմք'})),
EmojiAnnotations(emoji='😤', codepoints=(128548,), name='քթից գոլորշի հանող դեմք', slug='քթից_գոլորշի_հանող_դեմք', annotations=frozenset({'դեմք', 'հաղթած', 'հաղթանակ'})),
EmojiAnnotations(emoji='😢', codepoints=(128546,), name='արտասվող դեմք', slug='արտասվող_դեմք', annotations=frozenset({'արտասվել', 'դեմք', 'տխուր', 'արտասուք'})),
EmojiAnnotations(emoji='😭', codepoints=(128557,), name='բարձրաձայն արտասվող դեմք', slug='բարձրաձայն_արտասվող_դեմք', annotations=frozenset({'արտասվել', 'դեմք', 'տխուր', 'հեկեկալ', 'արտասուք'})),
EmojiAnnotations(emoji='😦', codepoints=(128550,), name='բաց բերանով խոժոռված դեմք', slug='բաց_բերանով_խոժոռված_դեմք', annotations=frozenset({'բաց', 'դեմք', 'բերան', 'խոժոռված'})),
EmojiAnnotations(emoji='😧', codepoints=(128551,), name='վշտահար դեմք', slug='վշտահար_դեմք', annotations=frozenset({'վշտահար', 'դեմք'})),
EmojiAnnotations(emoji='😨', codepoints=(128552,), name='վախեցած դեմք', slug='վախեցած_դեմք', annotations=frozenset({'վախեցած', 'դեմք', 'սարսափած', 'վախ'})),
EmojiAnnotations(emoji='😩', codepoints=(128553,), name='ուժասպառ դեմք', slug='ուժասպառ_դեմք', annotations=frozenset({'դեմք', 'հոգնած', 'ուժասպառ'})),
EmojiAnnotations(emoji='😬', codepoints=(128556,), name='ծամածռվող դեմք', slug='ծամածռվող_դեմք', annotations=frozenset({'դեմք', 'ծամածռություն'})),
EmojiAnnotations(emoji='😰', codepoints=(128560,), name='բաց բերանով և սառը քրտինքով դեմք', slug='բաց_բերանով_և_սառը_քրտինքով_դեմք', annotations=frozenset({'հապշտապ', 'բաց', 'սառը', 'դեմք', 'կապույտ', 'բերան', 'քրտինք'})),
EmojiAnnotations(emoji='😱', codepoints=(128561,), name='վախից գոռացող դեմք', slug='վախից_գոռացող_դեմք', annotations=frozenset({'վախեցած', 'ծամել', 'սարսափած', 'վախ', 'դեմք', 'ճչալ'})),
EmojiAnnotations(emoji='😳', codepoints=(128563,), name='շիկնած դեմք', slug='շիկնած_դեմք', annotations=frozenset({'դեմք', 'հիացած', 'շիկնած'})),
EmojiAnnotations(emoji='😵', codepoints=(128565,), name='գլխապտույտ ունեցող դեմք', slug='գլխապտույտ_ունեցող_դեմք', annotations=frozenset({'դեմք', 'գլխապտույտ'})),
EmojiAnnotations(emoji='😡', codepoints=(128545,), name='դժգոհ դեմք', slug='դժգոհ_դեմք', annotations=frozenset({'բարկացած', 'դեմք', 'զայրույթ', 'կարմիր', 'խենք', 'դժգոհ'})),
EmojiAnnotations(emoji='😠', codepoints=(128544,), name='բարկացած դեմք', slug='բարկացած_դեմք', annotations=frozenset({'դեմք', 'խենք', 'բարկացած'})),
EmojiAnnotations(emoji='😈', codepoints=(128520,), name='ժպտացող դեմք եղջյուրներով', slug='ժպտացող_դեմք_եղջյուրներով', annotations=frozenset({'դեմք', 'հեքիաթ', 'ֆանտազիա', 'ժպտալ', 'եղջյուրներ'})),
EmojiAnnotations(emoji='👿', codepoints=(128127,), name='սատանայի ճուտ', slug='սատանայի_ճուտ', annotations=frozenset({'դեմք', 'սատանա', 'հեքիաթ', 'ֆանտազիա', 'դև'})),
EmojiAnnotations(emoji='👹', codepoints=(128121,), name='մարդակեր հսկա', slug='մարդակեր_հսկա', annotations=frozenset({'դեմք', 'հեքիաթ', 'ֆանտազիա', 'ճապոնական', 'հրեշ', 'արարած'})),
EmojiAnnotations(emoji='👺', codepoints=(128122,), name='չար ոգի', slug='չար_ոգի', annotations=frozenset({'դեմք', 'հեքիաթ', 'ֆանտազիա', 'ճապոնական', 'հրեշ', 'արարած'})),
EmojiAnnotations(emoji='💀', codepoints=(128128,), name='գանգ', slug='գանգ', annotations=frozenset({'դեմք', 'հեքիաթ', 'մարմին', 'մահ', 'հրեշ'})),
EmojiAnnotations(emoji='☠', codepoints=(9760,), name='գանգ և խաչված ոսկորներ', slug='գանգ_և_խաչված_ոսկորներ', annotations=frozenset({'մարմին', 'ոսկորներ', 'գանգ', 'խաչված', 'դեմք', 'մահ', 'հրեշ'})),
EmojiAnnotations(emoji='👻', codepoints=(128123,), name='ուրվական', slug='ուրվական', annotations=frozenset({'դեմք', 'հեքիաթ', 'ֆանտազիա', 'արարած', 'հրեշ'})),
EmojiAnnotations(emoji='👽', codepoints=(128125,), name='այլմոլորակային', slug='այլմոլորակային', annotations=frozenset({'տիեզերք', 'դեմք', 'հեքիաթ', 'ֆանտազիա', 'չթօ', 'հրեշ', 'արարած'})),
EmojiAnnotations(emoji='👾', codepoints=(128126,), name='այլմոլորակային հրեշ', slug='այլմոլորակային_հրեշ', annotations=frozenset({'տիեզերք', 'այլմոլորակային', 'դեմք', 'հեքիաթ', 'ֆանտազիա', 'չթօ', 'հրեշ', 'արարած'})),
EmojiAnnotations(emoji='\U0001f916', codepoints=(129302,), name='ռոբոտի դեմք', slug='ռոբոտի_դեմք', annotations=frozenset({'դեմք', 'ռոբոտ', 'հրեշ'})),
EmojiAnnotations(emoji='💩', codepoints=(128169,), name='կեղտի կույտ', slug='կեղտի_կույտ', annotations=frozenset({'գոմաղբ', 'կոմիքս', 'դեմք', 'կեղտ', 'հրեշ', 'կղանք'})),
EmojiAnnotations(emoji='😺', codepoints=(128570,), name='ժպտացող կատվի դեմք բաց բերանով', slug='ժպտացող_կատվի_դեմք_բաց_բերանով', annotations=frozenset({'բաց', 'կատու', 'դեմք', 'ժպտալ', 'բերան'})),
EmojiAnnotations(emoji='😸', codepoints=(128568,), name='ծիծաղող կատվի դեմք ժպտացող աչքերով', slug='ծիծաղող_կատվի_դեմք_ժպտացող_աչքերով', annotations=frozenset({'կատու', 'աչք', 'դեմք', 'ժպտալ', 'կանաչ'})),
EmojiAnnotations(emoji='😹', codepoints=(128569,), name='կատվի դեմք ուրախության արցունքներով', slug='կատվի_դեմք_ուրախության_արցունքներով', annotations=frozenset({'ուրախություն', 'կատու', 'դեմք', 'արտասուք'})),
EmojiAnnotations(emoji='😻', codepoints=(128571,), name='ժպտացող կատվի դեմք սրտաձև աչքերով', slug='ժպտացող_կատվի_դեմք_սրտաձև_աչքերով', annotations=frozenset({'աչք', 'սիրտ', 'ժպտալ', 'սեր', 'դեմք', 'կատու'})),
EmojiAnnotations(emoji='😼', codepoints=(128572,), name='կատվի դեմք ծամածռված ժպիտով', slug='կատվի_դեմք_ծամածռված_ժպիտով', annotations=frozenset({'ծամածռված', 'կատու', 'դեմք', 'հեգնական', 'ժպտալ'})),
EmojiAnnotations(emoji='😽', codepoints=(128573,), name='համբուրող կատվի դեմք փակ աչքերով', slug='համբուրող_կատվի_դեմք_փակ_աչքերով', annotations=frozenset({'կատու', 'աչք', 'դեմք', 'համբույր'})),
EmojiAnnotations(emoji='🙀', codepoints=(128576,), name='ուժասպառ կատվի դեմք', slug='ուժասպառ_կատվի_դեմք', annotations=frozenset({'զարմացած', 'կատու', 'դեմք', 'ուժասպառ', 'օհ'})),
EmojiAnnotations(emoji='😿', codepoints=(128575,), name='արտասվող կատվի դեմք', slug='արտասվող_կատվի_դեմք', annotations=frozenset({'արտասվել', 'կատու', 'դեմք', 'տխուր', 'արտասուք'})),
EmojiAnnotations(emoji='😾', codepoints=(128574,), name='դժգոհ կատվի դեմք', slug='դժգոհ_կատվի_դեմք', annotations=frozenset({'կատու', 'դեմք', 'դժգոհ'})),
EmojiAnnotations(emoji='🙈', codepoints=(128584,), name='ոչինչ չեմ տեսնում', slug='ոչինչ_չեմ_տեսնում', annotations=frozenset({'չար', 'կապիկ', 'ժեստ', 'ոչ', 'տեսնել', 'դեմք', 'արգելված'})),
EmojiAnnotations(emoji='🙉', codepoints=(128585,), name='ոչինչ չեմ լսում', slug='ոչինչ_չեմ_լսում', annotations=frozenset({'լսել', 'չար', 'կապիկ', 'ժեստ', 'ոչ', 'դեմք', 'արգելված'})),
EmojiAnnotations(emoji='🙊', codepoints=(128586,), name='ոչինչ չեմ ասում', slug='ոչինչ_չեմ_ասում', annotations=frozenset({'չար', 'կապիկ', 'ժեստ', 'ոչ', 'դեմք', 'արգելված', 'խոսալ'})),
EmojiAnnotations(emoji='👧', codepoints=(128103,), name='աղջիկ', slug='աղջիկ', annotations=frozenset({'օրիորդ', 'կենդանակերպ', 'կույս'})),
EmojiAnnotations(emoji='👴', codepoints=(128116,), name='տարեց տղամարդ', slug='տարեց_տղամարդ', annotations=frozenset({'տղամարդ', 'տարեց'})),
EmojiAnnotations(emoji='👵', codepoints=(128117,), name='տարեց կին', slug='տարեց_կին', annotations=frozenset({'տարեց', 'կին'})),
EmojiAnnotations(emoji='👮', codepoints=(128110,), name='ոստիկան', slug='ոստիկան', annotations=frozenset({'սպա', 'ոստիկանություն'})),
EmojiAnnotations(emoji='👲', codepoints=(128114,), name='չինական գլխարկով մարդ', slug='չինական_գլխարկով_մարդ', annotations=frozenset({'գլխարկ', 'մարդ', 'չինական'})),
EmojiAnnotations(emoji='👳', codepoints=(128115,), name='չալմայով մարդ', slug='չալմայով_մարդ', annotations=frozenset({'չալմա', 'մարդ'})),
EmojiAnnotations(emoji='👷', codepoints=(128119,), name='շինարար', slug='շինարար', annotations=frozenset({'գլխարկ', 'շինարարություն', 'աշխատող'})),
EmojiAnnotations(emoji='⛑', codepoints=(9937,), name='սպիտակ խաչով սաղավարտ', slug='սպիտակ_խաչով_սաղավարտ', annotations=frozenset({'գլխարկ', 'դեմք', 'խաչ', 'սաղավարտ', 'օգնություն'})),
EmojiAnnotations(emoji='👸', codepoints=(128120,), name='արքայադուստր', slug='արքայադուստր', annotations=frozenset({'հեքիաթ', 'ֆանտազիա'})),
EmojiAnnotations(emoji='\U0001f575', codepoints=(128373,), name='խուզարկու', slug='խուզարկու', annotations=frozenset({'լրտես'})),
EmojiAnnotations(emoji='🎅', codepoints=(127877,), name='սանտա կլաուս', slug='սանտա_կլաուս', annotations=frozenset({'տոն', 'սանտա', 'հեքիաթ', 'ֆանտազիա', 'սուրբ ծնունդ', 'հայր'})),
EmojiAnnotations(emoji='👼', codepoints=(128124,), name='մանուկ-հրեշտակ', slug='մանուկ_հրեշտակ', annotations=frozenset({'երեխա', 'դեմք', 'հեքիաթ', 'ֆանտազիա', 'հրեշտակ'})),
EmojiAnnotations(emoji='💆', codepoints=(128134,), name='դեմքի մերսում', slug='դեմքի_մերսում', annotations=frozenset({'սրահ', 'մերսում'})),
EmojiAnnotations(emoji='💇', codepoints=(128135,), name='սանրվածք', slug='սանրվածք', annotations=frozenset({'վարսավիր', 'գեղեցկություն', 'սրահ'})),
EmojiAnnotations(emoji='👰', codepoints=(128112,), name='քողով հարս', slug='քողով_հարս', annotations=frozenset({'քող', 'հարս', 'հարսանիք'})),
EmojiAnnotations(emoji='🙍', codepoints=(128589,), name='խոժոռված դեմքով անձ', slug='խոժոռված_դեմքով_անձ', annotations=frozenset({'խոժոռված', 'ժեստ'})),
EmojiAnnotations(emoji='🙎', codepoints=(128590,), name='դժգոհ անձ', slug='դժգոհ_անձ', annotations=frozenset({'ժեստ', 'դժգոհ'})),
EmojiAnnotations(emoji='🙅', codepoints=(128581,), name='ոչ ցույց տվող', slug='ոչ_ցույց_տվող', annotations=frozenset({'արգելված', 'ձեռք', 'ժեստ', 'ոչ'})),
EmojiAnnotations(emoji='🙆', codepoints=(128582,), name='ok ցույց տվող', slug='ok_ցույց_տվող', annotations=frozenset({'ձեռք', 'ժեստ', 'ok'})),
EmojiAnnotations(emoji='💁', codepoints=(128129,), name='տեղեկատու բյուրոյի աշխատող', slug='տեղեկատու_բյուրոյի_աշխատող', annotations=frozenset({'հանդուգն', 'ձեռք', 'օգնել', 'տեղեկատվություն'})),
EmojiAnnotations(emoji='🙋', codepoints=(128587,), name='ձեռք բարձրացնող ուրախ անձ', slug='ձեռք_բարձրացնող_ուրախ_անձ', annotations=frozenset({'ձեռք', 'երջանիկ', 'ժեստ', 'բարձրացված'})),
EmojiAnnotations(emoji='🙇', codepoints=(128583,), name='խոնարհվող անձ', slug='խոնարհվող_անձ', annotations=frozenset({'ներողություն խնդրել', 'ներողություն', 'ժեստ', 'խոնարհվել'})),
EmojiAnnotations(emoji='🙌', codepoints=(128588,), name='ձեռքերը բարձրացնող անձ', slug='ձեռքերը_բարձրացնող_անձ', annotations=frozenset({'մարմին', 'տոն', 'ժեստ', 'ձեռք', 'ուռա', 'բարձրացված'})),
EmojiAnnotations(emoji='🙏', codepoints=(128591,), name='միացված ձեռքի ափեր', slug='միացված_ձեռքի_ափեր', annotations=frozenset({'աղոթել', 'խնդրել', 'մարմին', 'խնդրում եմ', 'ժեստ', 'խոնարհվել', 'ձեռք', 'շնորհակալություն', 'միացված'})),
EmojiAnnotations(emoji='\U0001f5e3', codepoints=(128483,), name='խոսացող գլուխ', slug='խոսացող_գլուխ', annotations=frozenset({'դեմք', 'գլուխ', 'ուրվագիծ', 'խոսացող', 'խոսալ'})),
EmojiAnnotations(emoji='👤', codepoints=(128100,), name='ուրվագծված կիսանդրի', slug='ուրվագծված_կիսանդրի', annotations=frozenset({'ուրվագիծ', 'կիսանդրի'})),
EmojiAnnotations(emoji='👥', codepoints=(128101,), name='ուրվագծված կիսանդրիներ', slug='ուրվագծված_կիսանդրիներ', annotations=frozenset({'ուրվագիծ', 'կիսանդրի'})),
EmojiAnnotations(emoji='🚶', codepoints=(128694,), name='հետիոտն', slug='հետիոտն', annotations=frozenset({'քայլել', 'զբոսանք', 'զբոսնել'})),
EmojiAnnotations(emoji='🏃', codepoints=(127939,), name='վազող', slug='վազող', annotations=frozenset({'մարաթոն', 'վազք'})),
EmojiAnnotations(emoji='👯', codepoints=(128111,), name='պարող կանայք', slug='պարող_կանայք', annotations=frozenset({'ականջ', 'աղջիկ', 'կին', 'պարող', 'ճագար'})),
EmojiAnnotations(emoji='\U0001f574', codepoints=(128372,), name='տեղում ճախրող գործնական կոստյումով մարդ', slug='տեղում_ճախրող_գործնական_կոստյումով_մարդ', annotations=frozenset({'կոստյում', 'բիզնես', 'մարդ'})),
EmojiAnnotations(emoji='💏', codepoints=(128143,), name='համբույր', slug='համբույր', annotations=frozenset({'զույգ', 'սիրավեպ'})),
EmojiAnnotations(emoji='💑', codepoints=(128145,), name='սրտիկով զույգ', slug='սրտիկով_զույգ', annotations=frozenset({'զույգ', 'սիրտ', 'սիրավեպ', 'սեր'})),
EmojiAnnotations(emoji='👪', codepoints=(128106,), name='ընտանիք', slug='ընտանիք', annotations=frozenset({'երեխա', 'մայր', 'հայր'})),
EmojiAnnotations(emoji='👫', codepoints=(128107,), name='իրար ձեռք բռնած մարդ և կին', slug='իրար_ձեռք_բռնած_մարդ_և_կին', annotations=frozenset({'զույգ', 'ձեռք', 'տղամարդ', 'բռնել', 'կին'})),
EmojiAnnotations(emoji='👬', codepoints=(128108,), name='իրար ձեռք բռնած երկու տղամարդ', slug='իրար_ձեռք_բռնած_երկու_տղամարդ', annotations=frozenset({'երկվորյակ', 'ձեռք', 'տղամարդ', 'բռնել', 'կենդանակերպ', 'զույգ'})),
EmojiAnnotations(emoji='👭', codepoints=(128109,), name='իրար ձեռք բռնած երկու կին', slug='իրար_ձեռք_բռնած_երկու_կին', annotations=frozenset({'զույգ', 'ձեռք', 'բռնել', 'կին'})),
EmojiAnnotations(emoji='\U0001f3fb', codepoints=(127995,), name='մաշկի տիպ-1-2', slug='մաշկի_տիպ_1_2', annotations=frozenset({'տոն', 'ֆիցպատրիկ', 'մաշկ', 'զմայլիկների կերպափոխիչ'})),
EmojiAnnotations(emoji='\U0001f3fc', codepoints=(127996,), name='մաշկի տիպ-3', slug='մաշկի_տիպ_3', annotations=frozenset({'տոն', 'ֆիցպատրիկ', 'մաշկ', 'զմայլիկների կերպափոխիչ'})),
EmojiAnnotations(emoji='\U0001f3fd', codepoints=(127997,), name='մաշկի տիպ-4', slug='մաշկի_տիպ_4', annotations=frozenset({'տոն', 'ֆիցպատրիկ', 'մաշկ', 'զմայլիկների կերպափոխիչ'})),
EmojiAnnotations(emoji='\U0001f3fe', codepoints=(127998,), name='մաշկի տիպ-5', slug='մաշկի_տիպ_5', annotations=frozenset({'տոն', 'ֆիցպատրիկ', 'մաշկ', 'զմայլիկների կերպափոխիչ'})),
EmojiAnnotations(emoji='\U0001f3ff', codepoints=(127999,), name='մաշկի տիպ-6', slug='մաշկի_տիպ_6', annotations=frozenset({'տոն', 'ֆիցպատրիկ', 'մաշկ', 'զմայլիկների կերպափոխիչ'})),
EmojiAnnotations(emoji='💪', codepoints=(128170,), name='ձգված բիցեպս', slug='ձգված_բիցեպս', annotations=frozenset({'բիցեպս', 'մարմին', 'կոմիքս', 'ձգել', 'մկան'})),
EmojiAnnotations(emoji='👈', codepoints=(128072,), name='դեպի ձախ ուղղված ցուցամատ', slug='դեպի_ձախ_ուղղված_ցուցամատ', annotations=frozenset({'ցուցամատ', 'ձեռք', 'ուղղված', 'մատ', 'մարմին'})),
EmojiAnnotations(emoji='👉', codepoints=(128073,), name='դեպի աջ ուղղված ցուցամատ', slug='դեպի_աջ_ուղղված_ցուցամատ', annotations=frozenset({'ցուցամատ', 'ձեռք', 'ուղղված', 'մատ', 'մարմին'})),
EmojiAnnotations(emoji='☝', codepoints=(9757,), name='դեպի վեր ուղղված ցուցամատ ձեռքի ափի կողմից', slug='դեպի_վեր_ուղղված_ցուցամատ_ձեռքի_ափի_կողմից', annotations=frozenset({'մարմին', 'ցուցամատ', 'ձեռքի ափ', 'ձեռք', 'ուղղված', 'մատ', 'վեր'})),
EmojiAnnotations(emoji='👆', codepoints=(128070,), name='դեպի վեր ուղղված ցուցամատ', slug='դեպի_վեր_ուղղված_ցուցամատ', annotations=frozenset({'մարմին', 'ցուցամատ', 'ձեռք', 'ուղղված', 'մատ', 'վեր'})),
EmojiAnnotations(emoji='\U0001f595', codepoints=(128405,), name='մեջտեղի մատ', slug='մեջտեղի_մատ', annotations=frozenset({'ձեռք', 'մատ', 'մարմին'})),
EmojiAnnotations(emoji='👇', codepoints=(128071,), name='դեպի վար ուղղված ցուցամատ', slug='դեպի_վար_ուղղված_ցուցամատ', annotations=frozenset({'մարմին', 'ցուցամատ', 'ձեռք', 'ուղղված', 'վար', 'մատ'})),
EmojiAnnotations(emoji='✌', codepoints=(9996,), name='հաղթական ձեռք', slug='հաղթական_ձեռք', annotations=frozenset({'ձեռք', 'v', 'մարմին', 'հաղթանակ'})),
EmojiAnnotations(emoji='\U0001f596', codepoints=(128406,), name='վուլկանցիների ողջույն', slug='վուլկանցիների_ողջույն', annotations=frozenset({'ձեռք', 'մատ', 'մարմին', 'վուլկան'})),
EmojiAnnotations(emoji='\U0001f918', codepoints=(129304,), name='եղջյուրների նշան', slug='եղջյուրների_նշան', annotations=frozenset({'ձեռք', 'մատ', 'մարմին', 'եղջյուրներ'})),
EmojiAnnotations(emoji='\U0001f590', codepoints=(128400,), name='բացված մատներով բարձրացված ձեռք', slug='բացված_մատներով_բարձրացված_ձեռք', annotations=frozenset({'ձեռք', 'մատ', 'մարմին', 'բացված'})),
EmojiAnnotations(emoji='✋', codepoints=(9995,), name='բարձրացված ձեռք', slug='բարձրացված_ձեռք', annotations=frozenset({'ձեռք', 'մարմին'})),
EmojiAnnotations(emoji='👌', codepoints=(128076,), name='ok ցույց տվող ձեռք', slug='ok_ցույց_տվող_ձեռք', annotations=frozenset({'ձեռք', 'մարմին', 'ok'})),
EmojiAnnotations(emoji='👍', codepoints=(128077,), name='բութ մատը վեր', slug='բութ_մատը_վեր', annotations=frozenset({'բութ', '+1', 'ձեռք', 'մարմին', 'վեր'})),
EmojiAnnotations(emoji='👎', codepoints=(128078,), name='բութ մատը ներքև', slug='բութ_մատը_ներքև', annotations=frozenset({'-1', 'ներքև', 'ձեռք', 'մարմին', 'բութ մատ'})),
EmojiAnnotations(emoji='✊', codepoints=(9994,), name='բարձրացված բռունցք', slug='բարձրացված_բռունցք', annotations=frozenset({'հարված', 'բռունցք', 'ձեռք', 'մարմին', 'սեղմված'})),
EmojiAnnotations(emoji='👊', codepoints=(128074,), name='հանդիպակաց բռունցք', slug='հանդիպակաց_բռունցք', annotations=frozenset({'հարված', 'բռունցք', 'ձեռք', 'մարմին', 'սեղմված'})),
EmojiAnnotations(emoji='👋', codepoints=(128075,), name='թափահարող ձեռք', slug='թափահարող_ձեռք', annotations=frozenset({'ձեռք', 'թափահարել', 'թափահարող', 'մարմին'})),
EmojiAnnotations(emoji='👏', codepoints=(128079,), name='ծափահարող ձեռքեր', slug='ծափահարող_ձեռքեր', annotations=frozenset({'ձեռք', 'մարմին', 'ծափահարել'})),
EmojiAnnotations(emoji='👐', codepoints=(128080,), name='բաց ձեռքեր', slug='բաց_ձեռքեր', annotations=frozenset({'բաց', 'ձեռք', 'մարմին'})),
EmojiAnnotations(emoji='✍', codepoints=(9997,), name='գրող ձեռք', slug='գրող_ձեռք', annotations=frozenset({'ձեռք', 'մարմին', 'գրել'})),
EmojiAnnotations(emoji='💅', codepoints=(128133,), name='եղունգների լաքապատում', slug='եղունգների_լաքապատում', annotations=frozenset({'խնամք', 'մարմին', 'հղկել', 'մատնահարդարում', 'եղունգ', 'կոսմետիկա'})),
EmojiAnnotations(emoji='👂', codepoints=(128066,), name='ականջ', slug='ականջ', annotations=frozenset({'մարմին'})),
EmojiAnnotations(emoji='👃', codepoints=(128067,), name='քիթ', slug='քիթ', annotations=frozenset({'մարմին'})),
EmojiAnnotations(emoji='👣', codepoints=(128099,), name='ոտնահետքեր', slug='ոտնահետքեր', annotations=frozenset({'հագուստ', 'ոտնահետք', 'հետք', 'մարմին'})),
EmojiAnnotations(emoji='👀', codepoints=(128064,), name='աչքեր', slug='աչքեր', annotations=frozenset({'աչք', 'դեմք', 'մարմին'})),
EmojiAnnotations(emoji='\U0001f441', codepoints=(128065,), name='աչք', slug='աչք', annotations=frozenset({'մարմին'})),
EmojiAnnotations(emoji='👅', codepoints=(128069,), name='լեզու', slug='լեզու', annotations=frozenset({'մարմին'})),
EmojiAnnotations(emoji='👄', codepoints=(128068,), name='բերան', slug='բերան', annotations=frozenset({'շուրթեր', 'մարմին'})),
EmojiAnnotations(emoji='💋', codepoints=(128139,), name='համբույրի հետք', slug='համբույրի_հետք', annotations=frozenset({'սիրտ', 'շուրթեր', 'հետք', 'սիրավեպ', 'համբույր'})),
EmojiAnnotations(emoji='💘', codepoints=(128152,), name='նետահարված սիրտ', slug='նետահարված_սիրտ', annotations=frozenset({'նետ', 'սիրտ', 'սիրավեպ', 'կուպիդոն'})),
EmojiAnnotations(emoji='❤', codepoints=(10084,), name='կարմիր սիրտ', slug='կարմիր_սիրտ', annotations=frozenset({'սիրտ'})),
EmojiAnnotations(emoji='💓', codepoints=(128147,), name='բաբախող սիրտ', slug='բաբախող_սիրտ', annotations=frozenset({'սիրտ', 'սրտխփոց', 'պուլսացիա', 'բաբախյուն'})),
EmojiAnnotations(emoji='💔', codepoints=(128148,), name='կոտրված սիրտ', slug='կոտրված_սիրտ', annotations=frozenset({'սիրտ', 'կոտրված', 'կոտրել'})),
EmojiAnnotations(emoji='💕', codepoints=(128149,), name='երկու սրտեր', slug='երկու_սրտեր', annotations=frozenset({'սիրտ', 'սեր'})),
EmojiAnnotations(emoji='💖', codepoints=(128150,), name='շողշողացող սիրտ', slug='շողշողացող_սիրտ', annotations=frozenset({'սիրտ', 'կայծ', 'ոգևորված'})),
EmojiAnnotations(emoji='💗', codepoints=(128151,), name='աճող սիրտ', slug='աճող_սիրտ', annotations=frozenset({'նյարդային', 'սիրտ', 'սրտի զարկ', 'աճող', 'ոգևորված'})),
EmojiAnnotations(emoji='💙', codepoints=(128153,), name='կապույտ սիրտ', slug='կապույտ_սիրտ', annotations=frozenset({'կապույտ', 'սիրտ'})),
EmojiAnnotations(emoji='💚', codepoints=(128154,), name='կանաչ սիրտ', slug='կանաչ_սիրտ', annotations=frozenset({'սիրտ', 'կանաչ'})),
EmojiAnnotations(emoji='💛', codepoints=(128155,), name='դեղին սիրտ', slug='դեղին_սիրտ', annotations=frozenset({'սիրտ', 'դեղին'})),
EmojiAnnotations(emoji='💜', codepoints=(128156,), name='մանուշակագույն սիրտ', slug='մանուշակագույն_սիրտ', annotations=frozenset({'սիրտ', 'մանուշակագույն'})),
EmojiAnnotations(emoji='💝', codepoints=(128157,), name='ժապավենով սիրտ', slug='ժապավենով_սիրտ', annotations=frozenset({'սիրտ', 'ժապավեն', 'վալենտին'})),
EmojiAnnotations(emoji='💞', codepoints=(128158,), name='պտտվող սրտեր', slug='պտտվող_սրտեր', annotations=frozenset({'պտտվող', 'սիրտ'})),
EmojiAnnotations(emoji='💟', codepoints=(128159,), name='սրտաձև նախշ', slug='սրտաձև_նախշ', annotations=frozenset({'սիրտ'})),
EmojiAnnotations(emoji='❣', codepoints=(10083,), name='բացականչական նշանի տեսքով սիրտ', slug='բացականչական_նշանի_տեսքով_սիրտ', annotations=frozenset({'նշան', 'կետադրական', 'սիրտ', 'բացականչություն'})),
EmojiAnnotations(emoji='💌', codepoints=(128140,), name='սիրային նամակ', slug='սիրային_նամակ', annotations=frozenset({'սիրտ', 'նամակ', 'փոստ', 'սիրավեպ', 'սեր'})),
EmojiAnnotations(emoji='💤', codepoints=(128164,), name='խռռռ', slug='խռռռ', annotations=frozenset({'քնել', 'կոմիքս'})),
EmojiAnnotations(emoji='💢', codepoints=(128162,), name='զայրույթի նշան', slug='զայրույթի_նշան', annotations=frozenset({'զայրացած', 'կոմիքս', 'խենք'})),
EmojiAnnotations(emoji='💣', codepoints=(128163,), name='ռումբ', slug='ռումբ', annotations=frozenset({'կոմիքս'})),
EmojiAnnotations(emoji='💥', codepoints=(128165,), name='բախում', slug='բախում', annotations=frozenset({'բում', 'կոմիքս'})),
EmojiAnnotations(emoji='💦', codepoints=(128166,), name='քրտինքի կաթիլներ', slug='քրտինքի_կաթիլներ', annotations=frozenset({'ցայտող', 'կոմիքս', 'քրտինք'})),
EmojiAnnotations(emoji='💨', codepoints=(128168,), name='սրընթաց', slug='սրընթաց', annotations=frozenset({'կոմիքս', 'ընթանալ', 'սլանալ'})),
EmojiAnnotations(emoji='💫', codepoints=(128171,), name='գլխապտույտ', slug='գլխապտույտ', annotations=frozenset({'կոմիքս', 'աստղ'})),
EmojiAnnotations(emoji='💬', codepoints=(128172,), name='խոսքի ամպիկ', slug='խոսքի_ամպիկ', annotations=frozenset({'երկխոսություն', 'փուչիկ', 'կոմիքս', 'պղպջակ', 'խոսք'})),
EmojiAnnotations(emoji='\U0001f5e8', codepoints=(128488,), name='խոսքի ձախակողմյա ամպիկ', slug='խոսքի_ձախակողմյա_ամպիկ', annotations=frozenset({'երկխոսություն', 'խոսք'})),
EmojiAnnotations(emoji='\U0001f5ef', codepoints=(128495,), name='զայրույթի աջակողմյա ամպիկ', slug='զայրույթի_աջակողմյա_ամպիկ', annotations=frozenset({'զայրացած', 'փուչիկ', 'պղպջակ', 'խենք'})),
EmojiAnnotations(emoji='💭', codepoints=(128173,), name='մտքի ամպիկ', slug='մտքի_ամպիկ', annotations=frozenset({'փուչիկ', 'կոմիքս', 'պղպջակ', 'միտք'})),
EmojiAnnotations(emoji='👓', codepoints=(128083,), name='ակնոց', slug='ակնոց', annotations=frozenset({'հագուստ', 'աչք'})),
EmojiAnnotations(emoji='\U0001f576', codepoints=(128374,), name='արևային ակնոց', slug='արևային_ակնոց', annotations=frozenset({'աչք', 'ակնոց', 'մուգ'})),
EmojiAnnotations(emoji='👔', codepoints=(128084,), name='փողկապ', slug='փողկապ', annotations=frozenset({'հագուստ'})),
EmojiAnnotations(emoji='👕', codepoints=(128085,), name='սպորտային վերնաշապիկ', slug='սպորտային_վերնաշապիկ', annotations=frozenset({'հագուստ', 'վերնաշապիկ', 'սպորտային'})),
EmojiAnnotations(emoji='👖', codepoints=(128086,), name='ջինս', slug='ջինս', annotations=frozenset({'հագուստ', 'տաբատ', 'շալվար'})),
EmojiAnnotations(emoji='👗', codepoints=(128087,), name='զգեստ', slug='զգեստ', annotations=frozenset({'հագուստ'})),
EmojiAnnotations(emoji='👘', codepoints=(128088,), name='կիմոնո', slug='կիմոնո', annotations=frozenset({'հագուստ'})),
EmojiAnnotations(emoji='👙', codepoints=(128089,), name='բիկինի', slug='բիկինի', annotations=frozenset({'հագուստ', 'լողալ'})),
EmojiAnnotations(emoji='👚', codepoints=(128090,), name='կնոջ հագուստ', slug='կնոջ_հագուստ', annotations=frozenset({'հագուստ', 'կին'})),
EmojiAnnotations(emoji='👛', codepoints=(128091,), name='դրամապանակ', slug='դրամապանակ', annotations=frozenset({'հագուստ', 'մետաղադրամ'})),
EmojiAnnotations(emoji='👜', codepoints=(128092,), name='ձեռքի պայուսակ', slug='ձեռքի_պայուսակ', annotations=frozenset({'հագուստ', 'պայուսակ'})),
EmojiAnnotations(emoji='👝', codepoints=(128093,), name='պայուսակ', slug='պայուսակ', annotations=frozenset({'հագուստ'})),
EmojiAnnotations(emoji='\U0001f6cd', codepoints=(128717,), name='գնումների պայուսակ', slug='գնումների_պայուսակ', annotations=frozenset({'գնումներ', 'պայուսակ', 'հյուրանոց'})),
EmojiAnnotations(emoji='🎒', codepoints=(127890,), name='դպրոցական պայուսակ', slug='դպրոցական_պայուսակ', annotations=frozenset({'դպրոց', 'պայուսակ', 'ուսապարկ'})),
EmojiAnnotations(emoji='👞', codepoints=(128094,), name='տղամարդու կոշիկ', slug='տղամարդու_կոշիկ', annotations=frozenset({'հագուստ', 'կոշիկ', 'տղամարդ'})),
EmojiAnnotations(emoji='👟', codepoints=(128095,), name='սպորտային կոշիկ', slug='սպորտային_կոշիկ', annotations=frozenset({'հագուստ', 'կոշիկ', 'կեդեր', 'մարզական'})),
EmojiAnnotations(emoji='👠', codepoints=(128096,), name='բարձրակրունկ կոշիկ', slug='բարձրակրունկ_կոշիկ', annotations=frozenset({'հագուստ', 'կոշիկ', 'կրունկ', 'կին'})),
EmojiAnnotations(emoji='👡', codepoints=(128097,), name='կնոջ սանդալ', slug='կնոջ_սանդալ', annotations=frozenset({'հագուստ', 'կոշիկ', 'սանդալ', 'կին'})),
EmojiAnnotations(emoji='👢', codepoints=(128098,), name='կնոջ երկարաճիթք կոշիկ', slug='կնոջ_երկարաճիթք_կոշիկ', annotations=frozenset({'երկարաճիթք կոշիկ', 'հագուստ', 'կոշիկ', 'կին'})),
EmojiAnnotations(emoji='👑', codepoints=(128081,), name='թագ', slug='թագ', annotations=frozenset({'հագուստ', 'արքա', 'թագուհի'})),
EmojiAnnotations(emoji='👒', codepoints=(128082,), name='կնոջ գլխարկ', slug='կնոջ_գլխարկ', annotations=frozenset({'հագուստ', 'գլխարկ', 'կին'})),
EmojiAnnotations(emoji='🎩', codepoints=(127913,), name='ցիլինդր', slug='ցիլինդր', annotations=frozenset({'հագուստ', 'գլխարկ'})),
EmojiAnnotations(emoji='🎓', codepoints=(127891,), name='շրջանավարտի գլխարկ', slug='շրջանավարտի_գլխարկ', annotations=frozenset({'գլխարկ', 'տոն', 'հագուստ', 'ավարտական'})),
EmojiAnnotations(emoji='\U0001f4ff', codepoints=(128255,), name='տերողորմյա', slug='տերողորմյա', annotations=frozenset({'հագուստ', 'վզնոց', 'ուլունքներ', 'աղոթք', 'կրոն'})),
EmojiAnnotations(emoji='💄', codepoints=(128132,), name='շրթներկ', slug='շրթներկ', annotations=frozenset({'կոսմետիա', 'դիմահարդարում'})),
EmojiAnnotations(emoji='💍', codepoints=(128141,), name='մատանի', slug='մատանի', annotations=frozenset({'ադամանդ', 'սիրավեպ'})),
EmojiAnnotations(emoji='💎', codepoints=(128142,), name='թանկարժեք քար', slug='թանկարժեք_քար', annotations=frozenset({'ադամանդ', 'ակն', 'սիրավեպ', 'գոհար'})),
EmojiAnnotations(emoji='🐵', codepoints=(128053,), name='կապիկի դեմք', slug='կապիկի_դեմք', annotations=frozenset({'դեմք', 'կապիկ'})),
EmojiAnnotations(emoji='🐶', codepoints=(128054,), name='շան դեմք', slug='շան_դեմք', annotations=frozenset({'դեմք', 'շուն', 'ընտանի կենդանի'})),
EmojiAnnotations(emoji='🐕', codepoints=(128021,), name='շուն', slug='շուն', annotations=frozenset({'ընտանի կենդանի'})),
EmojiAnnotations(emoji='🐩', codepoints=(128041,), name='պուդել', slug='պուդել', annotations=frozenset({'շուն'})),
EmojiAnnotations(emoji='🐺', codepoints=(128058,), name='գայլի դեմք', slug='գայլի_դեմք', annotations=frozenset({'դեմք', 'գայլ'})),
EmojiAnnotations(emoji='🐱', codepoints=(128049,), name='կատվի դեմք', slug='կատվի_դեմք', annotations=frozenset({'կատու', 'դեմք', 'ընտանի կենդանի'})),
EmojiAnnotations(emoji='🐈', codepoints=(128008,), name='կատու', slug='կատու', annotations=frozenset({'ընտանի կենդանի'})),
EmojiAnnotations(emoji='\U0001f981', codepoints=(129409,), name='առյուծի դեմք', slug='առյուծի_դեմք', annotations=frozenset({'դեմք', 'առյուծ', 'կենդանակերպ', 'կորյուն'})),
EmojiAnnotations(emoji='🐯', codepoints=(128047,), name='վագրի դեմք', slug='վագրի_դեմք', annotations=frozenset({'դեմք', 'վագր'})),
EmojiAnnotations(emoji='🐴', codepoints=(128052,), name='ձիու դեմք', slug='ձիու_դեմք', annotations=frozenset({'դեմք', 'ձի'})),
EmojiAnnotations(emoji='🐎', codepoints=(128014,), name='ձի', slug='ձի', annotations=frozenset({'մրցավազք', 'մրցավազքային ձի'})),
EmojiAnnotations(emoji='\U0001f984', codepoints=(129412,), name='միաեղջյուրի դեմք', slug='միաեղջյուրի_դեմք', annotations=frozenset({'դեմք', 'միաեղջյուր'})),
EmojiAnnotations(emoji='🐮', codepoints=(128046,), name='կովի դեմք', slug='կովի_դեմք', annotations=frozenset({'դեմք', 'կով'})),
EmojiAnnotations(emoji='🐂', codepoints=(128002,), name='ցուլիկ', slug='ցուլիկ', annotations=frozenset({'կենդանակերպ', 'ցուլ'})),
EmojiAnnotations(emoji='🐃', codepoints=(128003,), name='ջրագոմեշ', slug='ջրագոմեշ', annotations=frozenset({'ջուր'})),
EmojiAnnotations(emoji='🐷', codepoints=(128055,), name='խոզի դեմք', slug='խոզի_դեմք', annotations=frozenset({'դեմք', 'խոզ'})),
EmojiAnnotations(emoji='🐖', codepoints=(128022,), name='խոզ', slug='խոզ', annotations=frozenset({'էգ խոզ'})),
EmojiAnnotations(emoji='🐗', codepoints=(128023,), name='վարազ', slug='վարազ', annotations=frozenset({'խոզ'})),
EmojiAnnotations(emoji='🐽', codepoints=(128061,), name='խոզի քիթ', slug='խոզի_քիթ', annotations=frozenset({'դեմք', 'քիթ', 'խոզ'})),
EmojiAnnotations(emoji='🐏', codepoints=(128015,), name='արու ոչխար', slug='արու_ոչխար', annotations=frozenset({'ոչխար', 'կենդանակերպ', 'խոյ'})),
EmojiAnnotations(emoji='🐑', codepoints=(128017,), name='ոչխար', slug='ոչխար', annotations=frozenset({'մաքի'})),
EmojiAnnotations(emoji='🐐', codepoints=(128016,), name='այծ', slug='այծ', annotations=frozenset({'այծեղջյուր', 'կենդանակերպ'})),
EmojiAnnotations(emoji='🐪', codepoints=(128042,), name='ուղտ', slug='ուղտ', annotations=frozenset({'միասապատ', 'կուզ'})),
EmojiAnnotations(emoji='🐫', codepoints=(128043,), name='երկսապատավոր ուղտ', slug='երկսապատավոր_ուղտ', annotations=frozenset({'ուղտ', 'երկսապատանի', 'կուզ'})),
EmojiAnnotations(emoji='🐭', codepoints=(128045,), name='մկան դեմք', slug='մկան_դեմք', annotations=frozenset({'դեմք', 'մուկ'})),
EmojiAnnotations(emoji='🐹', codepoints=(128057,), name='գերմանամկան դեմք', slug='գերմանամկան_դեմք', annotations=frozenset({'դեմք', 'գերմանամուկ', 'ընտանի կենդանի'})),
EmojiAnnotations(emoji='🐰', codepoints=(128048,), name='ճագարի դեմք', slug='ճագարի_դեմք', annotations=frozenset({'դեմք', 'ընտանի կենդանի', 'նապաստակ', 'ճագար'})),
EmojiAnnotations(emoji='🐇', codepoints=(128007,), name='ճագար', slug='ճագար', annotations=frozenset({'ընտանի կենդանի', 'նապաստակ'})),
EmojiAnnotations(emoji='🐻', codepoints=(128059,), name='արջի դեմք', slug='արջի_դեմք', annotations=frozenset({'դեմք', 'արջ'})),
EmojiAnnotations(emoji='🐨', codepoints=(128040,), name='կոալա', slug='կոալա', annotations=frozenset({'արջ'})),
EmojiAnnotations(emoji='🐼', codepoints=(128060,), name='պանդայի դեմք', slug='պանդայի_դեմք', annotations=frozenset({'դեմք', 'պանդա'})),
EmojiAnnotations(emoji='🐾', codepoints=(128062,), name='թաթերի հետքեր', slug='թաթերի_հետքեր', annotations=frozenset({'ոտքեր', 'հետք', 'թաթ'})),
EmojiAnnotations(emoji='🐓', codepoints=(128019,), name='աքաղաղ', slug='աքաղաղ', annotations=frozenset({'աքաղաք'})),
EmojiAnnotations(emoji='🐣', codepoints=(128035,), name='ձվից դուրս եկող ճուտիկ', slug='ձվից_դուրս_եկող_ճուտիկ', annotations=frozenset({'ձագ', 'ձվից դուրս եկող', 'ճուտիկ'})),
EmojiAnnotations(emoji='🐤', codepoints=(128036,), name='ճուտիկ', slug='ճուտիկ', annotations=frozenset({'ձագ'})),
EmojiAnnotations(emoji='🐥', codepoints=(128037,), name='դեմքով կանգնած ճուտիկ', slug='դեմքով_կանգնած_ճուտիկ', annotations=frozenset({'ձագ', 'ճուտիկ'})),
EmojiAnnotations(emoji='\U0001f54a', codepoints=(128330,), name='աղավնի', slug='աղավնի', annotations=frozenset({'թռչուն', 'թռչել', 'խաղաղություն'})),
EmojiAnnotations(emoji='🐸', codepoints=(128056,), name='գորտի դեմք', slug='գորտի_դեմք', annotations=frozenset({'դեմք', 'գորտ'})),
EmojiAnnotations(emoji='🐍', codepoints=(128013,), name='օձ', slug='օձ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='🐲', codepoints=(128050,), name='վիշապի դեմք', slug='վիշապի_դեմք', annotations=frozenset({'վիշապ', 'դեմք', 'հեքիաթ'})),
EmojiAnnotations(emoji='🐉', codepoints=(128009,), name='վիշապ', slug='վիշապ', annotations=frozenset({'հեքիաթ'})),
EmojiAnnotations(emoji='🐳', codepoints=(128051,), name='ջուր ցայտեցնող կետաձուկ', slug='ջուր_ցայտեցնող_կետաձուկ', annotations=frozenset({'դեմք', 'կետաձուկ', 'ցայտում'})),
EmojiAnnotations(emoji='🐟', codepoints=(128031,), name='ձուկ', slug='ձուկ', annotations=frozenset({'կենդանակերպ', 'ձկներ'})),
EmojiAnnotations(emoji='🐠', codepoints=(128032,), name='արևադարձային ձուկ', slug='արևադարձային_ձուկ', annotations=frozenset({'ձուկ', 'արևադարձային'})),
EmojiAnnotations(emoji='🐡', codepoints=(128033,), name='փքաձուկ', slug='փքաձուկ', annotations=frozenset({'ձուկ'})),
EmojiAnnotations(emoji='🐚', codepoints=(128026,), name='պարուրաձև խխունջախեցի', slug='պարուրաձև_խխունջախեցի', annotations=frozenset({'պարույր', 'խխունջ'})),
EmojiAnnotations(emoji='\U0001f980', codepoints=(129408,), name='կրաբ', slug='կրաբ', annotations=frozenset({'խեցգետին', 'կենդանակերպ'})),
EmojiAnnotations(emoji='🐜', codepoints=(128028,), name='մրջյուն', slug='մրջյուն', annotations=frozenset({'միջատ'})),
EmojiAnnotations(emoji='🐝', codepoints=(128029,), name='մեղու', slug='մեղու', annotations=frozenset({'միջատ'})),
EmojiAnnotations(emoji='🐞', codepoints=(128030,), name='զատիկ', slug='զատիկ', annotations=frozenset({'միջատ', 'բզեզ'})),
EmojiAnnotations(emoji='\U0001f577', codepoints=(128375,), name='սարդ', slug='սարդ', annotations=frozenset({'միջատ'})),
EmojiAnnotations(emoji='\U0001f578', codepoints=(128376,), name='սարդոստայն', slug='սարդոստայն', annotations=frozenset({'սարդ', 'ոստայն'})),
EmojiAnnotations(emoji='\U0001f982', codepoints=(129410,), name='շագանակագույն կարիճ', slug='շագանակագույն_կարիճ', annotations=frozenset({'կարիճ', 'կենդանակերպ'})),
EmojiAnnotations(emoji='💐', codepoints=(128144,), name='ծաղկեփունջ', slug='ծաղկեփունջ', annotations=frozenset({'ծաղիկ', 'սիրավեպ', 'բույս'})),
EmojiAnnotations(emoji='🌸', codepoints=(127800,), name='բալենու ծաղիկ', slug='բալենու_ծաղիկ', annotations=frozenset({'ծաղիկ', 'բույս', 'բալ'})),
EmojiAnnotations(emoji='💮', codepoints=(128174,), name='սպիտակ ծաղիկ', slug='սպիտակ_ծաղիկ', annotations=frozenset({'ծաղիկ'})),
EmojiAnnotations(emoji='\U0001f3f5', codepoints=(127989,), name='վարդանախշ', slug='վարդանախշ', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🌹', codepoints=(127801,), name='վարդ', slug='վարդ', annotations=frozenset({'ծաղիկ', 'բույս'})),
EmojiAnnotations(emoji='🌺', codepoints=(127802,), name='հիբիսկուս', slug='հիբիսկուս', annotations=frozenset({'ծաղիկ', 'բույս'})),
EmojiAnnotations(emoji='🌻', codepoints=(127803,), name='արևածաղիկ', slug='արևածաղիկ', annotations=frozenset({'ծաղիկ', 'արև', 'բույս'})),
EmojiAnnotations(emoji='🌼', codepoints=(127804,), name='ծաղիկ', slug='ծաղիկ', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🌷', codepoints=(127799,), name='կակաչ', slug='կակաչ', annotations=frozenset({'ծաղիկ', 'բույս'})),
EmojiAnnotations(emoji='🌱', codepoints=(127793,), name='ծիլ', slug='ծիլ', annotations=frozenset({'բույս', 'մատղաշ'})),
EmojiAnnotations(emoji='🌲', codepoints=(127794,), name='եղևնի', slug='եղևնի', annotations=frozenset({'բույս', 'ծառ'})),
EmojiAnnotations(emoji='🌳', codepoints=(127795,), name='սաղարթավոր ծառ', slug='սաղարթավոր_ծառ', annotations=frozenset({'սաղարթավոր', 'բույս', 'ծառ'})),
EmojiAnnotations(emoji='🌴', codepoints=(127796,), name='արմավենի', slug='արմավենի', annotations=frozenset({'բույս', 'ծառ'})),
EmojiAnnotations(emoji='🌵', codepoints=(127797,), name='կակտուս', slug='կակտուս', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🌾', codepoints=(127806,), name='բրնձի հասկեր', slug='բրնձի_հասկեր', annotations=frozenset({'ականջ', 'բույս', 'բրինձ'})),
EmojiAnnotations(emoji='🌿', codepoints=(127807,), name='խոտաբույս', slug='խոտաբույս', annotations=frozenset({'տերև', 'բույս'})),
EmojiAnnotations(emoji='☘', codepoints=(9752,), name='երեքնուկ', slug='երեքնուկ', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🍀', codepoints=(127808,), name='քառատերև երեքնուկ', slug='քառատերև_երեքնուկ', annotations=frozenset({'4', 'չորս', 'տերև', 'բույս', 'երեքնուկ'})),
EmojiAnnotations(emoji='🍁', codepoints=(127809,), name='թխկու տերև', slug='թխկու_տերև', annotations=frozenset({'տերև', 'թխկի', 'բույս', 'ընկնող'})),
EmojiAnnotations(emoji='🍂', codepoints=(127810,), name='ընկած տերևներ', slug='ընկած_տերևներ', annotations=frozenset({'տերև', 'բույս', 'ընկնող'})),
EmojiAnnotations(emoji='🍃', codepoints=(127811,), name='ճախրող տերևներ', slug='ճախրող_տերևներ', annotations=frozenset({'տերև', 'քամի', 'փչել', 'թրթռալ', 'բույս'})),
EmojiAnnotations(emoji='🍇', codepoints=(127815,), name='խաղող', slug='խաղող', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍈', codepoints=(127816,), name='սեխ', slug='սեխ', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍉', codepoints=(127817,), name='ձմերուկ', slug='ձմերուկ', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍊', codepoints=(127818,), name='մանդարին', slug='մանդարին', annotations=frozenset({'նարինջ', 'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍋', codepoints=(127819,), name='կիտրոն', slug='կիտրոն', annotations=frozenset({'ցիտրուս', 'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍌', codepoints=(127820,), name='բանան', slug='բանան', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍍', codepoints=(127821,), name='արքայախնձոր', slug='արքայախնձոր', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍎', codepoints=(127822,), name='կարմիր խնձոր', slug='կարմիր_խնձոր', annotations=frozenset({'կարմիր', 'խնձոր', 'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍏', codepoints=(127823,), name='կանաչ խնձոր', slug='կանաչ_խնձոր', annotations=frozenset({'խնձոր', 'բույս', 'պտուղ', 'կանաչ'})),
EmojiAnnotations(emoji='🍐', codepoints=(127824,), name='տանձ', slug='տանձ', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍑', codepoints=(127825,), name='դեղձ', slug='դեղձ', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍒', codepoints=(127826,), name='բալ', slug='բալ', annotations=frozenset({'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍓', codepoints=(127827,), name='ելակ', slug='ելակ', annotations=frozenset({'հատապտուղ', 'բույս', 'պտուղ'})),
EmojiAnnotations(emoji='🍅', codepoints=(127813,), name='լոլիկ', slug='լոլիկ', annotations=frozenset({'բույս', 'բանջարեղեն'})),
EmojiAnnotations(emoji='🍆', codepoints=(127814,), name='սմբուկ', slug='սմբուկ', annotations=frozenset({'բույս', 'բանջարեղեն'})),
EmojiAnnotations(emoji='🌽', codepoints=(127805,), name='եգիպտացորեն', slug='եգիպտացորեն', annotations=frozenset({'ականջ', 'բույս'})),
EmojiAnnotations(emoji='\U0001f336', codepoints=(127798,), name='կծու պղպեղ', slug='կծու_պղպեղ', annotations=frozenset({'պղպեղ', 'կծու', 'բույս'})),
EmojiAnnotations(emoji='🍄', codepoints=(127812,), name='սունկ', slug='սունկ', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🌰', codepoints=(127792,), name='շագանակ', slug='շագանակ', annotations=frozenset({'բույս'})),
EmojiAnnotations(emoji='🍞', codepoints=(127838,), name='հաց', slug='հաց', annotations=frozenset({'բոքոն'})),
EmojiAnnotations(emoji='\U0001f9c0', codepoints=(129472,), name='պանրի կտոր', slug='պանրի_կտոր', annotations=frozenset({'պանիր'})),
EmojiAnnotations(emoji='🍖', codepoints=(127830,), name='ոսկորով միս', slug='ոսկորով_միս', annotations=frozenset({'ոսկոր', 'միս'})),
EmojiAnnotations(emoji='🍗', codepoints=(127831,), name='հավի բուդ', slug='հավի_բուդ', annotations=frozenset({'ոսկոր', 'բուդ', 'հավ', 'թռչնամիս'})),
EmojiAnnotations(emoji='🍔', codepoints=(127828,), name='համբուրգեր', slug='համբուրգեր', annotations=frozenset({'բուրգեր'})),
EmojiAnnotations(emoji='🍟', codepoints=(127839,), name='տապակած կարտոֆիլ', slug='տապակած_կարտոֆիլ', annotations=frozenset({'կարտոֆիլ', 'ֆրի'})),
EmojiAnnotations(emoji='🍕', codepoints=(127829,), name='պիցցա', slug='պիցցա', annotations=frozenset({'պանիր', 'կտոր'})),
EmojiAnnotations(emoji='\U0001f32d', codepoints=(127789,), name='հոթդոգ', slug='հոթդոգ', annotations=frozenset({'նրբերշիկ', 'ֆրանկֆուրտեր'})),
EmojiAnnotations(emoji='\U0001f32e', codepoints=(127790,), name='տակո', slug='տակո', annotations=frozenset({'մեքսիկական'})),
EmojiAnnotations(emoji='\U0001f32f', codepoints=(127791,), name='բուրիտո', slug='բուրիտո', annotations=frozenset({'մեքսիկական'})),
EmojiAnnotations(emoji='🍲', codepoints=(127858,), name='թասով ճաշ', slug='թասով_ճաշ', annotations=frozenset({'թաս', 'ճաշ'})),
EmojiAnnotations(emoji='🍱', codepoints=(127857,), name='բենտո արկղիկ', slug='բենտո_արկղիկ', annotations=frozenset({'բենտո', 'արկղիկ'})),
EmojiAnnotations(emoji='🍘', codepoints=(127832,), name='բրնձի կրեկեր', slug='բրնձի_կրեկեր', annotations=frozenset({'բրինձ', 'կրեկեր'})),
EmojiAnnotations(emoji='🍙', codepoints=(127833,), name='բրնձի գնդիկ', slug='բրնձի_գնդիկ', annotations=frozenset({'գնդիկ', 'ճապոնական', 'բրինձ'})),
EmojiAnnotations(emoji='🍚', codepoints=(127834,), name='եփած բրինձ', slug='եփած_բրինձ', annotations=frozenset({'եփած', 'բրինձ'})),
EmojiAnnotations(emoji='🍛', codepoints=(127835,), name='կարրիով բրինձ', slug='կարրիով_բրինձ', annotations=frozenset({'կարրի', 'բրինձ'})),
EmojiAnnotations(emoji='🍜', codepoints=(127836,), name='տաք ապուր', slug='տաք_ապուր', annotations=frozenset({'թաս', 'տաք', 'լապշա'})),
EmojiAnnotations(emoji='🍝', codepoints=(127837,), name='սպագետի', slug='սպագետի', annotations=frozenset({'մակարոնեղեն'})),
EmojiAnnotations(emoji='🍠', codepoints=(127840,), name='կարմրացրած քաղցր կարտոֆիլ', slug='կարմրացրած_քաղցր_կարտոֆիլ', annotations=frozenset({'կարտոֆիլ', 'կարմրացրած', 'քաղցր'})),
EmojiAnnotations(emoji='🍢', codepoints=(127842,), name='օդեն', slug='օդեն', annotations=frozenset({'ծովամթերք', 'շամփուր', 'քյաբաբ', 'փայտիկ'})),
EmojiAnnotations(emoji='🍤', codepoints=(127844,), name='տապակած ծովախեցգետին', slug='տապակած_ծովախեցգետին', annotations=frozenset({'տապակած', 'ծովախեցգետին'})),
EmojiAnnotations(emoji='🍥', codepoints=(127845,), name='ձկնային տորթ պտտանախշով', slug='ձկնային_տորթ_պտտանախշով', annotations=frozenset({'տորթ', 'խմորեղեն', 'ձուկ', 'պտտանախշ'})),
EmojiAnnotations(emoji='🍡', codepoints=(127841,), name='դանգո', slug='դանգո', annotations=frozenset({'շամփուր', 'փայտիկ', 'քաղցր', 'ճապոնական', 'դեսերտ'})),
EmojiAnnotations(emoji='🍦', codepoints=(127846,), name='լցնովի պաղպաղակ', slug='լցնովի_պաղպաղակ', annotations=frozenset({'քաղցր', 'պաղպաղակ', 'դեսերտ', 'կրեմ', 'լցնովի', 'փափուկ'})),
EmojiAnnotations(emoji='🍧', codepoints=(127847,), name='մանրացված սառույց', slug='մանրացված_սառույց', annotations=frozenset({'սառույց', 'մանրացված', 'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍨', codepoints=(127848,), name='պաղպաղակ', slug='պաղպաղակ', annotations=frozenset({'կրեմ', 'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍩', codepoints=(127849,), name='դոնաթ', slug='դոնաթ', annotations=frozenset({'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍪', codepoints=(127850,), name='թխվածքաբլիթ', slug='թխվածքաբլիթ', annotations=frozenset({'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🎂', codepoints=(127874,), name='ծննդյան տորթ', slug='ծննդյան_տորթ', annotations=frozenset({'քաղցր', 'տոն', 'դեսերտ', 'տորթ', 'խմորեղեն', 'տարեդարձ'})),
EmojiAnnotations(emoji='🍰', codepoints=(127856,), name='տորթի կտոր', slug='տորթի_կտոր', annotations=frozenset({'կտոր', 'տորթ', 'խմորեղեն', 'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍫', codepoints=(127851,), name='շոկոլադե սալիկ', slug='շոկոլադե_սալիկ', annotations=frozenset({'սալիկ', 'շոկոլադ', 'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍬', codepoints=(127852,), name='կոնֆետ', slug='կոնֆետ', annotations=frozenset({'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍭', codepoints=(127853,), name='սառնաշաքար', slug='սառնաշաքար', annotations=frozenset({'կոնֆետ', 'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍮', codepoints=(127854,), name='պուդինգ', slug='պուդինգ', annotations=frozenset({'քաղցր', 'դեսերտ'})),
EmojiAnnotations(emoji='🍯', codepoints=(127855,), name='մեղրի կճուճ', slug='մեղրի_կճուճ', annotations=frozenset({'քաղցր', 'մեղր', 'կճուճ'})),
EmojiAnnotations(emoji='🍼', codepoints=(127868,), name='մանկական շիշ', slug='մանկական_շիշ', annotations=frozenset({'մանկական', 'շիշ', 'խմել', 'կաթ'})),
EmojiAnnotations(emoji='☕', codepoints=(9749,), name='տաք ըմպելիք', slug='տաք_ըմպելիք', annotations=frozenset({'սուրճ', 'թեյ', 'խմել', 'տաք', 'ըմպելիք'})),
EmojiAnnotations(emoji='🍵', codepoints=(127861,), name='թեյի բաժակ առանց բռնակի', slug='թեյի_բաժակ_առանց_բռնակի', annotations=frozenset({'թեյի բաժակ', 'բաժակ', 'խմել', 'թեյ', 'ըմպելիք'})),
EmojiAnnotations(emoji='🍶', codepoints=(127862,), name='սակե', slug='սակե', annotations=frozenset({'բար', 'շիշ', 'բաժակ', 'խմել', 'ըմպելիք'})),
EmojiAnnotations(emoji='\U0001f37e', codepoints=(127870,), name='թռչող խցանով շիշ', slug='թռչող_խցանով_շիշ', annotations=frozenset({'բար', 'խցան', 'շիշ', 'խմել', 'դուրս թռչել'})),
EmojiAnnotations(emoji='🍷', codepoints=(127863,), name='գինու բաժակ', slug='գինու_բաժակ', annotations=frozenset({'բար', 'բաժակ', 'խմել', 'գինի', 'ըմպելիք'})),
EmojiAnnotations(emoji='🍸', codepoints=(127864,), name='կոկտեյլի բաժակ', slug='կոկտեյլի_բաժակ', annotations=frozenset({'բար', 'բաժակ', 'խմել', 'կոկտեյլ'})),
EmojiAnnotations(emoji='🍹', codepoints=(127865,), name='արևադարձային ընպելիք', slug='արևադարձային_ընպելիք', annotations=frozenset({'բար', 'խմել', 'արևադարձային'})),
EmojiAnnotations(emoji='🍺', codepoints=(127866,), name='գարեջրի գավաթ', slug='գարեջրի_գավաթ', annotations=frozenset({'բար', 'խմել', 'գավաթ', 'գարեջուր'})),
EmojiAnnotations(emoji='🍻', codepoints=(127867,), name='զրնգացող գարեջրի գավաթներ', slug='զրնգացող_գարեջրի_գավաթներ', annotations=frozenset({'բար', 'զրնգալ', 'խմել', 'գավաթ', 'գարեջուր'})),
EmojiAnnotations(emoji='\U0001f37d', codepoints=(127869,), name='դանակ և պատառաքաղ ափսեի հետ', slug='դանակ_և_պատառաքաղ_ափսեի_հետ', annotations=frozenset({'խոհարարություն', 'պատառաքաղ', 'դանակ', 'ափսե'})),
EmojiAnnotations(emoji='🍴', codepoints=(127860,), name='դանակ և պատառաքաղ', slug='դանակ_և_պատառաքաղ', annotations=frozenset({'խոհարարություն', 'պատառաքաղ', 'դանակ'})),
EmojiAnnotations(emoji='🍳', codepoints=(127859,), name='թավայով ձվածեղ', slug='թավայով_ձվածեղ', annotations=frozenset({'ձու', 'թավա', 'տապակել'})),
EmojiAnnotations(emoji='\U0001f3fa', codepoints=(127994,), name='սափոր', slug='սափոր', annotations=frozenset({'խմել', 'խոհարարություն', 'գործիք', 'կենդանակերպ', 'զենք', 'ջրհոս'})),
EmojiAnnotations(emoji='🌍', codepoints=(127757,), name='եվրոպան և աֆրիկան պատկերող գլոբուս', slug='եվրոպան_և_աֆրիկան_պատկերող_գլոբուս', annotations=frozenset({'աշխարհ', 'երկիր', 'գլոբուս', 'եվրոպա', 'աֆրիկա'})),
EmojiAnnotations(emoji='🌎', codepoints=(127758,), name='ամերիկաները պատկերող գլոբուս', slug='ամերիկաները_պատկերող_գլոբուս', annotations=frozenset({'երկիր', 'գլոբուս', 'ամերիկաներ', 'աշխարհ'})),
EmojiAnnotations(emoji='🌏', codepoints=(127759,), name='ասիան և ավստրալիան պատկերող գլոբուս', slug='ասիան_և_ավստրալիան_պատկերող_գլոբուս', annotations=frozenset({'աշխարհ', 'երկիր', 'գլոբուս', 'ավստրալիա', 'ասիա'})),
EmojiAnnotations(emoji='🌐', codepoints=(127760,), name='միջօրեականներով գլոբուս', slug='միջօրեականներով_գլոբուս', annotations=frozenset({'երկիր', 'գլոբուս', 'միջօրեականներ', 'աշխարհ'})),
EmojiAnnotations(emoji='\U0001f5fa', codepoints=(128506,), name='աշխարհի քարտեզ', slug='աշխարհի_քարտեզ', annotations=frozenset({'քարտեզ', 'աշխարհ'})),
EmojiAnnotations(emoji='\U0001f3d4', codepoints=(127956,), name='ձյունածածկ գագաթով լեռ', slug='ձյունածածկ_գագաթով_լեռ', annotations=frozenset({'սառը', 'ձյուն', 'լեռ'})),
EmojiAnnotations(emoji='🌋', codepoints=(127755,), name='հրաբուխ', slug='հրաբուխ', annotations=frozenset({'եղանակ', 'լեռ', 'ժայթքում'})),
EmojiAnnotations(emoji='🗻', codepoints=(128507,), name='ֆուջի լեռ', slug='ֆուջի_լեռ', annotations=frozenset({'լեռ', 'ֆուջի'})),
EmojiAnnotations(emoji='\U0001f3d6', codepoints=(127958,), name='լողափ հովանոցով', slug='լողափ_հովանոցով', annotations=frozenset({'լողափ', 'հովանոց'})),
EmojiAnnotations(emoji='\U0001f3dd', codepoints=(127965,), name='անմարդաբնակ կղզի', slug='անմարդաբնակ_կղզի', annotations=frozenset({'կղզի', 'անմարդաբնակ'})),
EmojiAnnotations(emoji='\U0001f3de', codepoints=(127966,), name='ազգային պարկ', slug='ազգային_պարկ', annotations=frozenset({'պարկ'})),
EmojiAnnotations(emoji='\U0001f3db', codepoints=(127963,), name='հունահռոմեական շինություն', slug='հունահռոմեական_շինություն', annotations=frozenset({'հունահռոմեական', 'շինություն'})),
EmojiAnnotations(emoji='\U0001f3d7', codepoints=(127959,), name='շենքի կառուցում', slug='շենքի_կառուցում', annotations=frozenset({'շենք', 'շինարարություն'})),
EmojiAnnotations(emoji='\U0001f3d8', codepoints=(127960,), name='տան շինարարություն', slug='տան_շինարարություն', annotations=frozenset({'շենք', 'տուն'})),
EmojiAnnotations(emoji='\U0001f3d9', codepoints=(127961,), name='քաղաքի համայնապատկեր', slug='քաղաքի_համայնապատկեր', annotations=frozenset({'քաղաք', 'շենք'})),
EmojiAnnotations(emoji='\U0001f3da', codepoints=(127962,), name='լքված շինություն', slug='լքված_շինություն', annotations=frozenset({'լքված', 'շենք', 'տուն'})),
EmojiAnnotations(emoji='🏠', codepoints=(127968,), name='բնակելի տուն', slug='բնակելի_տուն', annotations=frozenset({'շենք', 'բնակարան', 'տուն'})),
EmojiAnnotations(emoji='🏡', codepoints=(127969,), name='այգիով տուն', slug='այգիով_տուն', annotations=frozenset({'այգի', 'շենք', 'բնակարան', 'տուն'})),
EmojiAnnotations(emoji='⛪', codepoints=(9962,), name='եկեղեցի', slug='եկեղեցի', annotations=frozenset({'խաչ', 'շենք', 'կրոն', 'քրիստոնեական'})),
EmojiAnnotations(emoji='\U0001f54b', codepoints=(128331,), name='կաաբա', slug='կաաբա', annotations=frozenset({'իսլամ', 'մուսուլման', 'կրոն'})),
EmojiAnnotations(emoji='\U0001f54c', codepoints=(128332,), name='մզկիթ', slug='մզկիթ', annotations=frozenset({'իսլամ', 'մուսուլման', 'կրոն'})),
EmojiAnnotations(emoji='\U0001f54d', codepoints=(128333,), name='սինագոգ', slug='սինագոգ', annotations=frozenset({'հրեա', 'հրեական', 'տաճար', 'կրոն'})),
EmojiAnnotations(emoji='⛩', codepoints=(9961,), name='սինտոյական տաճար', slug='սինտոյական_տաճար', annotations=frozenset({'տաճար', 'կրոն', 'սինտոյական'})),
EmojiAnnotations(emoji='🏢', codepoints=(127970,), name='գրասենյակային շենք', slug='գրասենյակային_շենք', annotations=frozenset({'շենք'})),
EmojiAnnotations(emoji='🏣', codepoints=(127971,), name='ճապոնական փոստատուն', slug='ճապոնական_փոստատուն', annotations=frozenset({'փոստատուն', 'շենք', 'ճապոնական'})),
EmojiAnnotations(emoji='🏤', codepoints=(127972,), name='փոստատուն', slug='փոստատուն', annotations=frozenset({'եվրոպական', 'շենք'})),
EmojiAnnotations(emoji='🏥', codepoints=(127973,), name='հիվանդանոց', slug='հիվանդանոց', annotations=frozenset({'բժշկություն', 'շենք', 'բժիշկ'})),
EmojiAnnotations(emoji='🏦', codepoints=(127974,), name='բանկ', slug='բանկ', annotations=frozenset({'շենք'})),
EmojiAnnotations(emoji='🏨', codepoints=(127976,), name='հյուրանոց', slug='հյուրանոց', annotations=frozenset({'շենք'})),
EmojiAnnotations(emoji='🏩', codepoints=(127977,), name='սիրային հյուրանոց', slug='սիրային_հյուրանոց', annotations=frozenset({'շենք', 'հյուրանոց', 'սեր'})),
EmojiAnnotations(emoji='🏪', codepoints=(127978,), name='շուրջօրյա խանութ', slug='շուրջօրյա_խանութ', annotations=frozenset({'խանութ', 'շենք', 'շուրջօրյա'})),
EmojiAnnotations(emoji='🏫', codepoints=(127979,), name='դպրոց', slug='դպրոց', annotations=frozenset({'շենք'})),
EmojiAnnotations(emoji='🏬', codepoints=(127980,), name='հանրախանութ', slug='հանրախանութ', annotations=frozenset({'խանութ', 'շենք'})),
EmojiAnnotations(emoji='🏭', codepoints=(127981,), name='գործարան', slug='գործարան', annotations=frozenset({'շենք'})),
EmojiAnnotations(emoji='🏯', codepoints=(127983,), name='ճապոնական դղյակ', slug='ճապոնական_դղյակ', annotations=frozenset({'շենք', 'ճապոնական', 'դղյակ'})),
EmojiAnnotations(emoji='🏰', codepoints=(127984,), name='դղյակ', slug='դղյակ', annotations=frozenset({'եվրոպական', 'շենք'})),
EmojiAnnotations(emoji='💒', codepoints=(128146,), name='հարսանիք', slug='հարսանիք', annotations=frozenset({'մատուռ', 'սիրավեպ'})),
EmojiAnnotations(emoji='🗼', codepoints=(128508,), name='տոկիոյի աշտարակը', slug='տոկիոյի_աշտարակը', annotations=frozenset({'աշտարակ', 'տոկյո'})),
EmojiAnnotations(emoji='🗽', codepoints=(128509,), name='ազատության արձանը', slug='ազատության_արձանը', annotations=frozenset({'ազատություն', 'արձան'})),
EmojiAnnotations(emoji='🗾', codepoints=(128510,), name='ճապոնիայի քարտեզը', slug='ճապոնիայի_քարտեզը', annotations=frozenset({'քարտեզ', 'ճապոնիա'})),
EmojiAnnotations(emoji='⛺', codepoints=(9978,), name='վրան', slug='վրան', annotations=frozenset({'ճամբար'})),
EmojiAnnotations(emoji='🌁', codepoints=(127745,), name='մառախլապատ', slug='մառախլապատ', annotations=frozenset({'եղանակ', 'մառախուղ'})),
EmojiAnnotations(emoji='🌃', codepoints=(127747,), name='աստղազարդ գիշեր', slug='աստղազարդ_գիշեր', annotations=frozenset({'եղանակ', 'գիշեր', 'աստղ'})),
EmojiAnnotations(emoji='🌄', codepoints=(127748,), name='արևածագը լեռներում', slug='արևածագը_լեռներում', annotations=frozenset({'արևածագ', 'եղանակ', 'արև', 'լեռ', 'առավոտ'})),
EmojiAnnotations(emoji='🌅', codepoints=(127749,), name='արևածագ', slug='արևածագ', annotations=frozenset({'եղանակ', 'արև', 'առավոտ'})),
EmojiAnnotations(emoji='🌆', codepoints=(127750,), name='քաղաքի համայնապատկեր մթնշաղին', slug='քաղաքի_համայնապատկեր_մթնշաղին', annotations=frozenset({'լանդշաֆտ', 'երեկո', 'շենք', 'մթնշաղ', 'մայրամուտ', 'եղանակ', 'քաղաք', 'արև'})),
EmojiAnnotations(emoji='🌇', codepoints=(127751,), name='մայրամուտ', slug='մայրամուտ', annotations=frozenset({'եղանակ', 'արև', 'շենք', 'մթնշաղ'})),
EmojiAnnotations(emoji='🌉', codepoints=(127753,), name='կամուրջը գիշերով', slug='կամուրջը_գիշերով', annotations=frozenset({'եղանակ', 'գիշեր', 'կամուրջ'})),
EmojiAnnotations(emoji='♨', codepoints=(9832,), name='տաք աղբյուրներ', slug='տաք_աղբյուրներ', annotations=frozenset({'աղբյուրներ', 'տաք', 'հոսք'})),
EmojiAnnotations(emoji='🌌', codepoints=(127756,), name='ծիր կաթին', slug='ծիր_կաթին', annotations=frozenset({'եղանակ', 'տիեզերք'})),
EmojiAnnotations(emoji='🎠', codepoints=(127904,), name='կարուսելի ձի', slug='կարուսելի_ձի', annotations=frozenset({'ձի', 'կարուսել'})),
EmojiAnnotations(emoji='🎡', codepoints=(127905,), name='սատանայի անիվ', slug='սատանայի_անիվ', annotations=frozenset({'զվարճանքների այգի', 'անիվ', 'սատանայի'})),
EmojiAnnotations(emoji='🎢', codepoints=(127906,), name='ամերիկյան բլուրներ', slug='ամերիկյան_բլուրներ', annotations=frozenset({'զվարճանքների այգի', 'բլուրներ', 'ամերիկյան'})),
EmojiAnnotations(emoji='💈', codepoints=(128136,), name='վարսավիրի ձող', slug='վարսավիրի_ձող', annotations=frozenset({'վարսավիր', 'սանրվածք', 'ձող'})),
EmojiAnnotations(emoji='🎪', codepoints=(127914,), name='կրկեսային վրան', slug='կրկեսային_վրան', annotations=frozenset({'վրան', 'կրկես'})),
EmojiAnnotations(emoji='🎭', codepoints=(127917,), name='կատարողական արվեստ', slug='կատարողական_արվեստ', annotations=frozenset({'ներկայացում', 'թատրոն', 'դիմակ', 'արվեստ'})),
EmojiAnnotations(emoji='\U0001f5bc', codepoints=(128444,), name='շրջանակ նկարով', slug='շրջանակ_նկարով', annotations=frozenset({'նկարչություն', 'նկար', 'արվեստ', 'շրջանակ', 'թանգարան'})),
EmojiAnnotations(emoji='🎨', codepoints=(127912,), name='ներկապնակ', slug='ներկապնակ', annotations=frozenset({'նկարչություն', 'արվեստ', 'թանգարան'})),
EmojiAnnotations(emoji='🎰', codepoints=(127920,), name='խաղային ավտոմատ', slug='խաղային_ավտոմատ', annotations=frozenset({'խաղ', 'ավտոմատ'})),
EmojiAnnotations(emoji='🚂', codepoints=(128642,), name='շոգեքարշ', slug='շոգեքարշ', annotations=frozenset({'փոխադրամիջոց', 'գոլորշի', 'երկաթուղի', 'գնացք'})),
EmojiAnnotations(emoji='🚃', codepoints=(128643,), name='երկաթուղային վագոն', slug='երկաթուղային_վագոն', annotations=frozenset({'փոխադրամիջոց', 'տրոլեյբուս', 'էլեկտրական', 'երկաթուղի', 'վագոն', 'տրամվայ', 'գնացք'})),
EmojiAnnotations(emoji='🚄', codepoints=(128644,), name='ճեպընթաց գնացք', slug='ճեպընթաց_գնացք', annotations=frozenset({'գնացք', 'փոխադրամիջոց', 'սինկանսեն', 'երկաթուղի', 'արագություն'})),
EmojiAnnotations(emoji='🚅', codepoints=(128645,), name='ճեպընթաց գնացք կլոր քթով', slug='ճեպընթաց_գնացք_կլոր_քթով', annotations=frozenset({'փոխադրամիջոց', 'արագություն', 'կլոր քիթ', 'երկաթուղի', 'սինկանսեն', 'գնացք'})),
EmojiAnnotations(emoji='🚆', codepoints=(128646,), name='գնացք', slug='գնացք', annotations=frozenset({'երկաթուղի', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚇', codepoints=(128647,), name='մետրո', slug='մետրո', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚈', codepoints=(128648,), name='վերգետնյա մետրո', slug='վերգետնյա_մետրո', annotations=frozenset({'երկաթուղի', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚉', codepoints=(128649,), name='կայարան', slug='կայարան', annotations=frozenset({'փոխադրամիջոց', 'երկաթուղի', 'գնացք'})),
EmojiAnnotations(emoji='🚊', codepoints=(128650,), name='տրամվայ', slug='տրամվայ', annotations=frozenset({'տրոլեյբուս', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚝', codepoints=(128669,), name='մոնոռելս', slug='մոնոռելս', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚞', codepoints=(128670,), name='լեռնային երկաթուղի', slug='լեռնային_երկաթուղի', annotations=frozenset({'վագոն', 'լեռ', 'երկաթուղի', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚋', codepoints=(128651,), name='տրամվայի վագոն', slug='տրամվայի_վագոն', annotations=frozenset({'տրամվայ', 'վագոն', 'տրոլեյբուս', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚌', codepoints=(128652,), name='ավտոբուս', slug='ավտոբուս', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚍', codepoints=(128653,), name='մոտեցող ավտոբուս', slug='մոտեցող_ավտոբուս', annotations=frozenset({'մոտեցող', 'ավտոբուս', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚎', codepoints=(128654,), name='տրոլեյբուս', slug='տրոլեյբուս', annotations=frozenset({'տրամվայ', 'ավտոբուս', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚏', codepoints=(128655,), name='ավտոբուսի կանգառ', slug='ավտոբուսի_կանգառ', annotations=frozenset({'ավտոբուս', 'կանգառ'})),
EmojiAnnotations(emoji='🚐', codepoints=(128656,), name='միկրոավտոբուս', slug='միկրոավտոբուս', annotations=frozenset({'ավտոբուս', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚑', codepoints=(128657,), name='շտապօգնության մեքենա', slug='շտապօգնության_մեքենա', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚒', codepoints=(128658,), name='հրշեջ մեքենա', slug='հրշեջ_մեքենա', annotations=frozenset({'քարշակ', 'փոխադրամիջոց', 'հրդեք', 'բեռնատար'})),
EmojiAnnotations(emoji='🚓', codepoints=(128659,), name='ոստիկանական մեքենա', slug='ոստիկանական_մեքենա', annotations=frozenset({'պարեկ', 'ոստիկանություն', 'ավտոմեքենա', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚔', codepoints=(128660,), name='մոտեցող ոստիկանական մեքենա', slug='մոտեցող_ոստիկանական_մեքենա', annotations=frozenset({'մոտեցող', 'ոստիկանություն', 'ավտոմեքենա', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚕', codepoints=(128661,), name='տաքսի', slug='տաքսի', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚖', codepoints=(128662,), name='մոտեցող տաքսի', slug='մոտեցող_տաքսի', annotations=frozenset({'մոտեցող', 'տաքսի', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚗', codepoints=(128663,), name='ավտոմեքենա', slug='ավտոմեքենա', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚘', codepoints=(128664,), name='մոտեցող ավտոմեքենա', slug='մոտեցող_ավտոմեքենա', annotations=frozenset({'մոտեցող', 'մեքենա', 'ավտոմեքենա', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚙', codepoints=(128665,), name='ավտոֆուրգոն', slug='ավտոֆուրգոն', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚚', codepoints=(128666,), name='բեռնատար', slug='բեռնատար', annotations=frozenset({'առաքում', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚛', codepoints=(128667,), name='կցորդով բեռնատար', slug='կցորդով_բեռնատար', annotations=frozenset({'փոխադրամիջոց', 'կցորդ', 'բեռնատար'})),
EmojiAnnotations(emoji='🚜', codepoints=(128668,), name='տրակտոր', slug='տրակտոր', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚲', codepoints=(128690,), name='հեծանիվ', slug='հեծանիվ', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='⛽', codepoints=(9981,), name='բենզալցակայանի պոմպ', slug='բենզալցակայանի_պոմպ', annotations=frozenset({'բենզին', 'կայան', 'վառելիք', 'պոմպ', 'բենզալցակայան'})),
EmojiAnnotations(emoji='\U0001f6e3', codepoints=(128739,), name='ավտոմայրուղի', slug='ավտոմայրուղի', annotations=frozenset({'մայրուղի', 'ճանապարհ'})),
EmojiAnnotations(emoji='\U0001f6e4', codepoints=(128740,), name='երկաթուղի', slug='երկաթուղի', annotations=frozenset({'գնացք'})),
EmojiAnnotations(emoji='🚨', codepoints=(128680,), name='ոստիկանական մեքենայի փարոս', slug='ոստիկանական_մեքենայի_փարոս', annotations=frozenset({'լույս', 'ոստիկանություն', 'փարոս', 'պտտվող', 'ավտոմեքենա', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚥', codepoints=(128677,), name='հորիզոնական լուսակիր', slug='հորիզոնական_լուսակիր', annotations=frozenset({'երթևեկություն', 'լույս', 'ազդանշան'})),
EmojiAnnotations(emoji='🚦', codepoints=(128678,), name='ուղղահայաց լուսակիր', slug='ուղղահայաց_լուսակիր', annotations=frozenset({'երթևեկություն', 'լույս', 'ազդանշան'})),
EmojiAnnotations(emoji='🚧', codepoints=(128679,), name='շինարարություն', slug='շինարարություն', annotations=frozenset({'արգելապատնեշ'})),
EmojiAnnotations(emoji='⚓', codepoints=(9875,), name='խարիսխ', slug='խարիսխ', annotations=frozenset({'գործիք', 'նավ'})),
EmojiAnnotations(emoji='⛵', codepoints=(9973,), name='առագաստանավ', slug='առագաստանավ', annotations=frozenset({'նավակ', 'հանգստավայր', 'ծով', 'զբոսանավ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚣', codepoints=(128675,), name='թիանավակ', slug='թիանավակ', annotations=frozenset({'նավակ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚤', codepoints=(128676,), name='արագընթաց մոտորանավակ', slug='արագընթաց_մոտորանավակ', annotations=frozenset({'նավակ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6f3', codepoints=(128755,), name='ուղևորատար նավ', slug='ուղևորատար_նավ', annotations=frozenset({'նավ', 'ուղևոր', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='⛴', codepoints=(9972,), name='լաստանավ', slug='լաստանավ', annotations=frozenset({'նավակ'})),
EmojiAnnotations(emoji='\U0001f6e5', codepoints=(128741,), name='մոտորանավ', slug='մոտորանավ', annotations=frozenset({'նավակ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚢', codepoints=(128674,), name='նավ', slug='նավ', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='✈', codepoints=(9992,), name='ինքնաթիռ', slug='ինքնաթիռ', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6e9', codepoints=(128745,), name='փոքր ինքնաթիռ', slug='փոքր_ինքնաթիռ', annotations=frozenset({'ինքնաթիռ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6eb', codepoints=(128747,), name='օդանավի մեկնում', slug='օդանավի_մեկնում', annotations=frozenset({'ինքնաթիռ', 'գրանցում', 'մեկնում', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6ec', codepoints=(128748,), name='օդանավի ժամանում', slug='օդանավի_ժամանում', annotations=frozenset({'վայրէջք', 'ժամանող', 'օդանավ', 'ժամանում', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='💺', codepoints=(128186,), name='նստատեղ', slug='նստատեղ', annotations=frozenset({'բազկաթոռ'})),
EmojiAnnotations(emoji='🚁', codepoints=(128641,), name='ուղղաթիռ', slug='ուղղաթիռ', annotations=frozenset({'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚟', codepoints=(128671,), name='կախովի երկաթուղի', slug='կախովի_երկաթուղի', annotations=frozenset({'փոխադրամիջոց', 'երկաթուղի', 'կախովի'})),
EmojiAnnotations(emoji='🚠', codepoints=(128672,), name='լեռնային ճոպանուղի', slug='լեռնային_ճոպանուղի', annotations=frozenset({'գոնդոլա', 'ճոպան', 'լեռ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚡', codepoints=(128673,), name='օդային տրամվայ', slug='օդային_տրամվայ', annotations=frozenset({'օդային', 'ճոպան', 'ճոպանուղի', 'գոնդոլա', 'վագոն', 'տրամվայ', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='🚀', codepoints=(128640,), name='հրթիռ', slug='հրթիռ', annotations=frozenset({'տիեզերք', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6f0', codepoints=(128752,), name='արբանյակ', slug='արբանյակ', annotations=frozenset({'տիեզերք', 'փոխադրամիջոց'})),
EmojiAnnotations(emoji='\U0001f6ce', codepoints=(128718,), name='հյուրանոցային զանգ', slug='հյուրանոցային_զանգ', annotations=frozenset({'զանգ', 'հյուրանոց'})),
EmojiAnnotations(emoji='\U0001f6cc', codepoints=(128716,), name='մահճակալում պառկած մարդ', slug='մահճակալում_պառկած_մարդ', annotations=frozenset({'քնել', 'հյուրանոց'})),
EmojiAnnotations(emoji='\U0001f6cf', codepoints=(128719,), name='մահճակալ', slug='մահճակալ', annotations=frozenset({'քնել', 'հյուրանոց'})),
EmojiAnnotations(emoji='\U0001f6cb', codepoints=(128715,), name='բազմոց և լամպ', slug='բազմոց_և_լամպ', annotations=frozenset({'լամպ', 'բազմոց', 'հյուրանոց'})),
EmojiAnnotations(emoji='🚽', codepoints=(128701,), name='զուգարանակոնք', slug='զուգարանակոնք', annotations=frozenset({'զուգարան'})),
EmojiAnnotations(emoji='🚿', codepoints=(128703,), name='լոգարանի ցնցուղ', slug='լոգարանի_ցնցուղ', annotations=frozenset({'ջուր'})),
EmojiAnnotations(emoji='🛀', codepoints=(128704,), name='լոգանք ընդունող մարդ', slug='լոգանք_ընդունող_մարդ', annotations=frozenset({'լոգարան', 'լոգասենյակ'})),
EmojiAnnotations(emoji='🛁', codepoints=(128705,), name='լոգարան', slug='լոգարան', annotations=frozenset({'լոգասենյակ'})),
EmojiAnnotations(emoji='⌛', codepoints=(8987,), name='ավազի ժամացույց', slug='ավազի_ժամացույց', annotations=frozenset({'ժամաչափ', 'ավազ'})),
EmojiAnnotations(emoji='⏳', codepoints=(9203,), name='ավազի ժամացույց հոսող ավազով', slug='ավազի_ժամացույց_հոսող_ավազով', annotations=frozenset({'ավազի ժամացույց', 'ժամաչափ', 'ավազ'})),
EmojiAnnotations(emoji='⏰', codepoints=(9200,), name='զարթուցիչ', slug='զարթուցիչ', annotations=frozenset({'ժամացույց'})),
EmojiAnnotations(emoji='⏱', codepoints=(9201,), name='վայրկյանաչափ', slug='վայրկյանաչափ', annotations=frozenset({'ժամացույց'})),
EmojiAnnotations(emoji='⏲', codepoints=(9202,), name='ժամաչափ', slug='ժամաչափ', annotations=frozenset({'ժամացույց'})),
EmojiAnnotations(emoji='\U0001f570', codepoints=(128368,), name='բուխարու ժամացույց', slug='բուխարու_ժամացույց', annotations=frozenset({'ժամացույց'})),
EmojiAnnotations(emoji='🕛', codepoints=(128347,), name='ժամը տասներկուսը', slug='ժամը_տասներկուսը', annotations=frozenset({'12', 'ժամ', 'տասներկու', '00', 'ժամացույց', '12:00'})),
EmojiAnnotations(emoji='🕧', codepoints=(128359,), name='տասներկուսն անց կես', slug='տասներկուսն_անց_կես', annotations=frozenset({'12', 'տասներկու', '12:30', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕐', codepoints=(128336,), name='ժամը մեկը', slug='ժամը_մեկը', annotations=frozenset({'ժամ', 'մեկ', '1', '1:00', '00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕜', codepoints=(128348,), name='մեկն անց կես', slug='մեկն_անց_կես', annotations=frozenset({'մեկ', '1', '1:30', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕑', codepoints=(128337,), name='ժամը երկուսը', slug='ժամը_երկուսը', annotations=frozenset({'ժամ', 'երկու', '00', '2:00', 'ժամացույց', '2'})),
EmojiAnnotations(emoji='🕝', codepoints=(128349,), name='երկուսն անց կես', slug='երկուսն_անց_կես', annotations=frozenset({'երկու', '2:30', 'ժամացույց', '2', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕒', codepoints=(128338,), name='ժամը երեքը', slug='ժամը_երեքը', annotations=frozenset({'ժամ', 'երեք', '3', '00', '3:00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕞', codepoints=(128350,), name='երեքն անց կես', slug='երեքն_անց_կես', annotations=frozenset({'երեք', '3', '3:30', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕓', codepoints=(128339,), name='ժամը չորսը', slug='ժամը_չորսը', annotations=frozenset({'4:00', 'ժամ', '00', 'չորս', '4', 'ժամացույց'})),
EmojiAnnotations(emoji='🕟', codepoints=(128351,), name='չորսն անց կես', slug='չորսն_անց_կես', annotations=frozenset({'4:30', 'չորս', '4', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕔', codepoints=(128340,), name='ժամը հինգը', slug='ժամը_հինգը', annotations=frozenset({'ժամ', 'հինգ', '5:00', '5', '00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕠', codepoints=(128352,), name='հինգն անց կես', slug='հինգն_անց_կես', annotations=frozenset({'5:30', 'հինգ', '5', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕕', codepoints=(128341,), name='ժամը վեցը', slug='ժամը_վեցը', annotations=frozenset({'վեց', '6', 'ժամ', '6:00', '00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕡', codepoints=(128353,), name='վեցն անց կես', slug='վեցն_անց_կես', annotations=frozenset({'վեց', '6', '6:30', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕖', codepoints=(128342,), name='ժամը յոթը', slug='ժամը_յոթը', annotations=frozenset({'ժամ', 'յոթ', '7', '00', '7:00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕢', codepoints=(128354,), name='յոթն անց կես', slug='յոթն_անց_կես', annotations=frozenset({'յոթ', '7', '7:30', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕗', codepoints=(128343,), name='ժամը ութը', slug='ժամը_ութը', annotations=frozenset({'ժամ', '8:00', '00', '8', 'ժամացույց', 'ութ'})),
EmojiAnnotations(emoji='🕣', codepoints=(128355,), name='ութն անց կես', slug='ութն_անց_կես', annotations=frozenset({'8:30', '8', 'ժամացույց', 'ութ', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🕘', codepoints=(128344,), name='ժամը ինը', slug='ժամը_ինը', annotations=frozenset({'ժամ', '9:00', 'ինը', '00', '9', 'ժամացույց'})),
EmojiAnnotations(emoji='🕤', codepoints=(128356,), name='ինն անց կես', slug='ինն_անց_կես', annotations=frozenset({'ինը', '9', 'ժամացույց', 'երեսուն', '30', '9:30'})),
EmojiAnnotations(emoji='🕙', codepoints=(128345,), name='ժամը տասը', slug='ժամը_տասը', annotations=frozenset({'10', '10:00', 'ժամ', 'տասը', '00', 'ժամացույց'})),
EmojiAnnotations(emoji='🕥', codepoints=(128357,), name='տասն անց կես', slug='տասն_անց_կես', annotations=frozenset({'10', 'տասը', '10:30', 'երեսուն', 'ժամացույց', '30'})),
EmojiAnnotations(emoji='🕚', codepoints=(128346,), name='ժամը տասնմեկը', slug='ժամը_տասնմեկը', annotations=frozenset({'11', 'ժամ', '00', '11:00', 'տասնմեկ', 'ժամացույց'})),
EmojiAnnotations(emoji='🕦', codepoints=(128358,), name='տասնմեկն անց կես', slug='տասնմեկն_անց_կես', annotations=frozenset({'11', '11:30', 'տասնմեկ', 'ժամացույց', 'երեսուն', '30'})),
EmojiAnnotations(emoji='🌑', codepoints=(127761,), name='նորալուսին', slug='նորալուսին', annotations=frozenset({'եղանակ', 'մութ', 'լուսին', 'տիեզերք'})),
EmojiAnnotations(emoji='🌒', codepoints=(127762,), name='աճող մահիկ', slug='աճող_մահիկ', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք', 'մահիկ', 'աճող'})),
EmojiAnnotations(emoji='🌓', codepoints=(127763,), name='լուսինն առաջին քառորդում', slug='լուսինն_առաջին_քառորդում', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք', 'քառորդ'})),
EmojiAnnotations(emoji='🌔', codepoints=(127764,), name='աճող ուռուցիկ լուսին', slug='աճող_ուռուցիկ_լուսին', annotations=frozenset({'ուռուցիկ', 'լուսին', 'տիեզերք', 'եղանակ', 'աճող'})),
EmojiAnnotations(emoji='🌕', codepoints=(127765,), name='լիալուսին', slug='լիալուսին', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք'})),
EmojiAnnotations(emoji='🌖', codepoints=(127766,), name='նվազող ուռուցիկ լուսին', slug='նվազող_ուռուցիկ_լուսին', annotations=frozenset({'ուռուցիկ', 'լուսին', 'տիեզերք', 'նվազող', 'եղանակ'})),
EmojiAnnotations(emoji='🌗', codepoints=(127767,), name='լուսինը երկրորդ քառորդում', slug='լուսինը_երկրորդ_քառորդում', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք', 'քառորդ'})),
EmojiAnnotations(emoji='🌘', codepoints=(127768,), name='նվազող մահիկ', slug='նվազող_մահիկ', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք', 'նվազող', 'մահիկ'})),
EmojiAnnotations(emoji='🌙', codepoints=(127769,), name='մահիկ', slug='մահիկ', annotations=frozenset({'եղանակ', 'լուսին', 'տիեզերք'})),
EmojiAnnotations(emoji='🌚', codepoints=(127770,), name='դեմքով նորալուսին', slug='դեմքով_նորալուսին', annotations=frozenset({'եղանակ', 'դեմք', 'լուսին', 'տիեզերք'})),
EmojiAnnotations(emoji='🌛', codepoints=(127771,), name='լուսինն առաջին քառորդում դեմքով', slug='լուսինն_առաջին_քառորդում_դեմքով', annotations=frozenset({'եղանակ', 'դեմք', 'լուսին', 'քառորդ', 'տիեզերք'})),
EmojiAnnotations(emoji='🌜', codepoints=(127772,), name='լուսինը երկրորդ քառորդում դեմքով', slug='լուսինը_երկրորդ_քառորդում_դեմքով', annotations=frozenset({'եղանակ', 'դեմք', 'լուսին', 'քառորդ', 'տիեզերք'})),
EmojiAnnotations(emoji='\U0001f321', codepoints=(127777,), name='ջերմաչափ', slug='ջերմաչափ', annotations=frozenset({'եղանակ'})),
EmojiAnnotations(emoji='☀', codepoints=(9728,), name='արև', slug='արև', annotations=frozenset({'եղանակ', 'տիեզերք', 'արևոտ', 'պայծառ', 'ճառագայթներ'})),
EmojiAnnotations(emoji='🌝', codepoints=(127773,), name='դեմքով լիալուսին', slug='դեմքով_լիալուսին', annotations=frozenset({'տիեզերք', 'լուսին', 'լիալուսին', 'պայծառ', 'դեմք', 'եղանակ'})),
EmojiAnnotations(emoji='🌞', codepoints=(127774,), name='դեմքով արև', slug='դեմքով_արև', annotations=frozenset({'եղանակ', 'դեմք', 'տիեզերք', 'արև', 'պայծառ'})),
EmojiAnnotations(emoji='⭐', codepoints=(11088,), name='սպիտակավուն աստղ', slug='սպիտակավուն_աստղ', annotations=frozenset({'աստղ'})),
EmojiAnnotations(emoji='🌟', codepoints=(127775,), name='փայլող աստղ', slug='փայլող_աստղ', annotations=frozenset({'փայլող', 'կայծ', 'աստղ'})),
EmojiAnnotations(emoji='🌠', codepoints=(127776,), name='ընկնող աստղ', slug='ընկնող_աստղ', annotations=frozenset({'տիեզերք', 'աստղ', 'ընկնող'})),
EmojiAnnotations(emoji='☁', codepoints=(9729,), name='ամպ', slug='ամպ', annotations=frozenset({'եղանակ'})),
EmojiAnnotations(emoji='⛅', codepoints=(9925,), name='արև ամպի հետևում', slug='արև_ամպի_հետևում', annotations=frozenset({'եղանակ', 'արև', 'ամպ'})),
EmojiAnnotations(emoji='⛈', codepoints=(9928,), name='կայծակով և անձրևով ամպ', slug='կայծակով_և_անձրևով_ամպ', annotations=frozenset({'եղանակ', 'ամպ', 'անձրև', 'ամպրոպ'})),
EmojiAnnotations(emoji='\U0001f324', codepoints=(127780,), name='արև փոքր ամպի հետևում', slug='արև_փոքր_ամպի_հետևում', annotations=frozenset({'եղանակ', 'արև', 'ամպ'})),
EmojiAnnotations(emoji='\U0001f325', codepoints=(127781,), name='արև մեծ ամպի հետևում', slug='արև_մեծ_ամպի_հետևում', annotations=frozenset({'եղանակ', 'արև', 'ամպ'})),
EmojiAnnotations(emoji='\U0001f326', codepoints=(127782,), name='արև անձրևով ամպի հետևում', slug='արև_անձրևով_ամպի_հետևում', annotations=frozenset({'եղանակ', 'արև', 'ամպ', 'անձրև'})),
EmojiAnnotations(emoji='\U0001f327', codepoints=(127783,), name='անձրևով ամպ', slug='անձրևով_ամպ', annotations=frozenset({'եղանակ', 'ամպ', 'անձրև'})),
EmojiAnnotations(emoji='\U0001f328', codepoints=(127784,), name='ձյունով ամպ', slug='ձյունով_ամպ', annotations=frozenset({'եղանակ', 'սառը', 'ձյուն', 'ամպ'})),
EmojiAnnotations(emoji='\U0001f329', codepoints=(127785,), name='կայծակով ամպ', slug='կայծակով_ամպ', annotations=frozenset({'եղանակ', 'ամպ', 'կայծակ'})),
EmojiAnnotations(emoji='\U0001f32a', codepoints=(127786,), name='պտտահողմ', slug='պտտահողմ', annotations=frozenset({'եղանակ', 'ամպ'})),
EmojiAnnotations(emoji='\U0001f32b', codepoints=(127787,), name='մառախուղ', slug='մառախուղ', annotations=frozenset({'եղանակ', 'ամպ'})),
EmojiAnnotations(emoji='\U0001f32c', codepoints=(127788,), name='քամու երես', slug='քամու_երես', annotations=frozenset({'եղանակ', 'դեմք', 'քամի', 'փչել', 'ամպ'})),
EmojiAnnotations(emoji='🌀', codepoints=(127744,), name='ցիկլոն', slug='ցիկլոն', annotations=frozenset({'եղանակ', 'պտտվող', 'թայֆուն'})),
EmojiAnnotations(emoji='🌈', codepoints=(127752,), name='ծիածան', slug='ծիածան', annotations=frozenset({'եղանակ', 'անձրև'})),
EmojiAnnotations(emoji='🌂', codepoints=(127746,), name='փակ անձրևանոց', slug='փակ_անձրևանոց', annotations=frozenset({'հագուստ', 'անձրևանոց', 'եղանակ', 'անձրև'})),
EmojiAnnotations(emoji='☂', codepoints=(9730,), name='անձրևանոց', slug='անձրևանոց', annotations=frozenset({'հագուստ', 'եղանակ', 'անձրև'})),
EmojiAnnotations(emoji='☔', codepoints=(9748,), name='անձրևանոց անձրևի կաթիլներով', slug='անձրևանոց_անձրևի_կաթիլներով', annotations=frozenset({'հագուստ', 'կաթիլ', 'անձրևանոց', 'անձրև', 'եղանակ'})),
EmojiAnnotations(emoji='⛱', codepoints=(9969,), name='անձրևանոց գետնի վրա', slug='անձրևանոց_գետնի_վրա', annotations=frozenset({'անձրևանոց', 'եղանակ', 'արև', 'անձրև'})),
EmojiAnnotations(emoji='⚡', codepoints=(9889,), name='բարձր լարում', slug='բարձր_լարում', annotations=frozenset({'վտանգ', 'լարում', 'էլեկտրականություն', 'էլեկտրական', 'կայծակ'})),
EmojiAnnotations(emoji='❄', codepoints=(10052,), name='ձյան փաթիլ', slug='ձյան_փաթիլ', annotations=frozenset({'եղանակ', 'սառը', 'ձյուն'})),
EmojiAnnotations(emoji='☃', codepoints=(9731,), name='ձնեմարդ', slug='ձնեմարդ', annotations=frozenset({'եղանակ', 'սառը', 'ձյուն'})),
EmojiAnnotations(emoji='⛄', codepoints=(9924,), name='ձնեմարդ առանց ձյան', slug='ձնեմարդ_առանց_ձյան', annotations=frozenset({'եղանակ', 'սառը', 'ձյուն', 'ձնեմարդ'})),
EmojiAnnotations(emoji='☄', codepoints=(9732,), name='գիսաստղ', slug='գիսաստղ', annotations=frozenset({'տիեզերք'})),
EmojiAnnotations(emoji='🔥', codepoints=(128293,), name='կրակ', slug='կրակ', annotations=frozenset({'գործիք', 'բոց'})),
EmojiAnnotations(emoji='💧', codepoints=(128167,), name='կաթիլ', slug='կաթիլ', annotations=frozenset({'եղանակ', 'սառը', 'կոմիքս', 'քրտինք'})),
EmojiAnnotations(emoji='🌊', codepoints=(127754,), name='ծովի ալիք', slug='ծովի_ալիք', annotations=frozenset({'ալիք', 'եղանակ', 'օվկիանոս', 'ջուր'})),
EmojiAnnotations(emoji='🎃', codepoints=(127875,), name='ջեքի լապտեր', slug='ջեքի_լապտեր', annotations=frozenset({'լապտեր', 'տոն', 'հելոուին', 'ջեք'})),
EmojiAnnotations(emoji='🎄', codepoints=(127876,), name='տոնածառ', slug='տոնածառ', annotations=frozenset({'սուրբ ծնունդ', 'տոն', 'ծառ'})),
EmojiAnnotations(emoji='🎆', codepoints=(127878,), name='հրավառություն', slug='հրավառություն', annotations=frozenset({'տոնակատարություն'})),
EmojiAnnotations(emoji='🎇', codepoints=(127879,), name='բենգալյան կրակ', slug='բենգալյան_կրակ', annotations=frozenset({'տոնակատարություն', 'կայծ', 'հրավառություն'})),
EmojiAnnotations(emoji='✨', codepoints=(10024,), name='կայծեր', slug='կայծեր', annotations=frozenset({'կայծ', 'աստղ'})),
EmojiAnnotations(emoji='🎈', codepoints=(127880,), name='փուչիկ', slug='փուչիկ', annotations=frozenset({'տոն'})),
EmojiAnnotations(emoji='🎉', codepoints=(127881,), name='ճայթուկ', slug='ճայթուկ', annotations=frozenset({'տոն', 'երեկույթ'})),
EmojiAnnotations(emoji='🎊', codepoints=(127882,), name='կոնֆետի', slug='կոնֆետի', annotations=frozenset({'տոն', 'գունդ'})),
EmojiAnnotations(emoji='🎋', codepoints=(127883,), name='տանաբատա', slug='տանաբատա', annotations=frozenset({'դրոշակ', 'տոն', 'ճապոնական', 'ծառ'})),
EmojiAnnotations(emoji='🎌', codepoints=(127884,), name='խաչված դրոշակներ', slug='խաչված_դրոշակներ', annotations=frozenset({'տոն', 'խաչ', 'խաչված', 'ճապոնական'})),
EmojiAnnotations(emoji='🎍', codepoints=(127885,), name='բամբուկից դեկորացիա', slug='բամբուկից_դեկորացիա', annotations=frozenset({'բամբուկ', 'տոն', 'ճապոնական', 'դեկորացիա', 'բույս'})),
EmojiAnnotations(emoji='🎎', codepoints=(127886,), name='ճապոնական տիկնիկներ', slug='ճապոնական_տիկնիկներ', annotations=frozenset({'տոն', 'փառատոն', 'ճապոնական', 'տիկնիկ'})),
EmojiAnnotations(emoji='🎏', codepoints=(127887,), name='կարպերի տեսքով նավադրոշ', slug='կարպերի_տեսքով_նավադրոշ', annotations=frozenset({'տոն', 'նավադրոշ', 'կարպ'})),
EmojiAnnotations(emoji='🎐', codepoints=(127888,), name='քամու զանգակ', slug='քամու_զանգակ', annotations=frozenset({'տոն', 'քամի', 'զանգ'})),
EmojiAnnotations(emoji='🎑', codepoints=(127889,), name='լուսնի ծես', slug='լուսնի_ծես', annotations=frozenset({'տոն', 'լուսին', 'ծես'})),
EmojiAnnotations(emoji='🎀', codepoints=(127872,), name='ժապավեն', slug='ժապավեն', annotations=frozenset({'տոն', 'տոնակատարություն'})),
EmojiAnnotations(emoji='🎁', codepoints=(127873,), name='փաթեթավորված նվեր', slug='փաթեթավորված_նվեր', annotations=frozenset({'տոն', 'փաթեթավորված', 'արկղ', 'նվեր'})),
EmojiAnnotations(emoji='\U0001f396', codepoints=(127894,), name='ռազմական մեդալ', slug='ռազմական_մեդալ', annotations=frozenset({'տոն', 'ռազմական', 'մեդալ'})),
EmojiAnnotations(emoji='\U0001f397', codepoints=(127895,), name='հուշաժապավեն', slug='հուշաժապավեն', annotations=frozenset({'տոն', 'ժապավեն', 'հուշ'})),
EmojiAnnotations(emoji='\U0001f39e', codepoints=(127902,), name='տեսաժապավեն', slug='տեսաժապավեն', annotations=frozenset({'կադր', 'ժապավեն', 'կինո', 'ֆիլմ'})),
EmojiAnnotations(emoji='\U0001f39f', codepoints=(127903,), name='մուտքի տոմս', slug='մուտքի_տոմս', annotations=frozenset({'տոմս', 'մուտք'})),
EmojiAnnotations(emoji='🎫', codepoints=(127915,), name='տոմս', slug='տոմս', annotations=frozenset({'մուտք'})),
EmojiAnnotations(emoji='⚽', codepoints=(9917,), name='ֆուտբոլի գնդակ', slug='ֆուտբոլի_գնդակ', annotations=frozenset({'ֆուտբոլ', 'գնդակ'})),
EmojiAnnotations(emoji='⚾', codepoints=(9918,), name='բեյսբոլի գնդակ', slug='բեյսբոլի_գնդակ', annotations=frozenset({'գնդակ'})),
EmojiAnnotations(emoji='🏀', codepoints=(127936,), name='բասկետբոլի գնդակ', slug='բասկետբոլի_գնդակ', annotations=frozenset({'գնդակ', 'բասկետբոլ'})),
EmojiAnnotations(emoji='🏈', codepoints=(127944,), name='ամերիկյան ֆուտբոլի գնդակ', slug='ամերիկյան_ֆուտբոլի_գնդակ', annotations=frozenset({'ֆուտբոլ', 'գնդակ', 'ամերիկյան'})),
EmojiAnnotations(emoji='🏉', codepoints=(127945,), name='ռեգբիի գնդակ', slug='ռեգբիի_գնդակ', annotations=frozenset({'ռեգբի', 'ֆուտբոլ', 'գնդակ'})),
EmojiAnnotations(emoji='🎾', codepoints=(127934,), name='թենիսի գնդակ', slug='թենիսի_գնդակ', annotations=frozenset({'գնդակ', 'թենիս', 'մեծ'})),
EmojiAnnotations(emoji='🎱', codepoints=(127921,), name='բիլիարդ', slug='բիլիարդ', annotations=frozenset({'8', 'խաղ', '8 գնդակ', 'գնդակ', 'ութ'})),
EmojiAnnotations(emoji='🎳', codepoints=(127923,), name='բոուլինգ', slug='բոուլինգ', annotations=frozenset({'խաղ', 'գնդակ'})),
EmojiAnnotations(emoji='⛳', codepoints=(9971,), name='գոլֆի դրոշակ', slug='գոլֆի_դրոշակ', annotations=frozenset({'գոլֆ', 'անցք'})),
EmojiAnnotations(emoji='\U0001f3cc', codepoints=(127948,), name='գոլֆ խաղացող', slug='գոլֆ_խաղացող', annotations=frozenset({'գոլֆ', 'գնդակ'})),
EmojiAnnotations(emoji='⛸', codepoints=(9976,), name='չմուշկ', slug='չմուշկ', annotations=frozenset({'սառույց'})),
EmojiAnnotations(emoji='🎣', codepoints=(127907,), name='կարթաձող', slug='կարթաձող', annotations=frozenset({'կարթ', 'ձուկ'})),
EmojiAnnotations(emoji='🎽', codepoints=(127933,), name='պտտվող շապիկ', slug='պտտվող_շապիկ', annotations=frozenset({'շապիկ', 'պտտվող', 'ժապավեն'})),
EmojiAnnotations(emoji='🎿', codepoints=(127935,), name='դահուկներ', slug='դահուկներ', annotations=frozenset({'ձյուն', 'դահուկ'})),
EmojiAnnotations(emoji='⛷', codepoints=(9975,), name='դահուկորդ', slug='դահուկորդ', annotations=frozenset({'ձյուն', 'դահուկ'})),
EmojiAnnotations(emoji='🏂', codepoints=(127938,), name='սնոուբորդիստ', slug='սնոուբորդիստ', annotations=frozenset({'ձյուն', 'դահուկ', 'սնոուբորդ'})),
EmojiAnnotations(emoji='🏄', codepoints=(127940,), name='սերֆեր', slug='սերֆեր', annotations=frozenset({'սերֆինգ'})),
EmojiAnnotations(emoji='🏇', codepoints=(127943,), name='ձիավազք', slug='ձիավազք', annotations=frozenset({'ձի', 'ժոկեյ', 'մրցարշավային ձի', 'մրցարշավ'})),
EmojiAnnotations(emoji='🏊', codepoints=(127946,), name='լողորդ', slug='լողորդ', annotations=frozenset({'լողալ'})),
EmojiAnnotations(emoji='⛹', codepoints=(9977,), name='գնդակով մարդ', slug='գնդակով_մարդ', annotations=frozenset({'գնդակ'})),
EmojiAnnotations(emoji='\U0001f3cb', codepoints=(127947,), name='ծանրորդ', slug='ծանրորդ', annotations=frozenset({'ծանրություն'})),
EmojiAnnotations(emoji='🚴', codepoints=(128692,), name='հեծանվորդ', slug='հեծանվորդ', annotations=frozenset({'հեծանիվ'})),
EmojiAnnotations(emoji='🚵', codepoints=(128693,), name='լեռնահեծանվորդ', slug='լեռնահեծանվորդ', annotations=frozenset({'հեծանիվ', 'լեռ', 'հեծանվորդ'})),
EmojiAnnotations(emoji='\U0001f3ce', codepoints=(127950,), name='մրցարշավային մեքենա', slug='մրցարշավային_մեքենա', annotations=frozenset({'մեքենա', 'մրցարշավ'})),
EmojiAnnotations(emoji='\U0001f3cd', codepoints=(127949,), name='մոտոցիկլետ', slug='մոտոցիկլետ', annotations=frozenset({'մրցարշավ'})),
EmojiAnnotations(emoji='\U0001f3c5', codepoints=(127941,), name='սպորտային մեդալ', slug='սպորտային_մեդալ', annotations=frozenset({'մեդալ'})),
EmojiAnnotations(emoji='🏆', codepoints=(127942,), name='գավաթ', slug='գավաթ', annotations=frozenset({'մրցանակ'})),
EmojiAnnotations(emoji='\U0001f3cf', codepoints=(127951,), name='կրիկետ', slug='կրիկետ', annotations=frozenset({'բիտա', 'խաղ', 'գնդակ'})),
EmojiAnnotations(emoji='\U0001f3d0', codepoints=(127952,), name='վոլեյբոլի գնդակ', slug='վոլեյբոլի_գնդակ', annotations=frozenset({'խաղ', 'գնդակ'})),
EmojiAnnotations(emoji='\U0001f3d1', codepoints=(127953,), name='խոտի հոկեյ', slug='խոտի_հոկեյ', annotations=frozenset({'խաղ', 'մական', 'գնդակ', 'դաշտ', 'հոկեյ'})),
EmojiAnnotations(emoji='\U0001f3d2', codepoints=(127954,), name='մական և տափօղակ', slug='մական_և_տափօղակ', annotations=frozenset({'սառույց', 'խաղ', 'տափօղակ', 'մական', 'հոկեյ'})),
EmojiAnnotations(emoji='\U0001f3d3', codepoints=(127955,), name='սեղանի թենիս', slug='սեղանի_թենիս', annotations=frozenset({'բիտա', 'խաղ', 'գնդակ', 'ռակետ', 'ձեռնաթիակ'})),
EmojiAnnotations(emoji='\U0001f3f8', codepoints=(127992,), name='բադմինտոն', slug='բադմինտոն', annotations=frozenset({'փետրագնդակ', 'խաղ', 'ռակետ', 'ձեռնաթիակ', 'վոլան'})),
EmojiAnnotations(emoji='🎯', codepoints=(127919,), name='դիպուկ հարված', slug='դիպուկ_հարված', annotations=frozenset({'հարվածել', 'խաղ', 'դարթ', 'կենտրոն', 'նշանակետ', 'թիրախ'})),
EmojiAnnotations(emoji='🎮', codepoints=(127918,), name='տեսախաղ', slug='տեսախաղ', annotations=frozenset({'խաղ', 'վահանակ'})),
EmojiAnnotations(emoji='\U0001f579', codepoints=(128377,), name='ջոյսթիք', slug='ջոյսթիք', annotations=frozenset({'խաղ', 'տեսախաղ'})),
EmojiAnnotations(emoji='🎲', codepoints=(127922,), name='զառ', slug='զառ', annotations=frozenset({'խաղ'})),
EmojiAnnotations(emoji='♠', codepoints=(9824,), name='ղառ', slug='ղառ', annotations=frozenset({'թղթախաղ', 'խաղ'})),
EmojiAnnotations(emoji='♥', codepoints=(9829,), name='սիրտ', slug='սիրտ', annotations=frozenset({'թղթախաղ', 'խաղ', 'սրտեր'})),
EmojiAnnotations(emoji='♦', codepoints=(9830,), name='քյափ', slug='քյափ', annotations=frozenset({'թղթախաղ', 'խաղ'})),
EmojiAnnotations(emoji='♣', codepoints=(9827,), name='խաչ', slug='խաչ', annotations=frozenset({'թղթախաղ', 'խաղ'})),
EmojiAnnotations(emoji='🃏', codepoints=(127183,), name='ջոկեր', slug='ջոկեր', annotations=frozenset({'թղթախաղ', 'խաղ', 'խաղալ'})),
EmojiAnnotations(emoji='🀄', codepoints=(126980,), name='մաջոնգի կարմիր վիշապ', slug='մաջոնգի_կարմիր_վիշապ', annotations=frozenset({'խաղ', 'մաջոնգ', 'կարմիր'})),
EmojiAnnotations(emoji='🎴', codepoints=(127924,), name='ծաղկի խաղաթղթեր', slug='ծաղկի_խաղաթղթեր', annotations=frozenset({'թղթախաղ', 'ծաղիկ', 'խաղ', 'խաղալ', 'ճապոնական'})),
EmojiAnnotations(emoji='🔇', codepoints=(128263,), name='բարձրախոսն անջատված է', slug='բարձրախոսն_անջատված_է', annotations=frozenset({'լուռ', 'բարձրախոս', 'անջատել ձայնը', 'հանգիստ', 'ձայն'})),
EmojiAnnotations(emoji='🔈', codepoints=(128264,), name='բարձրախոս', slug='բարձրախոս', annotations=frozenset({'ձայնի ուժգնություն', 'ձայն'})),
EmojiAnnotations(emoji='🔉', codepoints=(128265,), name='բարձրախոսը միացված է', slug='բարձրախոսը_միացված_է', annotations=frozenset({'ալիք', 'ցածր', 'բարձրախոս', 'ձայն'})),
EmojiAnnotations(emoji='🔊', codepoints=(128266,), name='բարձրախոսի ձայնը բարձր է', slug='բարձրախոսի_ձայնը_բարձր_է', annotations=frozenset({'բարձր', 'բարձրաձայն', 'երեք', 'ձայն', '3', 'բարձրախոս'})),
EmojiAnnotations(emoji='📢', codepoints=(128226,), name='մեծ բարձրախոս', slug='մեծ_բարձրախոս', annotations=frozenset({'բարձրաձայն', 'հասարակական'})),
EmojiAnnotations(emoji='📯', codepoints=(128239,), name='փոստային եղջյուր', slug='փոստային_եղջյուր', annotations=frozenset({'եղջյուր', 'փոստ', 'փոստային'})),
EmojiAnnotations(emoji='🔕', codepoints=(128277,), name='զանգակ շեղ գծիկով', slug='զանգակ_շեղ_գծիկով', annotations=frozenset({'զանգակ', 'հանգիստ', 'ոչ', 'արգելված', 'լուռ', 'անջատել ձայնը'})),
EmojiAnnotations(emoji='🎼', codepoints=(127932,), name='սոլի բանալի', slug='սոլի_բանալի', annotations=frozenset({'երաժշտություն'})),
EmojiAnnotations(emoji='🎵', codepoints=(127925,), name='նոտա', slug='նոտա', annotations=frozenset({'երաժշտություն'})),
EmojiAnnotations(emoji='🎶', codepoints=(127926,), name='նոտաներ', slug='նոտաներ', annotations=frozenset({'նոտա', 'երաժշտություն'})),
EmojiAnnotations(emoji='\U0001f399', codepoints=(127897,), name='ստուդիայի խոսափող', slug='ստուդիայի_խոսափող', annotations=frozenset({'խոսափող', 'ստուդիա', 'երաժշտություն'})),
EmojiAnnotations(emoji='\U0001f39a', codepoints=(127898,), name='ձայնի բարձրության սահոց', slug='ձայնի_բարձրության_սահոց', annotations=frozenset({'մակարդակ', 'սահոց', 'երաժշտություն'})),
EmojiAnnotations(emoji='\U0001f39b', codepoints=(127899,), name='կառավարման կոճակներ', slug='կառավարման_կոճակներ', annotations=frozenset({'կոճակներ', 'կառավարել', 'երաժշտություն'})),
EmojiAnnotations(emoji='🎤', codepoints=(127908,), name='խոսափող', slug='խոսափող', annotations=frozenset({'կարաոկե'})),
EmojiAnnotations(emoji='🎷', codepoints=(127927,), name='սաքսոֆոն', slug='սաքսոֆոն', annotations=frozenset({'գործիք', 'երաժշտություն'})),
EmojiAnnotations(emoji='🎸', codepoints=(127928,), name='կիթառ', slug='կիթառ', annotations=frozenset({'գործիք', 'երաժշտություն'})),
EmojiAnnotations(emoji='🎹', codepoints=(127929,), name='երաժշտական ստեղնաշար', slug='երաժշտական_ստեղնաշար', annotations=frozenset({'գործիք', 'ստեղնաշար', 'դաշնամուր', 'երաժշտություն'})),
EmojiAnnotations(emoji='🎺', codepoints=(127930,), name='շեփոր', slug='շեփոր', annotations=frozenset({'գործիք', 'երաժշտություն'})),
EmojiAnnotations(emoji='🎻', codepoints=(127931,), name='ջութակ', slug='ջութակ', annotations=frozenset({'գործիք', 'երաժշտություն'})),
EmojiAnnotations(emoji='📱', codepoints=(128241,), name='բջջային հեռախոս', slug='բջջային_հեռախոս', annotations=frozenset({'հեռախոս', 'շարժական', 'բջջային'})),
EmojiAnnotations(emoji='📲', codepoints=(128242,), name='բջջային հեռախոս սլաքով', slug='բջջային_հեռախոս_սլաքով', annotations=frozenset({'հեռախոս', 'զանգել', 'սլաք', 'շարժական', 'բջջային'})),
EmojiAnnotations(emoji='📞', codepoints=(128222,), name='հեռախոսի լսափող', slug='հեռախոսի_լսափող', annotations=frozenset({'լսափող', 'հեռախոս'})),
EmojiAnnotations(emoji='📠', codepoints=(128224,), name='ֆաքսի մեքենա', slug='ֆաքսի_մեքենա', annotations=frozenset({'ֆաքս'})),
EmojiAnnotations(emoji='🔌', codepoints=(128268,), name='էլեկտրական խրոց', slug='էլեկտրական_խրոց', annotations=frozenset({'էլեկտրականություն', 'էլեկտրական', 'խրոց'})),
EmojiAnnotations(emoji='💻', codepoints=(128187,), name='նոթբուք', slug='նոթբուք', annotations=frozenset({'համակարգիչ', 'անձնական'})),
EmojiAnnotations(emoji='\U0001f5a8', codepoints=(128424,), name='տպիչ', slug='տպիչ', annotations=frozenset({'համակարգիչ'})),
EmojiAnnotations(emoji='⌨', codepoints=(9000,), name='ստեղնաշար', slug='ստեղնաշար', annotations=frozenset({'համակարգիչ'})),
EmojiAnnotations(emoji='\U0001f5b1', codepoints=(128433,), name='համակարգչի մկնիկ', slug='համակարգչի_մկնիկ', annotations=frozenset({'համակարգիչ', 'մկնիկ', 'կոճակ', 'երեք', '3'})),
EmojiAnnotations(emoji='\U0001f5b2', codepoints=(128434,), name='թրեքբոլ', slug='թրեքբոլ', annotations=frozenset({'համակարգիչ'})),
EmojiAnnotations(emoji='💽', codepoints=(128189,), name='մինի սկավառակ', slug='մինի_սկավառակ', annotations=frozenset({'համակարգիչ', 'սկավառակ', 'օպտիկական'})),
EmojiAnnotations(emoji='💾', codepoints=(128190,), name='ֆլոպի սկավառակ', slug='ֆլոպի_սկավառակ', annotations=frozenset({'համակարգիչ', 'սկավառակ', 'ֆլոպի'})),
EmojiAnnotations(emoji='💿', codepoints=(128191,), name='օպտիկական սկավառակ', slug='օպտիկական_սկավառակ', annotations=frozenset({'օպտիկական', 'dvd', 'համակարգիչ', 'blu-ray', 'cd', 'սկավառակ'})),
EmojiAnnotations(emoji='📀', codepoints=(128192,), name='dvd', slug='dvd', annotations=frozenset({'համակարգիչ', 'cd', 'սկավառակ', 'օպտիկական', 'blu-ray'})),
EmojiAnnotations(emoji='🎥', codepoints=(127909,), name='ժապավենային տեսախցիկ', slug='ժապավենային_տեսախցիկ', annotations=frozenset({'տեսախցիկ', 'կինո', 'ֆիլմ'})),
EmojiAnnotations(emoji='🎬', codepoints=(127916,), name='կինոդուբլների համարացույց', slug='կինոդուբլների_համարացույց', annotations=frozenset({'ֆիլմ', 'կինոդուբլ'})),
EmojiAnnotations(emoji='\U0001f4fd', codepoints=(128253,), name='ժապավենային պրոյեկտոր', slug='ժապավենային_պրոյեկտոր', annotations=frozenset({'պրոյեկտոր', 'ժապավեն', 'կինո', 'ֆիլմ', 'վիդեո'})),
EmojiAnnotations(emoji='📺', codepoints=(128250,), name='հեռուստացույց', slug='հեռուստացույց', annotations=frozenset({'tv', 'վիդեո'})),
EmojiAnnotations(emoji='📷', codepoints=(128247,), name='ֆոտոապարատ', slug='ֆոտոապարատ', annotations=frozenset({'վիդեո'})),
EmojiAnnotations(emoji='\U0001f4f8', codepoints=(128248,), name='ֆոտոապարատ լուսաթարթիչով', slug='ֆոտոապարատ_լուսաթարթիչով', annotations=frozenset({'լուսաթարթիչ', 'ֆոտոապարատ', 'վիդեո'})),
EmojiAnnotations(emoji='📹', codepoints=(128249,), name='տեսախցիկ', slug='տեսախցիկ', annotations=frozenset({'վիդեո'})),
EmojiAnnotations(emoji='📼', codepoints=(128252,), name='տեսաերիզ', slug='տեսաերիզ', annotations=frozenset({'երիզ', 'vhs', 'վիդեո'})),
EmojiAnnotations(emoji='🔍', codepoints=(128269,), name='ձախ ուղղված խոշորացույց', slug='ձախ_ուղղված_խոշորացույց', annotations=frozenset({'գործիք', 'ապակի', 'խոշորացնող', 'որոնել'})),
EmojiAnnotations(emoji='🔎', codepoints=(128270,), name='աջ ուղղված խոշորացույց', slug='աջ_ուղղված_խոշորացույց', annotations=frozenset({'գործիք', 'ապակի', 'խոշորացնող', 'որոնել'})),
EmojiAnnotations(emoji='🔬', codepoints=(128300,), name='մանրադիտակ', slug='մանրադիտակ', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='🔭', codepoints=(128301,), name='հեռադիտակ', slug='հեռադիտակ', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='📡', codepoints=(128225,), name='արբանյակային ալեհավաք', slug='արբանյակային_ալեհավաք', annotations=frozenset({'ափսե', 'արբանյակ', 'ալեհավաք'})),
EmojiAnnotations(emoji='\U0001f56f', codepoints=(128367,), name='մոմ', slug='մոմ', annotations=frozenset({'լույս'})),
EmojiAnnotations(emoji='💡', codepoints=(128161,), name='էլեկտրական լամպ', slug='էլեկտրական_լամպ', annotations=frozenset({'գաղափար', 'լամպ', 'էլեկտրական', 'լույս', 'կոմիքս'})),
EmojiAnnotations(emoji='🔦', codepoints=(128294,), name='գրպանի լապտեր', slug='գրպանի_լապտեր', annotations=frozenset({'գործիք', 'լապտեր', 'էլեկտրական', 'լույս'})),
EmojiAnnotations(emoji='🏮', codepoints=(127982,), name='թղթե կարմիր լապտեր', slug='թղթե_կարմիր_լապտեր', annotations=frozenset({'լապտեր', 'բար', 'լույս', 'կարմիր', 'ճապոնական'})),
EmojiAnnotations(emoji='📔', codepoints=(128212,), name='ձևավոր կազմով տետր', slug='ձևավոր_կազմով_տետր', annotations=frozenset({'գիրք', 'նոթատետր', 'ձևավորված', 'կազմ'})),
EmojiAnnotations(emoji='📕', codepoints=(128213,), name='փակված գիրք', slug='փակված_գիրք', annotations=frozenset({'գիրք', 'փակված'})),
EmojiAnnotations(emoji='📖', codepoints=(128214,), name='բացված գիրք', slug='բացված_գիրք', annotations=frozenset({'գիրք', 'բացված'})),
EmojiAnnotations(emoji='📗', codepoints=(128215,), name='կանաչ գիրք', slug='կանաչ_գիրք', annotations=frozenset({'գիրք', 'կանաչ'})),
EmojiAnnotations(emoji='📘', codepoints=(128216,), name='կապույտ գիրք', slug='կապույտ_գիրք', annotations=frozenset({'գիրք', 'կապույտ'})),
EmojiAnnotations(emoji='📙', codepoints=(128217,), name='նարնջագույն գիրք', slug='նարնջագույն_գիրք', annotations=frozenset({'գիրք', 'նարնջագույն'})),
EmojiAnnotations(emoji='📚', codepoints=(128218,), name='գրքեր', slug='գրքեր', annotations=frozenset({'գիրք'})),
EmojiAnnotations(emoji='📒', codepoints=(128210,), name='հաշվապահական մատյան', slug='հաշվապահական_մատյան', annotations=frozenset({'նոթատետր'})),
EmojiAnnotations(emoji='📃', codepoints=(128195,), name='կլորացած էջ', slug='կլորացած_էջ', annotations=frozenset({'կլորացած', 'էջ', 'փաստաթուղթ'})),
EmojiAnnotations(emoji='📜', codepoints=(128220,), name='գալարաթուղթ', slug='գալարաթուղթ', annotations=frozenset({'թուղթ'})),
EmojiAnnotations(emoji='📄', codepoints=(128196,), name='էջ', slug='էջ', annotations=frozenset({'փաստաթութղ'})),
EmojiAnnotations(emoji='📰', codepoints=(128240,), name='լրագիր', slug='լրագիր', annotations=frozenset({'նորություններ', 'թերթ'})),
EmojiAnnotations(emoji='\U0001f5de', codepoints=(128478,), name='կլորացրած լրագիր', slug='կլորացրած_լրագիր', annotations=frozenset({'լրագիր', 'կլորացրած', 'նորություններ', 'թերթ'})),
EmojiAnnotations(emoji='📑', codepoints=(128209,), name='էջանիշ ներդիրներ', slug='էջանիշ_ներդիրներ', annotations=frozenset({'նշել', 'էջանիշ', 'ներդիր', 'նշիչ'})),
EmojiAnnotations(emoji='🔖', codepoints=(128278,), name='էջանիշ', slug='էջանիշ', annotations=frozenset({'նշել'})),
EmojiAnnotations(emoji='💰', codepoints=(128176,), name='փողի պարկ', slug='փողի_պարկ', annotations=frozenset({'դոլար', 'փող', 'պարկ'})),
EmojiAnnotations(emoji='💴', codepoints=(128180,), name='իեն թղթադրամ', slug='իեն_թղթադրամ', annotations=frozenset({'բանկ', 'իեն', 'փող', 'տարադրամ', 'թղթադրամ'})),
EmojiAnnotations(emoji='💵', codepoints=(128181,), name='դոլար թղթադրամ', slug='դոլար_թղթադրամ', annotations=frozenset({'բանկ', 'դոլար', 'տարադրամ', 'փող', 'թղթադրամ'})),
EmojiAnnotations(emoji='💶', codepoints=(128182,), name='եվրո թղթադրամ', slug='եվրո_թղթադրամ', annotations=frozenset({'բանկ', 'եվրո', 'փող', 'տարադրամ', 'թղթադրամ'})),
EmojiAnnotations(emoji='💷', codepoints=(128183,), name='ֆունտ թղթադրամ', slug='ֆունտ_թղթադրամ', annotations=frozenset({'բանկ', 'փող', 'տարադրամ', 'ֆունտ', 'թղթադրամ'})),
EmojiAnnotations(emoji='💸', codepoints=(128184,), name='փող թևերով', slug='փող_թևերով', annotations=frozenset({'թղթադրամ', 'դոլար', 'բանկ', 'փող', 'թռչել', 'թևեր'})),
EmojiAnnotations(emoji='💳', codepoints=(128179,), name='պլաստիկ քարտ', slug='պլաստիկ_քարտ', annotations=frozenset({'բանկ', 'վարկ', 'փող', 'քարտ'})),
EmojiAnnotations(emoji='💹', codepoints=(128185,), name='աճող դիագրամ իենով', slug='աճող_դիագրամ_իենով', annotations=frozenset({'իեն', 'վերև', 'միտում', 'բանկ', 'փող', 'տարրադրամ', 'գրաֆիկ', 'շուկա', 'բարձրանալ', 'դիագրամ', 'աճ'})),
EmojiAnnotations(emoji='✉', codepoints=(9993,), name='ծրար', slug='ծրար', annotations=frozenset({'էլփոտ', 'նամակ'})),
EmojiAnnotations(emoji='📧', codepoints=(128231,), name='էլեկտրոնային նամակ', slug='էլեկտրոնային_նամակ', annotations=frozenset({'փոստ', 'նամակ', 'էլփոստ'})),
EmojiAnnotations(emoji='📨', codepoints=(128232,), name='ստացվող ծրար', slug='ստացվող_ծրար', annotations=frozenset({'փոստ', 'ստանալ', 'ծրար', 'նամակ', 'էլփոստ', 'ստացվող'})),
EmojiAnnotations(emoji='📩', codepoints=(128233,), name='ծրար սլաքով', slug='ծրար_սլաքով', annotations=frozenset({'փոստ', 'ուղարկված', 'ծրար', 'նամակ', 'էլփոստ', 'ուղարկվող', 'ներքև', 'սլաք'})),
EmojiAnnotations(emoji='📤', codepoints=(128228,), name='ելքի արկղ', slug='ելքի_արկղ', annotations=frozenset({'դարակ', 'փոստ', 'նամակ', 'ուղարկված', 'արկղ'})),
EmojiAnnotations(emoji='📥', codepoints=(128229,), name='մուտքի արկղ', slug='մուտքի_արկղ', annotations=frozenset({'դարակ', 'փոստ', 'նամակ', 'արկղ', 'ստանալ'})),
EmojiAnnotations(emoji='📦', codepoints=(128230,), name='ծանրոց', slug='ծանրոց', annotations=frozenset({'արկղ'})),
EmojiAnnotations(emoji='📫', codepoints=(128235,), name='փակ փոստարկղ բարձրացված դրոշակով', slug='փակ_փոստարկղ_բարձրացված_դրոշակով', annotations=frozenset({'փոստատուփ', 'փոստ', 'փակ', 'փոստարկղ'})),
EmojiAnnotations(emoji='📪', codepoints=(128234,), name='փակ փոստարկղ իջեցված դրոշակով', slug='փակ_փոստարկղ_իջեցված_դրոշակով', annotations=frozenset({'փոստատուփ', 'փոստ', 'փակ', 'իջեցված', 'փոստարկղ'})),
EmojiAnnotations(emoji='📬', codepoints=(128236,), name='բաց փոստարկղ բարձրացված դրոշակով', slug='բաց_փոստարկղ_բարձրացված_դրոշակով', annotations=frozenset({'բաց', 'փոստատուփ', 'փոստ', 'փոստարկղ'})),
EmojiAnnotations(emoji='📭', codepoints=(128237,), name='բաց փոստարկղ իջեցված դրոշակով', slug='բաց_փոստարկղ_իջեցված_դրոշակով', annotations=frozenset({'բաց', 'փոստատուփ', 'փոստ', 'իջեցված', 'փոստարկղ'})),
EmojiAnnotations(emoji='📮', codepoints=(128238,), name='փոստատուփ', slug='փոստատուփ', annotations=frozenset({'փոստ', 'փոստարկղ'})),
EmojiAnnotations(emoji='\U0001f5f3', codepoints=(128499,), name='քվեատուփ քվեաթերթիկով', slug='քվեատուփ_քվեաթերթիկով', annotations=frozenset({'քվեաթերթիկ', 'տուփ'})),
EmojiAnnotations(emoji='✒', codepoints=(10002,), name='սև գրչածայր', slug='սև_գրչածայր', annotations=frozenset({'գրչածայր', 'գրիչ'})),
EmojiAnnotations(emoji='\U0001f58b', codepoints=(128395,), name='ինքնահոս գրիչ', slug='ինքնահոս_գրիչ', annotations=frozenset({'ինքնահոս', 'գրիչ'})),
EmojiAnnotations(emoji='\U0001f58a', codepoints=(128394,), name='գրիչ', slug='գրիչ', annotations=frozenset({'գնդիկավոր գրիչ'})),
EmojiAnnotations(emoji='\U0001f58c', codepoints=(128396,), name='վրձին', slug='վրձին', annotations=frozenset({'ներկել', 'նկարել'})),
EmojiAnnotations(emoji='\U0001f58d', codepoints=(128397,), name='մոմամատիտ', slug='մոմամատիտ', annotations=frozenset({'գունավոր մատիտ'})),
EmojiAnnotations(emoji='📝', codepoints=(128221,), name='հուշաթերթ', slug='հուշաթերթ', annotations=frozenset({'մատիտ'})),
EmojiAnnotations(emoji='📁', codepoints=(128193,), name='թղթապանակ', slug='թղթապանակ', annotations=frozenset({'ֆայլ'})),
EmojiAnnotations(emoji='📂', codepoints=(128194,), name='բաց թղթապանակ', slug='բաց_թղթապանակ', annotations=frozenset({'բաց', 'թղթապանակ', 'ֆայլ'})),
EmojiAnnotations(emoji='\U0001f5c2', codepoints=(128450,), name='քարտադարանի բաժանարարներ', slug='քարտադարանի_բաժանարարներ', annotations=frozenset({'ինդեքս', 'բաժանարար', 'քարտ'})),
EmojiAnnotations(emoji='📅', codepoints=(128197,), name='օրացույց', slug='օրացույց', annotations=frozenset({'ամսաթիվ'})),
EmojiAnnotations(emoji='📆', codepoints=(128198,), name='պոկովի օրացույց', slug='պոկովի_օրացույց', annotations=frozenset({'օրացույց'})),
EmojiAnnotations(emoji='\U0001f5d2', codepoints=(128466,), name='պարուրավոր նոթատետր', slug='պարուրավոր_նոթատետր', annotations=frozenset({'գրքույկ', 'տետր', 'պարույր'})),
EmojiAnnotations(emoji='\U0001f5d3', codepoints=(128467,), name='պարուրավոր օրացույց', slug='պարուրավոր_օրացույց', annotations=frozenset({'օրացույց', 'գրքույկ', 'պարույր'})),
EmojiAnnotations(emoji='📇', codepoints=(128199,), name='քարտադարան', slug='քարտադարան', annotations=frozenset({'ինդեքս', 'քարտ'})),
EmojiAnnotations(emoji='📈', codepoints=(128200,), name='աճող դիագրամ', slug='աճող_դիագրամ', annotations=frozenset({'գրաֆիկ', 'դիագրամ', 'վեր', 'աճ', 'միտում'})),
EmojiAnnotations(emoji='📉', codepoints=(128201,), name='նվազող դիագրամ', slug='նվազող_դիագրամ', annotations=frozenset({'գրաֆիկ', 'ներքև', 'դիագրամ', 'միտում'})),
EmojiAnnotations(emoji='📊', codepoints=(128202,), name='գոտեձև գծապատկեր', slug='գոտեձև_գծապատկեր', annotations=frozenset({'գոտի', 'գրաֆիկ', 'դիագրամ'})),
EmojiAnnotations(emoji='📍', codepoints=(128205,), name='գնդասեղ', slug='գնդասեղ', annotations=frozenset({'քորոց'})),
EmojiAnnotations(emoji='\U0001f587', codepoints=(128391,), name='միացված սկրեպներ', slug='միացված_սկրեպներ', annotations=frozenset({'միացնել', 'սկրեպ'})),
EmojiAnnotations(emoji='📏', codepoints=(128207,), name='քանոն', slug='քանոն', annotations=frozenset({'ուղղանկյուն'})),
EmojiAnnotations(emoji='📐', codepoints=(128208,), name='եռանկյունի քանոն', slug='եռանկյունի_քանոն', annotations=frozenset({'եռանկյունի', 'քանոն'})),
EmojiAnnotations(emoji='✂', codepoints=(9986,), name='մկրատ', slug='մկրատ', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='\U0001f5c3', codepoints=(128451,), name='քարտադարանի արկղ', slug='քարտադարանի_արկղ', annotations=frozenset({'ֆայլ', 'արկղ', 'քարտ'})),
EmojiAnnotations(emoji='\U0001f5c4', codepoints=(128452,), name='պահարան', slug='պահարան', annotations=frozenset({'ֆայլ'})),
EmojiAnnotations(emoji='🔒', codepoints=(128274,), name='կողպեք', slug='կողպեք', annotations=frozenset({'փակ'})),
EmojiAnnotations(emoji='🔓', codepoints=(128275,), name='բաց կողպեք', slug='բաց_կողպեք', annotations=frozenset({'բաց', 'ապակողպել', 'կողպեք'})),
EmojiAnnotations(emoji='🔏', codepoints=(128271,), name='կողպեք ինքնահոսով', slug='կողպեք_ինքնահոսով', annotations=frozenset({'գրչածայր', 'գաղտնիություն', 'կողպեք', 'թանաք', 'գրիչ'})),
EmojiAnnotations(emoji='🔐', codepoints=(128272,), name='փակ կողպեք բանալիով', slug='փակ_կողպեք_բանալիով', annotations=frozenset({'ապահով', 'փակ', 'բնալի', 'կողպեք'})),
EmojiAnnotations(emoji='🔑', codepoints=(128273,), name='բանալի', slug='բանալի', annotations=frozenset({'գաղտնաբառ', 'կողպեք'})),
EmojiAnnotations(emoji='\U0001f5dd', codepoints=(128477,), name='հին բանալի', slug='հին_բանալի', annotations=frozenset({'հին', 'բանալի', 'կողպեք'})),
EmojiAnnotations(emoji='🔨', codepoints=(128296,), name='մուրճ', slug='մուրճ', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='⛏', codepoints=(9935,), name='քլունգ', slug='քլունգ', annotations=frozenset({'գործիք', 'հանք'})),
EmojiAnnotations(emoji='⚒', codepoints=(9874,), name='մուրճեր', slug='մուրճեր', annotations=frozenset({'գործիք', 'մուրճ'})),
EmojiAnnotations(emoji='\U0001f6e0', codepoints=(128736,), name='մուրճ և պտուտակաբանալի', slug='մուրճ_և_պտուտակաբանալի', annotations=frozenset({'գործիք', 'պտուտակաբանալի', 'մուրճ'})),
EmojiAnnotations(emoji='🔧', codepoints=(128295,), name='պտուտակաբանալի', slug='պտուտակաբանալի', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='🔩', codepoints=(128297,), name='մանեկ ու հեղույս', slug='մանեկ_ու_հեղույս', annotations=frozenset({'մանեկ', 'գործիք', 'հեղույս'})),
EmojiAnnotations(emoji='⚙', codepoints=(9881,), name='ատամնանիվ', slug='ատամնանիվ', annotations=frozenset({'գործիք'})),
EmojiAnnotations(emoji='\U0001f5dc', codepoints=(128476,), name='մամլակ', slug='մամլակ', annotations=frozenset({'մամլիչ'})),
EmojiAnnotations(emoji='⚗', codepoints=(9879,), name='թորիչ', slug='թորիչ', annotations=frozenset({'քիմիա', 'գործիք'})),
EmojiAnnotations(emoji='⚖', codepoints=(9878,), name='նժարավոր կշեռք', slug='նժարավոր_կշեռք', annotations=frozenset({'հավասարակշռություն', 'կշեռք', 'գործիք', 'ծանրություն', 'արդարություն', 'կենդանակերպ'})),
EmojiAnnotations(emoji='⛓', codepoints=(9939,), name='շղթաներ', slug='շղթաներ', annotations=frozenset({'շղթա'})),
EmojiAnnotations(emoji='💉', codepoints=(128137,), name='ներարկիչ', slug='ներարկիչ', annotations=frozenset({'գործիք', 'հիվանդ', 'բժշկություն', 'ասեղ', 'բժիշկ'})),
EmojiAnnotations(emoji='💊', codepoints=(128138,), name='դեղահաբ', slug='դեղահաբ', annotations=frozenset({'հիվանդ', 'բժշկություն', 'բժիշկ'})),
EmojiAnnotations(emoji='\U0001f5e1', codepoints=(128481,), name='դաշույն', slug='դաշույն', annotations=frozenset({'զենք', 'դանակ'})),
EmojiAnnotations(emoji='🔪', codepoints=(128298,), name='խոհանոցային դանակ', slug='խոհանոցային_դանակ', annotations=frozenset({'գործիք', 'եփել', 'խոհարար', 'դանակ', 'զենք'})),
EmojiAnnotations(emoji='⚔', codepoints=(9876,), name='խաչված սրեր', slug='խաչված_սրեր', annotations=frozenset({'սրեր', 'խաչված', 'զենք'})),
EmojiAnnotations(emoji='🔫', codepoints=(128299,), name='ատրճանակ', slug='ատրճանակ', annotations=frozenset({'գործիք', 'զենք'})),
EmojiAnnotations(emoji='\U0001f6e1', codepoints=(128737,), name='վահան', slug='վահան', annotations=frozenset({'զենք'})),
EmojiAnnotations(emoji='\U0001f3f9', codepoints=(127993,), name='նետ ու աղեղ', slug='նետ_ու_աղեղ', annotations=frozenset({'գործիք', 'նետ', 'աղեղնավոր', 'զենք', 'աղեղ'})),
EmojiAnnotations(emoji='🏁', codepoints=(127937,), name='վանդակավոր դրոշ', slug='վանդակավոր_դրոշ', annotations=frozenset({'մրցարշավ', 'վանդակավոր'})),
EmojiAnnotations(emoji='\U0001f3f3', codepoints=(127987,), name='ծածանվող սպիտակ դրոշ', slug='ծածանվող_սպիտակ_դրոշ', annotations=frozenset({'ծածանվող'})),
EmojiAnnotations(emoji='\U0001f3f4', codepoints=(127988,), name='ծածանվող սև դրոշ', slug='ծածանվող_սև_դրոշ', annotations=frozenset({'ծածանվող'})),
EmojiAnnotations(emoji='🚩', codepoints=(128681,), name='եռանկյունի դրոշ', slug='եռանկյունի_դրոշ', annotations=frozenset({'փոստ'})),
EmojiAnnotations(emoji='⚰', codepoints=(9904,), name='դագաղ', slug='դագաղ', annotations=frozenset({'մահ'})),
EmojiAnnotations(emoji='⚱', codepoints=(9905,), name='աճյունասափոր', slug='աճյունասափոր', annotations=frozenset({'հուղարկավորություն', 'մահ'})),
EmojiAnnotations(emoji='🗿', codepoints=(128511,), name='մոաի', slug='մոաի', annotations=frozenset({'դեմք', 'մոայի', 'արձան'})),
EmojiAnnotations(emoji='\U0001f6e2', codepoints=(128738,), name='նավթի տակառ', slug='նավթի_տակառ', annotations=frozenset({'տակառ', 'նավթ'})),
EmojiAnnotations(emoji='🔮', codepoints=(128302,), name='բյուրեղյա գունդ', slug='բյուրեղյա_գունդ', annotations=frozenset({'բյուրեղ', 'բախտ', 'գործիք', 'հեքիաթ', 'ֆանտազիա', 'գունդ'})),
EmojiAnnotations(emoji='🏧', codepoints=(127975,), name='բանկոմատի նշան', slug='բանկոմատի_նշան', annotations=frozenset({'բանկ', 'գանձապահ', 'atm', 'բանկոմատ'})),
EmojiAnnotations(emoji='🚮', codepoints=(128686,), name='աղբամանի նշան', slug='աղբամանի_նշան', annotations=frozenset({'աղբ', 'աղբարկղ'})),
EmojiAnnotations(emoji='🚰', codepoints=(128688,), name='խմելու ջուր', slug='խմելու_ջուր', annotations=frozenset({'խմելու', 'խմել', 'ջուր'})),
EmojiAnnotations(emoji='♿', codepoints=(9855,), name='անվասայլակ', slug='անվասայլակ', annotations=frozenset({'մատչելիություն'})),
EmojiAnnotations(emoji='🚹', codepoints=(128697,), name='տղամարդկանց զուգարան', slug='տղամարդկանց_զուգարան', annotations=frozenset({'wc', 'տղամարդ', 'զուգարան'})),
EmojiAnnotations(emoji='🚺', codepoints=(128698,), name='կանանց զուգարան', slug='կանանց_զուգարան', annotations=frozenset({'wc', 'կին', 'զուգարան'})),
EmojiAnnotations(emoji='🚻', codepoints=(128699,), name='ընդհանուր զուգարան', slug='ընդհանուր_զուգարան', annotations=frozenset({'wc', 'զուգարան'})),
EmojiAnnotations(emoji='🚼', codepoints=(128700,), name='նորածնի նշան', slug='նորածնի_նշան', annotations=frozenset({'նորածին', 'փոխել'})),
EmojiAnnotations(emoji='🚾', codepoints=(128702,), name='զուգարան', slug='զուգարան', annotations=frozenset({'wc', 'ջուր'})),
EmojiAnnotations(emoji='🛂', codepoints=(128706,), name='անձնագրային ստուգում', slug='անձնագրային_ստուգում', annotations=frozenset({'անձնագիր', 'ստուգում'})),
EmojiAnnotations(emoji='🛄', codepoints=(128708,), name='ուղեբեռի վերաբերյալ բողոք', slug='ուղեբեռի_վերաբերյալ_բողոք', annotations=frozenset({'ուղեբեռ', 'բողոք'})),
EmojiAnnotations(emoji='🛅', codepoints=(128709,), name='ուղեբեռ պահախցում', slug='ուղեբեռ_պահախցում', annotations=frozenset({'ուղեբեռ', 'բեռ', 'պահարան'})),
EmojiAnnotations(emoji='🚸', codepoints=(128696,), name='ճանապարհը հատող երեխաներ', slug='ճանապարհը_հատող_երեխաներ', annotations=frozenset({'երեխա', 'երթևեկություն', 'հատող', 'հետիոտն'})),
EmojiAnnotations(emoji='⛔', codepoints=(9940,), name='մուտք չկա', slug='մուտք_չկա', annotations=frozenset({'ոչ', 'արգելված', 'երթևեկություն', 'մուտք'})),
EmojiAnnotations(emoji='🚫', codepoints=(128683,), name='արգելված է', slug='արգելված_է', annotations=frozenset({'ոչ', 'արգելված', 'մուտք'})),
EmojiAnnotations(emoji='🚳', codepoints=(128691,), name='հեծանիվների մուտքն արգելված է', slug='հեծանիվների_մուտքն_արգելված_է', annotations=frozenset({'փոխադրամիջոց', 'արգելված', 'հեծանիվ', 'ոչ'})),
EmojiAnnotations(emoji='🚭', codepoints=(128685,), name='չծխել', slug='չծխել', annotations=frozenset({'ծխել', 'արգելված', 'ոչ'})),
EmojiAnnotations(emoji='🚯', codepoints=(128687,), name='չաղտոտել', slug='չաղտոտել', annotations=frozenset({'աղբ', 'արգելված', 'ոչ'})),
EmojiAnnotations(emoji='🚱', codepoints=(128689,), name='խմելու ջուր չէ', slug='խմելու_ջուր_չէ', annotations=frozenset({'արգելված', 'խմելու', 'խմել', 'ջուր', 'ոչ'})),
EmojiAnnotations(emoji='🚷', codepoints=(128695,), name='հետիոտնների մուտքն արգելված է', slug='հետիոտնների_մուտքն_արգելված_է', annotations=frozenset({'հետիոտն', 'արգելված', 'ոչ'})),
EmojiAnnotations(emoji='⬆', codepoints=(11014,), name='վերև սլաք', slug='վերև_սլաք', annotations=frozenset({'ուղղություն', 'սլաք', 'գլխավոր', 'հյուսիս'})),
EmojiAnnotations(emoji='↗', codepoints=(8599,), name='վերև աջ սլաք', slug='վերև_աջ_սլաք', annotations=frozenset({'ուղղություն', 'սլաք', 'հյուսիս-արևելք'})),
EmojiAnnotations(emoji='➡', codepoints=(10145,), name='աջ սլաք', slug='աջ_սլաք', annotations=frozenset({'ուղղություն', 'սլաք', 'գլխավոր', 'արևելք'})),
EmojiAnnotations(emoji='↘', codepoints=(8600,), name='ներքև աջ սլաք', slug='ներքև_աջ_սլաք', annotations=frozenset({'ուղղություն', 'սլաք', 'հարավ-արևելք'})),
EmojiAnnotations(emoji='⬇', codepoints=(11015,), name='ներքև սլաք', slug='ներքև_սլաք', annotations=frozenset({'ներքև', 'ուղղություն', 'սլաք', 'գլխավոր', 'հարավ'})),
EmojiAnnotations(emoji='↙', codepoints=(8601,), name='ներքև ձախ սլաք', slug='ներքև_ձախ_սլաք', annotations=frozenset({'հարավ-արևմուտք', 'ուղղություն', 'սլաք'})),
EmojiAnnotations(emoji='⬅', codepoints=(11013,), name='ձախ սլաք', slug='ձախ_սլաք', annotations=frozenset({'արևմուտք', 'ուղղություն', 'սլաք', 'գլխավոր'})),
EmojiAnnotations(emoji='↖', codepoints=(8598,), name='վերև ձախ սլաք', slug='վերև_ձախ_սլաք', annotations=frozenset({'ուղղություն', 'սլաք', 'հյուսիս-արևմուտք'})),
EmojiAnnotations(emoji='↕', codepoints=(8597,), name='վերև-ներքև սլաք', slug='վերև_ներքև_սլաք', annotations=frozenset({'սլաք'})),
EmojiAnnotations(emoji='↔', codepoints=(8596,), name='աջ-ձախ սլաք', slug='աջ_ձախ_սլաք', annotations=frozenset({'սլաք'})),
EmojiAnnotations(emoji='↩', codepoints=(8617,), name='աջ շրջադարձի սլաք', slug='աջ_շրջադարձի_սլաք', annotations=frozenset({'սլաք'})),
EmojiAnnotations(emoji='↪', codepoints=(8618,), name='ձախ շրջադարձի սլաք', slug='ձախ_շրջադարձի_սլաք', annotations=frozenset({'սլաք'})),
EmojiAnnotations(emoji='⤴', codepoints=(10548,), name='ձախից վերև թեքվող սլաք', slug='ձախից_վերև_թեքվող_սլաք', annotations=frozenset({'սլաք'})),
EmojiAnnotations(emoji='⤵', codepoints=(10549,), name='ձախից ներքև թեքվող սլաք', slug='ձախից_ներքև_թեքվող_սլաք', annotations=frozenset({'ներքև', 'սլաք'})),
EmojiAnnotations(emoji='🔃', codepoints=(128259,), name='ժամասլաքի ուղղությամբ ուղղահայաց սլաքներ', slug='ժամասլաքի_ուղղությամբ_ուղղահայաց_սլաքներ', annotations=frozenset({'վերաբեռնել', 'ժամասլաքի ուղղությամբ', 'սլաք'})),
EmojiAnnotations(emoji='🔄', codepoints=(128260,), name='ժամասլաքին հակառակ ուղղությամբ սլաքներով կոճակ', slug='ժամասլաքին_հակառակ_ուղղությամբ_սլաքներով_կոճակ', annotations=frozenset({'հակառակ ուղղությամբ', 'սլաք', 'ժամասլաքին հակառակ ուղղությամբ'})),
EmojiAnnotations(emoji='🔙', codepoints=(128281,), name='հետ գրությամբ սլաք', slug='հետ_գրությամբ_սլաք', annotations=frozenset({'հետ', 'սլաք'})),
EmojiAnnotations(emoji='🔚', codepoints=(128282,), name='վերջ գրությամբ սլաք', slug='վերջ_գրությամբ_սլաք', annotations=frozenset({'սլաք', 'վերջ'})),
EmojiAnnotations(emoji='🔛', codepoints=(128283,), name='միացված է գրությամբ սլաք', slug='միացված_է_գրությամբ_սլաք', annotations=frozenset({'նշան', 'սլաք', 'միացված է'})),
EmojiAnnotations(emoji='🔜', codepoints=(128284,), name='շուտով գրությամբ սլաք', slug='շուտով_գրությամբ_սլաք', annotations=frozenset({'սլաք', 'շուտով'})),
EmojiAnnotations(emoji='🔝', codepoints=(128285,), name='վերև գրությամբ սլաք', slug='վերև_գրությամբ_սլաք', annotations=frozenset({'սլաք', 'վերև', 'վեր'})),
EmojiAnnotations(emoji='\U0001f6d0', codepoints=(128720,), name='աղոթատեղի', slug='աղոթատեղի', annotations=frozenset({'պաշտամունք', 'կրոն'})),
EmojiAnnotations(emoji='⚛', codepoints=(9883,), name='ատոմի նշան', slug='ատոմի_նշան', annotations=frozenset({'ատոմ', 'աթեիստ'})),
EmojiAnnotations(emoji='\U0001f549', codepoints=(128329,), name='օմ', slug='օմ', annotations=frozenset({'կրոն', 'հնդիկ'})),
EmojiAnnotations(emoji='✡', codepoints=(10017,), name='դավթի աստղ', slug='դավթի_աստղ', annotations=frozenset({'դավիթ', 'հրեա', 'հրեական', 'կրոն', 'աստղ'})),
EmojiAnnotations(emoji='☸', codepoints=(9784,), name='դհարմայի անիվ', slug='դհարմայի_անիվ', annotations=frozenset({'դհարմա', 'անիվ', 'բուդդիստ', 'կրոն'})),
EmojiAnnotations(emoji='☯', codepoints=(9775,), name='ին և յան', slug='ին_և_յան', annotations=frozenset({'յին', 'դաո', 'դաոսիստ', 'կրոն', 'յան'})),
EmojiAnnotations(emoji='✝', codepoints=(10013,), name='լատինական խաչ', slug='լատինական_խաչ', annotations=frozenset({'քրիստոնյա', 'խաչ', 'կրոն'})),
EmojiAnnotations(emoji='☦', codepoints=(9766,), name='ուղղափառ խաչ', slug='ուղղափառ_խաչ', annotations=frozenset({'քրիստոնյա', 'խաչ', 'կրոն'})),
EmojiAnnotations(emoji='☪', codepoints=(9770,), name='աստղ և մահիկ', slug='աստղ_և_մահիկ', annotations=frozenset({'իսլամ', 'մուսուլման', 'կրոն'})),
EmojiAnnotations(emoji='☮', codepoints=(9774,), name='խաղաղության նշան', slug='խաղաղության_նշան', annotations=frozenset({'խաղաղություն'})),
EmojiAnnotations(emoji='\U0001f54e', codepoints=(128334,), name='մենորա', slug='մենորա', annotations=frozenset({'մոմակալ', 'աշտանակ', 'կրոն'})),
EmojiAnnotations(emoji='🔯', codepoints=(128303,), name='կետիկով վեցթևանի աստղ', slug='կետիկով_վեցթևանի_աստղ', annotations=frozenset({'բախտ', 'աստղ'})),
EmojiAnnotations(emoji='♻', codepoints=(9851,), name='վերամշակման նշան', slug='վերամշակման_նշան', annotations=frozenset({'վերամշակել'})),
EmojiAnnotations(emoji='📛', codepoints=(128219,), name='բեյջ', slug='բեյջ', annotations=frozenset({'անուն'})),
EmojiAnnotations(emoji='🔰', codepoints=(128304,), name='սկսնակ լինելու ճապոնական նշան', slug='սկսնակ_լինելու_ճապոնական_նշան', annotations=frozenset({'հեծանակ', 'սկսնակ', 'գործիք', 'դեղին', 'տերև', 'ճապոնական', 'կանաչ'})),
EmojiAnnotations(emoji='🔱', codepoints=(128305,), name='եռաժանի խորհրդանշան', slug='եռաժանի_խորհրդանշան', annotations=frozenset({'գործիք', 'եռաժանի', 'նավ', 'խարիսխ', 'զինանշան'})),
EmojiAnnotations(emoji='⭕', codepoints=(11093,), name='մեծ թավ շրջան', slug='մեծ_թավ_շրջան', annotations=frozenset({'օ', 'շրջան'})),
EmojiAnnotations(emoji='✅', codepoints=(9989,), name='սպիտակ թավ ստուգանշան', slug='սպիտակ_թավ_ստուգանշան', annotations=frozenset({'նշել', 'ստուգել'})),
EmojiAnnotations(emoji='☑', codepoints=(9745,), name='վանդակ ստուգանշանով', slug='վանդակ_ստուգանշանով', annotations=frozenset({'նշել', 'քվեաթերթիկ', 'տուփ'})),
EmojiAnnotations(emoji='✔', codepoints=(10004,), name='թավ ստուգանշան', slug='թավ_ստուգանշան', annotations=frozenset({'նշել', 'ստուգել'})),
EmojiAnnotations(emoji='✖', codepoints=(10006,), name='բազմապատկման թավ նշան', slug='բազմապատկման_թավ_նշան', annotations=frozenset({'բազմապատկում', 'x', 'բազմապատկել', 'չեղարկել'})),
EmojiAnnotations(emoji='❌', codepoints=(10060,), name='խաչի նշան', slug='խաչի_նշան', annotations=frozenset({'բազմապատկում', 'x', 'նշել', 'բազմապատկել', 'չեղարկել'})),
EmojiAnnotations(emoji='❎', codepoints=(10062,), name='խաչի նշանով կոճակ', slug='խաչի_նշանով_կոճակ', annotations=frozenset({'նշել', 'քառակուսի'})),
EmojiAnnotations(emoji='➕', codepoints=(10133,), name='գումարման թավ նշան', slug='գումարման_թավ_նշան', annotations=frozenset({'պլյուս', 'մաթեմատիկա'})),
EmojiAnnotations(emoji='➖', codepoints=(10134,), name='հանման թավ նշան', slug='հանման_թավ_նշան', annotations=frozenset({'մինուս', 'մաթեմատիկա'})),
EmojiAnnotations(emoji='➗', codepoints=(10135,), name='բաժանման թավ նշան', slug='բաժանման_թավ_նշան', annotations=frozenset({'բաժանում', 'մաթեմատիկա'})),
EmojiAnnotations(emoji='➰', codepoints=(10160,), name='ոլորուն հանգույց', slug='ոլորուն_հանգույց', annotations=frozenset({'ոլորված', 'հանգույց'})),
EmojiAnnotations(emoji='➿', codepoints=(10175,), name='կրկնակի ոլորուն հանգույց', slug='կրկնակի_ոլորուն_հանգույց', annotations=frozenset({'ոլորված', 'կրկնակի', 'հանգույց'})),
EmojiAnnotations(emoji='〽', codepoints=(12349,), name='իորիտեն', slug='իորիտեն', annotations=frozenset({'նշել', 'մաս'})),
EmojiAnnotations(emoji='✳', codepoints=(10035,), name='ութ թևանի աստղանիշ', slug='ութ_թևանի_աստղանիշ', annotations=frozenset({'աստղանիշ'})),
EmojiAnnotations(emoji='✴', codepoints=(10036,), name='աստղիկ', slug='աստղիկ', annotations=frozenset({'աստղ'})),
EmojiAnnotations(emoji='💱', codepoints=(128177,), name='տարադրամի փոխանակում', slug='տարադրամի_փոխանակում', annotations=frozenset({'բանկ', 'փոխանակում', 'փող', 'տարադրամ'})),
EmojiAnnotations(emoji='💲', codepoints=(128178,), name='դոլարի թավ նշան', slug='դոլարի_թավ_նշան', annotations=frozenset({'դոլար', 'տարադրամ', 'փող'})),
EmojiAnnotations(emoji='‼', codepoints=(8252,), name='կրկնակի բացականչական նշան', slug='կրկնակի_բացականչական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'բացականչություն'})),
EmojiAnnotations(emoji='⁉', codepoints=(8265,), name='բացականչական հարցական նշան', slug='բացականչական_հարցական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'հարց', 'բացականչություն'})),
EmojiAnnotations(emoji='❓', codepoints=(10067,), name='հարցական նշան', slug='հարցական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'հարց'})),
EmojiAnnotations(emoji='❔', codepoints=(10068,), name='սպիտակ հարցական նշան', slug='սպիտակ_հարցական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'հարց', 'ուրվագծված'})),
EmojiAnnotations(emoji='❕', codepoints=(10069,), name='սպիտակ բացականչական նշան', slug='սպիտակ_բացականչական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'ուրվագծված', 'բացականչություն'})),
EmojiAnnotations(emoji='❗', codepoints=(10071,), name='բացականչական նշան', slug='բացականչական_նշան', annotations=frozenset({'նշան', 'կետադրություն', 'բացականչություն'})),
EmojiAnnotations(emoji='〰', codepoints=(12336,), name='ալիքաձև գծիկ', slug='ալիքաձև_գծիկ', annotations=frozenset({'ալիքաձև', 'կետադրություն', 'գծիկ'})),
EmojiAnnotations(emoji='™', codepoints=(8482,), name='ապրանքանիշ', slug='ապրանքանիշ', annotations=frozenset({'նշան', 'tm'})),
EmojiAnnotations(emoji='♈', codepoints=(9800,), name='խոյ', slug='խոյ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♉', codepoints=(9801,), name='ցուլ', slug='ցուլ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♊', codepoints=(9802,), name='երկվորյակներ', slug='երկվորյակներ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♋', codepoints=(9803,), name='խեցգետին', slug='խեցգետին', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♌', codepoints=(9804,), name='առյուծ', slug='առյուծ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♍', codepoints=(9805,), name='կույս', slug='կույս', annotations=frozenset({'օրիորդ', 'կենդանակերպ'})),
EmojiAnnotations(emoji='♎', codepoints=(9806,), name='կշեռք', slug='կշեռք', annotations=frozenset({'արդարադատություն', 'կենդանակերպ', 'հավասարակշռություն'})),
EmojiAnnotations(emoji='♏', codepoints=(9807,), name='կարիճ', slug='կարիճ', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♐', codepoints=(9808,), name='աղեղնավոր', slug='աղեղնավոր', annotations=frozenset({'կենդանակերպ'})),
EmojiAnnotations(emoji='♑', codepoints=(9809,), name='այծեղջյուր', slug='այծեղջյուր', annotations=frozenset({'այծ', 'կենդանակերպ'})),
EmojiAnnotations(emoji='♒', codepoints=(9810,), name='ջրհոս', slug='ջրհոս', annotations=frozenset({'կենդանակերպ', 'կրող', 'ջուր'})),
EmojiAnnotations(emoji='♓', codepoints=(9811,), name='ձկներ', slug='ձկներ', annotations=frozenset({'կենդանակերպ', 'ձուկ'})),
EmojiAnnotations(emoji='⛎', codepoints=(9934,), name='օձակիր', slug='օձակիր', annotations=frozenset({'օձ', 'կենդանակերպ', 'կրող'})),
EmojiAnnotations(emoji='🔀', codepoints=(128256,), name='խառնել կատարումները կոճակ', slug='խառնել_կատարումները_կոճակ', annotations=frozenset({'խաչված', 'սլաք'})),
EmojiAnnotations(emoji='🔁', codepoints=(128257,), name='անընդհատ կրկնել կոճակ', slug='անընդհատ_կրկնել_կոճակ', annotations=frozenset({'ժամասլաքի ուղղությամբ', 'սլաք', 'կրկնել'})),
EmojiAnnotations(emoji='🔂', codepoints=(128258,), name='կրկնել մեկ անգամ կոճակ', slug='կրկնել_մեկ_անգամ_կոճակ', annotations=frozenset({'ժամասլաքի ուղղությամբ', 'սլաք', 'մեկ անգամ'})),
EmojiAnnotations(emoji='▶', codepoints=(9654,), name='նվագարկել կոճակ', slug='նվագարկել_կոճակ', annotations=frozenset({'նվագարկել', 'եռանկյուն', 'սլաք', 'աջ'})),
EmojiAnnotations(emoji='⏩', codepoints=(9193,), name='արագ առաջ կոճակ', slug='արագ_առաջ_կոճակ', annotations=frozenset({'արագ', 'սլաք', 'կրկնակի', 'առաջ'})),
EmojiAnnotations(emoji='⏭', codepoints=(9197,), name='հաջորդ կատարումը կոճակ', slug='հաջորդ_կատարումը_կոճակ', annotations=frozenset({'հաջորդ տեսարանը', 'եռանկյուն', 'սլաք', 'հաջորդ կատարումը'})),
EmojiAnnotations(emoji='⏯', codepoints=(9199,), name='նվագարկել կամ դադար կոճակ', slug='նվագարկել_կամ_դադար_կոճակ', annotations=frozenset({'նվագարկել', 'դադար', 'եռանկյուն', 'սլաք', 'աջ'})),
EmojiAnnotations(emoji='◀', codepoints=(9664,), name='հետադարձել կոճակ', slug='հետադարձել_կոճակ', annotations=frozenset({'ձախ', 'եռանկյուն', 'սլաք', 'հետադարձել'})),
EmojiAnnotations(emoji='⏪', codepoints=(9194,), name='արագ հետադարձել կոճակ', slug='արագ_հետադարձել_կոճակ', annotations=frozenset({'սլաք', 'կրկնակի', 'հետադարձել'})),
EmojiAnnotations(emoji='⏮', codepoints=(9198,), name='վերջին կատարումը կոճակ', slug='վերջին_կատարումը_կոճակ', annotations=frozenset({'նախորդ տեսարանը', 'նախորդ կատարումը', 'սլաք', 'եռանկյուն'})),
EmojiAnnotations(emoji='🔼', codepoints=(128316,), name='վերև կոճակ', slug='վերև_կոճակ', annotations=frozenset({'կոճակ', 'կարմիր', 'սլաք'})),
EmojiAnnotations(emoji='⏫', codepoints=(9195,), name='արագ վերև կոճակ', slug='արագ_վերև_կոճակ', annotations=frozenset({'սլաք', 'կրկնակի'})),
EmojiAnnotations(emoji='🔽', codepoints=(128317,), name='ներքև կոճակ', slug='ներքև_կոճակ', annotations=frozenset({'ներքև', 'կոճակ', 'կարմիր', 'սլաք'})),
EmojiAnnotations(emoji='⏬', codepoints=(9196,), name='արագ ներքև կոճակ', slug='արագ_ներքև_կոճակ', annotations=frozenset({'ներքև', 'սլաք', 'կրկնակի'})),
EmojiAnnotations(emoji='\u23f8', codepoints=(9208,), name='դադար կոճակ', slug='դադար_կոճակ', annotations=frozenset({'գծեր', 'դադար', 'կրկնակի', 'ուղղահայաց'})),
EmojiAnnotations(emoji='\u23f9', codepoints=(9209,), name='ստոպ կոճակ', slug='ստոպ_կոճակ', annotations=frozenset({'ստոպ', 'քառակուսի'})),
EmojiAnnotations(emoji='\u23fa', codepoints=(9210,), name='ձայնագրել կոճակ', slug='ձայնագրել_կոճակ', annotations=frozenset({'ձայնագրել', 'շրջան'})),
EmojiAnnotations(emoji='⏏', codepoints=(9167,), name='դուրս հանել կոճակ', slug='դուրս_հանել_կոճակ', annotations=frozenset({'դուրս հանել'})),
EmojiAnnotations(emoji='🎦', codepoints=(127910,), name='կինոմատոգրաֆիա', slug='կինոմատոգրաֆիա', annotations=frozenset({'տեսախցիկ', 'ժապավեն', 'ֆիլմ'})),
EmojiAnnotations(emoji='🔅', codepoints=(128261,), name='մթեցնել կոճակ', slug='մթեցնել_կոճակ', annotations=frozenset({'պայծառություն', 'թույլ', 'մթեցնել'})),
EmojiAnnotations(emoji='🔆', codepoints=(128262,), name='պայծառեցնել կոճակ', slug='պայծառեցնել_կոճակ', annotations=frozenset({'պայծառություն', 'պայծառ'})),
EmojiAnnotations(emoji='📶', codepoints=(128246,), name='անտենայի գծիկներ', slug='անտենայի_գծիկներ', annotations=frozenset({'գծիկ', 'ազդանշան', 'հեռախոս', 'շարժական', 'անտենա', 'բջջային'})),
EmojiAnnotations(emoji='📵', codepoints=(128245,), name='բջջային հեռախոսներն արգելվում են', slug='բջջային_հեռախոսներն_արգելվում_են', annotations=frozenset({'ոչ', 'արգելված', 'հեռախոս', 'շարժական', 'բջջային'})),
EmojiAnnotations(emoji='📳', codepoints=(128243,), name='թրթռազանգի ռեժիմ', slug='թրթռազանգի_ռեժիմ', annotations=frozenset({'ռեժիմ', 'թրթռում', 'հեռախոս', 'շարժական', 'բջջային'})),
EmojiAnnotations(emoji='📴', codepoints=(128244,), name='բջջայինն անջատված է', slug='բջջայինն_անջատված_է', annotations=frozenset({'անջատված', 'հեռախոս', 'շարժական', 'բջջային'})),
EmojiAnnotations(emoji='{#⃣}', codepoints=(123, 35, 8419, 125), name='ստեղն վանդականիշ', slug='ստեղն_վանդականիշ', annotations=frozenset({'ստեղն', 'վանդականիշ', 'ֆունտ'})),
EmojiAnnotations(emoji='{*⃣}', codepoints=(123, 42, 8419, 125), name='ստեղն աստղանիշ', slug='ստեղն_աստղանիշ', annotations=frozenset({'ստեղն', 'աստղանիշ', 'աստղ'})),
EmojiAnnotations(emoji='{0⃣}', codepoints=(123, 48, 8419, 125), name='ստեղն զրո', slug='ստեղն_զրո', annotations=frozenset({'0', 'ստեղն', 'զրո'})),
EmojiAnnotations(emoji='{1⃣}', codepoints=(123, 49, 8419, 125), name='ստեղն մեկ', slug='ստեղն_մեկ', annotations=frozenset({'ստեղն', 'մեկ', '1'})),
EmojiAnnotations(emoji='{2⃣}', codepoints=(123, 50, 8419, 125), name='ստեղն երկու', slug='ստեղն_երկու', annotations=frozenset({'ստեղն', 'երկու', '2'})),
EmojiAnnotations(emoji='{3⃣}', codepoints=(123, 51, 8419, 125), name='ստեղն երեք', slug='ստեղն_երեք', annotations=frozenset({'ստեղն', 'երեք', '3'})),
EmojiAnnotations(emoji='{4⃣}', codepoints=(123, 52, 8419, 125), name='ստեղն չորս', slug='ստեղն_չորս', annotations=frozenset({'4', 'ստեղն', 'չորս'})),
EmojiAnnotations(emoji='{5⃣}', codepoints=(123, 53, 8419, 125), name='ստեղն հինգ', slug='ստեղն_հինգ', annotations=frozenset({'ստեղն', '5', 'հինգ'})),
EmojiAnnotations(emoji='{6⃣}', codepoints=(123, 54, 8419, 125), name='ստեղն վեց', slug='ստեղն_վեց', annotations=frozenset({'ստեղն', 'վեց', '6'})),
EmojiAnnotations(emoji='{7⃣}', codepoints=(123, 55, 8419, 125), name='ստեղն յոթ', slug='ստեղն_յոթ', annotations=frozenset({'7', 'ստեղն', 'յոթ'})),
EmojiAnnotations(emoji='{8⃣}', codepoints=(123, 56, 8419, 125), name='ստեղն ութ', slug='ստեղն_ութ', annotations=frozenset({'8', 'ստեղն', 'ութ'})),
EmojiAnnotations(emoji='{9⃣}', codepoints=(123, 57, 8419, 125), name='ստեղն ինը', slug='ստեղն_ինը', annotations=frozenset({'ստեղն', 'ինը', '9'})),
EmojiAnnotations(emoji='🔟', codepoints=(128287,), name='ստեղն տասը', slug='ստեղն_տասը', annotations=frozenset({'ստեղն', '10', 'տասը'})),
EmojiAnnotations(emoji='💯', codepoints=(128175,), name='հարյուր միավոր', slug='հարյուր_միավոր', annotations=frozenset({'հարյուր', 'միավոր', '100', 'ամբողջ'})),
EmojiAnnotations(emoji='🔞', codepoints=(128286,), name='տասնութից ցածր արգելվում է', slug='տասնութից_ցածր_արգելվում_է', annotations=frozenset({'18', 'ոչ', 'արգելված', 'տարիքային սահմանափակում', 'անչափահաս', 'տասնութ'})),
EmojiAnnotations(emoji='🔠', codepoints=(128288,), name='լատինատառ մեծատառ ներածում', slug='լատինատառ_մեծատառ_ներածում', annotations=frozenset({'տառեր', 'մուտքագրել', 'լատինական', 'մեծատառ'})),
EmojiAnnotations(emoji='🔡', codepoints=(128289,), name='լատինատառ փոքրատառ ներածում', slug='լատինատառ_փոքրատառ_ներածում', annotations=frozenset({'տառեր', 'մուտքագրել', 'abcd', 'լատինական', 'փոքրատառ'})),
EmojiAnnotations(emoji='🔢', codepoints=(128290,), name='թվերի ներածում', slug='թվերի_ներածում', annotations=frozenset({'մուտքագրել', '1234', 'թվեր'})),
EmojiAnnotations(emoji='🔣', codepoints=(128291,), name='նշանների ներածում', slug='նշանների_ներածում', annotations=frozenset({'մուտքագրել'})),
EmojiAnnotations(emoji='🔤', codepoints=(128292,), name='լատինատառ ներածում', slug='լատինատառ_ներածում', annotations=frozenset({'abc', 'այբուբեն', 'տառեր', 'մուտքագրել', 'լատինական'})),
EmojiAnnotations(emoji='🅰', codepoints=(127344,), name='էյ կոճակ', slug='էյ_կոճակ', annotations=frozenset({'a', 'արյուն'})),
EmojiAnnotations(emoji='🆎', codepoints=(127374,), name='էյ-բի կոճակ', slug='էյ_բի_կոճակ', annotations=frozenset({'արյուն', 'ab'})),
EmojiAnnotations(emoji='🅱', codepoints=(127345,), name='բի կոճակ', slug='բի_կոճակ', annotations=frozenset({'b', 'արյուն'})),
EmojiAnnotations(emoji='🆑', codepoints=(127377,), name='սի-էլ', slug='սի_էլ', annotations=frozenset({'cl'})),
EmojiAnnotations(emoji='ℹ', codepoints=(8505,), name='տեղեկատու', slug='տեղեկատու', annotations=frozenset({'i', 'տեղեկատվություն'})),
EmojiAnnotations(emoji='🆔', codepoints=(127380,), name='այ-դի', slug='այ_դի', annotations=frozenset({'ինքնություն', 'id'})),
EmojiAnnotations(emoji='Ⓜ', codepoints=(9410,), name='էմ տառը շրջանակի մեջ', slug='էմ_տառը_շրջանակի_մեջ', annotations=frozenset({'m', 'շրջան'})),
EmojiAnnotations(emoji='🆖', codepoints=(127382,), name='էն-ջի', slug='էն_ջի', annotations=frozenset({'ng'})),
EmojiAnnotations(emoji='🅾', codepoints=(127358,), name='օ կոճակ', slug='օ_կոճակ', annotations=frozenset({'o', 'արյուն'})),
EmojiAnnotations(emoji='🆗', codepoints=(127383,), name='օքեյ', slug='օքեյ', annotations=frozenset({'ok'})),
EmojiAnnotations(emoji='🅿', codepoints=(127359,), name='փի կոճակ', slug='փի_կոճակ', annotations=frozenset({'կայանատեղի'})),
EmojiAnnotations(emoji='🆘', codepoints=(127384,), name='սոս', slug='սոս', annotations=frozenset({'օգնել', 'sos'})),
EmojiAnnotations(emoji='🆙', codepoints=(127385,), name='ափ կոճակ', slug='ափ_կոճակ', annotations=frozenset({'նշան', 'up', 'վեր'})),
EmojiAnnotations(emoji='🆚', codepoints=(127386,), name='վի-էս', slug='վի_էս', annotations=frozenset({'ընդդեմ', 'vs'})),
EmojiAnnotations(emoji='🈁', codepoints=(127489,), name='կատականա կոկո', slug='կատականա_կոկո', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈂', codepoints=(127490,), name='կատականա սա', slug='կատականա_սա', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈷', codepoints=(127543,), name='լուսին գաղափարագիր', slug='լուսին_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈶', codepoints=(127542,), name='գոյ գաղափարագիր', slug='գոյ_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈯', codepoints=(127535,), name='մատ գաղափարագիր', slug='մատ_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🉐', codepoints=(127568,), name='առավելություն գաղափարագիր շրջանակի մեջ', slug='առավելություն_գաղափարագիր_շրջանակի_մեջ', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈹', codepoints=(127545,), name='բաժանել գաղափարագիր', slug='բաժանել_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈚', codepoints=(127514,), name='ժխտում գաղափարագիր', slug='ժխտում_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🈲', codepoints=(127538,), name='արգելել գաղափարագիր', slug='արգելել_գաղափարագիր', annotations=frozenset({'ճապոնական', 'ճապոներեն'})),
EmojiAnnotations(emoji='🉑', codepoints=(127569,), name='ընդունել գաղափարագիր շրջանակի մեջ', slug='ընդունել_գաղափարագիր_շրջանակի_մեջ', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='🈸', codepoints=(127544,), name='կիրառել գաղափարագիր', slug='կիրառել_գաղափարագիր', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='🈴', codepoints=(127540,), name='միասին գաղափարագիր', slug='միասին_գաղափարագիր', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='🈳', codepoints=(127539,), name='դատարկ գաղափարագիր', slug='դատարկ_գաղափարագիր', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='㊗', codepoints=(12951,), name='շնորհավորել գաղափարագիր շրջանակի մեջ', slug='շնորհավորել_գաղափարագիր_շրջանակի_մեջ', annotations=frozenset({'շնորհավորանք', 'չինարեն', 'գաղափարագիր', 'չինական'})),
EmojiAnnotations(emoji='㊙', codepoints=(12953,), name='գաղտնի գաղափարագիր շրջանակի մեջ', slug='գաղտնի_գաղափարագիր_շրջանակի__մեջ', annotations=frozenset({'գաղափարագիր', 'չինարեն', 'գաղտնիք', 'չինական'})),
EmojiAnnotations(emoji='🈺', codepoints=(127546,), name='աշխատում է գաղափարագիր', slug='աշխատում_է_գաղափարագիր', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='🈵', codepoints=(127541,), name='լիություն գաղափարագիր', slug='լիություն_գաղափարագիր', annotations=frozenset({'չինարեն', 'չինական'})),
EmojiAnnotations(emoji='▪', codepoints=(9642,), name='սև փոքր քառակուսի', slug='սև_փոքր_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='▫', codepoints=(9643,), name='սպիտակ փոքր քառակուսի', slug='սպիտակ_փոքր_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='◻', codepoints=(9723,), name='սպիտակ միջին չափի քառակուսի', slug='սպիտակ_միջին_չափի_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='◼', codepoints=(9724,), name='սև միջին չափի քառակուսի', slug='սև_միջին_չափի_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='◽', codepoints=(9725,), name='սպիտակ միջին-փոքր քառակուսի', slug='սպիտակ_միջին_փոքր_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='◾', codepoints=(9726,), name='սև միջին-փոքր քառակուսի', slug='սև_միջին_փոքր_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='⬛', codepoints=(11035,), name='սև մեծ քառակուսի', slug='սև_մեծ_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='⬜', codepoints=(11036,), name='սպիտակ մեծ քառակուսի', slug='սպիտակ_մեծ_քառակուսի', annotations=frozenset({'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='🔶', codepoints=(128310,), name='նարնջագույն մեծ շեղանկյուն', slug='նարնջագույն_մեծ_շեղանկյուն', annotations=frozenset({'երկրաչափական', 'շեղանկյուն', 'նարնջագույն'})),
EmojiAnnotations(emoji='🔷', codepoints=(128311,), name='կապույտ մեծ շեղանկյուն', slug='կապույտ_մեծ_շեղանկյուն', annotations=frozenset({'կապույտ', 'երկրաչափական', 'շեղանկյուն'})),
EmojiAnnotations(emoji='🔸', codepoints=(128312,), name='նարնջագույն փոքր շեղանկյուն', slug='նարնջագույն_փոքր_շեղանկյուն', annotations=frozenset({'երկրաչափական', 'շեղանկյուն', 'նարնջագույն'})),
EmojiAnnotations(emoji='🔹', codepoints=(128313,), name='կապույտ փոքր շեղանկյուն', slug='կապույտ_փոքր_շեղանկյուն', annotations=frozenset({'կապույտ', 'երկրաչափական', 'շեղանկյուն'})),
EmojiAnnotations(emoji='🔺', codepoints=(128314,), name='կարմիր եռանկյունի ուղղված վերև', slug='կարմիր_եռանկյունի_ուղղված_վերև', annotations=frozenset({'երկրաչափական', 'կարմիր'})),
EmojiAnnotations(emoji='🔻', codepoints=(128315,), name='կարմիր եռանկյունի ուղղված ներքև', slug='կարմիր_եռանկյունի_ուղղված_ներքև', annotations=frozenset({'ներքև', 'երկրաչափական', 'կարմիր'})),
EmojiAnnotations(emoji='💠', codepoints=(128160,), name='կետով շեղանկյուն', slug='կետով_շեղանկյուն', annotations=frozenset({'երկրաչափական', 'կոմիքս', 'շեղանկյուն', 'ներսում'})),
EmojiAnnotations(emoji='🔘', codepoints=(128280,), name='կետակոճակ', slug='կետակոճակ', annotations=frozenset({'կետ', 'կոճակ', 'երկրաչափական', 'ռադիո'})),
EmojiAnnotations(emoji='🔲', codepoints=(128306,), name='սև քառակուսի կոճակ', slug='սև_քառակուսի_կոճակ', annotations=frozenset({'կոճակ', 'երկրաչափական', 'քառակուսի'})),
EmojiAnnotations(emoji='🔳', codepoints=(128307,), name='սպիտակ քառակուսի կոճակ', slug='սպիտակ_քառակուսի_կոճակ', annotations=frozenset({'կոճակ', 'երկրաչափական', 'ուրվագծված', 'քառակուսի'})),
EmojiAnnotations(emoji='⚪', codepoints=(9898,), name='սպիտակ շրջանակ', slug='սպիտակ_շրջանակ', annotations=frozenset({'երկրաչափական', 'շրջան'})),
EmojiAnnotations(emoji='⚫', codepoints=(9899,), name='սև շրջանակ', slug='սև_շրջանակ', annotations=frozenset({'երկրաչափական', 'շրջան'})),
EmojiAnnotations(emoji='🔴', codepoints=(128308,), name='կարմիր շրջանակ', slug='կարմիր_շրջանակ', annotations=frozenset({'երկրաչափական', 'կարմիր', 'շրջան'})),
EmojiAnnotations(emoji='🔵', codepoints=(128309,), name='կապույտ շրջանակ', slug='կապույտ_շրջանակ', annotations=frozenset({'կապույտ', 'երկրաչափական', 'շրջան'})),] | true | true |
f72af208934e1a6893d8de9bece97fef4e04f823 | 68,824 | py | Python | spec/API_specification/array_api/elementwise_functions.py | oleksandr-pavlyk/array-api | 34aa9251bec8e53d8e7f4330f0b2b6221b3f6dcb | [
"MIT"
] | null | null | null | spec/API_specification/array_api/elementwise_functions.py | oleksandr-pavlyk/array-api | 34aa9251bec8e53d8e7f4330f0b2b6221b3f6dcb | [
"MIT"
] | null | null | null | spec/API_specification/array_api/elementwise_functions.py | oleksandr-pavlyk/array-api | 34aa9251bec8e53d8e7f4330f0b2b6221b3f6dcb | [
"MIT"
] | null | null | null | from ._types import array
def abs(x: array, /) -> array:
"""
Calculates the absolute value for each element ``x_i`` of the input array ``x`` (i.e., the element-wise result has the same magnitude as the respective element in ``x`` but has positive sign).
.. note::
For signed integer data types, the absolute value of the minimum representable integer is implementation-dependent.
**Special Cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``-0``, the result is ``+0``.
- If ``x_i`` is ``-infinity``, the result is ``+infinity``.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing the absolute value of each element in ``x``. The returned array must have the same data type as ``x``.
"""
def acos(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation of the principal value of the inverse cosine, having domain ``[-1, +1]`` and codomain ``[+0, +π]``, for each element ``x_i`` of the input array ``x``. Each element-wise result is expressed in radians.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is greater than ``1``, the result is ``NaN``.
- If ``x_i`` is less than ``-1``, the result is ``NaN``.
- If ``x_i`` is ``1``, the result is ``+0``.
Parameters
----------
x: array
input array. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the inverse cosine of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def acosh(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the inverse hyperbolic cosine, having domain ``[+1, +infinity]`` and codomain ``[+0, +infinity]``, for each element ``x_i`` of the input array ``x``.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is less than ``1``, the result is ``NaN``.
- If ``x_i`` is ``1``, the result is ``+0``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
Parameters
----------
x: array
input array whose elements each represent the area of a hyperbolic sector. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the inverse hyperbolic cosine of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def add(x1: array, x2: array, /) -> array:
"""
Calculates the sum for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
**Special cases**
For floating-point operands,
- If either ``x1_i`` or ``x2_i`` is ``NaN``, the result is ``NaN``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is ``-infinity``, the result is ``NaN``.
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is ``+infinity``, the result is ``NaN``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is ``-infinity``, the result is ``-infinity``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is a finite number, the result is ``+infinity``.
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is a finite number, the result is ``-infinity``.
- If ``x1_i`` is a finite number and ``x2_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x1_i`` is a finite number and ``x2_i`` is ``-infinity``, the result is ``-infinity``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is ``-0``, the result is ``-0``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is ``+0``, the result is ``+0``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is ``-0``, the result is ``+0``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is ``+0``, the result is ``+0``.
- If ``x1_i`` is either ``+0`` or ``-0`` and ``x2_i`` is a nonzero finite number, the result is ``x2_i``.
- If ``x1_i`` is a nonzero finite number and ``x2_i`` is either ``+0`` or ``-0``, the result is ``x1_i``.
- If ``x1_i`` is a nonzero finite number and ``x2_i`` is ``-x1_i``, the result is ``+0``.
- In the remaining cases, when neither ``infinity``, ``+0``, ``-0``, nor a ``NaN`` is involved, and the operands have the same mathematical sign or have different magnitudes, the sum must be computed and rounded to the nearest representable value according to IEEE 754-2019 and a supported round mode. If the magnitude is too large to represent, the operation overflows and the result is an `infinity` of appropriate mathematical sign.
.. note::
Floating-point addition is a commutative operation, but not always associative.
Parameters
----------
x1: array
first input array. Should have a real-valued data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise sums. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def asin(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation of the principal value of the inverse sine, having domain ``[-1, +1]`` and codomain ``[-π/2, +π/2]`` for each element ``x_i`` of the input array ``x``. Each element-wise result is expressed in radians.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is greater than ``1``, the result is ``NaN``.
- If ``x_i`` is less than ``-1``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
Parameters
----------
x: array
input array. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the inverse sine of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def asinh(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the inverse hyperbolic sine, having domain ``[-infinity, +infinity]`` and codomain ``[-infinity, +infinity]``, for each element ``x_i`` in the input array ``x``.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x_i`` is ``-infinity``, the result is ``-infinity``.
Parameters
----------
x: array
input array whose elements each represent the area of a hyperbolic sector. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the inverse hyperbolic sine of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def atan(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation of the principal value of the inverse tangent, having domain ``[-infinity, +infinity]`` and codomain ``[-π/2, +π/2]``, for each element ``x_i`` of the input array ``x``. Each element-wise result is expressed in radians.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``+infinity``, the result is an implementation-dependent approximation to ``+π/2``.
- If ``x_i`` is ``-infinity``, the result is an implementation-dependent approximation to ``-π/2``.
Parameters
----------
x: array
input array. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the inverse tangent of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def atan2(x1: array, x2: array, /) -> array:
"""
Calculates an implementation-dependent approximation of the inverse tangent of the quotient ``x1/x2``, having domain ``[-infinity, +infinity] x [-infinity, +infinity]`` (where the ``x`` notation denotes the set of ordered pairs of elements ``(x1_i, x2_i)``) and codomain ``[-π, +π]``, for each pair of elements ``(x1_i, x2_i)`` of the input arrays ``x1`` and ``x2``, respectively. Each element-wise result is expressed in radians.
The mathematical signs of ``x1_i`` and ``x2_i`` determine the quadrant of each element-wise result. The quadrant (i.e., branch) is chosen such that each element-wise result is the signed angle in radians between the ray ending at the origin and passing through the point ``(1,0)`` and the ray ending at the origin and passing through the point ``(x2_i, x1_i)``.
.. note::
Note the role reversal: the "y-coordinate" is the first function parameter; the "x-coordinate" is the second function parameter. The parameter order is intentional and traditional for the two-argument inverse tangent function where the y-coordinate argument is first and the x-coordinate argument is second.
By IEEE 754 convention, the inverse tangent of the quotient ``x1/x2`` is defined for ``x2_i`` equal to positive or negative zero and for either or both of ``x1_i`` and ``x2_i`` equal to positive or negative ``infinity``.
**Special cases**
For floating-point operands,
- If either ``x1_i`` or ``x2_i`` is ``NaN``, the result is ``NaN``.
- If ``x1_i`` is greater than ``0`` and ``x2_i`` is ``+0``, the result is an implementation-dependent approximation to ``+π/2``.
- If ``x1_i`` is greater than ``0`` and ``x2_i`` is ``-0``, the result is an implementation-dependent approximation to ``+π/2``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is greater than ``0``, the result is ``+0``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is ``+0``, the result is ``+0``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is ``-0``, the result is an implementation-dependent approximation to ``+π``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is less than ``0``, the result is an implementation-dependent approximation to ``+π``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is greater than ``0``, the result is ``-0``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is ``+0``, the result is ``-0``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is ``-0``, the result is an implementation-dependent approximation to ``-π``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is less than ``0``, the result is an implementation-dependent approximation to ``-π``.
- If ``x1_i`` is less than ``0`` and ``x2_i`` is ``+0``, the result is an implementation-dependent approximation to ``-π/2``.
- If ``x1_i`` is less than ``0`` and ``x2_i`` is ``-0``, the result is an implementation-dependent approximation to ``-π/2``.
- If ``x1_i`` is greater than ``0``, ``x1_i`` is a finite number, and ``x2_i`` is ``+infinity``, the result is ``+0``.
- If ``x1_i`` is greater than ``0``, ``x1_i`` is a finite number, and ``x2_i`` is ``-infinity``, the result is an implementation-dependent approximation to ``+π``.
- If ``x1_i`` is less than ``0``, ``x1_i`` is a finite number, and ``x2_i`` is ``+infinity``, the result is ``-0``.
- If ``x1_i`` is less than ``0``, ``x1_i`` is a finite number, and ``x2_i`` is ``-infinity``, the result is an implementation-dependent approximation to ``-π``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is finite, the result is an implementation-dependent approximation to ``+π/2``.
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is finite, the result is an implementation-dependent approximation to ``-π/2``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is ``+infinity``, the result is an implementation-dependent approximation to ``+π/4``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is ``-infinity``, the result is an implementation-dependent approximation to ``+3π/4``.
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is ``+infinity``, the result is an implementation-dependent approximation to ``-π/4``.
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is ``-infinity``, the result is an implementation-dependent approximation to ``-3π/4``.
Parameters
----------
x1: array
input array corresponding to the y-coordinates. Should have a real-valued floating-point data type.
x2: array
input array corresponding to the x-coordinates. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the inverse tangent of the quotient ``x1/x2``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def atanh(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the inverse hyperbolic tangent, having domain ``[-1, +1]`` and codomain ``[-infinity, +infinity]``, for each element ``x_i`` of the input array ``x``.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is less than ``-1``, the result is ``NaN``.
- If ``x_i`` is greater than ``1``, the result is ``NaN``.
- If ``x_i`` is ``-1``, the result is ``-infinity``.
- If ``x_i`` is ``+1``, the result is ``+infinity``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
Parameters
----------
x: array
input array whose elements each represent the area of a hyperbolic sector. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the inverse hyperbolic tangent of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def bitwise_and(x1: array, x2: array, /) -> array:
"""
Computes the bitwise AND of the underlying binary representation of each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
Parameters
----------
x1: array
first input array. Should have an integer or boolean data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have an integer or boolean data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def bitwise_left_shift(x1: array, x2: array, /) -> array:
"""
Shifts the bits of each element ``x1_i`` of the input array ``x1`` to the left by appending ``x2_i`` (i.e., the respective element in the input array ``x2``) zeros to the right of ``x1_i``.
Parameters
----------
x1: array
first input array. Should have an integer data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have an integer data type. Each element must be greater than or equal to ``0``.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def bitwise_invert(x: array, /) -> array:
"""
Inverts (flips) each bit for each element ``x_i`` of the input array ``x``.
Parameters
----------
x: array
input array. Should have an integer or boolean data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have the same data type as ``x``.
"""
def bitwise_or(x1: array, x2: array, /) -> array:
"""
Computes the bitwise OR of the underlying binary representation of each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
Parameters
----------
x1: array
first input array. Should have an integer or boolean data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have an integer or boolean data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def bitwise_right_shift(x1: array, x2: array, /) -> array:
"""
Shifts the bits of each element ``x1_i`` of the input array ``x1`` to the right according to the respective element ``x2_i`` of the input array ``x2``.
.. note::
This operation must be an arithmetic shift (i.e., sign-propagating) and thus equivalent to floor division by a power of two.
Parameters
----------
x1: array
first input array. Should have an integer data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have an integer data type. Each element must be greater than or equal to ``0``.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def bitwise_xor(x1: array, x2: array, /) -> array:
"""
Computes the bitwise XOR of the underlying binary representation of each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
Parameters
----------
x1: array
first input array. Should have an integer or boolean data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have an integer or boolean data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def ceil(x: array, /) -> array:
"""
Rounds each element ``x_i`` of the input array ``x`` to the smallest (i.e., closest to ``-infinity``) integer-valued number that is not less than ``x_i``.
**Special cases**
- If ``x_i`` is already integer-valued, the result is ``x_i``.
For floating-point operands,
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x_i`` is ``-infinity``, the result is ``-infinity``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``NaN``, the result is ``NaN``.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing the rounded result for each element in ``x``. The returned array must have the same data type as ``x``.
"""
def cos(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the cosine, having domain ``(-infinity, +infinity)`` and codomain ``[-1, +1]``, for each element ``x_i`` of the input array ``x``. Each element ``x_i`` is assumed to be expressed in radians.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``1``.
- If ``x_i`` is ``-0``, the result is ``1``.
- If ``x_i`` is ``+infinity``, the result is ``NaN``.
- If ``x_i`` is ``-infinity``, the result is ``NaN``.
Parameters
----------
x: array
input array whose elements are each expressed in radians. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the cosine of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def cosh(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the hyperbolic cosine, having domain ``[-infinity, +infinity]`` and codomain ``[-infinity, +infinity]``, for each element ``x_i`` in the input array ``x``.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``1``.
- If ``x_i`` is ``-0``, the result is ``1``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x_i`` is ``-infinity``, the result is ``+infinity``.
Parameters
----------
x: array
input array whose elements each represent a hyperbolic angle. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the hyperbolic cosine of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def divide(x1: array, x2: array, /) -> array:
"""
Calculates the division for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
.. note::
If one or both of the input arrays have integer data types, the result is implementation-dependent, as type promotion between data type "kinds" (e.g., integer versus floating-point) is unspecified.
Specification-compliant libraries may choose to raise an error or return an array containing the element-wise results. If an array is returned, the array must have a real-valued floating-point data type.
**Special cases**
For floating-point operands,
- If either ``x1_i`` or ``x2_i`` is ``NaN``, the result is ``NaN``.
- If ``x1_i`` is either ``+infinity`` or ``-infinity`` and ``x2_i`` is either ``+infinity`` or ``-infinity``, the result is ``NaN``.
- If ``x1_i`` is either ``+0`` or ``-0`` and ``x2_i`` is either ``+0`` or ``-0``, the result is ``NaN``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is greater than ``0``, the result is ``+0``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is greater than ``0``, the result is ``-0``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is less than ``0``, the result is ``-0``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is less than ``0``, the result is ``+0``.
- If ``x1_i`` is greater than ``0`` and ``x2_i`` is ``+0``, the result is ``+infinity``.
- If ``x1_i`` is greater than ``0`` and ``x2_i`` is ``-0``, the result is ``-infinity``.
- If ``x1_i`` is less than ``0`` and ``x2_i`` is ``+0``, the result is ``-infinity``.
- If ``x1_i`` is less than ``0`` and ``x2_i`` is ``-0``, the result is ``+infinity``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is a positive (i.e., greater than ``0``) finite number, the result is ``+infinity``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is a negative (i.e., less than ``0``) finite number, the result is ``-infinity``.
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is a positive (i.e., greater than ``0``) finite number, the result is ``-infinity``.
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is a negative (i.e., less than ``0``) finite number, the result is ``+infinity``.
- If ``x1_i`` is a positive (i.e., greater than ``0``) finite number and ``x2_i`` is ``+infinity``, the result is ``+0``.
- If ``x1_i`` is a positive (i.e., greater than ``0``) finite number and ``x2_i`` is ``-infinity``, the result is ``-0``.
- If ``x1_i`` is a negative (i.e., less than ``0``) finite number and ``x2_i`` is ``+infinity``, the result is ``-0``.
- If ``x1_i`` is a negative (i.e., less than ``0``) finite number and ``x2_i`` is ``-infinity``, the result is ``+0``.
- If ``x1_i`` and ``x2_i`` have the same mathematical sign and are both nonzero finite numbers, the result has a positive mathematical sign.
- If ``x1_i`` and ``x2_i`` have different mathematical signs and are both nonzero finite numbers, the result has a negative mathematical sign.
- In the remaining cases, where neither ``-infinity``, ``+0``, ``-0``, nor ``NaN`` is involved, the quotient must be computed and rounded to the nearest representable value according to IEEE 754-2019 and a supported rounding mode. If the magnitude is too large to represent, the operation overflows and the result is an ``infinity`` of appropriate mathematical sign. If the magnitude is too small to represent, the operation underflows and the result is a zero of appropriate mathematical sign.
Parameters
----------
x1: array
dividend input array. Should have a real-valued data type.
x2: array
divisor input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def equal(x1: array, x2: array, /) -> array:
"""
Computes the truth value of ``x1_i == x2_i`` for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
Parameters
----------
x1: array
first input array. May have any data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). May have any data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type of ``bool``.
"""
def exp(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the exponential function, having domain ``[-infinity, +infinity]`` and codomain ``[+0, +infinity]``, for each element ``x_i`` of the input array ``x`` (``e`` raised to the power of ``x_i``, where ``e`` is the base of the natural logarithm).
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``1``.
- If ``x_i`` is ``-0``, the result is ``1``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x_i`` is ``-infinity``, the result is ``+0``.
Parameters
----------
x: array
input array. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the evaluated exponential function result for each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def expm1(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to ``exp(x)-1``, having domain ``[-infinity, +infinity]`` and codomain ``[-1, +infinity]``, for each element ``x_i`` of the input array ``x``.
.. note::
The purpose of this function is to calculate ``exp(x)-1.0`` more accurately when `x` is close to zero. Accordingly, conforming implementations should avoid implementing this function as simply ``exp(x)-1.0``. See FDLIBM, or some other IEEE 754-2019 compliant mathematical library, for a potential reference implementation.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x_i`` is ``-infinity``, the result is ``-1``.
Parameters
----------
x: array
input array. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the evaluated result for each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def floor(x: array, /) -> array:
"""
Rounds each element ``x_i`` of the input array ``x`` to the greatest (i.e., closest to ``+infinity``) integer-valued number that is not greater than ``x_i``.
**Special cases**
- If ``x_i`` is already integer-valued, the result is ``x_i``.
For floating-point operands,
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x_i`` is ``-infinity``, the result is ``-infinity``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``NaN``, the result is ``NaN``.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing the rounded result for each element in ``x``. The returned array must have the same data type as ``x``.
"""
def floor_divide(x1: array, x2: array, /) -> array:
"""
Rounds the result of dividing each element ``x1_i`` of the input array ``x1`` by the respective element ``x2_i`` of the input array ``x2`` to the greatest (i.e., closest to `+infinity`) integer-value number that is not greater than the division result.
.. note::
For input arrays which promote to an integer data type, the result of division by zero is unspecified and thus implementation-defined.
**Special cases**
.. note::
Floor division was introduced in Python via `PEP 238 <https://www.python.org/dev/peps/pep-0238/>`_ with the goal to disambiguate "true division" (i.e., computing an approximation to the mathematical operation of division) from "floor division" (i.e., rounding the result of division toward negative infinity). The former was computed when one of the operands was a ``float``, while the latter was computed when both operands were ``int``\s. Overloading the ``/`` operator to support both behaviors led to subtle numerical bugs when integers are possible, but not expected.
To resolve this ambiguity, ``/`` was designated for true division, and ``//`` was designated for floor division. Semantically, floor division was `defined <https://www.python.org/dev/peps/pep-0238/#semantics-of-floor-division>`_ as equivalent to ``a // b == floor(a/b)``; however, special floating-point cases were left ill-defined.
Accordingly, floor division is not implemented consistently across array libraries for some of the special cases documented below. Namely, when one of the operands is ``infinity``, libraries may diverge with some choosing to strictly follow ``floor(a/b)`` and others choosing to pair ``//`` with ``%`` according to the relation ``b = a % b + b * (a // b)``. The special cases leading to divergent behavior are documented below.
This specification prefers floor division to match ``floor(divide(x1, x2))`` in order to avoid surprising and unexpected results; however, array libraries may choose to more strictly follow Python behavior.
For floating-point operands,
- If either ``x1_i`` or ``x2_i`` is ``NaN``, the result is ``NaN``.
- If ``x1_i`` is either ``+infinity`` or ``-infinity`` and ``x2_i`` is either ``+infinity`` or ``-infinity``, the result is ``NaN``.
- If ``x1_i`` is either ``+0`` or ``-0`` and ``x2_i`` is either ``+0`` or ``-0``, the result is ``NaN``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is greater than ``0``, the result is ``+0``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is greater than ``0``, the result is ``-0``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is less than ``0``, the result is ``-0``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is less than ``0``, the result is ``+0``.
- If ``x1_i`` is greater than ``0`` and ``x2_i`` is ``+0``, the result is ``+infinity``.
- If ``x1_i`` is greater than ``0`` and ``x2_i`` is ``-0``, the result is ``-infinity``.
- If ``x1_i`` is less than ``0`` and ``x2_i`` is ``+0``, the result is ``-infinity``.
- If ``x1_i`` is less than ``0`` and ``x2_i`` is ``-0``, the result is ``+infinity``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is a positive (i.e., greater than ``0``) finite number, the result is ``+infinity``. (**note**: libraries may return ``NaN`` to match Python behavior.)
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is a negative (i.e., less than ``0``) finite number, the result is ``-infinity``. (**note**: libraries may return ``NaN`` to match Python behavior.)
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is a positive (i.e., greater than ``0``) finite number, the result is ``-infinity``. (**note**: libraries may return ``NaN`` to match Python behavior.)
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is a negative (i.e., less than ``0``) finite number, the result is ``+infinity``. (**note**: libraries may return ``NaN`` to match Python behavior.)
- If ``x1_i`` is a positive (i.e., greater than ``0``) finite number and ``x2_i`` is ``+infinity``, the result is ``+0``.
- If ``x1_i`` is a positive (i.e., greater than ``0``) finite number and ``x2_i`` is ``-infinity``, the result is ``-0``. (**note**: libraries may return ``-1.0`` to match Python behavior.)
- If ``x1_i`` is a negative (i.e., less than ``0``) finite number and ``x2_i`` is ``+infinity``, the result is ``-0``. (**note**: libraries may return ``-1.0`` to match Python behavior.)
- If ``x1_i`` is a negative (i.e., less than ``0``) finite number and ``x2_i`` is ``-infinity``, the result is ``+0``.
- If ``x1_i`` and ``x2_i`` have the same mathematical sign and are both nonzero finite numbers, the result has a positive mathematical sign.
- If ``x1_i`` and ``x2_i`` have different mathematical signs and are both nonzero finite numbers, the result has a negative mathematical sign.
- In the remaining cases, where neither ``-infinity``, ``+0``, ``-0``, nor ``NaN`` is involved, the quotient must be computed and rounded to the greatest (i.e., closest to `+infinity`) representable integer-value number that is not greater than the division result. If the magnitude is too large to represent, the operation overflows and the result is an ``infinity`` of appropriate mathematical sign. If the magnitude is too small to represent, the operation underflows and the result is a zero of appropriate mathematical sign.
Parameters
----------
x1: array
dividend input array. Should have a real-valued data type.
x2: array
divisor input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def greater(x1: array, x2: array, /) -> array:
"""
Computes the truth value of ``x1_i > x2_i`` for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
Parameters
----------
x1: array
first input array. Should have a real-valued data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type of ``bool``.
"""
def greater_equal(x1: array, x2: array, /) -> array:
"""
Computes the truth value of ``x1_i >= x2_i`` for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
Parameters
----------
x1: array
first input array. Should have a real-valued data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type of ``bool``.
"""
def isfinite(x: array, /) -> array:
"""
Tests each element ``x_i`` of the input array ``x`` to determine if finite (i.e., not ``NaN`` and not equal to positive or negative infinity).
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing test results. An element ``out_i`` is ``True`` if ``x_i`` is finite and ``False`` otherwise. The returned array must have a data type of ``bool``.
"""
def isinf(x: array, /) -> array:
"""
Tests each element ``x_i`` of the input array ``x`` to determine if equal to positive or negative infinity.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing test results. An element ``out_i`` is ``True`` if ``x_i`` is either positive or negative infinity and ``False`` otherwise. The returned array must have a data type of ``bool``.
"""
def isnan(x: array, /) -> array:
"""
Tests each element ``x_i`` of the input array ``x`` to determine whether the element is ``NaN``.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing test results. An element ``out_i`` is ``True`` if ``x_i`` is ``NaN`` and ``False`` otherwise. The returned array should have a data type of ``bool``.
"""
def less(x1: array, x2: array, /) -> array:
"""
Computes the truth value of ``x1_i < x2_i`` for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
Parameters
----------
x1: array
first input array. Should have a real-valued data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type of ``bool``.
"""
def less_equal(x1: array, x2: array, /) -> array:
"""
Computes the truth value of ``x1_i <= x2_i`` for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
Parameters
----------
x1: array
first input array. Should have a real-valued data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type of ``bool``.
"""
def log(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the natural (base ``e``) logarithm, having domain ``[0, +infinity]`` and codomain ``[-infinity, +infinity]``, for each element ``x_i`` of the input array ``x``.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is less than ``0``, the result is ``NaN``.
- If ``x_i`` is either ``+0`` or ``-0``, the result is ``-infinity``.
- If ``x_i`` is ``1``, the result is ``+0``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
Parameters
----------
x: array
input array. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the evaluated natural logarithm for each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def log1p(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to ``log(1+x)``, where ``log`` refers to the natural (base ``e``) logarithm, having domain ``[-1, +infinity]`` and codomain ``[-infinity, +infinity]``, for each element ``x_i`` of the input array ``x``.
.. note::
The purpose of this function is to calculate ``log(1+x)`` more accurately when `x` is close to zero. Accordingly, conforming implementations should avoid implementing this function as simply ``log(1+x)``. See FDLIBM, or some other IEEE 754-2019 compliant mathematical library, for a potential reference implementation.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is less than ``-1``, the result is ``NaN``.
- If ``x_i`` is ``-1``, the result is ``-infinity``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
Parameters
----------
x: array
input array. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the evaluated result for each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def log2(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the base ``2`` logarithm, having domain ``[0, +infinity]`` and codomain ``[-infinity, +infinity]``, for each element ``x_i`` of the input array ``x``.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is less than ``0``, the result is ``NaN``.
- If ``x_i`` is either ``+0`` or ``-0``, the result is ``-infinity``.
- If ``x_i`` is ``1``, the result is ``+0``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
Parameters
----------
x: array
input array. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the evaluated base ``2`` logarithm for each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def log10(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the base ``10`` logarithm, having domain ``[0, +infinity]`` and codomain ``[-infinity, +infinity]``, for each element ``x_i`` of the input array ``x``.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is less than ``0``, the result is ``NaN``.
- If ``x_i`` is either ``+0`` or ``-0``, the result is ``-infinity``.
- If ``x_i`` is ``1``, the result is ``+0``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
Parameters
----------
x: array
input array. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the evaluated base ``10`` logarithm for each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def logaddexp(x1: array, x2: array, /) -> array:
"""
Calculates the logarithm of the sum of exponentiations ``log(exp(x1) + exp(x2))`` for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
**Special cases**
For floating-point operands,
- If either ``x1_i`` or ``x2_i`` is ``NaN``, the result is ``NaN``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is not ``NaN``, the result is ``+infinity``.
- If ``x1_i`` is not ``NaN`` and ``x2_i`` is ``+infinity``, the result is ``+infinity``.
Parameters
----------
x1: array
first input array. Should have a real-valued floating-point data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def logical_and(x1: array, x2: array, /) -> array:
"""
Computes the logical AND for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
.. note::
While this specification recommends that this function only accept input arrays having a boolean data type, specification-compliant array libraries may choose to accept input arrays having real-valued data types. If non-boolean data types are supported, zeros must be considered the equivalent of ``False``, while non-zeros must be considered the equivalent of ``True``.
Parameters
----------
x1: array
first input array. Should have a boolean data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a boolean data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type of `bool`.
"""
def logical_not(x: array, /) -> array:
"""
Computes the logical NOT for each element ``x_i`` of the input array ``x``.
.. note::
While this specification recommends that this function only accept input arrays having a boolean data type, specification-compliant array libraries may choose to accept input arrays having real-valued data types. If non-boolean data types are supported, zeros must be considered the equivalent of ``False``, while non-zeros must be considered the equivalent of ``True``.
Parameters
----------
x: array
input array. Should have a boolean data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type of ``bool``.
"""
def logical_or(x1: array, x2: array, /) -> array:
"""
Computes the logical OR for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
.. note::
While this specification recommends that this function only accept input arrays having a boolean data type, specification-compliant array libraries may choose to accept input arrays having real-valued data types. If non-boolean data types are supported, zeros must be considered the equivalent of ``False``, while non-zeros must be considered the equivalent of ``True``.
Parameters
----------
x1: array
first input array. Should have a boolean data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a boolean data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type of ``bool``.
"""
def logical_xor(x1: array, x2: array, /) -> array:
"""
Computes the logical XOR for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
.. note::
While this specification recommends that this function only accept input arrays having a boolean data type, specification-compliant array libraries may choose to accept input arrays having real-valued data types. If non-boolean data types are supported, zeros must be considered the equivalent of ``False``, while non-zeros must be considered the equivalent of ``True``.
Parameters
----------
x1: array
first input array. Should have a boolean data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a boolean data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type of ``bool``.
"""
def multiply(x1: array, x2: array, /) -> array:
"""
Calculates the product for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
**Special cases**
For floating-point operands,
- If either ``x1_i`` or ``x2_i`` is ``NaN``, the result is ``NaN``.
- If ``x1_i`` is either ``+infinity`` or ``-infinity`` and ``x2_i`` is either ``+0`` or ``-0``, the result is ``NaN``.
- If ``x1_i`` is either ``+0`` or ``-0`` and ``x2_i`` is either ``+infinity`` or ``-infinity``, the result is ``NaN``.
- If ``x1_i`` and ``x2_i`` have the same mathematical sign, the result has a positive mathematical sign, unless the result is ``NaN``. If the result is ``NaN``, the "sign" of ``NaN`` is implementation-defined.
- If ``x1_i`` and ``x2_i`` have different mathematical signs, the result has a negative mathematical sign, unless the result is ``NaN``. If the result is ``NaN``, the "sign" of ``NaN`` is implementation-defined.
- If ``x1_i`` is either ``+infinity`` or ``-infinity`` and ``x2_i`` is either ``+infinity`` or ``-infinity``, the result is a signed infinity with the mathematical sign determined by the rule already stated above.
- If ``x1_i`` is either ``+infinity`` or ``-infinity`` and ``x2_i`` is a nonzero finite number, the result is a signed infinity with the mathematical sign determined by the rule already stated above.
- If ``x1_i`` is a nonzero finite number and ``x2_i`` is either ``+infinity`` or ``-infinity``, the result is a signed infinity with the mathematical sign determined by the rule already stated above.
- In the remaining cases, where neither ``infinity`` nor ``NaN`` is involved, the product must be computed and rounded to the nearest representable value according to IEEE 754-2019 and a supported rounding mode. If the magnitude is too large to represent, the result is an `infinity` of appropriate mathematical sign. If the magnitude is too small to represent, the result is a zero of appropriate mathematical sign.
.. note::
Floating-point multiplication is not always associative due to finite precision.
Parameters
----------
x1: array
first input array. Should have a real-valued data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise products. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def negative(x: array, /) -> array:
"""
Computes the numerical negative of each element ``x_i`` (i.e., ``y_i = -x_i``) of the input array ``x``.
.. note::
For signed integer data types, the numerical negative of the minimum representable integer is implementation-dependent.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing the evaluated result for each element in ``x``. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def not_equal(x1: array, x2: array, /) -> array:
"""
Computes the truth value of ``x1_i != x2_i`` for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``.
Parameters
----------
x1: array
first input array. May have any data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`).
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type of ``bool``.
"""
def positive(x: array, /) -> array:
"""
Computes the numerical positive of each element ``x_i`` (i.e., ``y_i = +x_i``) of the input array ``x``.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing the evaluated result for each element in ``x``. The returned array must have the same data type as ``x``.
"""
def pow(x1: array, x2: array, /) -> array:
"""
Calculates an implementation-dependent approximation of exponentiation by raising each element ``x1_i`` (the base) of the input array ``x1`` to the power of ``x2_i`` (the exponent), where ``x2_i`` is the corresponding element of the input array ``x2``.
.. note::
If both ``x1`` and ``x2`` have integer data types, the result of ``pow`` when ``x2_i`` is negative (i.e., less than zero) is unspecified and thus implementation-dependent.
If ``x1`` has an integer data type and ``x2`` has a real-valued floating-point data type, behavior is implementation-dependent (type promotion between data type "kinds" (integer versus floating-point) is unspecified).
**Special cases**
For floating-point operands,
- If ``x1_i`` is not equal to ``1`` and ``x2_i`` is ``NaN``, the result is ``NaN``.
- If ``x2_i`` is ``+0``, the result is ``1``, even if ``x1_i`` is ``NaN``.
- If ``x2_i`` is ``-0``, the result is ``1``, even if ``x1_i`` is ``NaN``.
- If ``x1_i`` is ``NaN`` and ``x2_i`` is not equal to ``0``, the result is ``NaN``.
- If ``abs(x1_i)`` is greater than ``1`` and ``x2_i`` is ``+infinity``, the result is ``+infinity``.
- If ``abs(x1_i)`` is greater than ``1`` and ``x2_i`` is ``-infinity``, the result is ``+0``.
- If ``abs(x1_i)`` is ``1`` and ``x2_i`` is ``+infinity``, the result is ``1``.
- If ``abs(x1_i)`` is ``1`` and ``x2_i`` is ``-infinity``, the result is ``1``.
- If ``x1_i`` is ``1`` and ``x2_i`` is not ``NaN``, the result is ``1``.
- If ``abs(x1_i)`` is less than ``1`` and ``x2_i`` is ``+infinity``, the result is ``+0``.
- If ``abs(x1_i)`` is less than ``1`` and ``x2_i`` is ``-infinity``, the result is ``+infinity``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is greater than ``0``, the result is ``+infinity``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is less than ``0``, the result is ``+0``.
- If ``x1_i`` is ``-infinity``, ``x2_i`` is greater than ``0``, and ``x2_i`` is an odd integer value, the result is ``-infinity``.
- If ``x1_i`` is ``-infinity``, ``x2_i`` is greater than ``0``, and ``x2_i`` is not an odd integer value, the result is ``+infinity``.
- If ``x1_i`` is ``-infinity``, ``x2_i`` is less than ``0``, and ``x2_i`` is an odd integer value, the result is ``-0``.
- If ``x1_i`` is ``-infinity``, ``x2_i`` is less than ``0``, and ``x2_i`` is not an odd integer value, the result is ``+0``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is greater than ``0``, the result is ``+0``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is less than ``0``, the result is ``+infinity``.
- If ``x1_i`` is ``-0``, ``x2_i`` is greater than ``0``, and ``x2_i`` is an odd integer value, the result is ``-0``.
- If ``x1_i`` is ``-0``, ``x2_i`` is greater than ``0``, and ``x2_i`` is not an odd integer value, the result is ``+0``.
- If ``x1_i`` is ``-0``, ``x2_i`` is less than ``0``, and ``x2_i`` is an odd integer value, the result is ``-infinity``.
- If ``x1_i`` is ``-0``, ``x2_i`` is less than ``0``, and ``x2_i`` is not an odd integer value, the result is ``+infinity``.
- If ``x1_i`` is less than ``0``, ``x1_i`` is a finite number, ``x2_i`` is a finite number, and ``x2_i`` is not an integer value, the result is ``NaN``.
Parameters
----------
x1: array
first input array whose elements correspond to the exponentiation base. Should have a real-valued data type.
x2: array
second input array whose elements correspond to the exponentiation exponent. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise results. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def remainder(x1: array, x2: array, /) -> array:
"""
Returns the remainder of division for each element ``x1_i`` of the input array ``x1`` and the respective element ``x2_i`` of the input array ``x2``.
.. note::
This function is equivalent to the Python modulus operator ``x1_i % x2_i``.
.. note::
For input arrays which promote to an integer data type, the result of division by zero is unspecified and thus implementation-defined.
**Special cases**
.. note::
In general, similar to Python's ``%`` operator, this function is **not** recommended for floating-point operands as semantics do not follow IEEE 754. That this function is specified to accept floating-point operands is primarily for reasons of backward compatibility.
For floating-point operands,
- If either ``x1_i`` or ``x2_i`` is ``NaN``, the result is ``NaN``.
- If ``x1_i`` is either ``+infinity`` or ``-infinity`` and ``x2_i`` is either ``+infinity`` or ``-infinity``, the result is ``NaN``.
- If ``x1_i`` is either ``+0`` or ``-0`` and ``x2_i`` is either ``+0`` or ``-0``, the result is ``NaN``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is greater than ``0``, the result is ``+0``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is greater than ``0``, the result is ``+0``.
- If ``x1_i`` is ``+0`` and ``x2_i`` is less than ``0``, the result is ``-0``.
- If ``x1_i`` is ``-0`` and ``x2_i`` is less than ``0``, the result is ``-0``.
- If ``x1_i`` is greater than ``0`` and ``x2_i`` is ``+0``, the result is ``NaN``.
- If ``x1_i`` is greater than ``0`` and ``x2_i`` is ``-0``, the result is ``NaN``.
- If ``x1_i`` is less than ``0`` and ``x2_i`` is ``+0``, the result is ``NaN``.
- If ``x1_i`` is less than ``0`` and ``x2_i`` is ``-0``, the result is ``NaN``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is a positive (i.e., greater than ``0``) finite number, the result is ``NaN``.
- If ``x1_i`` is ``+infinity`` and ``x2_i`` is a negative (i.e., less than ``0``) finite number, the result is ``NaN``.
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is a positive (i.e., greater than ``0``) finite number, the result is ``NaN``.
- If ``x1_i`` is ``-infinity`` and ``x2_i`` is a negative (i.e., less than ``0``) finite number, the result is ``NaN``.
- If ``x1_i`` is a positive (i.e., greater than ``0``) finite number and ``x2_i`` is ``+infinity``, the result is ``x1_i``. (**note**: this result matches Python behavior.)
- If ``x1_i`` is a positive (i.e., greater than ``0``) finite number and ``x2_i`` is ``-infinity``, the result is ``x2_i``. (**note**: this result matches Python behavior.)
- If ``x1_i`` is a negative (i.e., less than ``0``) finite number and ``x2_i`` is ``+infinity``, the result is ``x2_i``. (**note**: this results matches Python behavior.)
- If ``x1_i`` is a negative (i.e., less than ``0``) finite number and ``x2_i`` is ``-infinity``, the result is ``x1_i``. (**note**: this result matches Python behavior.)
- In the remaining cases, the result must match that of the Python ``%`` operator.
Parameters
----------
x1: array
dividend input array. Should have a real-valued data type.
x2: array
divisor input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise results. Each element-wise result must have the same sign as the respective element ``x2_i``. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def round(x: array, /) -> array:
"""
Rounds each element ``x_i`` of the input array ``x`` to the nearest integer-valued number.
**Special cases**
- If ``x_i`` is already integer-valued, the result is ``x_i``.
For floating-point operands,
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x_i`` is ``-infinity``, the result is ``-infinity``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If two integers are equally close to ``x_i``, the result is the even integer closest to ``x_i``.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing the rounded result for each element in ``x``. The returned array must have the same data type as ``x``.
"""
def sign(x: array, /) -> array:
"""
Returns an indication of the sign of a number for each element ``x_i`` of the input array ``x``.
**Special cases**
- If ``x_i`` is less than ``0``, the result is ``-1``.
- If ``x_i`` is either ``-0`` or ``+0``, the result is ``0``.
- If ``x_i`` is greater than ``0``, the result is ``+1``.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing the evaluated result for each element in ``x``. The returned array must have the same data type as ``x``.
"""
def sin(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the sine, having domain ``(-infinity, +infinity)`` and codomain ``[-1, +1]``, for each element ``x_i`` of the input array ``x``. Each element ``x_i`` is assumed to be expressed in radians.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is either ``+infinity`` or ``-infinity``, the result is ``NaN``.
Parameters
----------
x: array
input array whose elements are each expressed in radians. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the sine of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def sinh(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the hyperbolic sine, having domain ``[-infinity, +infinity]`` and codomain ``[-infinity, +infinity]``, for each element ``x_i`` of the input array ``x``.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x_i`` is ``-infinity``, the result is ``-infinity``.
Parameters
----------
x: array
input array whose elements each represent a hyperbolic angle. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the hyperbolic sine of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def square(x: array, /) -> array:
"""
Squares (``x_i * x_i``) each element ``x_i`` of the input array ``x``.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing the evaluated result for each element in ``x``. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def sqrt(x: array, /) -> array:
"""
Calculates the square root, having domain ``[0, +infinity]`` and codomain ``[0, +infinity]``, for each element ``x_i`` of the input array ``x``. After rounding, each result must be indistinguishable from the infinitely precise result (as required by IEEE 754).
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is less than ``0``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
Parameters
----------
x: array
input array. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the square root of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def subtract(x1: array, x2: array, /) -> array:
"""
Calculates the difference for each element ``x1_i`` of the input array ``x1`` with the respective element ``x2_i`` of the input array ``x2``. The result of ``x1_i - x2_i`` must be the same as ``x1_i + (-x2_i)`` and must be governed by the same floating-point rules as addition (see :meth:`add`).
Parameters
----------
x1: array
first input array. Should have a real-valued data type.
x2: array
second input array. Must be compatible with ``x1`` (see :ref:`broadcasting`). Should have a real-valued data type.
Returns
-------
out: array
an array containing the element-wise differences. The returned array must have a data type determined by :ref:`type-promotion`.
"""
def tan(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the tangent, having domain ``(-infinity, +infinity)`` and codomain ``(-infinity, +infinity)``, for each element ``x_i`` of the input array ``x``. Each element ``x_i`` is assumed to be expressed in radians.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is either ``+infinity`` or ``-infinity``, the result is ``NaN``.
Parameters
----------
x: array
input array whose elements are expressed in radians. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the tangent of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def tanh(x: array, /) -> array:
"""
Calculates an implementation-dependent approximation to the hyperbolic tangent, having domain ``[-infinity, +infinity]`` and codomain ``[-1, +1]``, for each element ``x_i`` of the input array ``x``.
**Special cases**
For floating-point operands,
- If ``x_i`` is ``NaN``, the result is ``NaN``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``+infinity``, the result is ``+1``.
- If ``x_i`` is ``-infinity``, the result is ``-1``.
Parameters
----------
x: array
input array whose elements each represent a hyperbolic angle. Should have a real-valued floating-point data type.
Returns
-------
out: array
an array containing the hyperbolic tangent of each element in ``x``. The returned array must have a real-valued floating-point data type determined by :ref:`type-promotion`.
"""
def trunc(x: array, /) -> array:
"""
Rounds each element ``x_i`` of the input array ``x`` to the integer-valued number that is closest to but no greater than ``x_i``.
**Special cases**
- If ``x_i`` is already integer-valued, the result is ``x_i``.
For floating-point operands,
- If ``x_i`` is ``+infinity``, the result is ``+infinity``.
- If ``x_i`` is ``-infinity``, the result is ``-infinity``.
- If ``x_i`` is ``+0``, the result is ``+0``.
- If ``x_i`` is ``-0``, the result is ``-0``.
- If ``x_i`` is ``NaN``, the result is ``NaN``.
Parameters
----------
x: array
input array. Should have a real-valued data type.
Returns
-------
out: array
an array containing the rounded result for each element in ``x``. The returned array must have the same data type as ``x``.
"""
__all__ = ['abs', 'acos', 'acosh', 'add', 'asin', 'asinh', 'atan', 'atan2', 'atanh', 'bitwise_and', 'bitwise_left_shift', 'bitwise_invert', 'bitwise_or', 'bitwise_right_shift', 'bitwise_xor', 'ceil', 'cos', 'cosh', 'divide', 'equal', 'exp', 'expm1', 'floor', 'floor_divide', 'greater', 'greater_equal', 'isfinite', 'isinf', 'isnan', 'less', 'less_equal', 'log', 'log1p', 'log2', 'log10', 'logaddexp', 'logical_and', 'logical_not', 'logical_or', 'logical_xor', 'multiply', 'negative', 'not_equal', 'positive', 'pow', 'remainder', 'round', 'sign', 'sin', 'sinh', 'square', 'sqrt', 'subtract', 'tan', 'tanh', 'trunc'] | 49.407035 | 614 | 0.621527 | from ._types import array
def abs(x: array, /) -> array:
def acos(x: array, /) -> array:
def acosh(x: array, /) -> array:
def add(x1: array, x2: array, /) -> array:
def asin(x: array, /) -> array:
def asinh(x: array, /) -> array:
def atan(x: array, /) -> array:
def atan2(x1: array, x2: array, /) -> array:
def atanh(x: array, /) -> array:
def bitwise_and(x1: array, x2: array, /) -> array:
def bitwise_left_shift(x1: array, x2: array, /) -> array:
def bitwise_invert(x: array, /) -> array:
def bitwise_or(x1: array, x2: array, /) -> array:
def bitwise_right_shift(x1: array, x2: array, /) -> array:
def bitwise_xor(x1: array, x2: array, /) -> array:
def ceil(x: array, /) -> array:
def cos(x: array, /) -> array:
def cosh(x: array, /) -> array:
def divide(x1: array, x2: array, /) -> array:
def equal(x1: array, x2: array, /) -> array:
def exp(x: array, /) -> array:
def expm1(x: array, /) -> array:
def floor(x: array, /) -> array:
def floor_divide(x1: array, x2: array, /) -> array:
def greater(x1: array, x2: array, /) -> array:
def greater_equal(x1: array, x2: array, /) -> array:
def isfinite(x: array, /) -> array:
def isinf(x: array, /) -> array:
def isnan(x: array, /) -> array:
def less(x1: array, x2: array, /) -> array:
def less_equal(x1: array, x2: array, /) -> array:
def log(x: array, /) -> array:
def log1p(x: array, /) -> array:
def log2(x: array, /) -> array:
def log10(x: array, /) -> array:
def logaddexp(x1: array, x2: array, /) -> array:
def logical_and(x1: array, x2: array, /) -> array:
def logical_not(x: array, /) -> array:
def logical_or(x1: array, x2: array, /) -> array:
def logical_xor(x1: array, x2: array, /) -> array:
def multiply(x1: array, x2: array, /) -> array:
def negative(x: array, /) -> array:
def not_equal(x1: array, x2: array, /) -> array:
def positive(x: array, /) -> array:
def pow(x1: array, x2: array, /) -> array:
def remainder(x1: array, x2: array, /) -> array:
def round(x: array, /) -> array:
def sign(x: array, /) -> array:
def sin(x: array, /) -> array:
def sinh(x: array, /) -> array:
def square(x: array, /) -> array:
def sqrt(x: array, /) -> array:
def subtract(x1: array, x2: array, /) -> array:
def tan(x: array, /) -> array:
def tanh(x: array, /) -> array:
def trunc(x: array, /) -> array:
__all__ = ['abs', 'acos', 'acosh', 'add', 'asin', 'asinh', 'atan', 'atan2', 'atanh', 'bitwise_and', 'bitwise_left_shift', 'bitwise_invert', 'bitwise_or', 'bitwise_right_shift', 'bitwise_xor', 'ceil', 'cos', 'cosh', 'divide', 'equal', 'exp', 'expm1', 'floor', 'floor_divide', 'greater', 'greater_equal', 'isfinite', 'isinf', 'isnan', 'less', 'less_equal', 'log', 'log1p', 'log2', 'log10', 'logaddexp', 'logical_and', 'logical_not', 'logical_or', 'logical_xor', 'multiply', 'negative', 'not_equal', 'positive', 'pow', 'remainder', 'round', 'sign', 'sin', 'sinh', 'square', 'sqrt', 'subtract', 'tan', 'tanh', 'trunc'] | true | true |
f72af3b77a6c41b7fa62f8cf773835380670f57a | 130 | py | Python | cwlkernel/__main__.py | codacy-badger/CWLJNIKernel | 89c830d2ab300f3775e4e49cfc2d0fe894170f5e | [
"Apache-2.0"
] | null | null | null | cwlkernel/__main__.py | codacy-badger/CWLJNIKernel | 89c830d2ab300f3775e4e49cfc2d0fe894170f5e | [
"Apache-2.0"
] | null | null | null | cwlkernel/__main__.py | codacy-badger/CWLJNIKernel | 89c830d2ab300f3775e4e49cfc2d0fe894170f5e | [
"Apache-2.0"
] | null | null | null | from ipykernel.kernelapp import IPKernelApp
from .CWLKernel import CWLKernel
IPKernelApp.launch_instance(kernel_class=CWLKernel) | 26 | 51 | 0.876923 | from ipykernel.kernelapp import IPKernelApp
from .CWLKernel import CWLKernel
IPKernelApp.launch_instance(kernel_class=CWLKernel) | true | true |
f72af4bbb77cd40f08c0addf4a50faf422264aa8 | 7,875 | py | Python | TorchRay/torchray/benchmark/evaluate_imagenet_gradcam_energy_inside_bbox.py | UMBCvision/Consistent-Explanations-by-Contrastive-Learning | 589ff89cbcc96a1d8bd8d5b7bd7a785448ed2de3 | [
"MIT"
] | null | null | null | TorchRay/torchray/benchmark/evaluate_imagenet_gradcam_energy_inside_bbox.py | UMBCvision/Consistent-Explanations-by-Contrastive-Learning | 589ff89cbcc96a1d8bd8d5b7bd7a785448ed2de3 | [
"MIT"
] | null | null | null | TorchRay/torchray/benchmark/evaluate_imagenet_gradcam_energy_inside_bbox.py | UMBCvision/Consistent-Explanations-by-Contrastive-Learning | 589ff89cbcc96a1d8bd8d5b7bd7a785448ed2de3 | [
"MIT"
] | null | null | null | import argparse
import time
import numpy as np
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim
import torch.utils.data.distributed
import torchvision.transforms as transforms
import resnet_multigpu_cgc as resnet
import cv2
import datasets as pointing_datasets
"""
Here, we evaluate the content heatmap (Grad-CAM heatmap within object bounding box) on the imagenet dataset.
"""
model_names = ['resnet18', 'resnet50']
parser = argparse.ArgumentParser(description='PyTorch ImageNet Training')
parser.add_argument('data', metavar='DIR', help='path to dataset')
parser.add_argument('-a', '--arch', metavar='ARCH', default='resnet18',
choices=model_names,
help='model architecture: ' +
' | '.join(model_names) +
' (default: resnet18)')
parser.add_argument('-j', '--workers', default=16, type=int, metavar='N',
help='number of data loading workers (default: 16)')
parser.add_argument('-b', '--batch-size', default=256, type=int,
metavar='N', help='mini-batch size (default: 96)')
parser.add_argument('--pretrained', dest='pretrained', action='store_true',
help='use pre-trained model')
parser.add_argument('-g', '--num-gpus', default=1, type=int,
metavar='N', help='number of GPUs to match (default: 4)')
parser.add_argument('--resume', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('--input_resize', default=224, type=int,
metavar='N', help='Resize for smallest side of input (default: 224)')
def main():
global args
args = parser.parse_args()
if args.pretrained:
print("=> using pre-trained model '{}'".format(args.arch))
if args.arch.startswith('resnet'):
model = resnet.__dict__[args.arch](pretrained=True)
else:
assert False, 'Unsupported architecture: {}'.format(args.arch)
else:
print("=> creating model '{}'".format(args.arch))
if args.arch.startswith('resnet'):
model = resnet.__dict__[args.arch]()
model = torch.nn.DataParallel(model).cuda()
if args.resume:
print("=> loading checkpoint '{}'".format(args.resume))
checkpoint = torch.load(args.resume)
model.load_state_dict(checkpoint['state_dict'])
if (not args.resume) and (not args.pretrained):
assert False, "Please specify either the pre-trained model or checkpoint for evaluation"
cudnn.benchmark = True
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
# Here, we don't resize the images. We feed the full image and use AdaptivePooling before FC.
# We will resize Gradcam heatmap to image size and compare the actual bbox co-ordinates
val_dataset = pointing_datasets.ImageNetDetection(args.data,
transform=transforms.Compose([
transforms.Resize(args.input_resize),
transforms.ToTensor(),
normalize,
]))
# we set batch size=1 since we are loading full resolution images.
val_loader = torch.utils.data.DataLoader(
val_dataset, batch_size=1, shuffle=False,
num_workers=args.workers, pin_memory=True)
validate_multi(val_loader, val_dataset, model)
def validate_multi(val_loader, val_dataset, model):
batch_time = AverageMeter()
heatmap_inside_bbox = AverageMeter()
# switch to evaluate mode
model.eval()
end = time.time()
for i, (images, annotation, targets) in enumerate(val_loader):
images = images.cuda(non_blocking=True)
targets = targets.cuda(non_blocking=True)
# we assume batch size == 1 and unwrap the first elem of every list in annotation object
annotation = unwrap_dict(annotation)
image_size = val_dataset.as_image_size(annotation)
output, feats = model(images, vanilla_with_feats=True)
output_gradcam = compute_gradcam(output, feats, targets)
output_gradcam_np = output_gradcam.data.cpu().numpy()[0] # since we have batch size==1
resized_output_gradcam = cv2.resize(output_gradcam_np, image_size)
spatial_sum = resized_output_gradcam.sum()
if spatial_sum <= 0:
# We ignore images with zero Grad-CAM
continue
# resized_output_gradcam is now normalized and can be considered as probabilities
resized_output_gradcam = resized_output_gradcam / spatial_sum
mask = pointing_datasets.imagenet_as_mask(annotation, targets[0].item())
mask = mask.type(torch.ByteTensor)
mask = mask.cpu().data.numpy()
gcam_inside_gt_mask = mask * resized_output_gradcam
# Now we sum the heatmap inside the object bounding box
total_gcam_inside_gt_mask = gcam_inside_gt_mask.sum()
heatmap_inside_bbox.update(total_gcam_inside_gt_mask)
if i % 1000 == 0:
print('\nResults after {} examples: '.format(i+1))
print('Curr % of heatmap inside bbox: {:.4f} ({:.4f})'.format(heatmap_inside_bbox.val * 100,
heatmap_inside_bbox.avg * 100))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
print('\nFinal Results - ')
print('\n\n% of heatmap inside bbox: {:.4f}'.format(heatmap_inside_bbox.avg * 100))
return
def compute_gradcam(output, feats, target):
"""
Compute the gradcam for the top predicted category
:param output:
:param feats:
:param target:
:return:
"""
eps = 1e-8
relu = nn.ReLU(inplace=True)
target = target.cpu().numpy()
one_hot = np.zeros((output.shape[0], output.shape[-1]), dtype=np.float32)
indices_range = np.arange(output.shape[0])
one_hot[indices_range, target[indices_range]] = 1
one_hot = torch.from_numpy(one_hot)
one_hot.requires_grad = True
# Compute the Grad-CAM for the original image
one_hot_cuda = torch.sum(one_hot.cuda() * output)
dy_dz1, = torch.autograd.grad(one_hot_cuda, feats, grad_outputs=torch.ones(one_hot_cuda.size()).cuda(),
retain_graph=True, create_graph=True)
# Changing to dot product of grad and features to preserve grad spatial locations
gcam512_1 = dy_dz1 * feats
gradcam = gcam512_1.sum(dim=1)
gradcam = relu(gradcam)
spatial_sum1 = gradcam.sum(dim=[1, 2]).unsqueeze(-1).unsqueeze(-1)
gradcam = (gradcam / (spatial_sum1 + eps)) + eps
return gradcam
def unwrap_dict(dict_object):
new_dict = {}
for k, v in dict_object.items():
if k == 'object':
new_v_list = []
for elem in v:
new_v_list.append(unwrap_dict(elem))
new_dict[k] = new_v_list
continue
if isinstance(v, dict):
new_v = unwrap_dict(v)
elif isinstance(v, list) and len(v) == 1:
new_v = v[0]
else:
new_v = v
new_dict[k] = new_v
return new_dict
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
if __name__ == '__main__':
main()
| 36.971831 | 112 | 0.616381 | import argparse
import time
import numpy as np
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim
import torch.utils.data.distributed
import torchvision.transforms as transforms
import resnet_multigpu_cgc as resnet
import cv2
import datasets as pointing_datasets
model_names = ['resnet18', 'resnet50']
parser = argparse.ArgumentParser(description='PyTorch ImageNet Training')
parser.add_argument('data', metavar='DIR', help='path to dataset')
parser.add_argument('-a', '--arch', metavar='ARCH', default='resnet18',
choices=model_names,
help='model architecture: ' +
' | '.join(model_names) +
' (default: resnet18)')
parser.add_argument('-j', '--workers', default=16, type=int, metavar='N',
help='number of data loading workers (default: 16)')
parser.add_argument('-b', '--batch-size', default=256, type=int,
metavar='N', help='mini-batch size (default: 96)')
parser.add_argument('--pretrained', dest='pretrained', action='store_true',
help='use pre-trained model')
parser.add_argument('-g', '--num-gpus', default=1, type=int,
metavar='N', help='number of GPUs to match (default: 4)')
parser.add_argument('--resume', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('--input_resize', default=224, type=int,
metavar='N', help='Resize for smallest side of input (default: 224)')
def main():
global args
args = parser.parse_args()
if args.pretrained:
print("=> using pre-trained model '{}'".format(args.arch))
if args.arch.startswith('resnet'):
model = resnet.__dict__[args.arch](pretrained=True)
else:
assert False, 'Unsupported architecture: {}'.format(args.arch)
else:
print("=> creating model '{}'".format(args.arch))
if args.arch.startswith('resnet'):
model = resnet.__dict__[args.arch]()
model = torch.nn.DataParallel(model).cuda()
if args.resume:
print("=> loading checkpoint '{}'".format(args.resume))
checkpoint = torch.load(args.resume)
model.load_state_dict(checkpoint['state_dict'])
if (not args.resume) and (not args.pretrained):
assert False, "Please specify either the pre-trained model or checkpoint for evaluation"
cudnn.benchmark = True
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
# We will resize Gradcam heatmap to image size and compare the actual bbox co-ordinates
val_dataset = pointing_datasets.ImageNetDetection(args.data,
transform=transforms.Compose([
transforms.Resize(args.input_resize),
transforms.ToTensor(),
normalize,
]))
# we set batch size=1 since we are loading full resolution images.
val_loader = torch.utils.data.DataLoader(
val_dataset, batch_size=1, shuffle=False,
num_workers=args.workers, pin_memory=True)
validate_multi(val_loader, val_dataset, model)
def validate_multi(val_loader, val_dataset, model):
batch_time = AverageMeter()
heatmap_inside_bbox = AverageMeter()
# switch to evaluate mode
model.eval()
end = time.time()
for i, (images, annotation, targets) in enumerate(val_loader):
images = images.cuda(non_blocking=True)
targets = targets.cuda(non_blocking=True)
# we assume batch size == 1 and unwrap the first elem of every list in annotation object
annotation = unwrap_dict(annotation)
image_size = val_dataset.as_image_size(annotation)
output, feats = model(images, vanilla_with_feats=True)
output_gradcam = compute_gradcam(output, feats, targets)
output_gradcam_np = output_gradcam.data.cpu().numpy()[0] # since we have batch size==1
resized_output_gradcam = cv2.resize(output_gradcam_np, image_size)
spatial_sum = resized_output_gradcam.sum()
if spatial_sum <= 0:
# We ignore images with zero Grad-CAM
continue
# resized_output_gradcam is now normalized and can be considered as probabilities
resized_output_gradcam = resized_output_gradcam / spatial_sum
mask = pointing_datasets.imagenet_as_mask(annotation, targets[0].item())
mask = mask.type(torch.ByteTensor)
mask = mask.cpu().data.numpy()
gcam_inside_gt_mask = mask * resized_output_gradcam
# Now we sum the heatmap inside the object bounding box
total_gcam_inside_gt_mask = gcam_inside_gt_mask.sum()
heatmap_inside_bbox.update(total_gcam_inside_gt_mask)
if i % 1000 == 0:
print('\nResults after {} examples: '.format(i+1))
print('Curr % of heatmap inside bbox: {:.4f} ({:.4f})'.format(heatmap_inside_bbox.val * 100,
heatmap_inside_bbox.avg * 100))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
print('\nFinal Results - ')
print('\n\n% of heatmap inside bbox: {:.4f}'.format(heatmap_inside_bbox.avg * 100))
return
def compute_gradcam(output, feats, target):
eps = 1e-8
relu = nn.ReLU(inplace=True)
target = target.cpu().numpy()
one_hot = np.zeros((output.shape[0], output.shape[-1]), dtype=np.float32)
indices_range = np.arange(output.shape[0])
one_hot[indices_range, target[indices_range]] = 1
one_hot = torch.from_numpy(one_hot)
one_hot.requires_grad = True
# Compute the Grad-CAM for the original image
one_hot_cuda = torch.sum(one_hot.cuda() * output)
dy_dz1, = torch.autograd.grad(one_hot_cuda, feats, grad_outputs=torch.ones(one_hot_cuda.size()).cuda(),
retain_graph=True, create_graph=True)
# Changing to dot product of grad and features to preserve grad spatial locations
gcam512_1 = dy_dz1 * feats
gradcam = gcam512_1.sum(dim=1)
gradcam = relu(gradcam)
spatial_sum1 = gradcam.sum(dim=[1, 2]).unsqueeze(-1).unsqueeze(-1)
gradcam = (gradcam / (spatial_sum1 + eps)) + eps
return gradcam
def unwrap_dict(dict_object):
new_dict = {}
for k, v in dict_object.items():
if k == 'object':
new_v_list = []
for elem in v:
new_v_list.append(unwrap_dict(elem))
new_dict[k] = new_v_list
continue
if isinstance(v, dict):
new_v = unwrap_dict(v)
elif isinstance(v, list) and len(v) == 1:
new_v = v[0]
else:
new_v = v
new_dict[k] = new_v
return new_dict
class AverageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
if __name__ == '__main__':
main()
| true | true |
f72af5153bd9f88566e3d6863c7c6bad63faba5c | 558 | py | Python | var_global_local.py | Spy142/python_lesson_4 | 1539576301c2bf61be803be7846c9278f350a0f3 | [
"MIT"
] | null | null | null | var_global_local.py | Spy142/python_lesson_4 | 1539576301c2bf61be803be7846c9278f350a0f3 | [
"MIT"
] | null | null | null | var_global_local.py | Spy142/python_lesson_4 | 1539576301c2bf61be803be7846c9278f350a0f3 | [
"MIT"
] | 1 | 2020-09-09T09:27:06.000Z | 2020-09-09T09:27:06.000Z | global_var = 10
def function_example(local_var_1, local_var_2):
print(local_var_1, local_var_2, global_var)
function_example(11, 12)
def function_example_1(local_var_1, local_var_2):
global global_var
global_var = 20
print(local_var_1, local_var_2, global_var, id(global_var))
function_example_1(11, 12)
print(global_var, id(global_var))
# nonlocal
def counter():
num = 0
def plus_one():
nonlocal num
num+=1
return num
return plus_one
count = counter()
print(count)
print(count())
print(count())
| 16.909091 | 63 | 0.702509 | global_var = 10
def function_example(local_var_1, local_var_2):
print(local_var_1, local_var_2, global_var)
function_example(11, 12)
def function_example_1(local_var_1, local_var_2):
global global_var
global_var = 20
print(local_var_1, local_var_2, global_var, id(global_var))
function_example_1(11, 12)
print(global_var, id(global_var))
def counter():
num = 0
def plus_one():
nonlocal num
num+=1
return num
return plus_one
count = counter()
print(count)
print(count())
print(count())
| true | true |
f72af6a3f7871c38684b0e461069b71876226a9b | 157 | py | Python | tests/model_control/detailed/transf_None/model_control_one_enabled_None_MovingAverage_Seasonal_Second_MLP.py | shaido987/pyaf | b9afd089557bed6b90b246d3712c481ae26a1957 | [
"BSD-3-Clause"
] | 377 | 2016-10-13T20:52:44.000Z | 2022-03-29T18:04:14.000Z | tests/model_control/detailed/transf_None/model_control_one_enabled_None_MovingAverage_Seasonal_Second_MLP.py | ysdede/pyaf | b5541b8249d5a1cfdc01f27fdfd99b6580ed680b | [
"BSD-3-Clause"
] | 160 | 2016-10-13T16:11:53.000Z | 2022-03-28T04:21:34.000Z | tests/model_control/detailed/transf_None/model_control_one_enabled_None_MovingAverage_Seasonal_Second_MLP.py | ysdede/pyaf | b5541b8249d5a1cfdc01f27fdfd99b6580ed680b | [
"BSD-3-Clause"
] | 63 | 2017-03-09T14:51:18.000Z | 2022-03-27T20:52:57.000Z | import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['None'] , ['MovingAverage'] , ['Seasonal_Second'] , ['MLP'] ); | 39.25 | 84 | 0.751592 | import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['None'] , ['MovingAverage'] , ['Seasonal_Second'] , ['MLP'] ); | true | true |
f72af6e888f158710810a4b5ed837ab592f4f7f4 | 3,251 | py | Python | tests/toolkit/utils.py | Devtography/ibpy_native | e3e2a406a8db9bb338953be6dc195b8099379acb | [
"Apache-2.0"
] | 6 | 2020-07-09T20:55:41.000Z | 2022-01-22T15:43:29.000Z | tests/toolkit/utils.py | Devtography/ibpy_native | e3e2a406a8db9bb338953be6dc195b8099379acb | [
"Apache-2.0"
] | 1 | 2021-02-28T13:37:43.000Z | 2021-02-28T13:37:43.000Z | tests/toolkit/utils.py | Devtography/ibpy_native | e3e2a406a8db9bb338953be6dc195b8099379acb | [
"Apache-2.0"
] | 5 | 2020-05-24T19:15:06.000Z | 2022-01-22T15:43:35.000Z | """Utilities for making unittests easier to write."""
# pylint: disable=protected-access
import asyncio
import os
import queue
from typing import Dict, List, Optional, Union
from ibapi import wrapper
from ibpy_native import error
from ibpy_native import models
from ibpy_native.interfaces import delegates
from ibpy_native.interfaces import listeners
from ibpy_native.utils import finishable_queue as fq
#region - General utils
def async_test(fn):
# pylint: disable=invalid-name
"""Decorator for testing the async functions."""
def fn_wrapper(*args, **kwargs):
loop = asyncio.new_event_loop()
return loop.run_until_complete(fn(*args, **kwargs))
return fn_wrapper
#endregion - General utils
#region - ibpy_native specific
# Constants
IB_HOST: str = os.getenv("IB_HOST", "127.0.0.1")
IB_PORT: int = int(os.getenv("IB_PORT", "4002"))
IB_CLIENT_ID: int = int(os.getenv("IB_CLIENT_ID", "1001"))
IB_ACC_ID: str = os.getenv("IB_ACC_ID", "")
class MockConnectionListener(listeners.ConnectionListener):
"""Mock connection listener."""
def __init__(self):
self.connected: Optional[bool] = None
def on_connected(self):
self.connected = True
def on_disconnected(self):
self.connected = False
class MockNotificationListener(listeners.NotificationListener):
"""Mock notification listener."""
def __init__(self):
self.msg_code = -1
self.msg = ""
def on_notify(self, msg_code: int, msg: str):
"""Mock callback implementation."""
self.msg_code = msg_code
self.msg = msg
class MockAccountsManagementDelegate(delegates.AccountsManagementDelegate):
"""Mock accounts delegate"""
def __init__(self):
self._account_list: Dict[str, models.Account] = {}
self._account_updates_queue: fq.FinishableQueue = fq.FinishableQueue(
queue_to_finish=queue.Queue()
)
@property
def accounts(self) -> Dict[str, models.Account]:
return self._account_list
@property
def account_updates_queue(self) -> fq.FinishableQueue:
return self._account_updates_queue
def on_account_list_update(self, account_list: List[str]):
for account_id in account_list:
self._account_list[account_id] = models.Account(account_id)
async def sub_account_updates(self, account: models.Account):
pass
async def unsub_account_updates(self):
pass
def on_disconnected(self):
pass
class MockLiveTicksListener(listeners.LiveTicksListener):
"""Mock notification listener"""
def __init__(self):
self.ticks: List[Union[wrapper.HistoricalTick,
wrapper.HistoricalTickBidAsk,
wrapper.HistoricalTickLast]] = []
self.finished = False
def on_tick_receive(self, req_id: int,
tick: Union[wrapper.HistoricalTick,
wrapper.HistoricalTickBidAsk,
wrapper.HistoricalTickLast,]):
self.ticks.append(tick)
def on_finish(self, req_id: int):
self.finished = True
def on_err(self, err: error.IBError):
raise err
#endregion - ibpy_native specific
| 30.669811 | 77 | 0.671486 |
import asyncio
import os
import queue
from typing import Dict, List, Optional, Union
from ibapi import wrapper
from ibpy_native import error
from ibpy_native import models
from ibpy_native.interfaces import delegates
from ibpy_native.interfaces import listeners
from ibpy_native.utils import finishable_queue as fq
def async_test(fn):
def fn_wrapper(*args, **kwargs):
loop = asyncio.new_event_loop()
return loop.run_until_complete(fn(*args, **kwargs))
return fn_wrapper
IB_HOST: str = os.getenv("IB_HOST", "127.0.0.1")
IB_PORT: int = int(os.getenv("IB_PORT", "4002"))
IB_CLIENT_ID: int = int(os.getenv("IB_CLIENT_ID", "1001"))
IB_ACC_ID: str = os.getenv("IB_ACC_ID", "")
class MockConnectionListener(listeners.ConnectionListener):
def __init__(self):
self.connected: Optional[bool] = None
def on_connected(self):
self.connected = True
def on_disconnected(self):
self.connected = False
class MockNotificationListener(listeners.NotificationListener):
def __init__(self):
self.msg_code = -1
self.msg = ""
def on_notify(self, msg_code: int, msg: str):
self.msg_code = msg_code
self.msg = msg
class MockAccountsManagementDelegate(delegates.AccountsManagementDelegate):
def __init__(self):
self._account_list: Dict[str, models.Account] = {}
self._account_updates_queue: fq.FinishableQueue = fq.FinishableQueue(
queue_to_finish=queue.Queue()
)
@property
def accounts(self) -> Dict[str, models.Account]:
return self._account_list
@property
def account_updates_queue(self) -> fq.FinishableQueue:
return self._account_updates_queue
def on_account_list_update(self, account_list: List[str]):
for account_id in account_list:
self._account_list[account_id] = models.Account(account_id)
async def sub_account_updates(self, account: models.Account):
pass
async def unsub_account_updates(self):
pass
def on_disconnected(self):
pass
class MockLiveTicksListener(listeners.LiveTicksListener):
def __init__(self):
self.ticks: List[Union[wrapper.HistoricalTick,
wrapper.HistoricalTickBidAsk,
wrapper.HistoricalTickLast]] = []
self.finished = False
def on_tick_receive(self, req_id: int,
tick: Union[wrapper.HistoricalTick,
wrapper.HistoricalTickBidAsk,
wrapper.HistoricalTickLast,]):
self.ticks.append(tick)
def on_finish(self, req_id: int):
self.finished = True
def on_err(self, err: error.IBError):
raise err
| true | true |
f72af7d6e7b04db16a0baa10f553c130371e0a1e | 1,561 | py | Python | __scraping__/comics.panini.it - scrapy/main-itemloader.py | whitmans-max/python-examples | 881a8f23f0eebc76816a0078e19951893f0daaaa | [
"MIT"
] | 140 | 2017-02-21T22:49:04.000Z | 2022-03-22T17:51:58.000Z | __scraping__/comics.panini.it - scrapy/main-itemloader.py | whitmans-max/python-examples | 881a8f23f0eebc76816a0078e19951893f0daaaa | [
"MIT"
] | 5 | 2017-12-02T19:55:00.000Z | 2021-09-22T23:18:39.000Z | __scraping__/comics.panini.it - scrapy/main-itemloader.py | whitmans-max/python-examples | 881a8f23f0eebc76816a0078e19951893f0daaaa | [
"MIT"
] | 79 | 2017-01-25T10:53:33.000Z | 2022-03-11T16:13:57.000Z | #!/usr/bin/env python3
# date: 2019.08.06
# https://stackoverflow.com/questions/57366488/how-to-pass-the-single-link-in-a-nested-url-scrape
import scrapy
from scrapy.loader import ItemLoader
from scrapy.loader.processors import MapCompose
def clean(text):
text = text.replace('\xa0', ' ')
text = text.strip().split('\n')
text = ' '.join(x.strip() for x in text)
return text
class ComicscraperItem(scrapy.Item):
title = scrapy.Field(input_processor=MapCompose(clean))
link = scrapy.Field()
price = scrapy.Field(input_processor=MapCompose(clean))
class PaniniSpider(scrapy.Spider):
name = "spiderP"
start_urls = ["http://comics.panini.it/store/pub_ita_it/magazines.html"]
def parse(self, response):
for sel in response.xpath("//div[@class='list-group']//h3/a"):
l = ItemLoader(item=ComicscraperItem(), selector=sel)
l.add_xpath('title', './text()')
l.add_xpath('link', './@href')
request = scrapy.Request(sel.xpath('./@href').extract_first(), callback=self.parse_isbn, dont_filter=True)
request.meta['l'] = l
yield request
def parse_isbn(self, response):
l = response.meta['l']
l.add_value('price', response.xpath("//p[@class='special-price']//span/text()").get())
return l.load_item()
from scrapy.crawler import CrawlerProcess
c = CrawlerProcess({
'USER_AGENT': 'Mozilla/5.0',
'FEED_FORMAT': 'csv', # csv, json, xml
'FEED_URI': 'output.csv', #
})
c.crawl(PaniniSpider)
c.start()
| 31.22 | 118 | 0.643177 |
import scrapy
from scrapy.loader import ItemLoader
from scrapy.loader.processors import MapCompose
def clean(text):
text = text.replace('\xa0', ' ')
text = text.strip().split('\n')
text = ' '.join(x.strip() for x in text)
return text
class ComicscraperItem(scrapy.Item):
title = scrapy.Field(input_processor=MapCompose(clean))
link = scrapy.Field()
price = scrapy.Field(input_processor=MapCompose(clean))
class PaniniSpider(scrapy.Spider):
name = "spiderP"
start_urls = ["http://comics.panini.it/store/pub_ita_it/magazines.html"]
def parse(self, response):
for sel in response.xpath("//div[@class='list-group']//h3/a"):
l = ItemLoader(item=ComicscraperItem(), selector=sel)
l.add_xpath('title', './text()')
l.add_xpath('link', './@href')
request = scrapy.Request(sel.xpath('./@href').extract_first(), callback=self.parse_isbn, dont_filter=True)
request.meta['l'] = l
yield request
def parse_isbn(self, response):
l = response.meta['l']
l.add_value('price', response.xpath("//p[@class='special-price']//span/text()").get())
return l.load_item()
from scrapy.crawler import CrawlerProcess
c = CrawlerProcess({
'USER_AGENT': 'Mozilla/5.0',
'FEED_FORMAT': 'csv',
'FEED_URI': 'output.csv',
})
c.crawl(PaniniSpider)
c.start()
| true | true |
f72af7e4a722a6457a4e5bb9862634b05fb4b74c | 3,915 | py | Python | sendSMSSkillLambda/package/ask_sdk_model/interfaces/geolocation/altitude.py | shneydor/aws-alexa-lambda-workshop | 0fa6b7067b04fc85c46b9ce1c2cc04554ed5baf4 | [
"Apache-2.0"
] | null | null | null | sendSMSSkillLambda/package/ask_sdk_model/interfaces/geolocation/altitude.py | shneydor/aws-alexa-lambda-workshop | 0fa6b7067b04fc85c46b9ce1c2cc04554ed5baf4 | [
"Apache-2.0"
] | null | null | null | sendSMSSkillLambda/package/ask_sdk_model/interfaces/geolocation/altitude.py | shneydor/aws-alexa-lambda-workshop | 0fa6b7067b04fc85c46b9ce1c2cc04554ed5baf4 | [
"Apache-2.0"
] | 1 | 2019-10-11T17:15:20.000Z | 2019-10-11T17:15:20.000Z | # coding: utf-8
#
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file
# except in compliance with the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
# the specific language governing permissions and limitations under the License.
#
import pprint
import re # noqa: F401
import six
import typing
from enum import Enum
if typing.TYPE_CHECKING:
from typing import Dict, List, Optional, Union
from datetime import datetime
class Altitude(object):
"""
An object containing the altitude information of the device.
:param altitude_in_meters: A double representing the altitude of the device in meters.
:type altitude_in_meters: (optional) float
:param accuracy_in_meters: A double representing the accuracy of the altitude measurement in meters.
:type accuracy_in_meters: (optional) float
"""
deserialized_types = {
'altitude_in_meters': 'float',
'accuracy_in_meters': 'float'
} # type: Dict
attribute_map = {
'altitude_in_meters': 'altitudeInMeters',
'accuracy_in_meters': 'accuracyInMeters'
} # type: Dict
def __init__(self, altitude_in_meters=None, accuracy_in_meters=None):
# type: (Optional[float], Optional[float]) -> None
"""An object containing the altitude information of the device.
:param altitude_in_meters: A double representing the altitude of the device in meters.
:type altitude_in_meters: (optional) float
:param accuracy_in_meters: A double representing the accuracy of the altitude measurement in meters.
:type accuracy_in_meters: (optional) float
"""
self.__discriminator_value = None # type: str
self.altitude_in_meters = altitude_in_meters
self.accuracy_in_meters = accuracy_in_meters
def to_dict(self):
# type: () -> Dict[str, object]
"""Returns the model properties as a dict"""
result = {} # type: Dict
for attr, _ in six.iteritems(self.deserialized_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else
x.value if isinstance(x, Enum) else x,
value
))
elif isinstance(value, Enum):
result[attr] = value.value
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else
(item[0], item[1].value)
if isinstance(item[1], Enum) else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
# type: () -> str
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
# type: () -> str
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
# type: (object) -> bool
"""Returns true if both objects are equal"""
if not isinstance(other, Altitude):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
# type: (object) -> bool
"""Returns true if both objects are not equal"""
return not self == other
| 34.043478 | 108 | 0.61507 |
import pprint
import re
import six
import typing
from enum import Enum
if typing.TYPE_CHECKING:
from typing import Dict, List, Optional, Union
from datetime import datetime
class Altitude(object):
deserialized_types = {
'altitude_in_meters': 'float',
'accuracy_in_meters': 'float'
}
attribute_map = {
'altitude_in_meters': 'altitudeInMeters',
'accuracy_in_meters': 'accuracyInMeters'
}
def __init__(self, altitude_in_meters=None, accuracy_in_meters=None):
self.__discriminator_value = None
self.altitude_in_meters = altitude_in_meters
self.accuracy_in_meters = accuracy_in_meters
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.deserialized_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else
x.value if isinstance(x, Enum) else x,
value
))
elif isinstance(value, Enum):
result[attr] = value.value
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else
(item[0], item[1].value)
if isinstance(item[1], Enum) else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, Altitude):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f72af7f234a3a7aaf0e57fc752f62d4dd0d648af | 38 | py | Python | frontend/GUI/ROOT_AND_MAIN/USER_WINDOW/USER_FRAME/callbacks.py | Lucianofc138/smart_scheduler_usm | 0ac50d71cfd1947b889a9551c31a3a67ecabfb88 | [
"MIT"
] | null | null | null | frontend/GUI/ROOT_AND_MAIN/USER_WINDOW/USER_FRAME/callbacks.py | Lucianofc138/smart_scheduler_usm | 0ac50d71cfd1947b889a9551c31a3a67ecabfb88 | [
"MIT"
] | null | null | null | frontend/GUI/ROOT_AND_MAIN/USER_WINDOW/USER_FRAME/callbacks.py | Lucianofc138/smart_scheduler_usm | 0ac50d71cfd1947b889a9551c31a3a67ecabfb88 | [
"MIT"
] | null | null | null | def new_user(user_stringvar):
pass | 19 | 29 | 0.763158 | def new_user(user_stringvar):
pass | true | true |
f72af8f3d31d026bd4517c8b3a0509701311dff5 | 4,016 | py | Python | netmiko/exercise4.py | Tes3awy/DevNet-DC | 03b4c7dc82221943bc25d0ab9d74ee2697fcc34c | [
"MIT"
] | null | null | null | netmiko/exercise4.py | Tes3awy/DevNet-DC | 03b4c7dc82221943bc25d0ab9d74ee2697fcc34c | [
"MIT"
] | null | null | null | netmiko/exercise4.py | Tes3awy/DevNet-DC | 03b4c7dc82221943bc25d0ab9d74ee2697fcc34c | [
"MIT"
] | null | null | null | # Export Nexus device show interface brief command output to
# an Excel file
import json
import xlsxwriter
from netmiko import ConnectHandler
# Devices to SSH into
devices = [
{
"device_type": "cisco_nxos",
"ip": "sbx-nxos-mgmt.cisco.com",
"username": "admin",
"password": "Admin_1234!",
"port": 8181,
"fast_cli": False,
"session_log": "nxos-exercise4.log",
},
{
"device_type": "cisco_nxos",
"ip": "192.168.90.46",
"username": "admin",
"password": "P@ssw0rd",
"fast_cli": False,
"session_log": "nxos-exercise4-1.log",
"verbose": True,
},
{
"device_type": "cisco_nxos",
"ip": "192.168.90.47",
"username": "admin",
"password": "P@ssw0rd",
"fast_cli": False,
"session_log": "nxos-exercise4-2.log",
"verbose": True,
},
]
# Create an Excel file
with xlsxwriter.Workbook(filename="Ex4-Nexus-Interfaces-Brief.xlsx") as workbook:
# Loop over each device
for device in devices:
# Connect to each device
with ConnectHandler(**device) as net_connect:
# Parse hostname of each device
hostname = net_connect.send_command(
command_string="show hostname", use_textfsm=True
)[0]["hostname"]
# Parse show interface brief of each device
intfs = net_connect.send_command(
command_string="show interface brief", use_textfsm=True
)
# Export interfaces to a JSON file for readability (Comment out if you don't need it)
with open(file=f"{hostname}-intfs-brief.json", mode="w") as outfile:
json.dump(obj=intfs, fp=outfile, indent=4, sort_keys=True)
# Create worksheets with the hostname of each device
worksheet = workbook.add_worksheet(f"{hostname} Interface Brief")
# Auto Filter for header line
worksheet.autofilter("A1:L1")
# Freeze top row and very left column only
worksheet.freeze_panes(1, 1)
# Header line
header_line = {
"A1": "Interface Name", # 1
"B1": "IP Address", # 2
"C1": "Interface Type", # 3
"D1": "Mode", # 4
"E1": "VLAN", # 5
"F1": "Port-Channel", # 6
"G1": "Speed", # 7
"H1": "Status", # 8
"I1": "MTU", # 9
"J1": "VRF", # 10
"K1": "Reason", # 11
"L1": "Description", # 12
}
# Format header line text
header_line_frmt = workbook.add_format(
{
"bold": True,
"align": "center",
"valign": "vcenter",
"bg_color": "#0058a0",
"font_color": "#FFFFFF",
}
)
# Write header line
for key, value in header_line.items():
worksheet.write(key, value, header_line_frmt)
# Initial Values for row and col
row = 1
col = 0
# Place data according to header line
for intf in intfs:
worksheet.write(row, col + 0, intf["interface"]) # Interface Name
worksheet.write(row, col + 1, intf["ip"]) # IP
worksheet.write(row, col + 2, intf["type"]) # Type
worksheet.write(row, col + 3, intf["mode"]) # Mode
worksheet.write(row, col + 4, intf["vlan"]) # VLAN
worksheet.write(row, col + 5, intf["portch"]) # Port-Channel
worksheet.write(row, col + 6, intf["speed"]) # Speed
worksheet.write(row, col + 7, intf["status"]) # Status
worksheet.write(row, col + 8, intf["mtu"]) # MTU
worksheet.write(row, col + 9, intf["vrf"]) # VRF
worksheet.write(row, col + 10, intf["reason"]) # Reason
worksheet.write(row, col + 11, intf["description"]) # Description
# Jump to next row
row += 1
print("Done")
| 34.033898 | 93 | 0.528884 |
import json
import xlsxwriter
from netmiko import ConnectHandler
devices = [
{
"device_type": "cisco_nxos",
"ip": "sbx-nxos-mgmt.cisco.com",
"username": "admin",
"password": "Admin_1234!",
"port": 8181,
"fast_cli": False,
"session_log": "nxos-exercise4.log",
},
{
"device_type": "cisco_nxos",
"ip": "192.168.90.46",
"username": "admin",
"password": "P@ssw0rd",
"fast_cli": False,
"session_log": "nxos-exercise4-1.log",
"verbose": True,
},
{
"device_type": "cisco_nxos",
"ip": "192.168.90.47",
"username": "admin",
"password": "P@ssw0rd",
"fast_cli": False,
"session_log": "nxos-exercise4-2.log",
"verbose": True,
},
]
with xlsxwriter.Workbook(filename="Ex4-Nexus-Interfaces-Brief.xlsx") as workbook:
for device in devices:
with ConnectHandler(**device) as net_connect:
hostname = net_connect.send_command(
command_string="show hostname", use_textfsm=True
)[0]["hostname"]
intfs = net_connect.send_command(
command_string="show interface brief", use_textfsm=True
)
with open(file=f"{hostname}-intfs-brief.json", mode="w") as outfile:
json.dump(obj=intfs, fp=outfile, indent=4, sort_keys=True)
# Create worksheets with the hostname of each device
worksheet = workbook.add_worksheet(f"{hostname} Interface Brief")
# Auto Filter for header line
worksheet.autofilter("A1:L1")
# Freeze top row and very left column only
worksheet.freeze_panes(1, 1)
# Header line
header_line = {
"A1": "Interface Name", # 1
"B1": "IP Address", # 2
"C1": "Interface Type", # 3
"D1": "Mode", # 4
"E1": "VLAN", # 5
"F1": "Port-Channel", # 6
"G1": "Speed", # 7
"H1": "Status", # 8
"I1": "MTU", # 9
"J1": "VRF", # 10
"K1": "Reason", # 11
"L1": "Description", # 12
}
# Format header line text
header_line_frmt = workbook.add_format(
{
"bold": True,
"align": "center",
"valign": "vcenter",
"bg_color": "#0058a0",
"font_color": "#FFFFFF",
}
)
# Write header line
for key, value in header_line.items():
worksheet.write(key, value, header_line_frmt)
# Initial Values for row and col
row = 1
col = 0
# Place data according to header line
for intf in intfs:
worksheet.write(row, col + 0, intf["interface"]) # Interface Name
worksheet.write(row, col + 1, intf["ip"]) # IP
worksheet.write(row, col + 2, intf["type"]) # Type
worksheet.write(row, col + 3, intf["mode"]) # Mode
worksheet.write(row, col + 4, intf["vlan"]) # VLAN
worksheet.write(row, col + 5, intf["portch"]) # Port-Channel
worksheet.write(row, col + 6, intf["speed"]) # Speed
worksheet.write(row, col + 7, intf["status"]) # Status
worksheet.write(row, col + 8, intf["mtu"]) # MTU
worksheet.write(row, col + 9, intf["vrf"]) # VRF
worksheet.write(row, col + 10, intf["reason"]) # Reason
worksheet.write(row, col + 11, intf["description"]) # Description
# Jump to next row
row += 1
print("Done")
| true | true |
f72afbb1ae862f6cc33248e2ecf5c95000d6017c | 7,390 | py | Python | server/opendp_apps/dataset/dataset_formatter.py | opendifferentialprivacy/opendp-ux | 2669602d0a65f6a83d9e9916cbf753c38fd64c94 | [
"MIT"
] | null | null | null | server/opendp_apps/dataset/dataset_formatter.py | opendifferentialprivacy/opendp-ux | 2669602d0a65f6a83d9e9916cbf753c38fd64c94 | [
"MIT"
] | 82 | 2020-08-06T17:11:12.000Z | 2021-02-07T21:01:05.000Z | server/opendp_apps/dataset/dataset_formatter.py | opendifferentialprivacy/opendp-ux | 2669602d0a65f6a83d9e9916cbf753c38fd64c94 | [
"MIT"
] | 2 | 2020-10-16T22:03:24.000Z | 2020-11-15T22:45:19.000Z | """
Format a DataSetInfo for use in a JSON Release
"""
import json
from opendp_apps.dataset.models import DataSetInfo
from opendp_apps.dataset import static_vals as dstatic
from opendp_apps.model_helpers.basic_err_check import BasicErrCheck
from opendp_apps.model_helpers.basic_response import ok_resp, err_resp, BasicResponse
class DataSetFormatter(BasicErrCheck):
def __init__(self, dataset_info: DataSetInfo):
"""Init with a DataSetInfo object"""
assert isinstance(dataset_info, DataSetInfo), '"dataset_info" must be a DataSetInfo instance.'
self.dataset = dataset_info
self.formatted_info = {}
self.run_formatter()
def run_formatter(self):
"""
Format the dataset info
"""
if self.dataset.source == DataSetInfo.SourceChoices.UserUpload:
self.dataset = self.dataset.uploadfileinfo # Get the UploadFileInfo object
self.format_user_upload()
elif self.dataset.source == DataSetInfo.SourceChoices.Dataverse:
self.dataset = self.dataset.dataversefileinfo # Get the DataverseFileInfo object
self.format_dataverse_dataset()
else:
self.add_err_msg('Unknown dataset type: {self.dataset.source}')
return
def get_formatted_info(self, as_json=False):
"""
Return the formatted data
"""
assert self.has_error() is False,\
"Do not call this method before checking if \".has_error()\" is False"
if as_json:
return json.dumps(self.formatted_info, indent=4)
return self.formatted_info
def format_user_upload(self):
"""Format UserUpload dataset"""
if self.has_error():
return
ds_dict = {
'type': self.dataset.source,
'name': self.dataset.name,
'creator': self.dataset.creator,
'created': self.dataset.created,
}
self.formatted_info = ds_dict
def format_dataverse_dataset(self):
"""Format UserUpload dataset"""
if self.has_error():
return
# Pull citation from self.dataset.dataset_schema_info
#
citation_info = self.get_citation_from_dataset_schema_or_None()
if citation_info.success:
citation = citation_info.data
else:
self.add_err_msg(citation_info.message)
return
# Pull name from self.dataset.dataset_schema_info
#
name_info = self.get_name_from_dataset_schema()
if name_info.success:
ds_name = name_info.data
else:
self.add_err_msg(name_info.message)
return
# Format info in self.dataset.file_schema_info
#
file_info = self.get_file_info()
if file_info.success:
file_dict = file_info.data
else:
self.add_err_msg(file_info.message)
return
ds_dict = {
'type': self.dataset.source,
'name': self.dataset.name,
"citation": citation,
"doi": self.dataset.dataset_doi,
"identifier": self.get_dataset_identifier_or_none(),
'release_deposit_info': {
"deposited": False,
# if True, add: "release_url": "some-url"
# update with https://github.com/opendp/dpcreator/issues/34
# "release_urls": {
# "release_json": "http://dataverse.edu/some.json",
# "release_pdf": "http://dataverse.edu/some.pdf"
# }
},
'installation': {
"name": self.dataset.dv_installation.name,
"url": self.dataset.dv_installation.dataverse_url
},
"file_information": file_dict
}
self.formatted_info = ds_dict
def get_name_from_dataset_schema(self) -> BasicResponse:
"""
Return the "name" text from self.dataset_schema_info (a bit ugly...)
Trying to return string from: self.dataset.dataset_schema_info['name']
"""
if self.has_error():
# Shouldn't happen...
return err_resp(self.get_err_msg())
if not self.dataset.dataset_schema_info:
return err_resp('".dataset_schema_info" is empty')
if not 'name' in self.dataset.dataset_schema_info:
return err_resp('"name" not found in ".dataset_schema_info" not found')
ds_name = self.dataset.dataset_schema_info['name']
if not ds_name:
return err_resp('"name" within ".dataset_schema_info" is empty')
return ok_resp(ds_name)
def get_dataset_identifier_or_none(self):
"""Return the identifer within dataset_schema_info['identifer']"""
if '@id' in self.dataset.dataset_schema_info['@id']:
return elf.dataset.dataset_schema_info['@id']
return None
def get_citation_from_dataset_schema_or_None(self):
"""
Return the citation text from self.dataset_schema_info (a bit ugly...)
Trying to return string from: self.dataset.dataset_schema_info['citation'][0]
"""
if self.has_error():
# Shouldn't happen...
return err_resp(self.get_err_msg())
if not self.dataset.dataset_schema_info:
return err_resp('".dataset_schema_info" is empty')
if not 'citation' in self.dataset.dataset_schema_info:
return ok_resp(None)
# If the citation key is found, then do error checking....
if (not self.dataset.dataset_schema_info['citation']) or \
(not isinstance(self.dataset.dataset_schema_info['citation'], list)):
return err_resp('"citation" within ".dataset_schema_info" is empty or not a list')
if not 'text' in self.dataset.dataset_schema_info['citation'][0]:
return err_resp('"[\'citation\'][0][\'text\']" not found in ".dataset_schema_info"')
return ok_resp(self.dataset.dataset_schema_info['citation'][0]['text'])
def get_file_info(self):
"""
Return information from the "DataverseFileInfo.file_schema_info" field
Ideal:
{
"name": "crisis.tab"
"identifier": "https://doi.org/10.7910/DVN/OLD7MB/ZI4N3J",
"fileFormat": "text/tab-separated-values",
}
"""
if self.has_error():
# Shouldn't happen!
return err_resp(self.get_err_msg())
if not self.dataset.file_schema_info:
return err_resp('".file_schema_info" is empty')
file_dict = {}
if 'name' in self.dataset.file_schema_info:
file_dict['name'] = self.dataset.file_schema_info['name']
else:
return err_resp('"name" not found in ".file_schema_info" not found')
if 'identifier' in self.dataset.file_schema_info:
file_dict['identifier'] = self.dataset.file_schema_info['identifier']
else:
file_dict['identifier'] = None
if 'fileFormat' in self.dataset.file_schema_info:
file_dict['fileFormat'] = self.dataset.file_schema_info['fileFormat']
else:
file_dict['fileFormat'] = None
return ok_resp(file_dict)
| 34.858491 | 102 | 0.604195 | import json
from opendp_apps.dataset.models import DataSetInfo
from opendp_apps.dataset import static_vals as dstatic
from opendp_apps.model_helpers.basic_err_check import BasicErrCheck
from opendp_apps.model_helpers.basic_response import ok_resp, err_resp, BasicResponse
class DataSetFormatter(BasicErrCheck):
def __init__(self, dataset_info: DataSetInfo):
assert isinstance(dataset_info, DataSetInfo), '"dataset_info" must be a DataSetInfo instance.'
self.dataset = dataset_info
self.formatted_info = {}
self.run_formatter()
def run_formatter(self):
if self.dataset.source == DataSetInfo.SourceChoices.UserUpload:
self.dataset = self.dataset.uploadfileinfo
self.format_user_upload()
elif self.dataset.source == DataSetInfo.SourceChoices.Dataverse:
self.dataset = self.dataset.dataversefileinfo
self.format_dataverse_dataset()
else:
self.add_err_msg('Unknown dataset type: {self.dataset.source}')
return
def get_formatted_info(self, as_json=False):
assert self.has_error() is False,\
"Do not call this method before checking if \".has_error()\" is False"
if as_json:
return json.dumps(self.formatted_info, indent=4)
return self.formatted_info
def format_user_upload(self):
if self.has_error():
return
ds_dict = {
'type': self.dataset.source,
'name': self.dataset.name,
'creator': self.dataset.creator,
'created': self.dataset.created,
}
self.formatted_info = ds_dict
def format_dataverse_dataset(self):
if self.has_error():
return
citation_info = self.get_citation_from_dataset_schema_or_None()
if citation_info.success:
citation = citation_info.data
else:
self.add_err_msg(citation_info.message)
return
name_info = self.get_name_from_dataset_schema()
if name_info.success:
ds_name = name_info.data
else:
self.add_err_msg(name_info.message)
return
file_info = self.get_file_info()
if file_info.success:
file_dict = file_info.data
else:
self.add_err_msg(file_info.message)
return
ds_dict = {
'type': self.dataset.source,
'name': self.dataset.name,
"citation": citation,
"doi": self.dataset.dataset_doi,
"identifier": self.get_dataset_identifier_or_none(),
'release_deposit_info': {
"deposited": False,
},
'installation': {
"name": self.dataset.dv_installation.name,
"url": self.dataset.dv_installation.dataverse_url
},
"file_information": file_dict
}
self.formatted_info = ds_dict
def get_name_from_dataset_schema(self) -> BasicResponse:
if self.has_error():
return err_resp(self.get_err_msg())
if not self.dataset.dataset_schema_info:
return err_resp('".dataset_schema_info" is empty')
if not 'name' in self.dataset.dataset_schema_info:
return err_resp('"name" not found in ".dataset_schema_info" not found')
ds_name = self.dataset.dataset_schema_info['name']
if not ds_name:
return err_resp('"name" within ".dataset_schema_info" is empty')
return ok_resp(ds_name)
def get_dataset_identifier_or_none(self):
if '@id' in self.dataset.dataset_schema_info['@id']:
return elf.dataset.dataset_schema_info['@id']
return None
def get_citation_from_dataset_schema_or_None(self):
if self.has_error():
# Shouldn't happen...
return err_resp(self.get_err_msg())
if not self.dataset.dataset_schema_info:
return err_resp('".dataset_schema_info" is empty')
if not 'citation' in self.dataset.dataset_schema_info:
return ok_resp(None)
if (not self.dataset.dataset_schema_info['citation']) or \
(not isinstance(self.dataset.dataset_schema_info['citation'], list)):
return err_resp('"citation" within ".dataset_schema_info" is empty or not a list')
if not 'text' in self.dataset.dataset_schema_info['citation'][0]:
return err_resp('"[\'citation\'][0][\'text\']" not found in ".dataset_schema_info"')
return ok_resp(self.dataset.dataset_schema_info['citation'][0]['text'])
def get_file_info(self):
if self.has_error():
return err_resp(self.get_err_msg())
if not self.dataset.file_schema_info:
return err_resp('".file_schema_info" is empty')
file_dict = {}
if 'name' in self.dataset.file_schema_info:
file_dict['name'] = self.dataset.file_schema_info['name']
else:
return err_resp('"name" not found in ".file_schema_info" not found')
if 'identifier' in self.dataset.file_schema_info:
file_dict['identifier'] = self.dataset.file_schema_info['identifier']
else:
file_dict['identifier'] = None
if 'fileFormat' in self.dataset.file_schema_info:
file_dict['fileFormat'] = self.dataset.file_schema_info['fileFormat']
else:
file_dict['fileFormat'] = None
return ok_resp(file_dict)
| true | true |
f72afc6fd07bcfad6b0ce2194a5a5dfd54a13f25 | 9,191 | py | Python | 04_test.py | 500kg/learn2branch | 693d6f68def3ce290a0f5f289820e708019c019a | [
"MIT"
] | 248 | 2019-01-10T21:58:46.000Z | 2022-03-30T07:55:34.000Z | 04_test.py | 500kg/learn2branch | 693d6f68def3ce290a0f5f289820e708019c019a | [
"MIT"
] | 17 | 2018-10-09T19:17:25.000Z | 2022-02-27T07:33:11.000Z | 04_test.py | 500kg/learn2branch | 693d6f68def3ce290a0f5f289820e708019c019a | [
"MIT"
] | 66 | 2019-06-08T12:18:43.000Z | 2022-03-29T07:44:18.000Z | import os
import sys
import importlib
import argparse
import csv
import numpy as np
import time
import pickle
import pathlib
import gzip
import tensorflow as tf
import tensorflow.contrib.eager as tfe
import svmrank
import utilities
from utilities_tf import load_batch_gcnn
def load_batch_flat(sample_files, feats_type, augment_feats, normalize_feats):
cand_features = []
cand_choices = []
cand_scoress = []
for i, filename in enumerate(sample_files):
cand_states, cand_scores, cand_choice = utilities.load_flat_samples(filename, feats_type, 'scores', augment_feats, normalize_feats)
cand_features.append(cand_states)
cand_choices.append(cand_choice)
cand_scoress.append(cand_scores)
n_cands_per_sample = [v.shape[0] for v in cand_features]
cand_features = np.concatenate(cand_features, axis=0).astype(np.float32, copy=False)
cand_choices = np.asarray(cand_choices).astype(np.int32, copy=False)
cand_scoress = np.concatenate(cand_scoress, axis=0).astype(np.float32, copy=False)
n_cands_per_sample = np.asarray(n_cands_per_sample).astype(np.int32, copy=False)
return cand_features, n_cands_per_sample, cand_choices, cand_scoress
def padding(output, n_vars_per_sample, fill=-1e8):
n_vars_max = tf.reduce_max(n_vars_per_sample)
output = tf.split(
value=output,
num_or_size_splits=n_vars_per_sample,
axis=1,
)
output = tf.concat([
tf.pad(
x,
paddings=[[0, 0], [0, n_vars_max - tf.shape(x)[1]]],
mode='CONSTANT',
constant_values=fill)
for x in output
], axis=0)
return output
def process(policy, dataloader, top_k):
mean_kacc = np.zeros(len(top_k))
n_samples_processed = 0
for batch in dataloader:
if policy['type'] == 'gcnn':
c, ei, ev, v, n_cs, n_vs, n_cands, cands, best_cands, cand_scores = batch
pred_scores = policy['model']((c, ei, ev, v, tf.reduce_sum(n_cs, keepdims=True), tf.reduce_sum(n_vs, keepdims=True)), tf.convert_to_tensor(False))
# filter candidate variables
pred_scores = tf.expand_dims(tf.gather(tf.squeeze(pred_scores, 0), cands), 0)
elif policy['type'] == 'ml-competitor':
cand_feats, n_cands, best_cands, cand_scores = batch
# move to numpy
cand_feats = cand_feats.numpy()
n_cands = n_cands.numpy()
# feature normalization
cand_feats = (cand_feats - policy['feat_shift']) / policy['feat_scale']
pred_scores = policy['model'].predict(cand_feats)
# move back to TF
pred_scores = tf.convert_to_tensor(pred_scores.reshape((1, -1)), dtype=tf.float32)
# padding
pred_scores = padding(pred_scores, n_cands)
true_scores = padding(tf.reshape(cand_scores, (1, -1)), n_cands)
true_bestscore = tf.reduce_max(true_scores, axis=-1, keepdims=True)
assert all(true_bestscore.numpy() == np.take_along_axis(true_scores.numpy(), best_cands.numpy().reshape((-1, 1)), axis=1))
kacc = []
for k in top_k:
pred_top_k = tf.nn.top_k(pred_scores, k=k)[1].numpy()
pred_top_k_true_scores = np.take_along_axis(true_scores.numpy(), pred_top_k, axis=1)
kacc.append(np.mean(np.any(pred_top_k_true_scores == true_bestscore.numpy(), axis=1)))
kacc = np.asarray(kacc)
batch_size = int(n_cands.shape[0])
mean_kacc += kacc * batch_size
n_samples_processed += batch_size
mean_kacc /= n_samples_processed
return mean_kacc
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'problem',
help='MILP instance type to process.',
choices=['setcover', 'cauctions', 'facilities', 'indset'],
)
parser.add_argument(
'-g', '--gpu',
help='CUDA GPU id (-1 for CPU).',
type=int,
default=0,
)
args = parser.parse_args()
print(f"problem: {args.problem}")
print(f"gpu: {args.gpu}")
os.makedirs("results", exist_ok=True)
result_file = f"results/{args.problem}_validation_{time.strftime('%Y%m%d-%H%M%S')}.csv"
seeds = [0, 1, 2, 3, 4]
gcnn_models = ['baseline']
other_models = ['extratrees_gcnn_agg', 'lambdamart_khalil', 'svmrank_khalil']
test_batch_size = 128
top_k = [1, 3, 5, 10]
problem_folders = {
'setcover': 'setcover/500r_1000c_0.05d',
'cauctions': 'cauctions/100_500',
'facilities': 'facilities/100_100_5',
'indset': 'indset/500_4',
}
problem_folder = problem_folders[args.problem]
if args.problem == 'setcover':
gcnn_models += ['mean_convolution', 'no_prenorm']
result_file = f"results/{args.problem}_test_{time.strftime('%Y%m%d-%H%M%S')}"
result_file = result_file + '.csv'
os.makedirs('results', exist_ok=True)
### TENSORFLOW SETUP ###
if args.gpu == -1:
os.environ['CUDA_VISIBLE_DEVICES'] = ''
else:
os.environ['CUDA_VISIBLE_DEVICES'] = f'{args.gpu}'
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
tf.enable_eager_execution(config)
tf.executing_eagerly()
test_files = list(pathlib.Path(f"data/samples/{problem_folder}/test").glob('sample_*.pkl'))
test_files = [str(x) for x in test_files]
print(f"{len(test_files)} test samples")
evaluated_policies = [['gcnn', model] for model in gcnn_models] + \
[['ml-competitor', model] for model in other_models]
fieldnames = [
'policy',
'seed',
] + [
f'acc@{k}' for k in top_k
]
with open(result_file, 'w', newline='') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for policy_type, policy_name in evaluated_policies:
print(f"{policy_type}:{policy_name}...")
for seed in seeds:
rng = np.random.RandomState(seed)
tf.set_random_seed(rng.randint(np.iinfo(int).max))
policy = {}
policy['name'] = policy_name
policy['type'] = policy_type
if policy['type'] == 'gcnn':
# load model
sys.path.insert(0, os.path.abspath(f"models/{policy['name']}"))
import model
importlib.reload(model)
del sys.path[0]
policy['model'] = model.GCNPolicy()
policy['model'].restore_state(f"trained_models/{args.problem}/{policy['name']}/{seed}/best_params.pkl")
policy['model'].call = tfe.defun(policy['model'].call, input_signature=policy['model'].input_signature)
policy['batch_datatypes'] = [tf.float32, tf.int32, tf.float32,
tf.float32, tf.int32, tf.int32, tf.int32, tf.int32, tf.int32, tf.float32]
policy['batch_fun'] = load_batch_gcnn
else:
# load feature normalization parameters
try:
with open(f"trained_models/{args.problem}/{policy['name']}/{seed}/normalization.pkl", 'rb') as f:
policy['feat_shift'], policy['feat_scale'] = pickle.load(f)
except:
policy['feat_shift'], policy['feat_scale'] = 0, 1
# load model
if policy_name.startswith('svmrank'):
policy['model'] = svmrank.Model().read(f"trained_models/{args.problem}/{policy['name']}/{seed}/model.txt")
else:
with open(f"trained_models/{args.problem}/{policy['name']}/{seed}/model.pkl", 'rb') as f:
policy['model'] = pickle.load(f)
# load feature specifications
with open(f"trained_models/{args.problem}/{policy['name']}/{seed}/feat_specs.pkl", 'rb') as f:
feat_specs = pickle.load(f)
policy['batch_datatypes'] = [tf.float32, tf.int32, tf.int32, tf.float32]
policy['batch_fun'] = lambda x: load_batch_flat(x, feat_specs['type'], feat_specs['augment'], feat_specs['qbnorm'])
test_data = tf.data.Dataset.from_tensor_slices(test_files)
test_data = test_data.batch(test_batch_size)
test_data = test_data.map(lambda x: tf.py_func(
policy['batch_fun'], [x], policy['batch_datatypes']))
test_data = test_data.prefetch(2)
test_kacc = process(policy, test_data, top_k)
print(f" {seed} " + " ".join([f"acc@{k}: {100*acc:4.1f}" for k, acc in zip(top_k, test_kacc)]))
writer.writerow({
**{
'policy': f"{policy['type']}:{policy['name']}",
'seed': seed,
},
**{
f'acc@{k}': test_kacc[i] for i, k in enumerate(top_k)
},
})
csvfile.flush()
| 37.060484 | 158 | 0.586878 | import os
import sys
import importlib
import argparse
import csv
import numpy as np
import time
import pickle
import pathlib
import gzip
import tensorflow as tf
import tensorflow.contrib.eager as tfe
import svmrank
import utilities
from utilities_tf import load_batch_gcnn
def load_batch_flat(sample_files, feats_type, augment_feats, normalize_feats):
cand_features = []
cand_choices = []
cand_scoress = []
for i, filename in enumerate(sample_files):
cand_states, cand_scores, cand_choice = utilities.load_flat_samples(filename, feats_type, 'scores', augment_feats, normalize_feats)
cand_features.append(cand_states)
cand_choices.append(cand_choice)
cand_scoress.append(cand_scores)
n_cands_per_sample = [v.shape[0] for v in cand_features]
cand_features = np.concatenate(cand_features, axis=0).astype(np.float32, copy=False)
cand_choices = np.asarray(cand_choices).astype(np.int32, copy=False)
cand_scoress = np.concatenate(cand_scoress, axis=0).astype(np.float32, copy=False)
n_cands_per_sample = np.asarray(n_cands_per_sample).astype(np.int32, copy=False)
return cand_features, n_cands_per_sample, cand_choices, cand_scoress
def padding(output, n_vars_per_sample, fill=-1e8):
n_vars_max = tf.reduce_max(n_vars_per_sample)
output = tf.split(
value=output,
num_or_size_splits=n_vars_per_sample,
axis=1,
)
output = tf.concat([
tf.pad(
x,
paddings=[[0, 0], [0, n_vars_max - tf.shape(x)[1]]],
mode='CONSTANT',
constant_values=fill)
for x in output
], axis=0)
return output
def process(policy, dataloader, top_k):
mean_kacc = np.zeros(len(top_k))
n_samples_processed = 0
for batch in dataloader:
if policy['type'] == 'gcnn':
c, ei, ev, v, n_cs, n_vs, n_cands, cands, best_cands, cand_scores = batch
pred_scores = policy['model']((c, ei, ev, v, tf.reduce_sum(n_cs, keepdims=True), tf.reduce_sum(n_vs, keepdims=True)), tf.convert_to_tensor(False))
pred_scores = tf.expand_dims(tf.gather(tf.squeeze(pred_scores, 0), cands), 0)
elif policy['type'] == 'ml-competitor':
cand_feats, n_cands, best_cands, cand_scores = batch
cand_feats = cand_feats.numpy()
n_cands = n_cands.numpy()
cand_feats = (cand_feats - policy['feat_shift']) / policy['feat_scale']
pred_scores = policy['model'].predict(cand_feats)
pred_scores = tf.convert_to_tensor(pred_scores.reshape((1, -1)), dtype=tf.float32)
pred_scores = padding(pred_scores, n_cands)
true_scores = padding(tf.reshape(cand_scores, (1, -1)), n_cands)
true_bestscore = tf.reduce_max(true_scores, axis=-1, keepdims=True)
assert all(true_bestscore.numpy() == np.take_along_axis(true_scores.numpy(), best_cands.numpy().reshape((-1, 1)), axis=1))
kacc = []
for k in top_k:
pred_top_k = tf.nn.top_k(pred_scores, k=k)[1].numpy()
pred_top_k_true_scores = np.take_along_axis(true_scores.numpy(), pred_top_k, axis=1)
kacc.append(np.mean(np.any(pred_top_k_true_scores == true_bestscore.numpy(), axis=1)))
kacc = np.asarray(kacc)
batch_size = int(n_cands.shape[0])
mean_kacc += kacc * batch_size
n_samples_processed += batch_size
mean_kacc /= n_samples_processed
return mean_kacc
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'problem',
help='MILP instance type to process.',
choices=['setcover', 'cauctions', 'facilities', 'indset'],
)
parser.add_argument(
'-g', '--gpu',
help='CUDA GPU id (-1 for CPU).',
type=int,
default=0,
)
args = parser.parse_args()
print(f"problem: {args.problem}")
print(f"gpu: {args.gpu}")
os.makedirs("results", exist_ok=True)
result_file = f"results/{args.problem}_validation_{time.strftime('%Y%m%d-%H%M%S')}.csv"
seeds = [0, 1, 2, 3, 4]
gcnn_models = ['baseline']
other_models = ['extratrees_gcnn_agg', 'lambdamart_khalil', 'svmrank_khalil']
test_batch_size = 128
top_k = [1, 3, 5, 10]
problem_folders = {
'setcover': 'setcover/500r_1000c_0.05d',
'cauctions': 'cauctions/100_500',
'facilities': 'facilities/100_100_5',
'indset': 'indset/500_4',
}
problem_folder = problem_folders[args.problem]
if args.problem == 'setcover':
gcnn_models += ['mean_convolution', 'no_prenorm']
result_file = f"results/{args.problem}_test_{time.strftime('%Y%m%d-%H%M%S')}"
result_file = result_file + '.csv'
os.makedirs('results', exist_ok=True)
SIBLE_DEVICES'] = ''
else:
os.environ['CUDA_VISIBLE_DEVICES'] = f'{args.gpu}'
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
tf.enable_eager_execution(config)
tf.executing_eagerly()
test_files = list(pathlib.Path(f"data/samples/{problem_folder}/test").glob('sample_*.pkl'))
test_files = [str(x) for x in test_files]
print(f"{len(test_files)} test samples")
evaluated_policies = [['gcnn', model] for model in gcnn_models] + \
[['ml-competitor', model] for model in other_models]
fieldnames = [
'policy',
'seed',
] + [
f'acc@{k}' for k in top_k
]
with open(result_file, 'w', newline='') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for policy_type, policy_name in evaluated_policies:
print(f"{policy_type}:{policy_name}...")
for seed in seeds:
rng = np.random.RandomState(seed)
tf.set_random_seed(rng.randint(np.iinfo(int).max))
policy = {}
policy['name'] = policy_name
policy['type'] = policy_type
if policy['type'] == 'gcnn':
sys.path.insert(0, os.path.abspath(f"models/{policy['name']}"))
import model
importlib.reload(model)
del sys.path[0]
policy['model'] = model.GCNPolicy()
policy['model'].restore_state(f"trained_models/{args.problem}/{policy['name']}/{seed}/best_params.pkl")
policy['model'].call = tfe.defun(policy['model'].call, input_signature=policy['model'].input_signature)
policy['batch_datatypes'] = [tf.float32, tf.int32, tf.float32,
tf.float32, tf.int32, tf.int32, tf.int32, tf.int32, tf.int32, tf.float32]
policy['batch_fun'] = load_batch_gcnn
else:
try:
with open(f"trained_models/{args.problem}/{policy['name']}/{seed}/normalization.pkl", 'rb') as f:
policy['feat_shift'], policy['feat_scale'] = pickle.load(f)
except:
policy['feat_shift'], policy['feat_scale'] = 0, 1
if policy_name.startswith('svmrank'):
policy['model'] = svmrank.Model().read(f"trained_models/{args.problem}/{policy['name']}/{seed}/model.txt")
else:
with open(f"trained_models/{args.problem}/{policy['name']}/{seed}/model.pkl", 'rb') as f:
policy['model'] = pickle.load(f)
with open(f"trained_models/{args.problem}/{policy['name']}/{seed}/feat_specs.pkl", 'rb') as f:
feat_specs = pickle.load(f)
policy['batch_datatypes'] = [tf.float32, tf.int32, tf.int32, tf.float32]
policy['batch_fun'] = lambda x: load_batch_flat(x, feat_specs['type'], feat_specs['augment'], feat_specs['qbnorm'])
test_data = tf.data.Dataset.from_tensor_slices(test_files)
test_data = test_data.batch(test_batch_size)
test_data = test_data.map(lambda x: tf.py_func(
policy['batch_fun'], [x], policy['batch_datatypes']))
test_data = test_data.prefetch(2)
test_kacc = process(policy, test_data, top_k)
print(f" {seed} " + " ".join([f"acc@{k}: {100*acc:4.1f}" for k, acc in zip(top_k, test_kacc)]))
writer.writerow({
**{
'policy': f"{policy['type']}:{policy['name']}",
'seed': seed,
},
**{
f'acc@{k}': test_kacc[i] for i, k in enumerate(top_k)
},
})
csvfile.flush()
| true | true |
f72afdb37d0bc3631c2708300be0110723f46ee0 | 4,090 | py | Python | src/python/pants/ivy/ivy_subsystem.py | SergeKireev/pants | cd92c65aeb3dfdcee3e0946f2b68a301ef2f4541 | [
"Apache-2.0"
] | 1 | 2020-08-26T03:30:31.000Z | 2020-08-26T03:30:31.000Z | src/python/pants/ivy/ivy_subsystem.py | SergeKireev/pants | cd92c65aeb3dfdcee3e0946f2b68a301ef2f4541 | [
"Apache-2.0"
] | 1 | 2021-09-02T21:06:31.000Z | 2021-09-02T21:06:31.000Z | src/python/pants/ivy/ivy_subsystem.py | SergeKireev/pants | cd92c65aeb3dfdcee3e0946f2b68a301ef2f4541 | [
"Apache-2.0"
] | null | null | null | # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import os
import urllib
from pants.java.distribution.distribution import DistributionLocator
from pants.subsystem.subsystem import Subsystem
class IvySubsystem(Subsystem):
"""Common configuration items for ivy tasks.
:API: public
"""
options_scope = 'ivy'
_DEFAULT_VERSION = '2.4.0'
_DEFAULT_URL = ('https://repo1.maven.org/maven2/'
'org/apache/ivy/ivy/'
'{version}/ivy-{version}.jar'.format(version=_DEFAULT_VERSION))
@classmethod
def register_options(cls, register):
super().register_options(register)
register('--http-proxy', advanced=True,
help='Specify a proxy URL for http requests.')
register('--https-proxy', advanced=True,
help='Specify a proxy URL for https requests.')
register('--bootstrap-jar-url', advanced=True, default=cls._DEFAULT_URL,
help='Location to download a bootstrap version of Ivy.')
register('--bootstrap-fetch-timeout-secs', type=int, advanced=True, default=10,
help='Timeout the fetch if the connection is idle for longer than this value.')
register('--ivy-profile', advanced=True, default=cls._DEFAULT_VERSION,
help='The version of ivy to fetch.')
register('--cache-dir', advanced=True, default=os.path.expanduser('~/.ivy2/pants'),
help='The default directory used for both the Ivy resolution and repository caches.'
'If you want to isolate the resolution cache from the repository cache, we '
'recommend setting both the --resolution-cache-dir and --repository-cache-dir '
'instead of using --cache-dir')
register('--resolution-cache-dir', advanced=True,
help='Directory to store Ivy resolution artifacts.')
register('--repository-cache-dir', advanced=True,
help='Directory to store Ivy repository artifacts.')
register('--ivy-settings', advanced=True,
help='Location of XML configuration file for Ivy settings.')
register('--bootstrap-ivy-settings', advanced=True,
help='Bootstrap Ivy XML configuration file.')
@classmethod
def subsystem_dependencies(cls):
return super().subsystem_dependencies() + (DistributionLocator,)
def http_proxy(self):
"""Set ivy to use an http proxy.
Expects a string of the form http://<host>:<port>
"""
if os.getenv('HTTP_PROXY'):
return os.getenv('HTTP_PROXY')
if os.getenv('http_proxy'):
return os.getenv('http_proxy')
return self.get_options().http_proxy
def https_proxy(self):
"""Set ivy to use an https proxy.
Expects a string of the form http://<host>:<port>
"""
if os.getenv('HTTPS_PROXY'):
return os.getenv('HTTPS_PROXY')
if os.getenv('https_proxy'):
return os.getenv('https_proxy')
return self.get_options().https_proxy
def extra_jvm_options(self):
extra_options = []
http_proxy = self.http_proxy()
if http_proxy:
host, port = self._parse_proxy_string(http_proxy)
extra_options.extend([
"-Dhttp.proxyHost={}".format(host),
"-Dhttp.proxyPort={}".format(port),
])
https_proxy = self.https_proxy()
if https_proxy:
host, port = self._parse_proxy_string(https_proxy)
extra_options.extend([
"-Dhttps.proxyHost={}".format(host),
"-Dhttps.proxyPort={}".format(port),
])
return extra_options
def _parse_proxy_string(self, proxy_string):
parse_result = urllib.parse.urlparse(proxy_string)
return parse_result.hostname, parse_result.port
def resolution_cache_dir(self):
if self.get_options().resolution_cache_dir:
return self.get_options().resolution_cache_dir
else:
return self.get_options().cache_dir
def repository_cache_dir(self):
if self.get_options().repository_cache_dir:
return self.get_options().repository_cache_dir
else:
return self.get_options().cache_dir
| 37.181818 | 97 | 0.674817 |
import os
import urllib
from pants.java.distribution.distribution import DistributionLocator
from pants.subsystem.subsystem import Subsystem
class IvySubsystem(Subsystem):
options_scope = 'ivy'
_DEFAULT_VERSION = '2.4.0'
_DEFAULT_URL = ('https://repo1.maven.org/maven2/'
'org/apache/ivy/ivy/'
'{version}/ivy-{version}.jar'.format(version=_DEFAULT_VERSION))
@classmethod
def register_options(cls, register):
super().register_options(register)
register('--http-proxy', advanced=True,
help='Specify a proxy URL for http requests.')
register('--https-proxy', advanced=True,
help='Specify a proxy URL for https requests.')
register('--bootstrap-jar-url', advanced=True, default=cls._DEFAULT_URL,
help='Location to download a bootstrap version of Ivy.')
register('--bootstrap-fetch-timeout-secs', type=int, advanced=True, default=10,
help='Timeout the fetch if the connection is idle for longer than this value.')
register('--ivy-profile', advanced=True, default=cls._DEFAULT_VERSION,
help='The version of ivy to fetch.')
register('--cache-dir', advanced=True, default=os.path.expanduser('~/.ivy2/pants'),
help='The default directory used for both the Ivy resolution and repository caches.'
'If you want to isolate the resolution cache from the repository cache, we '
'recommend setting both the --resolution-cache-dir and --repository-cache-dir '
'instead of using --cache-dir')
register('--resolution-cache-dir', advanced=True,
help='Directory to store Ivy resolution artifacts.')
register('--repository-cache-dir', advanced=True,
help='Directory to store Ivy repository artifacts.')
register('--ivy-settings', advanced=True,
help='Location of XML configuration file for Ivy settings.')
register('--bootstrap-ivy-settings', advanced=True,
help='Bootstrap Ivy XML configuration file.')
@classmethod
def subsystem_dependencies(cls):
return super().subsystem_dependencies() + (DistributionLocator,)
def http_proxy(self):
if os.getenv('HTTP_PROXY'):
return os.getenv('HTTP_PROXY')
if os.getenv('http_proxy'):
return os.getenv('http_proxy')
return self.get_options().http_proxy
def https_proxy(self):
if os.getenv('HTTPS_PROXY'):
return os.getenv('HTTPS_PROXY')
if os.getenv('https_proxy'):
return os.getenv('https_proxy')
return self.get_options().https_proxy
def extra_jvm_options(self):
extra_options = []
http_proxy = self.http_proxy()
if http_proxy:
host, port = self._parse_proxy_string(http_proxy)
extra_options.extend([
"-Dhttp.proxyHost={}".format(host),
"-Dhttp.proxyPort={}".format(port),
])
https_proxy = self.https_proxy()
if https_proxy:
host, port = self._parse_proxy_string(https_proxy)
extra_options.extend([
"-Dhttps.proxyHost={}".format(host),
"-Dhttps.proxyPort={}".format(port),
])
return extra_options
def _parse_proxy_string(self, proxy_string):
parse_result = urllib.parse.urlparse(proxy_string)
return parse_result.hostname, parse_result.port
def resolution_cache_dir(self):
if self.get_options().resolution_cache_dir:
return self.get_options().resolution_cache_dir
else:
return self.get_options().cache_dir
def repository_cache_dir(self):
if self.get_options().repository_cache_dir:
return self.get_options().repository_cache_dir
else:
return self.get_options().cache_dir
| true | true |
f72afdfc03221196ea9ceaf1098c9e1569cc1366 | 808 | py | Python | sampling/text.py | YoannDupont/corpus-sampling | 20fd993bc967fd499e88444d882472ba7598c197 | [
"MIT"
] | null | null | null | sampling/text.py | YoannDupont/corpus-sampling | 20fd993bc967fd499e88444d882472ba7598c197 | [
"MIT"
] | null | null | null | sampling/text.py | YoannDupont/corpus-sampling | 20fd993bc967fd499e88444d882472ba7598c197 | [
"MIT"
] | null | null | null | from pathlib import Path
import nltk
from nltk.tokenize import sent_tokenize
tokenizer = nltk.RegexpTokenizer(r"([A-Z][A-Z0-9.]+|[0-9]+[,.][0-9]+|[cdjlmnst]'|qu'|[\w'-]+|\S)")
class Sentence:
def __init__(self, text, nth):
self.text = text
self.nth = nth
def __len__(self):
return len(tokenizer.tokenize(self.text))
@property
def id(self):
return self.nth
def contains_pos(self, postag):
return False
def count_pos(self, postag):
return 0
def read_corpus(path):
corpus = []
with open(path) as input_stream:
content = input_stream.read()
sents = [item.replace("\n", " ") for item in sent_tokenize(content)]
for nth, sent in enumerate(sents):
corpus.append(Sentence(sent, nth))
return corpus
| 22.444444 | 98 | 0.62005 | from pathlib import Path
import nltk
from nltk.tokenize import sent_tokenize
tokenizer = nltk.RegexpTokenizer(r"([A-Z][A-Z0-9.]+|[0-9]+[,.][0-9]+|[cdjlmnst]'|qu'|[\w'-]+|\S)")
class Sentence:
def __init__(self, text, nth):
self.text = text
self.nth = nth
def __len__(self):
return len(tokenizer.tokenize(self.text))
@property
def id(self):
return self.nth
def contains_pos(self, postag):
return False
def count_pos(self, postag):
return 0
def read_corpus(path):
corpus = []
with open(path) as input_stream:
content = input_stream.read()
sents = [item.replace("\n", " ") for item in sent_tokenize(content)]
for nth, sent in enumerate(sents):
corpus.append(Sentence(sent, nth))
return corpus
| true | true |
f72aff11df732c260aca806b126e282388a93204 | 4,897 | py | Python | seahub/api2/authentication.py | saukrIppl/newsea | 0fd5ab2ade9a8fb16b1e7b43ba13dac32eb39603 | [
"Apache-2.0"
] | 2 | 2017-06-21T09:46:55.000Z | 2018-05-30T10:07:32.000Z | seahub/api2/authentication.py | saukrIppl/newsea | 0fd5ab2ade9a8fb16b1e7b43ba13dac32eb39603 | [
"Apache-2.0"
] | null | null | null | seahub/api2/authentication.py | saukrIppl/newsea | 0fd5ab2ade9a8fb16b1e7b43ba13dac32eb39603 | [
"Apache-2.0"
] | 1 | 2020-10-01T04:11:41.000Z | 2020-10-01T04:11:41.000Z | import datetime
import logging
from rest_framework import status
from rest_framework.authentication import BaseAuthentication
from rest_framework.exceptions import APIException
import seaserv
from seahub.base.accounts import User
from seahub.constants import GUEST_USER
from seahub.api2.models import Token, TokenV2
from seahub.api2.utils import get_client_ip
from seahub.utils import within_time_range
try:
from seahub.settings import MULTI_TENANCY
except ImportError:
MULTI_TENANCY = False
logger = logging.getLogger(__name__)
HEADER_CLIENT_VERSION = 'HTTP_X_SEAFILE_CLIENT_VERSION'
HEADER_PLATFORM_VERSION = 'HTTP_X_SEAFILE_PLATFORM_VERSION'
class AuthenticationFailed(APIException):
status_code = status.HTTP_401_UNAUTHORIZED
default_detail = 'Incorrect authentication credentials.'
def __init__(self, detail=None):
self.detail = detail or self.default_detail
class TokenAuthentication(BaseAuthentication):
"""
Simple token based authentication.
Clients should authenticate by passing the token key in the "Authorization"
HTTP header, prepended with the string "Token ". For example:
Authorization: Token 401f7ac837da42b97f613d789819ff93537bee6a
A custom token model may be used, but must have the following properties.
* key -- The string identifying the token
* user -- The user to which the token belongs
"""
def authenticate(self, request):
auth = request.META.get('HTTP_AUTHORIZATION', '').split()
if not auth or auth[0].lower() != 'token':
return None
if len(auth) == 1:
msg = 'Invalid token header. No credentials provided.'
raise AuthenticationFailed(msg)
elif len(auth) > 2:
msg = 'Invalid token header. Token string should not contain spaces.'
raise AuthenticationFailed(msg)
key = auth[1]
ret = self.authenticate_v2(request, key)
if ret:
return ret
return self.authenticate_v1(request, key)
def _populate_user_permissions(self, user):
"""Disable some operations if ``user`` is a guest.
"""
if user.role == GUEST_USER:
user.permissions.can_add_repo = lambda: False
user.permissions.can_add_group = lambda: False
user.permissions.can_view_org = lambda: False
user.permissions.can_use_global_address_book = lambda: False
user.permissions.can_generate_shared_link = lambda: False
def authenticate_v1(self, request, key):
try:
token = Token.objects.get(key=key)
except Token.DoesNotExist:
raise AuthenticationFailed('Invalid token')
try:
user = User.objects.get(email=token.user)
except User.DoesNotExist:
raise AuthenticationFailed('User inactive or deleted')
if MULTI_TENANCY:
orgs = seaserv.get_orgs_by_user(token.user)
if orgs:
user.org = orgs[0]
self._populate_user_permissions(user)
if user.is_active:
return (user, token)
def authenticate_v2(self, request, key):
try:
token = TokenV2.objects.get(key=key)
except TokenV2.DoesNotExist:
return None # Continue authentication in token v1
try:
user = User.objects.get(email=token.user)
except User.DoesNotExist:
raise AuthenticationFailed('User inactive or deleted')
if MULTI_TENANCY:
orgs = seaserv.get_orgs_by_user(token.user)
if orgs:
user.org = orgs[0]
self._populate_user_permissions(user)
if user.is_active:
need_save = False
# We update the device's last_login_ip, client_version, platform_version if changed
ip = get_client_ip(request)
if ip and ip != token.last_login_ip:
token.last_login_ip = ip
need_save = True
client_version = request.META.get(HEADER_CLIENT_VERSION, '')
if client_version and client_version != token.client_version:
token.client_version = client_version
need_save = True
platform_version = request.META.get(HEADER_PLATFORM_VERSION, '')
if platform_version and platform_version != token.platform_version:
token.platform_version = platform_version
need_save = True
if not within_time_range(token.last_accessed, datetime.datetime.now(), 10 * 60):
# We only need 10min precision for the last_accessed field
need_save = True
if need_save:
try:
token.save()
except:
logger.exception('error when save token v2:')
return (user, token)
| 33.772414 | 95 | 0.647131 | import datetime
import logging
from rest_framework import status
from rest_framework.authentication import BaseAuthentication
from rest_framework.exceptions import APIException
import seaserv
from seahub.base.accounts import User
from seahub.constants import GUEST_USER
from seahub.api2.models import Token, TokenV2
from seahub.api2.utils import get_client_ip
from seahub.utils import within_time_range
try:
from seahub.settings import MULTI_TENANCY
except ImportError:
MULTI_TENANCY = False
logger = logging.getLogger(__name__)
HEADER_CLIENT_VERSION = 'HTTP_X_SEAFILE_CLIENT_VERSION'
HEADER_PLATFORM_VERSION = 'HTTP_X_SEAFILE_PLATFORM_VERSION'
class AuthenticationFailed(APIException):
status_code = status.HTTP_401_UNAUTHORIZED
default_detail = 'Incorrect authentication credentials.'
def __init__(self, detail=None):
self.detail = detail or self.default_detail
class TokenAuthentication(BaseAuthentication):
def authenticate(self, request):
auth = request.META.get('HTTP_AUTHORIZATION', '').split()
if not auth or auth[0].lower() != 'token':
return None
if len(auth) == 1:
msg = 'Invalid token header. No credentials provided.'
raise AuthenticationFailed(msg)
elif len(auth) > 2:
msg = 'Invalid token header. Token string should not contain spaces.'
raise AuthenticationFailed(msg)
key = auth[1]
ret = self.authenticate_v2(request, key)
if ret:
return ret
return self.authenticate_v1(request, key)
def _populate_user_permissions(self, user):
if user.role == GUEST_USER:
user.permissions.can_add_repo = lambda: False
user.permissions.can_add_group = lambda: False
user.permissions.can_view_org = lambda: False
user.permissions.can_use_global_address_book = lambda: False
user.permissions.can_generate_shared_link = lambda: False
def authenticate_v1(self, request, key):
try:
token = Token.objects.get(key=key)
except Token.DoesNotExist:
raise AuthenticationFailed('Invalid token')
try:
user = User.objects.get(email=token.user)
except User.DoesNotExist:
raise AuthenticationFailed('User inactive or deleted')
if MULTI_TENANCY:
orgs = seaserv.get_orgs_by_user(token.user)
if orgs:
user.org = orgs[0]
self._populate_user_permissions(user)
if user.is_active:
return (user, token)
def authenticate_v2(self, request, key):
try:
token = TokenV2.objects.get(key=key)
except TokenV2.DoesNotExist:
return None
try:
user = User.objects.get(email=token.user)
except User.DoesNotExist:
raise AuthenticationFailed('User inactive or deleted')
if MULTI_TENANCY:
orgs = seaserv.get_orgs_by_user(token.user)
if orgs:
user.org = orgs[0]
self._populate_user_permissions(user)
if user.is_active:
need_save = False
ip = get_client_ip(request)
if ip and ip != token.last_login_ip:
token.last_login_ip = ip
need_save = True
client_version = request.META.get(HEADER_CLIENT_VERSION, '')
if client_version and client_version != token.client_version:
token.client_version = client_version
need_save = True
platform_version = request.META.get(HEADER_PLATFORM_VERSION, '')
if platform_version and platform_version != token.platform_version:
token.platform_version = platform_version
need_save = True
if not within_time_range(token.last_accessed, datetime.datetime.now(), 10 * 60):
# We only need 10min precision for the last_accessed field
need_save = True
if need_save:
try:
token.save()
except:
logger.exception('error when save token v2:')
return (user, token)
| true | true |
f72affbaf63edad2e1efdfe81604b7c4734c0339 | 405 | py | Python | setup.py | mstroud/python-matrix-gfyrslf | 0375bfb12d1cd50611f01101917d2cd2123543e4 | [
"MIT"
] | null | null | null | setup.py | mstroud/python-matrix-gfyrslf | 0375bfb12d1cd50611f01101917d2cd2123543e4 | [
"MIT"
] | null | null | null | setup.py | mstroud/python-matrix-gfyrslf | 0375bfb12d1cd50611f01101917d2cd2123543e4 | [
"MIT"
] | null | null | null | from distutils.core import setup
DESC='A simple, extensible chatbot for Matrix'
setup(
name='python-matrix-gfyrslf',
version='0.1',
author='Matt Stroud',
author_email='see github',
url='https://github.com/mstroud/python-matrix-gfyrslf',
packages=['python-matrix-gfyrslf'],
install_requires=['matrix_client'],
license='MIT',
summary=DESC,
long_description=DESC,
)
| 23.823529 | 59 | 0.688889 | from distutils.core import setup
DESC='A simple, extensible chatbot for Matrix'
setup(
name='python-matrix-gfyrslf',
version='0.1',
author='Matt Stroud',
author_email='see github',
url='https://github.com/mstroud/python-matrix-gfyrslf',
packages=['python-matrix-gfyrslf'],
install_requires=['matrix_client'],
license='MIT',
summary=DESC,
long_description=DESC,
)
| true | true |
f72b00a5286e87e05ac8c588aa0072278e0c0565 | 30 | py | Python | bot/__init__.py | Sc2-AI-Cup/example-bot-workerrush | 6a4ddcc4c22018bcd64d07ba405b7ef13ed634f2 | [
"MIT"
] | null | null | null | bot/__init__.py | Sc2-AI-Cup/example-bot-workerrush | 6a4ddcc4c22018bcd64d07ba405b7ef13ed634f2 | [
"MIT"
] | null | null | null | bot/__init__.py | Sc2-AI-Cup/example-bot-workerrush | 6a4ddcc4c22018bcd64d07ba405b7ef13ed634f2 | [
"MIT"
] | null | null | null | from .bot import WorkerRushBot | 30 | 30 | 0.866667 | from .bot import WorkerRushBot | true | true |
f72b00c52fc98e9202a373c7817029e4bb84f7b4 | 8,185 | py | Python | controllers.py | Yoshiyuki-Su/FastAPITodo | d9efcc2793eb5191f70923eb669eb9a1a3fcc427 | [
"MIT"
] | null | null | null | controllers.py | Yoshiyuki-Su/FastAPITodo | d9efcc2793eb5191f70923eb669eb9a1a3fcc427 | [
"MIT"
] | 6 | 2020-11-23T14:38:55.000Z | 2021-01-10T16:55:57.000Z | controllers.py | Yoshiyuki-Su/FastAPITodo | d9efcc2793eb5191f70923eb669eb9a1a3fcc427 | [
"MIT"
] | null | null | null | from fastapi import FastAPI, Depends, Form
from fastapi.security import HTTPBasic, HTTPBasicCredentials
from starlette.templating import Jinja2Templates
from starlette.requests import Request
from starlette.responses import RedirectResponse
from datetime import datetime, timedelta
import db
import hashlib
from mycalendar import MyCalendar
import re
from auth import auth
from models import User, Task
app = FastAPI(
title='FastAPIでつくるToDoアプリケーション',
description='FastAPIチュートリアル:FastAPI(とstarlette)でシンプルなToDoアプリの作成',
version='0.0.1'
)
security = HTTPBasic()
templates = Jinja2Templates(directory="templates")
jinja_env = templates.env
pattern = re.compile(r'\w{4,20}') # 任意の4~20の英数字を示す正規表現
pattern_pw = re.compile(r'\w{6,20}') # 任意の6~20の英数字を示す正規表現
pattern_mail = re.compile(r'^\w+([-+.]\w+)*@\w+([-.]\w+)*\.\w+([-.]\w+)*$') # e-mailの正規表現
def index(request: Request):
return templates.TemplateResponse('index.html',
{'request': request})
def admin(request: Request, credentials: HTTPBasicCredentials = Depends(security)):
username = auth(credentials)
user = db.session.query(User).filter(User.username == username).first()
task = db.session.query(Task).filter(Task.user_id == user.id).all()
db.session.close()
""" [new] 今日の日付と来週の日付"""
today = datetime.now()
next_w = today + timedelta(days=7) # 1週間後の日付
""" [new] カレンダー関連 """
# カレンダーをHTML形式で取得
cal = MyCalendar(username,
{t.deadline.strftime('%Y%m%d'): t.done for t in task}) # 予定がある日付をキーとして渡す
cal = cal.formatyear(today.year, 4) # カレンダーをHTMLで取得
# 直近のタスクだけでいいので、リストを書き換える
task = [t for t in task if today <= t.deadline <= next_w]
links = [t.deadline.strftime('/todo/'+username+'/%Y/%m/%d') for t in task] # 直近の予定リンク
return templates.TemplateResponse('admin.html',
{'request': request,
'user': user,
'task': task,
'links': links,
'calender': cal})
async def register(request: Request):
if request.method == 'GET':
return templates.TemplateResponse('register.html',
{'request': request,
'username': '',
'error': []})
if request.method == 'POST':
data = await request.form()
username = data.get('username')
password = data.get('password')
password_tmp = data.get('password_tmp')
mail = data.get('mail')
error = []
tmp_user = db.session.query(User).filter(User.username == username).first()
if tmp_user is not None:
error.append('同じユーザ名のユーザが存在します。')
if password != password_tmp:
error.append('入力したパスワードが一致しません。')
if pattern.match(username) is None:
error.append('ユーザ名は4~20文字の半角英数字にしてください。')
if pattern_pw.match(password) is None:
error.append('パスワードは6~20文字の半角英数字にしてください。')
if pattern_mail.match(mail) is None:
error.append('正しくメールアドレスを入力してください。')
# エラーがあれば登録ページへ戻す
if error:
return templates.TemplateResponse('register.html',
{'request': request,
'username': username,
'error': error})
# 問題がなければユーザ登録
user = User(username, password, mail)
db.session.add(user)
db.session.commit()
db.session.close()
return templates.TemplateResponse('complete.html',
{'request': request,
'username': username})
def detail(request: Request, username, year, month, day,
credentials: HTTPBasicCredentials = Depends(security)):
username_tmp = auth(credentials)
if username_tmp != username: # もし他のユーザが訪問してきたらはじく
return RedirectResponse('/')
# ログインユーザを取得
user = db.session.query(User).filter(User.username == username).first()
# ログインユーザのタスクを取得
task = db.session.query(Task).filter(Task.user_id == user.id).all()
db.session.close()
# 該当の日付と一致するものだけのリストにする
theday = f'{year}{month.zfill(2)}{day.zfill(2)}' # 月日は0埋めする
task = [t for t in task if t.deadline.strftime('%Y%m%d') == theday]
return templates.TemplateResponse('detail.html',
{'request': request,
'username': username,
'task': task,
'year': year,
'month': month,
'day': day})
async def done(request: Request, credentials: HTTPBasicCredentials = Depends(security)):
username = auth(credentials)
# ユーザ情報を取得
user = db.session.query(User).filter(User.username == username).first()
# ログインユーザのタスクを取得
task = db.session.query(Task).filter(Task.user_id == user.id).all()
# フォームで受け取ったタスクの終了判定を見て内容を変更する
data = await request.form()
t_dones = data.getlist('done[]') # リストとして取得
for t in task:
if str(t.id) in t_dones: # もしIDが一致すれば "終了した予定" とする
t.done = True
db.session.commit() # update!!
db.session.close()
return RedirectResponse('/admin')
async def add(request: Request, credentials: HTTPBasicCredentials = Depends(security)):
username = auth(credentials)
user = db.session.query(User).filter(User.username == username).first()
# フォームからデータを取得
data = await request.form()
print(data)
year = int(data['year'])
month = int(data['month'])
day = int(data['day'])
hour = int(data['hour'])
minute = int(data['minute'])
deadline = datetime(year=year, month=month, day=day,
hour=hour, minute=minute)
# 新しくタスクを生成しコミット
task = Task(user.id, data['content'], deadline)
db.session.add(task)
db.session.commit()
db.session.close()
return RedirectResponse('/admin')
def delete(request: Request, t_id, credentials: HTTPBasicCredentials = Depends(security)):
username = auth(credentials)
user = db.session.query(User).filter(User.username == username).first()
task = db.session.query(Task).filter(Task.id == t_id).first()
# もしユーザIDが異なれば削除せずリダイレクト
if task.user_id != user.id:
return RedirectResponse('/admin')
# 削除してコミット
db.session.delete(task)
db.session.commit()
db.session.close()
return RedirectResponse('/admin')
def get(request: Request, credentials: HTTPBasicCredentials = Depends(security)):
username = auth(credentials)
user = db.session.query(User).filter(User.username == username).first()
task = db.session.query(Task).filter(Task.user_id == user.id).all()
db.session.close()
# JSONフォーマット
task = [{
'id': t.id,
'content': t.content,
'deadline': t.deadline.strftime('%Y-%m-%d %H:%M:%S'),
'published': t.date.strftime('%Y-%m-%d %H:%M:%S'),
'done': t.done,
} for t in task]
return task
async def insert(request: Request,
content: str = Form(...), deadline: str = Form(...),
credentials: HTTPBasicCredentials = Depends(security)):
"""
タスクを追加してJSONで新規タスクを返す。「deadline」は%Y-%m-%d_%H:%M:%S (e.g. 2019-11-03_12:30:00)の形式
"""
username = auth(credentials)
user = db.session.query(User).filter(User.username == username).first()
task = Task(user.id, content, datetime.strptime(deadline, '%Y-%m-%d_%H:%M:%S'))
db.session.add(task)
db.session.commit()
# テーブルから新しく追加したタスクを取得する
task = db.session.query(Task).all()[-1]
db.session.close()
# 新規タスクをJSONで返す
return {
'id': task.id,
'content': task.content,
'deadline': task.deadline.strftime('%Y-%m-%d %H:%M:%S'),
'published': task.date.strftime('%Y-%m-%d %H:%M:%S'),
'done': task.done,
}
| 32.871486 | 94 | 0.579475 | from fastapi import FastAPI, Depends, Form
from fastapi.security import HTTPBasic, HTTPBasicCredentials
from starlette.templating import Jinja2Templates
from starlette.requests import Request
from starlette.responses import RedirectResponse
from datetime import datetime, timedelta
import db
import hashlib
from mycalendar import MyCalendar
import re
from auth import auth
from models import User, Task
app = FastAPI(
title='FastAPIでつくるToDoアプリケーション',
description='FastAPIチュートリアル:FastAPI(とstarlette)でシンプルなToDoアプリの作成',
version='0.0.1'
)
security = HTTPBasic()
templates = Jinja2Templates(directory="templates")
jinja_env = templates.env
pattern = re.compile(r'\w{4,20}')
pattern_pw = re.compile(r'\w{6,20}')
pattern_mail = re.compile(r'^\w+([-+.]\w+)*@\w+([-.]\w+)*\.\w+([-.]\w+)*$')
def index(request: Request):
return templates.TemplateResponse('index.html',
{'request': request})
def admin(request: Request, credentials: HTTPBasicCredentials = Depends(security)):
username = auth(credentials)
user = db.session.query(User).filter(User.username == username).first()
task = db.session.query(Task).filter(Task.user_id == user.id).all()
db.session.close()
today = datetime.now()
next_w = today + timedelta(days=7)
cal = MyCalendar(username,
{t.deadline.strftime('%Y%m%d'): t.done for t in task})
cal = cal.formatyear(today.year, 4)
task = [t for t in task if today <= t.deadline <= next_w]
links = [t.deadline.strftime('/todo/'+username+'/%Y/%m/%d') for t in task]
return templates.TemplateResponse('admin.html',
{'request': request,
'user': user,
'task': task,
'links': links,
'calender': cal})
async def register(request: Request):
if request.method == 'GET':
return templates.TemplateResponse('register.html',
{'request': request,
'username': '',
'error': []})
if request.method == 'POST':
data = await request.form()
username = data.get('username')
password = data.get('password')
password_tmp = data.get('password_tmp')
mail = data.get('mail')
error = []
tmp_user = db.session.query(User).filter(User.username == username).first()
if tmp_user is not None:
error.append('同じユーザ名のユーザが存在します。')
if password != password_tmp:
error.append('入力したパスワードが一致しません。')
if pattern.match(username) is None:
error.append('ユーザ名は4~20文字の半角英数字にしてください。')
if pattern_pw.match(password) is None:
error.append('パスワードは6~20文字の半角英数字にしてください。')
if pattern_mail.match(mail) is None:
error.append('正しくメールアドレスを入力してください。')
if error:
return templates.TemplateResponse('register.html',
{'request': request,
'username': username,
'error': error})
user = User(username, password, mail)
db.session.add(user)
db.session.commit()
db.session.close()
return templates.TemplateResponse('complete.html',
{'request': request,
'username': username})
def detail(request: Request, username, year, month, day,
credentials: HTTPBasicCredentials = Depends(security)):
username_tmp = auth(credentials)
if username_tmp != username:
return RedirectResponse('/')
user = db.session.query(User).filter(User.username == username).first()
task = db.session.query(Task).filter(Task.user_id == user.id).all()
db.session.close()
theday = f'{year}{month.zfill(2)}{day.zfill(2)}'
task = [t for t in task if t.deadline.strftime('%Y%m%d') == theday]
return templates.TemplateResponse('detail.html',
{'request': request,
'username': username,
'task': task,
'year': year,
'month': month,
'day': day})
async def done(request: Request, credentials: HTTPBasicCredentials = Depends(security)):
username = auth(credentials)
user = db.session.query(User).filter(User.username == username).first()
task = db.session.query(Task).filter(Task.user_id == user.id).all()
data = await request.form()
t_dones = data.getlist('done[]')
for t in task:
if str(t.id) in t_dones:
t.done = True
db.session.commit()
db.session.close()
return RedirectResponse('/admin')
async def add(request: Request, credentials: HTTPBasicCredentials = Depends(security)):
username = auth(credentials)
user = db.session.query(User).filter(User.username == username).first()
data = await request.form()
print(data)
year = int(data['year'])
month = int(data['month'])
day = int(data['day'])
hour = int(data['hour'])
minute = int(data['minute'])
deadline = datetime(year=year, month=month, day=day,
hour=hour, minute=minute)
task = Task(user.id, data['content'], deadline)
db.session.add(task)
db.session.commit()
db.session.close()
return RedirectResponse('/admin')
def delete(request: Request, t_id, credentials: HTTPBasicCredentials = Depends(security)):
username = auth(credentials)
user = db.session.query(User).filter(User.username == username).first()
task = db.session.query(Task).filter(Task.id == t_id).first()
if task.user_id != user.id:
return RedirectResponse('/admin')
db.session.delete(task)
db.session.commit()
db.session.close()
return RedirectResponse('/admin')
def get(request: Request, credentials: HTTPBasicCredentials = Depends(security)):
username = auth(credentials)
user = db.session.query(User).filter(User.username == username).first()
task = db.session.query(Task).filter(Task.user_id == user.id).all()
db.session.close()
task = [{
'id': t.id,
'content': t.content,
'deadline': t.deadline.strftime('%Y-%m-%d %H:%M:%S'),
'published': t.date.strftime('%Y-%m-%d %H:%M:%S'),
'done': t.done,
} for t in task]
return task
async def insert(request: Request,
content: str = Form(...), deadline: str = Form(...),
credentials: HTTPBasicCredentials = Depends(security)):
username = auth(credentials)
user = db.session.query(User).filter(User.username == username).first()
task = Task(user.id, content, datetime.strptime(deadline, '%Y-%m-%d_%H:%M:%S'))
db.session.add(task)
db.session.commit()
task = db.session.query(Task).all()[-1]
db.session.close()
return {
'id': task.id,
'content': task.content,
'deadline': task.deadline.strftime('%Y-%m-%d %H:%M:%S'),
'published': task.date.strftime('%Y-%m-%d %H:%M:%S'),
'done': task.done,
}
| true | true |
f72b01644b9c24e4ff1dde34645ffd6b1aec9355 | 2,765 | py | Python | Contrib/LEF/ClusterFps.py | kazuyaujihara/rdkit | 06027dcd05674787b61f27ba46ec0d42a6037540 | [
"BSD-3-Clause"
] | 1,609 | 2015-01-05T02:41:13.000Z | 2022-03-30T21:57:24.000Z | Contrib/LEF/ClusterFps.py | kazuyaujihara/rdkit | 06027dcd05674787b61f27ba46ec0d42a6037540 | [
"BSD-3-Clause"
] | 3,412 | 2015-01-06T12:13:33.000Z | 2022-03-31T17:25:41.000Z | Contrib/LEF/ClusterFps.py | kazuyaujihara/rdkit | 06027dcd05674787b61f27ba46ec0d42a6037540 | [
"BSD-3-Clause"
] | 811 | 2015-01-11T03:33:48.000Z | 2022-03-28T11:57:49.000Z | #
# Copyright (c) 2009, Novartis Institutes for BioMedical Research Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Novartis Institutes for BioMedical Research Inc.
# nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Created by Greg Landrum and Anna Vulpetti, March 2009
from rdkit.ML.Cluster import Butina
from rdkit import DataStructs
import sys, pickle
# sims is the list of similarity thresholds used to generate clusters
sims = [.9, .8, .7, .6]
smis = []
uniq = []
uFps = []
for fileN in sys.argv[1:]:
inF = file(sys.argv[1], 'r')
cols = pickle.load(inF)
fps = pickle.load(inF)
for row in fps:
nm, smi, fp = row[:3]
if smi not in smis:
try:
fpIdx = uFps.index(fp)
except ValueError:
fpIdx = len(uFps)
uFps.append(fp)
uniq.append([fp, nm, smi, 'FP_%d' % fpIdx] + row[3:])
smis.append(smi)
def distFunc(a, b):
return 1. - DataStructs.DiceSimilarity(a[0], b[0])
for sim in sims:
clusters = Butina.ClusterData(uniq, len(uniq), 1. - sim, False, distFunc)
print('Sim: %.2f, nClusters: %d' % (sim, len(clusters)), file=sys.stderr)
for i, cluster in enumerate(clusters):
for pt in cluster:
uniq[pt].append(str(i + 1))
cols.append('cluster_thresh_%d' % (int(100 * sim)))
print(' '.join(cols))
for row in uniq:
print(' '.join(row[1:]))
| 37.364865 | 86 | 0.707052 |
from rdkit.ML.Cluster import Butina
from rdkit import DataStructs
import sys, pickle
sims = [.9, .8, .7, .6]
smis = []
uniq = []
uFps = []
for fileN in sys.argv[1:]:
inF = file(sys.argv[1], 'r')
cols = pickle.load(inF)
fps = pickle.load(inF)
for row in fps:
nm, smi, fp = row[:3]
if smi not in smis:
try:
fpIdx = uFps.index(fp)
except ValueError:
fpIdx = len(uFps)
uFps.append(fp)
uniq.append([fp, nm, smi, 'FP_%d' % fpIdx] + row[3:])
smis.append(smi)
def distFunc(a, b):
return 1. - DataStructs.DiceSimilarity(a[0], b[0])
for sim in sims:
clusters = Butina.ClusterData(uniq, len(uniq), 1. - sim, False, distFunc)
print('Sim: %.2f, nClusters: %d' % (sim, len(clusters)), file=sys.stderr)
for i, cluster in enumerate(clusters):
for pt in cluster:
uniq[pt].append(str(i + 1))
cols.append('cluster_thresh_%d' % (int(100 * sim)))
print(' '.join(cols))
for row in uniq:
print(' '.join(row[1:]))
| true | true |
f72b01a7f0fb8665343e290a8c45dfabc5c03f99 | 801 | py | Python | predictability_utils/utils/helpers.py | marpyr/forecast_predictability | 2285b37e20095ae6f67533595bcb0580882924a2 | [
"MIT"
] | 2 | 2020-10-23T08:58:18.000Z | 2021-05-03T17:30:03.000Z | predictability_utils/utils/helpers.py | marpyr/forecast_predictability | 2285b37e20095ae6f67533595bcb0580882924a2 | [
"MIT"
] | null | null | null | predictability_utils/utils/helpers.py | marpyr/forecast_predictability | 2285b37e20095ae6f67533595bcb0580882924a2 | [
"MIT"
] | 1 | 2020-10-23T09:07:19.000Z | 2020-10-23T09:07:19.000Z | import numpy as np
def compute_anomaly_corrs(out_true, out_pred):
anomaly_corrs = np.zeros(out_pred.shape[1])
for i in range(anomaly_corrs.size):
anomaly_corrs[i] = np.corrcoef(out_pred[:,i], out_true[:,i])[0,1]
return anomaly_corrs
def split_train_data(train_months, test_months, train_years, test_years):
def make_idx(months, years): # based on simple broadcasting
return np.asarray(months).reshape(-1,1)+(12*np.asarray(years).flatten())
idx_source_train = make_idx(train_months, train_years)
idx_target_train = make_idx(test_months, train_years)
idx_source_test = make_idx(train_months, test_years)
idx_target_test = make_idx(test_months, test_years)
return idx_source_train, idx_target_train, idx_source_test, idx_target_test | 36.409091 | 80 | 0.740325 | import numpy as np
def compute_anomaly_corrs(out_true, out_pred):
anomaly_corrs = np.zeros(out_pred.shape[1])
for i in range(anomaly_corrs.size):
anomaly_corrs[i] = np.corrcoef(out_pred[:,i], out_true[:,i])[0,1]
return anomaly_corrs
def split_train_data(train_months, test_months, train_years, test_years):
def make_idx(months, years):
return np.asarray(months).reshape(-1,1)+(12*np.asarray(years).flatten())
idx_source_train = make_idx(train_months, train_years)
idx_target_train = make_idx(test_months, train_years)
idx_source_test = make_idx(train_months, test_years)
idx_target_test = make_idx(test_months, test_years)
return idx_source_train, idx_target_train, idx_source_test, idx_target_test | true | true |
f72b01c050db440e10771a348c74c4d89b91660f | 19,971 | py | Python | dfvfs/lib/gzipfile.py | dfjxs/dfvfs | a4154b07bb08c3c86afa2847f3224189dd80c138 | [
"Apache-2.0"
] | 176 | 2015-01-02T13:55:39.000Z | 2022-03-12T11:44:37.000Z | dfvfs/lib/gzipfile.py | dfjxs/dfvfs | a4154b07bb08c3c86afa2847f3224189dd80c138 | [
"Apache-2.0"
] | 495 | 2015-01-13T06:47:06.000Z | 2022-03-12T11:07:03.000Z | dfvfs/lib/gzipfile.py | dfjxs/dfvfs | a4154b07bb08c3c86afa2847f3224189dd80c138 | [
"Apache-2.0"
] | 62 | 2015-02-23T08:19:38.000Z | 2022-03-18T06:01:22.000Z | # -*- coding: utf-8 -*-
"""Gzip compressed stream file."""
# Note: do not rename file to gzip.py this can cause the exception:
# AttributeError: 'module' object has no attribute 'GzipFile'
# when using pip.
import collections
import os
from dtfabric.runtime import fabric as dtfabric_fabric
from dfvfs.compression import zlib_decompressor
from dfvfs.lib import data_format
from dfvfs.lib import errors
class _GzipDecompressorState(object):
"""Deflate decompressor wrapper for reading a gzip member.
This class encapsulates the state of a deflate decompression object, as well
as the location of the decompressor's source data.
Attributes:
uncompressed_offset (int): offset into the uncompressed data in a gzip
member last emitted by the state object.
"""
_MAXIMUM_READ_SIZE = 16 * 1024 * 1024
def __init__(self, stream_start):
"""Initializes a gzip member decompressor wrapper.
Args:
stream_start (int): offset to the compressed stream within the containing
file object.
"""
self._compressed_data = b''
self._decompressor = zlib_decompressor.DeflateDecompressor()
self._last_read = stream_start
self.uncompressed_offset = 0
def Read(self, file_object):
"""Reads the next uncompressed data from the gzip stream.
Args:
file_object (FileIO): file object that contains the compressed stream.
Returns:
bytes: next uncompressed data from the compressed stream.
"""
file_object.seek(self._last_read, os.SEEK_SET)
read_data = file_object.read(self._MAXIMUM_READ_SIZE)
self._last_read = file_object.get_offset()
compressed_data = b''.join([self._compressed_data, read_data])
decompressed_data, remaining_compressed_data = (
self._decompressor.Decompress(compressed_data))
self._compressed_data = remaining_compressed_data
self.uncompressed_offset += len(decompressed_data)
return decompressed_data
def GetUnusedData(self):
"""Retrieves any bytes past the end of the compressed data.
See https://docs.python.org/2/library/zlib.html#zlib.Decompress.unused_data
Unused data can be any bytes after a Deflate compressed block (or chunk).
Returns:
bytes: data past the end of the compressed data, if any has been read from
the gzip file.
"""
return self._decompressor.unused_data
class GzipMember(data_format.DataFormat):
"""Gzip member.
Gzip files have no index of members, so each member must be read
sequentially before metadata and random seeks are possible. This class
provides caching of gzip member data during the initial read of each member.
Attributes:
comment (str): comment stored in the member.
member_end_offset (int): offset to the end of the member in the parent file
object.
member_start_offset (int): offset to the start of the member in the parent
file object.
operating_system (int): type of file system on which the compression
took place.
original_filename (str): original filename of the uncompressed file.
uncompressed_data_offset (int): offset of the start of the uncompressed
data in this member relative to the whole gzip file's uncompressed data.
uncompressed_data_size (int): total size of the data in this gzip member
after decompression.
"""
_DATA_TYPE_FABRIC_DEFINITION_FILE = os.path.join(
os.path.dirname(__file__), 'gzipfile.yaml')
with open(_DATA_TYPE_FABRIC_DEFINITION_FILE, 'rb') as file_object:
_DATA_TYPE_FABRIC_DEFINITION = file_object.read()
_DATA_TYPE_FABRIC = dtfabric_fabric.DataTypeFabric(
yaml_definition=_DATA_TYPE_FABRIC_DEFINITION)
_MEMBER_HEADER = _DATA_TYPE_FABRIC.CreateDataTypeMap(
'gzip_member_header')
_MEMBER_HEADER_SIZE = _MEMBER_HEADER.GetByteSize()
_MEMBER_FOOTER = _DATA_TYPE_FABRIC.CreateDataTypeMap(
'gzip_member_footer')
_MEMBER_FOOTER_SIZE = _MEMBER_FOOTER.GetByteSize()
_UINT16LE = _DATA_TYPE_FABRIC.CreateDataTypeMap('uint16le')
_UINT16LE_SIZE = _UINT16LE.GetByteSize()
_CSTRING = _DATA_TYPE_FABRIC.CreateDataTypeMap('cstring')
_GZIP_SIGNATURE = 0x8b1f
_COMPRESSION_METHOD_DEFLATE = 8
_FLAG_FTEXT = 0x01
_FLAG_FHCRC = 0x02
_FLAG_FEXTRA = 0x04
_FLAG_FNAME = 0x08
_FLAG_FCOMMENT = 0x10
# The maximum size of the uncompressed data cache.
_UNCOMPRESSED_DATA_CACHE_SIZE = 2 * 1024 * 1024
def __init__(
self, file_object, member_start_offset, uncompressed_data_offset):
"""Initializes a gzip member.
Args:
file_object (FileIO): file-like object, containing the gzip member.
member_start_offset (int): offset to the beginning of the gzip member
in the containing file.
uncompressed_data_offset (int): offset of the start of the uncompressed
data in this member relative to the whole gzip file's uncompressed
data.
"""
self._cache = b''
# End offset of the cached uncompressed data of the member.
self._cache_end_offset = None
# Start offset of the cached uncompressed data of the member.
self._cache_start_offset = None
self.comment = None
self.modification_time = None
self.operating_system = None
self.original_filename = None
file_size = file_object.get_size()
file_object.seek(member_start_offset, os.SEEK_SET)
self._ReadMemberHeader(file_object)
data_offset = 0
uncompressed_data_size = 0
compressed_data_offset = file_object.get_offset()
decompressor_state = _GzipDecompressorState(compressed_data_offset)
# Read the member data to determine the uncompressed data size and
# the offset of the member footer.
file_offset = compressed_data_offset
while file_offset < file_size:
data_offset += uncompressed_data_size
decompressed_data = decompressor_state.Read(file_object)
uncompressed_data_size += len(decompressed_data)
# Note that unused data will be set when the decompressor reads beyond
# the end of the compressed data stream.
unused_data = decompressor_state.GetUnusedData()
if unused_data:
file_object.seek(-len(unused_data), os.SEEK_CUR)
file_offset = file_object.get_offset()
break
file_offset = file_object.get_offset()
# Do not read the the last member footer if it is missing, which is
# a common corruption scenario.
if file_offset < file_size:
self._ReadStructure(
file_object, file_offset, self._MEMBER_FOOTER_SIZE,
self._MEMBER_FOOTER, 'member footer')
member_end_offset = file_object.get_offset()
# Initialize the member with data.
self._file_object = file_object
self._file_object.seek(member_start_offset, os.SEEK_SET)
# Cache uncompressed data of gzip files that fit entirely in the cache.
if (data_offset == 0 and
uncompressed_data_size < self._UNCOMPRESSED_DATA_CACHE_SIZE):
self._cache = decompressed_data
self._cache_start_offset = 0
self._cache_end_offset = uncompressed_data_size
# Offset to the beginning of the compressed data in the file object.
self._compressed_data_start = compressed_data_offset
self._decompressor_state = _GzipDecompressorState(compressed_data_offset)
# Offset to the start of the member in the parent file object.
self.member_start_offset = member_start_offset
# Offset to the end of the member in the parent file object.
self.member_end_offset = member_end_offset
# Total size of the data in this gzip member after decompression.
self.uncompressed_data_size = uncompressed_data_size
# Offset of the start of the uncompressed data in this member relative to
# the whole gzip file's uncompressed data.
self.uncompressed_data_offset = uncompressed_data_offset
def _GetCacheSize(self):
"""Determines the size of the uncompressed cached data.
Returns:
int: number of cached bytes.
"""
if None in (self._cache_start_offset, self._cache_end_offset):
return 0
return self._cache_end_offset - self._cache_start_offset
def _IsCacheFull(self):
"""Checks whether the uncompressed data cache is full.
Returns:
bool: True if the cache is full.
"""
return self._GetCacheSize() >= self._UNCOMPRESSED_DATA_CACHE_SIZE
def _LoadDataIntoCache(self, file_object, minimum_offset):
"""Reads and decompresses the data in the member.
This function already loads as much data as possible in the cache, up to
UNCOMPRESSED_DATA_CACHE_SIZE bytes.
Args:
file_object (FileIO): file-like object.
minimum_offset (int): offset into this member's uncompressed data at
which the cache should start.
"""
# Decompression can only be performed from beginning to end of the stream.
# So, if data before the current position of the decompressor in the stream
# is required, it's necessary to throw away the current decompression
# state and start again.
if minimum_offset < self._decompressor_state.uncompressed_offset:
self._ResetDecompressorState()
cache_is_full = self._IsCacheFull()
while not cache_is_full:
decompressed_data = self._decompressor_state.Read(file_object)
# Note that decompressed_data will be empty if there is no data left
# to read and decompress.
if not decompressed_data:
break
decompressed_data_length = len(decompressed_data)
decompressed_end_offset = self._decompressor_state.uncompressed_offset
decompressed_start_offset = (
decompressed_end_offset - decompressed_data_length)
data_to_add = decompressed_data
added_data_start_offset = decompressed_start_offset
if decompressed_start_offset < minimum_offset:
data_to_add = None
if decompressed_start_offset < minimum_offset < decompressed_end_offset:
data_add_offset = decompressed_end_offset - minimum_offset
data_to_add = decompressed_data[-data_add_offset:]
added_data_start_offset = decompressed_end_offset - data_add_offset
if data_to_add and not cache_is_full:
self._cache = b''.join([self._cache, data_to_add])
if self._cache_start_offset is None:
self._cache_start_offset = added_data_start_offset
if self._cache_end_offset is None:
self._cache_end_offset = self._cache_start_offset + len(data_to_add)
else:
self._cache_end_offset += len(data_to_add)
cache_is_full = self._IsCacheFull()
# If there's no more data in the member, the unused_data value is
# populated in the decompressor. When this situation arises, we rewind
# to the end of the compressed_data section.
unused_data = self._decompressor_state.GetUnusedData()
if unused_data:
seek_offset = -len(unused_data)
file_object.seek(seek_offset, os.SEEK_CUR)
self._ResetDecompressorState()
break
def _ReadMemberHeader(self, file_object):
"""Reads a member header.
Args:
file_object (FileIO): file-like object to read from.
Raises:
FileFormatError: if the member header cannot be read.
"""
file_offset = file_object.get_offset()
member_header = self._ReadStructure(
file_object, file_offset, self._MEMBER_HEADER_SIZE,
self._MEMBER_HEADER, 'member header')
if member_header.signature != self._GZIP_SIGNATURE:
raise errors.FileFormatError(
'Unsupported signature: 0x{0:04x}.'.format(member_header.signature))
if member_header.compression_method != self._COMPRESSION_METHOD_DEFLATE:
raise errors.FileFormatError(
'Unsupported compression method: {0:d}.'.format(
member_header.compression_method))
self.modification_time = member_header.modification_time
self.operating_system = member_header.operating_system
if member_header.flags & self._FLAG_FEXTRA:
file_offset = file_object.get_offset()
extra_field_data_size = self._ReadStructure(
file_object, file_offset, self._UINT16LE_SIZE,
self._UINT16LE, 'extra field data size')
file_object.seek(extra_field_data_size, os.SEEK_CUR)
if member_header.flags & self._FLAG_FNAME:
file_offset = file_object.get_offset()
string_value = self._ReadString(
file_object, file_offset, self._CSTRING, 'original filename')
self.original_filename = string_value.rstrip('\x00')
if member_header.flags & self._FLAG_FCOMMENT:
file_offset = file_object.get_offset()
string_value = self._ReadString(
file_object, file_offset, self._CSTRING, 'comment')
self.comment = string_value.rstrip('\x00')
if member_header.flags & self._FLAG_FHCRC:
file_object.read(2)
def _ResetDecompressorState(self):
"""Resets the state of the internal decompression object."""
self._decompressor_state = _GzipDecompressorState(
self._compressed_data_start)
def FlushCache(self):
"""Empties the cache that holds cached decompressed data."""
self._cache = b''
self._cache_start_offset = None
self._cache_end_offset = None
self._ResetDecompressorState()
def ReadAtOffset(self, offset, size=None):
"""Reads a byte string from the gzip member at the specified offset.
The function will read a byte string of the specified size or
all of the remaining data if no size was specified.
Args:
offset (int): offset within the uncompressed data in this member to
read from.
size (Optional[int]): maximum number of bytes to read, where None
represents all remaining data, to a maximum of the uncompressed
cache size.
Returns:
bytes: data read.
Raises:
IOError: if the read failed.
ValueError: if a negative read size or offset is specified.
"""
if size is not None and size < 0:
raise ValueError('Invalid size value {0!s}'.format(size))
if offset < 0:
raise ValueError('Invalid offset value {0!s}'.format(offset))
if size == 0 or offset >= self.uncompressed_data_size:
return b''
if self._cache_start_offset is None:
self._LoadDataIntoCache(self._file_object, offset)
if offset > self._cache_end_offset or offset < self._cache_start_offset:
self.FlushCache()
self._LoadDataIntoCache(self._file_object, offset)
cache_offset = offset - self._cache_start_offset
if not size:
return self._cache[cache_offset:]
data_end_offset = cache_offset + size
if data_end_offset > self._cache_end_offset:
return self._cache[cache_offset:]
return self._cache[cache_offset:data_end_offset]
class GzipCompressedStream(object):
"""File-like object of a gzip compressed stream (file).
The gzip file format is defined in RFC1952: http://www.zlib.org/rfc-gzip.html
Attributes:
uncompressed_data_size (int): total size of the decompressed data stored
in the gzip file.
"""
def __init__(self):
"""Initializes a file-like object."""
super(GzipCompressedStream, self).__init__()
self._compressed_data_size = -1
self._current_offset = 0
self._file_object = None
self._members_by_end_offset = collections.OrderedDict()
self.uncompressed_data_size = 0
@property
def members(self):
"""list(GzipMember): members in the gzip file."""
return list(self._members_by_end_offset.values())
def _GetMemberForOffset(self, offset):
"""Finds the member whose data includes the provided offset.
Args:
offset (int): offset in the uncompressed data to find the
containing member for.
Returns:
GzipMember: gzip file member or None if not available.
Raises:
ValueError: if the provided offset is outside of the bounds of the
uncompressed data.
"""
if offset < 0 or offset >= self.uncompressed_data_size:
raise ValueError('Offset {0:d} is larger than file size {1:d}.'.format(
offset, self.uncompressed_data_size))
for end_offset, member in self._members_by_end_offset.items():
if offset < end_offset:
return member
return None
def Open(self, file_object):
"""Opens the file-like object defined by path specification.
Args:
file_object (FileIO): file-like object that contains the gzip compressed
stream.
Raises:
IOError: if the file-like object could not be opened.
OSError: if the file-like object could not be opened.
"""
file_size = file_object.get_size()
file_object.seek(0, os.SEEK_SET)
uncompressed_data_offset = 0
next_member_offset = 0
while next_member_offset < file_size:
member = GzipMember(
file_object, next_member_offset, uncompressed_data_offset)
uncompressed_data_offset = (
uncompressed_data_offset + member.uncompressed_data_size)
self._members_by_end_offset[uncompressed_data_offset] = member
self.uncompressed_data_size += member.uncompressed_data_size
next_member_offset = member.member_end_offset
self._file_object = file_object
# Note: that the following functions do not follow the style guide
# because they are part of the file-like object interface.
# pylint: disable=invalid-name
def close(self):
"""Closes the file-like object."""
self._members_by_end_offset = []
if self._file_object:
self._file_object = None
def read(self, size=None):
"""Reads a byte string from the gzip file at the current offset.
The function will read a byte string up to the specified size or
all of the remaining data if no size was specified.
Args:
size (Optional[int]): number of bytes to read, where None is all
remaining data.
Returns:
bytes: data read.
Raises:
IOError: if the read failed.
OSError: if the read failed.
"""
data = b''
while ((size and len(data) < size) and
self._current_offset < self.uncompressed_data_size):
member = self._GetMemberForOffset(self._current_offset)
member_offset = self._current_offset - member.uncompressed_data_offset
data_read = member.ReadAtOffset(member_offset, size)
if not data_read:
break
self._current_offset += len(data_read)
data = b''.join([data, data_read])
return data
def seek(self, offset, whence=os.SEEK_SET):
"""Seeks to an offset within the file-like object.
Args:
offset (int): offset to seek to.
whence (Optional(int)): value that indicates whether offset is an absolute
or relative position within the file.
Raises:
IOError: if the seek failed or the file has not been opened.
OSError: if the seek failed or the file has not been opened.
"""
if not self._file_object:
raise IOError('Not opened.')
if whence == os.SEEK_CUR:
offset += self._current_offset
elif whence == os.SEEK_END:
offset += self.uncompressed_data_size
elif whence != os.SEEK_SET:
raise IOError('Unsupported whence.')
if offset < 0:
raise IOError('Invalid offset value less than zero.')
self._current_offset = offset
def get_offset(self):
"""Retrieves the current offset into the file-like object.
Returns:
int: current offset into the file-like object.
Raises:
IOError: if the file-like object has not been opened.
OSError: if the file-like object has not been opened.
"""
if not self._file_object:
raise IOError('Not opened.')
return self._current_offset
def get_size(self):
"""Retrieves the size of the file-like object.
Returns:
int: size of the file-like object data.
Raises:
IOError: if the file-like object has not been opened.
OSError: if the file-like object has not been opened.
"""
if not self._file_object:
raise IOError('Not opened.')
return self.uncompressed_data_size
| 33.452261 | 80 | 0.714286 |
import collections
import os
from dtfabric.runtime import fabric as dtfabric_fabric
from dfvfs.compression import zlib_decompressor
from dfvfs.lib import data_format
from dfvfs.lib import errors
class _GzipDecompressorState(object):
_MAXIMUM_READ_SIZE = 16 * 1024 * 1024
def __init__(self, stream_start):
self._compressed_data = b''
self._decompressor = zlib_decompressor.DeflateDecompressor()
self._last_read = stream_start
self.uncompressed_offset = 0
def Read(self, file_object):
file_object.seek(self._last_read, os.SEEK_SET)
read_data = file_object.read(self._MAXIMUM_READ_SIZE)
self._last_read = file_object.get_offset()
compressed_data = b''.join([self._compressed_data, read_data])
decompressed_data, remaining_compressed_data = (
self._decompressor.Decompress(compressed_data))
self._compressed_data = remaining_compressed_data
self.uncompressed_offset += len(decompressed_data)
return decompressed_data
def GetUnusedData(self):
return self._decompressor.unused_data
class GzipMember(data_format.DataFormat):
_DATA_TYPE_FABRIC_DEFINITION_FILE = os.path.join(
os.path.dirname(__file__), 'gzipfile.yaml')
with open(_DATA_TYPE_FABRIC_DEFINITION_FILE, 'rb') as file_object:
_DATA_TYPE_FABRIC_DEFINITION = file_object.read()
_DATA_TYPE_FABRIC = dtfabric_fabric.DataTypeFabric(
yaml_definition=_DATA_TYPE_FABRIC_DEFINITION)
_MEMBER_HEADER = _DATA_TYPE_FABRIC.CreateDataTypeMap(
'gzip_member_header')
_MEMBER_HEADER_SIZE = _MEMBER_HEADER.GetByteSize()
_MEMBER_FOOTER = _DATA_TYPE_FABRIC.CreateDataTypeMap(
'gzip_member_footer')
_MEMBER_FOOTER_SIZE = _MEMBER_FOOTER.GetByteSize()
_UINT16LE = _DATA_TYPE_FABRIC.CreateDataTypeMap('uint16le')
_UINT16LE_SIZE = _UINT16LE.GetByteSize()
_CSTRING = _DATA_TYPE_FABRIC.CreateDataTypeMap('cstring')
_GZIP_SIGNATURE = 0x8b1f
_COMPRESSION_METHOD_DEFLATE = 8
_FLAG_FTEXT = 0x01
_FLAG_FHCRC = 0x02
_FLAG_FEXTRA = 0x04
_FLAG_FNAME = 0x08
_FLAG_FCOMMENT = 0x10
_UNCOMPRESSED_DATA_CACHE_SIZE = 2 * 1024 * 1024
def __init__(
self, file_object, member_start_offset, uncompressed_data_offset):
self._cache = b''
self._cache_end_offset = None
self._cache_start_offset = None
self.comment = None
self.modification_time = None
self.operating_system = None
self.original_filename = None
file_size = file_object.get_size()
file_object.seek(member_start_offset, os.SEEK_SET)
self._ReadMemberHeader(file_object)
data_offset = 0
uncompressed_data_size = 0
compressed_data_offset = file_object.get_offset()
decompressor_state = _GzipDecompressorState(compressed_data_offset)
file_offset = compressed_data_offset
while file_offset < file_size:
data_offset += uncompressed_data_size
decompressed_data = decompressor_state.Read(file_object)
uncompressed_data_size += len(decompressed_data)
unused_data = decompressor_state.GetUnusedData()
if unused_data:
file_object.seek(-len(unused_data), os.SEEK_CUR)
file_offset = file_object.get_offset()
break
file_offset = file_object.get_offset()
if file_offset < file_size:
self._ReadStructure(
file_object, file_offset, self._MEMBER_FOOTER_SIZE,
self._MEMBER_FOOTER, 'member footer')
member_end_offset = file_object.get_offset()
self._file_object = file_object
self._file_object.seek(member_start_offset, os.SEEK_SET)
if (data_offset == 0 and
uncompressed_data_size < self._UNCOMPRESSED_DATA_CACHE_SIZE):
self._cache = decompressed_data
self._cache_start_offset = 0
self._cache_end_offset = uncompressed_data_size
self._compressed_data_start = compressed_data_offset
self._decompressor_state = _GzipDecompressorState(compressed_data_offset)
self.member_start_offset = member_start_offset
self.member_end_offset = member_end_offset
self.uncompressed_data_size = uncompressed_data_size
self.uncompressed_data_offset = uncompressed_data_offset
def _GetCacheSize(self):
if None in (self._cache_start_offset, self._cache_end_offset):
return 0
return self._cache_end_offset - self._cache_start_offset
def _IsCacheFull(self):
return self._GetCacheSize() >= self._UNCOMPRESSED_DATA_CACHE_SIZE
def _LoadDataIntoCache(self, file_object, minimum_offset):
# Decompression can only be performed from beginning to end of the stream.
# So, if data before the current position of the decompressor in the stream
# is required, it's necessary to throw away the current decompression
if minimum_offset < self._decompressor_state.uncompressed_offset:
self._ResetDecompressorState()
cache_is_full = self._IsCacheFull()
while not cache_is_full:
decompressed_data = self._decompressor_state.Read(file_object)
if not decompressed_data:
break
decompressed_data_length = len(decompressed_data)
decompressed_end_offset = self._decompressor_state.uncompressed_offset
decompressed_start_offset = (
decompressed_end_offset - decompressed_data_length)
data_to_add = decompressed_data
added_data_start_offset = decompressed_start_offset
if decompressed_start_offset < minimum_offset:
data_to_add = None
if decompressed_start_offset < minimum_offset < decompressed_end_offset:
data_add_offset = decompressed_end_offset - minimum_offset
data_to_add = decompressed_data[-data_add_offset:]
added_data_start_offset = decompressed_end_offset - data_add_offset
if data_to_add and not cache_is_full:
self._cache = b''.join([self._cache, data_to_add])
if self._cache_start_offset is None:
self._cache_start_offset = added_data_start_offset
if self._cache_end_offset is None:
self._cache_end_offset = self._cache_start_offset + len(data_to_add)
else:
self._cache_end_offset += len(data_to_add)
cache_is_full = self._IsCacheFull()
# populated in the decompressor. When this situation arises, we rewind
# to the end of the compressed_data section.
unused_data = self._decompressor_state.GetUnusedData()
if unused_data:
seek_offset = -len(unused_data)
file_object.seek(seek_offset, os.SEEK_CUR)
self._ResetDecompressorState()
break
def _ReadMemberHeader(self, file_object):
file_offset = file_object.get_offset()
member_header = self._ReadStructure(
file_object, file_offset, self._MEMBER_HEADER_SIZE,
self._MEMBER_HEADER, 'member header')
if member_header.signature != self._GZIP_SIGNATURE:
raise errors.FileFormatError(
'Unsupported signature: 0x{0:04x}.'.format(member_header.signature))
if member_header.compression_method != self._COMPRESSION_METHOD_DEFLATE:
raise errors.FileFormatError(
'Unsupported compression method: {0:d}.'.format(
member_header.compression_method))
self.modification_time = member_header.modification_time
self.operating_system = member_header.operating_system
if member_header.flags & self._FLAG_FEXTRA:
file_offset = file_object.get_offset()
extra_field_data_size = self._ReadStructure(
file_object, file_offset, self._UINT16LE_SIZE,
self._UINT16LE, 'extra field data size')
file_object.seek(extra_field_data_size, os.SEEK_CUR)
if member_header.flags & self._FLAG_FNAME:
file_offset = file_object.get_offset()
string_value = self._ReadString(
file_object, file_offset, self._CSTRING, 'original filename')
self.original_filename = string_value.rstrip('\x00')
if member_header.flags & self._FLAG_FCOMMENT:
file_offset = file_object.get_offset()
string_value = self._ReadString(
file_object, file_offset, self._CSTRING, 'comment')
self.comment = string_value.rstrip('\x00')
if member_header.flags & self._FLAG_FHCRC:
file_object.read(2)
def _ResetDecompressorState(self):
self._decompressor_state = _GzipDecompressorState(
self._compressed_data_start)
def FlushCache(self):
self._cache = b''
self._cache_start_offset = None
self._cache_end_offset = None
self._ResetDecompressorState()
def ReadAtOffset(self, offset, size=None):
if size is not None and size < 0:
raise ValueError('Invalid size value {0!s}'.format(size))
if offset < 0:
raise ValueError('Invalid offset value {0!s}'.format(offset))
if size == 0 or offset >= self.uncompressed_data_size:
return b''
if self._cache_start_offset is None:
self._LoadDataIntoCache(self._file_object, offset)
if offset > self._cache_end_offset or offset < self._cache_start_offset:
self.FlushCache()
self._LoadDataIntoCache(self._file_object, offset)
cache_offset = offset - self._cache_start_offset
if not size:
return self._cache[cache_offset:]
data_end_offset = cache_offset + size
if data_end_offset > self._cache_end_offset:
return self._cache[cache_offset:]
return self._cache[cache_offset:data_end_offset]
class GzipCompressedStream(object):
def __init__(self):
super(GzipCompressedStream, self).__init__()
self._compressed_data_size = -1
self._current_offset = 0
self._file_object = None
self._members_by_end_offset = collections.OrderedDict()
self.uncompressed_data_size = 0
@property
def members(self):
return list(self._members_by_end_offset.values())
def _GetMemberForOffset(self, offset):
if offset < 0 or offset >= self.uncompressed_data_size:
raise ValueError('Offset {0:d} is larger than file size {1:d}.'.format(
offset, self.uncompressed_data_size))
for end_offset, member in self._members_by_end_offset.items():
if offset < end_offset:
return member
return None
def Open(self, file_object):
file_size = file_object.get_size()
file_object.seek(0, os.SEEK_SET)
uncompressed_data_offset = 0
next_member_offset = 0
while next_member_offset < file_size:
member = GzipMember(
file_object, next_member_offset, uncompressed_data_offset)
uncompressed_data_offset = (
uncompressed_data_offset + member.uncompressed_data_size)
self._members_by_end_offset[uncompressed_data_offset] = member
self.uncompressed_data_size += member.uncompressed_data_size
next_member_offset = member.member_end_offset
self._file_object = file_object
# Note: that the following functions do not follow the style guide
# because they are part of the file-like object interface.
# pylint: disable=invalid-name
def close(self):
self._members_by_end_offset = []
if self._file_object:
self._file_object = None
def read(self, size=None):
data = b''
while ((size and len(data) < size) and
self._current_offset < self.uncompressed_data_size):
member = self._GetMemberForOffset(self._current_offset)
member_offset = self._current_offset - member.uncompressed_data_offset
data_read = member.ReadAtOffset(member_offset, size)
if not data_read:
break
self._current_offset += len(data_read)
data = b''.join([data, data_read])
return data
def seek(self, offset, whence=os.SEEK_SET):
if not self._file_object:
raise IOError('Not opened.')
if whence == os.SEEK_CUR:
offset += self._current_offset
elif whence == os.SEEK_END:
offset += self.uncompressed_data_size
elif whence != os.SEEK_SET:
raise IOError('Unsupported whence.')
if offset < 0:
raise IOError('Invalid offset value less than zero.')
self._current_offset = offset
def get_offset(self):
if not self._file_object:
raise IOError('Not opened.')
return self._current_offset
def get_size(self):
if not self._file_object:
raise IOError('Not opened.')
return self.uncompressed_data_size
| true | true |
f72b027333bbe2d8bc09150e018d4e2a3f9db7df | 11,472 | py | Python | vspk/v4_0/nustaticroute.py | mohaimenhasan/vspk-python | 4c7b297427048340b250cc3c74d9214dc0d4bde1 | [
"BSD-3-Clause"
] | null | null | null | vspk/v4_0/nustaticroute.py | mohaimenhasan/vspk-python | 4c7b297427048340b250cc3c74d9214dc0d4bde1 | [
"BSD-3-Clause"
] | null | null | null | vspk/v4_0/nustaticroute.py | mohaimenhasan/vspk-python | 4c7b297427048340b250cc3c74d9214dc0d4bde1 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetcher
from .fetchers import NUEventLogsFetcher
from bambou import NURESTObject
class NUStaticRoute(NURESTObject):
""" Represents a StaticRoute in the VSD
Notes:
Static routes allow end users to define how traffic is routed through the dVRS in addition to the routes learned by VSC through VM activation. By using static routes, end users can define for example that all traffic with a destination address towards a specific subnet must be forwarded to a specific VM attached in the dVRS and this VM could be a firewall
"""
__rest_name__ = "staticroute"
__resource_name__ = "staticroutes"
## Constants
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_TYPE_OVERLAY = "OVERLAY"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
CONST_IP_TYPE_IPV6 = "IPV6"
CONST_IP_TYPE_IPV4 = "IPV4"
CONST_TYPE_EXIT_DOMAIN = "EXIT_DOMAIN"
CONST_IP_TYPE_DUALSTACK = "DUALSTACK"
def __init__(self, **kwargs):
""" Initializes a StaticRoute instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> staticroute = NUStaticRoute(id=u'xxxx-xxx-xxx-xxx', name=u'StaticRoute')
>>> staticroute = NUStaticRoute(data=my_dict)
"""
super(NUStaticRoute, self).__init__()
# Read/Write Attributes
self._ip_type = None
self._ipv6_address = None
self._last_updated_by = None
self._address = None
self._netmask = None
self._next_hop_ip = None
self._entity_scope = None
self._route_distinguisher = None
self._external_id = None
self._type = None
self.expose_attribute(local_name="ip_type", remote_name="IPType", attribute_type=str, is_required=False, is_unique=False, choices=[u'DUALSTACK', u'IPV4', u'IPV6'])
self.expose_attribute(local_name="ipv6_address", remote_name="IPv6Address", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="address", remote_name="address", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="netmask", remote_name="netmask", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="next_hop_ip", remote_name="nextHopIp", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="route_distinguisher", remote_name="routeDistinguisher", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
self.expose_attribute(local_name="type", remote_name="type", attribute_type=str, is_required=False, is_unique=False, choices=[u'EXIT_DOMAIN', u'OVERLAY'])
# Fetchers
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.event_logs = NUEventLogsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def ip_type(self):
""" Get ip_type value.
Notes:
IPv4 or IPv6
This attribute is named `IPType` in VSD API.
"""
return self._ip_type
@ip_type.setter
def ip_type(self, value):
""" Set ip_type value.
Notes:
IPv4 or IPv6
This attribute is named `IPType` in VSD API.
"""
self._ip_type = value
@property
def ipv6_address(self):
""" Get ipv6_address value.
Notes:
IPv6 address of the route
This attribute is named `IPv6Address` in VSD API.
"""
return self._ipv6_address
@ipv6_address.setter
def ipv6_address(self, value):
""" Set ipv6_address value.
Notes:
IPv6 address of the route
This attribute is named `IPv6Address` in VSD API.
"""
self._ipv6_address = value
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def address(self):
""" Get address value.
Notes:
IP address of the route
"""
return self._address
@address.setter
def address(self, value):
""" Set address value.
Notes:
IP address of the route
"""
self._address = value
@property
def netmask(self):
""" Get netmask value.
Notes:
Netmask associated with the route
"""
return self._netmask
@netmask.setter
def netmask(self, value):
""" Set netmask value.
Notes:
Netmask associated with the route
"""
self._netmask = value
@property
def next_hop_ip(self):
""" Get next_hop_ip value.
Notes:
IP address of the next hop. This must be a VM attached to the dVRS
This attribute is named `nextHopIp` in VSD API.
"""
return self._next_hop_ip
@next_hop_ip.setter
def next_hop_ip(self, value):
""" Set next_hop_ip value.
Notes:
IP address of the next hop. This must be a VM attached to the dVRS
This attribute is named `nextHopIp` in VSD API.
"""
self._next_hop_ip = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def route_distinguisher(self):
""" Get route_distinguisher value.
Notes:
Route distinguisher associated with the nexthop. System generates this identifier automatically
This attribute is named `routeDistinguisher` in VSD API.
"""
return self._route_distinguisher
@route_distinguisher.setter
def route_distinguisher(self, value):
""" Set route_distinguisher value.
Notes:
Route distinguisher associated with the nexthop. System generates this identifier automatically
This attribute is named `routeDistinguisher` in VSD API.
"""
self._route_distinguisher = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
@property
def type(self):
""" Get type value.
Notes:
Type flag for static-route provisioning for exit-domain (break-to-underlay) prefixes.
"""
return self._type
@type.setter
def type(self, value):
""" Set type value.
Notes:
Type flag for static-route provisioning for exit-domain (break-to-underlay) prefixes.
"""
self._type = value
| 29.720207 | 369 | 0.602772 |
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetcher
from .fetchers import NUEventLogsFetcher
from bambou import NURESTObject
class NUStaticRoute(NURESTObject):
__rest_name__ = "staticroute"
__resource_name__ = "staticroutes"
ONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_TYPE_OVERLAY = "OVERLAY"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
CONST_IP_TYPE_IPV6 = "IPV6"
CONST_IP_TYPE_IPV4 = "IPV4"
CONST_TYPE_EXIT_DOMAIN = "EXIT_DOMAIN"
CONST_IP_TYPE_DUALSTACK = "DUALSTACK"
def __init__(self, **kwargs):
super(NUStaticRoute, self).__init__()
self._ip_type = None
self._ipv6_address = None
self._last_updated_by = None
self._address = None
self._netmask = None
self._next_hop_ip = None
self._entity_scope = None
self._route_distinguisher = None
self._external_id = None
self._type = None
self.expose_attribute(local_name="ip_type", remote_name="IPType", attribute_type=str, is_required=False, is_unique=False, choices=[u'DUALSTACK', u'IPV4', u'IPV6'])
self.expose_attribute(local_name="ipv6_address", remote_name="IPv6Address", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="address", remote_name="address", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="netmask", remote_name="netmask", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="next_hop_ip", remote_name="nextHopIp", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="route_distinguisher", remote_name="routeDistinguisher", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
self.expose_attribute(local_name="type", remote_name="type", attribute_type=str, is_required=False, is_unique=False, choices=[u'EXIT_DOMAIN', u'OVERLAY'])
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.event_logs = NUEventLogsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
@property
def ip_type(self):
return self._ip_type
@ip_type.setter
def ip_type(self, value):
self._ip_type = value
@property
def ipv6_address(self):
return self._ipv6_address
@ipv6_address.setter
def ipv6_address(self, value):
self._ipv6_address = value
@property
def last_updated_by(self):
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
self._last_updated_by = value
@property
def address(self):
return self._address
@address.setter
def address(self, value):
self._address = value
@property
def netmask(self):
return self._netmask
@netmask.setter
def netmask(self, value):
self._netmask = value
@property
def next_hop_ip(self):
return self._next_hop_ip
@next_hop_ip.setter
def next_hop_ip(self, value):
self._next_hop_ip = value
@property
def entity_scope(self):
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
self._entity_scope = value
@property
def route_distinguisher(self):
return self._route_distinguisher
@route_distinguisher.setter
def route_distinguisher(self, value):
self._route_distinguisher = value
@property
def external_id(self):
return self._external_id
@external_id.setter
def external_id(self, value):
self._external_id = value
@property
def type(self):
return self._type
@type.setter
def type(self, value):
self._type = value
| true | true |
f72b045654dc44f3155f6d877133a3202b759449 | 5,054 | py | Python | python-lib/dku_error_analysis_mpp/dku_error_visualizer.py | dataiku/dss-plugin-model-error-analysis | 4c0f42a5c0aa1710005db3d81ca9bd9d7f829e6b | [
"Apache-2.0"
] | null | null | null | python-lib/dku_error_analysis_mpp/dku_error_visualizer.py | dataiku/dss-plugin-model-error-analysis | 4c0f42a5c0aa1710005db3d81ca9bd9d7f829e6b | [
"Apache-2.0"
] | 2 | 2021-09-29T15:08:25.000Z | 2022-01-13T11:20:58.000Z | python-lib/dku_error_analysis_mpp/dku_error_visualizer.py | dataiku/dss-plugin-model-error-analysis | 4c0f42a5c0aa1710005db3d81ca9bd9d7f829e6b | [
"Apache-2.0"
] | 1 | 2021-09-10T12:25:08.000Z | 2021-09-10T12:25:08.000Z | # -*- coding: utf-8 -*-
import numpy as np
from graphviz import Source
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
from dku_error_analysis_mpp.dku_error_analyzer import DkuErrorAnalyzer
from mealy import _BaseErrorVisualizer, ErrorAnalyzerConstants
from dku_error_analysis_utils import safe_str, format_float
import logging
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO, format='Error Analysis Plugin | %(levelname)s - %(message)s')
plt.rc('font', family="sans-serif")
SMALL_SIZE, MEDIUM_SIZE, BIGGER_SIZE = 8, 10, 12
plt.rc('axes', titlesize=BIGGER_SIZE, labelsize=MEDIUM_SIZE)
plt.rc('xtick', labelsize=SMALL_SIZE)
plt.rc('ytick', labelsize=SMALL_SIZE)
plt.rc('legend', fontsize=SMALL_SIZE)
plt.rc("hatch", color="white", linewidth=4)
class DkuErrorVisualizer(_BaseErrorVisualizer):
"""
ErrorVisualizer provides visual utilities to analyze the error classifier in ErrorAnalyzer and DkuErrorAnalyzer.
"""
def __init__(self, error_analyzer):
if not isinstance(error_analyzer, DkuErrorAnalyzer):
raise TypeError('You need to input a DkuErrorAnalyzer object.')
super(DkuErrorVisualizer, self).__init__(error_analyzer)
self._tree = error_analyzer.tree
def plot_error_tree(self, size=(50, 50)):
""" Plot the graph of the decision tree
Args:
size (tuple): Size of the output plot as (width, length), in inches.
"""
return Source(self._tree.to_dot_string(size))
def plot_feature_distributions_on_leaves(self, leaf_selector=None, top_k_features=ErrorAnalyzerConstants.TOP_K_FEATURES,
show_global=True, show_class=False, rank_leaves_by="total_error_fraction", nr_bins=10, figsize=(15, 10)):
""" Return plot of error node feature distribution and compare to global baseline """
leaf_nodes = self._get_ranked_leaf_ids(leaf_selector, rank_leaves_by)
ranked_features = self._tree.ranked_features[:top_k_features]
nr_leaves, nr_features = len(leaf_nodes), len(ranked_features)
logger.info("{} lea{} selected: {}".format(nr_leaves,
"f" if nr_leaves == 1 else "ves",
leaf_nodes))
logger.info("{} feature distribution{} plotted: {}".format(nr_features,
"" if nr_features == 1 else "s",
[f["name"] for f in ranked_features]))
for leaf_id in leaf_nodes:
leaf = self._tree.get_node(leaf_id)
suptitle = 'Leaf {} ({}: {}'.format(leaf.id, leaf.probabilities[0][0], format_float(leaf.probabilities[0][1], 3))
suptitle += ', {}: {})'.format(leaf.probabilities[1][0], format_float(leaf.probabilities[1][1], 3))
for feature in ranked_features:
feature_name = feature["name"]
leaf_stats = self._tree.get_stats(leaf.id, feature_name, nr_bins)
feature_is_numerical = feature["numerical"]
bins = leaf_stats["bin_edge"] if feature_is_numerical else leaf_stats["bin_value"]
if show_global:
root_samples = self._tree.get_node(0).samples[0]
root_stats = self._tree.get_stats(0, feature_name, nr_bins, bins) # TODO: optimize
if show_class:
root_hist_data = {}
for class_value, bar_heights in root_stats["target_distrib"].items():
root_hist_data[class_value] = np.array(bar_heights)/root_samples
else:
root_hist_data, root_prediction = {}, self._tree.get_node(0).prediction
root_hist_data[root_prediction] = np.array(root_stats["count"])/root_samples
else:
root_hist_data = None
if bins:
leaf_hist_data = {}
if show_class:
for class_value, bar_heights in leaf_stats["target_distrib"].items():
leaf_hist_data[class_value] = np.array(bar_heights)/leaf.samples[0]
else:
leaf_hist_data = {leaf.prediction: np.array(leaf_stats["count"])/leaf.samples[0]}
else:
leaf_hist_data = None
logger.info("No values for the feature {} at the leaf {}".format(feature_name, leaf.id))
if show_global:
bins = root_stats["bin_edge"] if feature_is_numerical else root_stats["bin_value"]
x_ticks = range(len(bins))
_BaseErrorVisualizer._add_new_plot(figsize, bins, x_ticks, feature_name, suptitle)
_BaseErrorVisualizer._plot_feature_distribution(x_ticks, feature_is_numerical, leaf_hist_data, root_hist_data)
plt.show()
| 49.54902 | 149 | 0.609616 |
import numpy as np
from graphviz import Source
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
from dku_error_analysis_mpp.dku_error_analyzer import DkuErrorAnalyzer
from mealy import _BaseErrorVisualizer, ErrorAnalyzerConstants
from dku_error_analysis_utils import safe_str, format_float
import logging
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO, format='Error Analysis Plugin | %(levelname)s - %(message)s')
plt.rc('font', family="sans-serif")
SMALL_SIZE, MEDIUM_SIZE, BIGGER_SIZE = 8, 10, 12
plt.rc('axes', titlesize=BIGGER_SIZE, labelsize=MEDIUM_SIZE)
plt.rc('xtick', labelsize=SMALL_SIZE)
plt.rc('ytick', labelsize=SMALL_SIZE)
plt.rc('legend', fontsize=SMALL_SIZE)
plt.rc("hatch", color="white", linewidth=4)
class DkuErrorVisualizer(_BaseErrorVisualizer):
def __init__(self, error_analyzer):
if not isinstance(error_analyzer, DkuErrorAnalyzer):
raise TypeError('You need to input a DkuErrorAnalyzer object.')
super(DkuErrorVisualizer, self).__init__(error_analyzer)
self._tree = error_analyzer.tree
def plot_error_tree(self, size=(50, 50)):
return Source(self._tree.to_dot_string(size))
def plot_feature_distributions_on_leaves(self, leaf_selector=None, top_k_features=ErrorAnalyzerConstants.TOP_K_FEATURES,
show_global=True, show_class=False, rank_leaves_by="total_error_fraction", nr_bins=10, figsize=(15, 10)):
leaf_nodes = self._get_ranked_leaf_ids(leaf_selector, rank_leaves_by)
ranked_features = self._tree.ranked_features[:top_k_features]
nr_leaves, nr_features = len(leaf_nodes), len(ranked_features)
logger.info("{} lea{} selected: {}".format(nr_leaves,
"f" if nr_leaves == 1 else "ves",
leaf_nodes))
logger.info("{} feature distribution{} plotted: {}".format(nr_features,
"" if nr_features == 1 else "s",
[f["name"] for f in ranked_features]))
for leaf_id in leaf_nodes:
leaf = self._tree.get_node(leaf_id)
suptitle = 'Leaf {} ({}: {}'.format(leaf.id, leaf.probabilities[0][0], format_float(leaf.probabilities[0][1], 3))
suptitle += ', {}: {})'.format(leaf.probabilities[1][0], format_float(leaf.probabilities[1][1], 3))
for feature in ranked_features:
feature_name = feature["name"]
leaf_stats = self._tree.get_stats(leaf.id, feature_name, nr_bins)
feature_is_numerical = feature["numerical"]
bins = leaf_stats["bin_edge"] if feature_is_numerical else leaf_stats["bin_value"]
if show_global:
root_samples = self._tree.get_node(0).samples[0]
root_stats = self._tree.get_stats(0, feature_name, nr_bins, bins)
if show_class:
root_hist_data = {}
for class_value, bar_heights in root_stats["target_distrib"].items():
root_hist_data[class_value] = np.array(bar_heights)/root_samples
else:
root_hist_data, root_prediction = {}, self._tree.get_node(0).prediction
root_hist_data[root_prediction] = np.array(root_stats["count"])/root_samples
else:
root_hist_data = None
if bins:
leaf_hist_data = {}
if show_class:
for class_value, bar_heights in leaf_stats["target_distrib"].items():
leaf_hist_data[class_value] = np.array(bar_heights)/leaf.samples[0]
else:
leaf_hist_data = {leaf.prediction: np.array(leaf_stats["count"])/leaf.samples[0]}
else:
leaf_hist_data = None
logger.info("No values for the feature {} at the leaf {}".format(feature_name, leaf.id))
if show_global:
bins = root_stats["bin_edge"] if feature_is_numerical else root_stats["bin_value"]
x_ticks = range(len(bins))
_BaseErrorVisualizer._add_new_plot(figsize, bins, x_ticks, feature_name, suptitle)
_BaseErrorVisualizer._plot_feature_distribution(x_ticks, feature_is_numerical, leaf_hist_data, root_hist_data)
plt.show()
| true | true |
f72b04ab534d3991395505fbd9524526beed8f88 | 5,288 | py | Python | seahub/api2/endpoints/draft_reviewer.py | odontomachus/seahub | 5b6f2153921da21a473d9ff20ce443d40efc93ab | [
"Apache-2.0"
] | null | null | null | seahub/api2/endpoints/draft_reviewer.py | odontomachus/seahub | 5b6f2153921da21a473d9ff20ce443d40efc93ab | [
"Apache-2.0"
] | 6 | 2019-12-13T09:55:45.000Z | 2022-03-11T23:47:29.000Z | seahub/api2/endpoints/draft_reviewer.py | odontomachus/seahub | 5b6f2153921da21a473d9ff20ce443d40efc93ab | [
"Apache-2.0"
] | 1 | 2019-05-16T06:58:16.000Z | 2019-05-16T06:58:16.000Z | # Copyright (c) 2012-2016 Seafile Ltd.
import posixpath
from rest_framework import status
from rest_framework.authentication import SessionAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from django.utils.translation import ugettext as _
from seaserv import seafile_api
from seahub.api2.authentication import TokenAuthentication
from seahub.api2.throttling import UserRateThrottle
from seahub.api2.utils import api_error, user_to_dict
from seahub.base.templatetags.seahub_tags import email2nickname
from seahub.base.accounts import User
from seahub.tags.models import FileUUIDMap
from seahub.views import check_folder_permission
from seahub.utils import is_valid_username
from seahub.drafts.models import Draft, DraftReviewer
from seahub.drafts.signals import request_reviewer_successful
class DraftReviewerView(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
permission_classes = (IsAuthenticated, )
throttle_classes = (UserRateThrottle, )
def get(self, request, pk, format=None):
try:
d = Draft.objects.get(pk=pk)
except Draft.DoesNotExist:
return api_error(status.HTTP_404_NOT_FOUND,
'Draft %s not found' % pk)
# format user result
try:
avatar_size = int(request.GET.get('avatar_size', 32))
except ValueError:
avatar_size = 32
# get reviewer list
reviewers = []
for x in d.draftreviewer_set.all():
reviewer = user_to_dict(x.reviewer, request=request, avatar_size=avatar_size)
reviewers.append(reviewer)
return Response({'reviewers': reviewers})
def post(self, request, pk, format=None):
"""add draft reviewer
"""
try:
d = Draft.objects.get(pk=pk)
except Draft.DoesNotExist:
return api_error(status.HTTP_404_NOT_FOUND,
'Draft %s not found' % pk)
result = {}
result['failed'] = []
result['success'] = []
reviewers = request.data.getlist('reviewer')
for reviewer in reviewers:
if not is_valid_username(reviewer):
result['failed'].append({
'email': reviewer,
'error_msg': _(u'username invalid.')
})
continue
try:
User.objects.get(email=reviewer)
except User.DoesNotExist:
result['failed'].append({
'email': reviewer,
'error_msg': _(u'User %s not found.') % reviewer
})
continue
# can't share to owner
if reviewer == d.username:
error_msg = 'Draft can not be asked owner to review.'
result['failed'].append({
'email': reviewer,
'error_msg': error_msg
})
continue
uuid = FileUUIDMap.objects.get_fileuuidmap_by_uuid(d.origin_file_uuid)
origin_file_path = posixpath.join(uuid.parent_path, uuid.filename)
# check perm
if seafile_api.check_permission_by_path(d.origin_repo_id, origin_file_path, reviewer) != 'rw':
error_msg = _(u'Permission denied.')
result['failed'].append({
'email': reviewer,
'error_msg': error_msg
})
continue
if DraftReviewer.objects.filter(draft=d, reviewer=reviewer):
error_msg = u'Reviewer %s has existed.' % reviewer
result['failed'].append({
'email': reviewer,
'error_msg': error_msg
})
continue
result['success'].append({
"user_info": {
"name": reviewer,
"nickname": email2nickname(reviewer)
}
})
DraftReviewer.objects.add(reviewer, d)
request_reviewer_successful.send(sender=None, from_user=request.user.username,
to_user=reviewer, draft_id=d.id)
return Response(result)
def delete(self, request, pk):
"""Delete a reviewer
"""
try:
d = Draft.objects.get(pk=pk)
except Draft.DoesNotExist:
return api_error(status.HTTP_404_NOT_FOUND,
'Draft %s not found' % pk)
perm = check_folder_permission(request, d.origin_repo_id, '/')
if perm is None:
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
reviewer = request.GET.get('username')
if reviewer is None:
return api_error(status.HTTP_400_BAD_REQUEST, 'Email %s invalid.' % reviewer)
try:
reviewer = DraftReviewer.objects.get(reviewer=reviewer, draft=d)
except DraftReviewer.DoesNotExist:
return Response(status.HTTP_200_OK)
reviewer.delete()
return Response(status.HTTP_200_OK)
| 34.562092 | 106 | 0.587368 |
import posixpath
from rest_framework import status
from rest_framework.authentication import SessionAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from django.utils.translation import ugettext as _
from seaserv import seafile_api
from seahub.api2.authentication import TokenAuthentication
from seahub.api2.throttling import UserRateThrottle
from seahub.api2.utils import api_error, user_to_dict
from seahub.base.templatetags.seahub_tags import email2nickname
from seahub.base.accounts import User
from seahub.tags.models import FileUUIDMap
from seahub.views import check_folder_permission
from seahub.utils import is_valid_username
from seahub.drafts.models import Draft, DraftReviewer
from seahub.drafts.signals import request_reviewer_successful
class DraftReviewerView(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
permission_classes = (IsAuthenticated, )
throttle_classes = (UserRateThrottle, )
def get(self, request, pk, format=None):
try:
d = Draft.objects.get(pk=pk)
except Draft.DoesNotExist:
return api_error(status.HTTP_404_NOT_FOUND,
'Draft %s not found' % pk)
try:
avatar_size = int(request.GET.get('avatar_size', 32))
except ValueError:
avatar_size = 32
reviewers = []
for x in d.draftreviewer_set.all():
reviewer = user_to_dict(x.reviewer, request=request, avatar_size=avatar_size)
reviewers.append(reviewer)
return Response({'reviewers': reviewers})
def post(self, request, pk, format=None):
try:
d = Draft.objects.get(pk=pk)
except Draft.DoesNotExist:
return api_error(status.HTTP_404_NOT_FOUND,
'Draft %s not found' % pk)
result = {}
result['failed'] = []
result['success'] = []
reviewers = request.data.getlist('reviewer')
for reviewer in reviewers:
if not is_valid_username(reviewer):
result['failed'].append({
'email': reviewer,
'error_msg': _(u'username invalid.')
})
continue
try:
User.objects.get(email=reviewer)
except User.DoesNotExist:
result['failed'].append({
'email': reviewer,
'error_msg': _(u'User %s not found.') % reviewer
})
continue
if reviewer == d.username:
error_msg = 'Draft can not be asked owner to review.'
result['failed'].append({
'email': reviewer,
'error_msg': error_msg
})
continue
uuid = FileUUIDMap.objects.get_fileuuidmap_by_uuid(d.origin_file_uuid)
origin_file_path = posixpath.join(uuid.parent_path, uuid.filename)
# check perm
if seafile_api.check_permission_by_path(d.origin_repo_id, origin_file_path, reviewer) != 'rw':
error_msg = _(u'Permission denied.')
result['failed'].append({
'email': reviewer,
'error_msg': error_msg
})
continue
if DraftReviewer.objects.filter(draft=d, reviewer=reviewer):
error_msg = u'Reviewer %s has existed.' % reviewer
result['failed'].append({
'email': reviewer,
'error_msg': error_msg
})
continue
result['success'].append({
"user_info": {
"name": reviewer,
"nickname": email2nickname(reviewer)
}
})
DraftReviewer.objects.add(reviewer, d)
request_reviewer_successful.send(sender=None, from_user=request.user.username,
to_user=reviewer, draft_id=d.id)
return Response(result)
def delete(self, request, pk):
try:
d = Draft.objects.get(pk=pk)
except Draft.DoesNotExist:
return api_error(status.HTTP_404_NOT_FOUND,
'Draft %s not found' % pk)
perm = check_folder_permission(request, d.origin_repo_id, '/')
if perm is None:
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
reviewer = request.GET.get('username')
if reviewer is None:
return api_error(status.HTTP_400_BAD_REQUEST, 'Email %s invalid.' % reviewer)
try:
reviewer = DraftReviewer.objects.get(reviewer=reviewer, draft=d)
except DraftReviewer.DoesNotExist:
return Response(status.HTTP_200_OK)
reviewer.delete()
return Response(status.HTTP_200_OK)
| true | true |
f72b04c22d26af35d88e3f843c7d2b7c9e606c26 | 120 | py | Python | module_2/lab2_1_1_7.py | dzooli/pcep_prepare | ddf34991a2d6ef2cfe3bda706ec333e9caa2aea5 | [
"MIT"
] | null | null | null | module_2/lab2_1_1_7.py | dzooli/pcep_prepare | ddf34991a2d6ef2cfe3bda706ec333e9caa2aea5 | [
"MIT"
] | null | null | null | module_2/lab2_1_1_7.py | dzooli/pcep_prepare | ddf34991a2d6ef2cfe3bda706ec333e9caa2aea5 | [
"MIT"
] | null | null | null | print("Hello, Python!")
print("Zoltan")
#print(Zoltan)
#print "Zoltan"
print('Zoltan')
print('''
Alma
on the
tree
'''
)
| 10 | 23 | 0.65 | print("Hello, Python!")
print("Zoltan")
print('Zoltan')
print('''
Alma
on the
tree
'''
)
| true | true |
f72b058123386b2f12effdfae7010abf516ca956 | 13,314 | py | Python | Lib/json/__init__.py | Hadron/python | 73137f499ed658169f49273eee46845e3b53e800 | [
"PSF-2.0"
] | null | null | null | Lib/json/__init__.py | Hadron/python | 73137f499ed658169f49273eee46845e3b53e800 | [
"PSF-2.0"
] | null | null | null | Lib/json/__init__.py | Hadron/python | 73137f499ed658169f49273eee46845e3b53e800 | [
"PSF-2.0"
] | null | null | null | r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
interchange format.
:mod:`json` exposes an API familiar to users of the standard library
:mod:`marshal` and :mod:`pickle` modules. It is derived from a
version of the externally maintained simplejson library.
Encoding basic Python object hierarchies::
>>> import json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
>>> print(json.dumps("\"foo\bar"))
"\"foo\bar"
>>> print(json.dumps('\u1234'))
"\u1234"
>>> print(json.dumps('\\'))
"\\"
>>> print(json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True))
{"a": 0, "b": 0, "c": 0}
>>> from io import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
'["streaming API"]'
Compact encoding::
>>> import json
>>> from collections import OrderedDict
>>> mydict = OrderedDict([('4', 5), ('6', 7)])
>>> json.dumps([1,2,3,mydict], separators=(',', ':'))
'[1,2,3,{"4":5,"6":7}]'
Pretty printing::
>>> import json
>>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4))
{
"4": 5,
"6": 7
}
Decoding JSON::
>>> import json
>>> obj = ['foo', {'bar': ['baz', None, 1.0, 2]}]
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
True
>>> json.loads('"\\"foo\\bar"') == '"foo\x08ar'
True
>>> from io import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)[0] == 'streaming API'
True
Specializing JSON object decoding::
>>> import json
>>> def as_complex(dct):
... if '__complex__' in dct:
... return complex(dct['real'], dct['imag'])
... return dct
...
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
... object_hook=as_complex)
(1+2j)
>>> from decimal import Decimal
>>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1')
True
Specializing JSON object encoding::
>>> import json
>>> def encode_complex(obj):
... if isinstance(obj, complex):
... return [obj.real, obj.imag]
... raise TypeError(repr(o) + " is not JSON serializable")
...
>>> json.dumps(2 + 1j, default=encode_complex)
'[2.0, 1.0]'
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
'[2.0, 1.0]'
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
'[2.0, 1.0]'
Using json.tool from the shell to validate and pretty-print::
$ echo '{"json":"obj"}' | python -m json.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m json.tool
Expecting property name enclosed in double quotes: line 1 column 3 (char 2)
"""
__version__ = '2.0.9'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
]
__author__ = 'Bob Ippolito <bob@redivi.com>'
from .decoder import JSONDecoder, JSONDecodeError
from .encoder import JSONEncoder
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
default=None,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
default=None, sort_keys=False, **kw):
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If ``skipkeys`` is true then ``dict`` keys that are not basic types
(``str``, ``int``, ``float``, ``bool``, ``None``) will be skipped
instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the strings written to ``fp`` can
contain non-ASCII characters if they appear in strings contained in
``obj``. Otherwise, all such characters are escaped in JSON strings.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
in strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
representation.
If specified, ``separators`` should be an ``(item_separator, key_separator)``
tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and
``(',', ': ')`` otherwise. To get the most compact JSON representation,
you should specify ``(',', ':')`` to eliminate whitespace.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
If *sort_keys* is true (default: ``False``), then the output of
dictionaries will be sorted by key.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg; otherwise ``JSONEncoder`` is used.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
default is None and not sort_keys and not kw):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
cls = JSONEncoder
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators,
default=default, sort_keys=sort_keys, **kw).iterencode(obj)
# could accelerate with writelines in some versions of Python, at
# a debuggability cost
for chunk in iterable:
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
default=None, sort_keys=False, **kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is true then ``dict`` keys that are not basic types
(``str``, ``int``, ``float``, ``bool``, ``None``) will be skipped
instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the return value can contain non-ASCII
characters if they appear in strings contained in ``obj``. Otherwise, all
such characters are escaped in JSON strings.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
representation.
If specified, ``separators`` should be an ``(item_separator, key_separator)``
tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and
``(',', ': ')`` otherwise. To get the most compact JSON representation,
you should specify ``(',', ':')`` to eliminate whitespace.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
If *sort_keys* is true (default: ``False``), then the output of
dictionaries will be sorted by key.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg; otherwise ``JSONEncoder`` is used.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
default is None and not sort_keys and not kw):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, default=default, sort_keys=sort_keys,
**kw).encode(obj)
_default_decoder = JSONDecoder(object_hook=None, object_pairs_hook=None)
def load(fp, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
``object_pairs_hook`` is an optional function that will be called with the
result of any object literal decoded with an ordered list of pairs. The
return value of ``object_pairs_hook`` will be used instead of the ``dict``.
This feature can be used to implement custom decoders that rely on the
order that the key and value pairs are decoded (for example,
collections.OrderedDict will remember the order of insertion). If
``object_hook`` is also defined, the ``object_pairs_hook`` takes priority.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg; otherwise ``JSONDecoder`` is used.
"""
return loads(fp.read(),
cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, **kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
"""Deserialize ``s`` (a ``str`` instance containing a JSON
document) to a Python object.
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
``object_pairs_hook`` is an optional function that will be called with the
result of any object literal decoded with an ordered list of pairs. The
return value of ``object_pairs_hook`` will be used instead of the ``dict``.
This feature can be used to implement custom decoders that rely on the
order that the key and value pairs are decoded (for example,
collections.OrderedDict will remember the order of insertion). If
``object_hook`` is also defined, the ``object_pairs_hook`` takes priority.
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN, null, true, false.
This can be used to raise an exception if invalid JSON numbers
are encountered.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg; otherwise ``JSONDecoder`` is used.
The ``encoding`` argument is ignored and deprecated.
"""
if not isinstance(s, str):
raise TypeError('the JSON object must be str, not {!r}'.format(
s.__class__.__name__))
if s.startswith(u'\ufeff'):
raise JSONDecodeError("Unexpected UTF-8 BOM (decode using utf-8-sig)",
s, 0)
if (cls is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and object_pairs_hook is None and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if object_pairs_hook is not None:
kw['object_pairs_hook'] = object_pairs_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
return cls(**kw).decode(s)
| 39.981982 | 81 | 0.653372 | __version__ = '2.0.9'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
]
__author__ = 'Bob Ippolito <bob@redivi.com>'
from .decoder import JSONDecoder, JSONDecodeError
from .encoder import JSONEncoder
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
default=None,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
default=None, sort_keys=False, **kw):
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
default is None and not sort_keys and not kw):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
cls = JSONEncoder
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators,
default=default, sort_keys=sort_keys, **kw).iterencode(obj)
for chunk in iterable:
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
default=None, sort_keys=False, **kw):
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
default is None and not sort_keys and not kw):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, default=default, sort_keys=sort_keys,
**kw).encode(obj)
_default_decoder = JSONDecoder(object_hook=None, object_pairs_hook=None)
def load(fp, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
return loads(fp.read(),
cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, **kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
if not isinstance(s, str):
raise TypeError('the JSON object must be str, not {!r}'.format(
s.__class__.__name__))
if s.startswith(u'\ufeff'):
raise JSONDecodeError("Unexpected UTF-8 BOM (decode using utf-8-sig)",
s, 0)
if (cls is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and object_pairs_hook is None and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if object_pairs_hook is not None:
kw['object_pairs_hook'] = object_pairs_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
return cls(**kw).decode(s)
| true | true |
f72b05a1e16676d1178d4682bdc7c44175562994 | 3,192 | py | Python | scripts/loadelastic-aurora.py | dbmi-pitt/aurora-meta | a0d3d3963fce2639081cb55715b5357cd0e21902 | [
"Apache-2.0"
] | null | null | null | scripts/loadelastic-aurora.py | dbmi-pitt/aurora-meta | a0d3d3963fce2639081cb55715b5357cd0e21902 | [
"Apache-2.0"
] | null | null | null | scripts/loadelastic-aurora.py | dbmi-pitt/aurora-meta | a0d3d3963fce2639081cb55715b5357cd0e21902 | [
"Apache-2.0"
] | null | null | null | import requests, json, os
import argparse
import pandas as pd
import ijson
import time
# Elasticsearch python libs
from elasticsearch import Elasticsearch
from elasticsearch import helpers
directory = ""
indexName = "aurora-meta2"
typeName = "patient"
THRESHOLD = 10000 # this regulates how much data gets loaded then is processed in a bulk group
PK = "ID"
json_root = "item"
errors = []
def loadit():
es = Elasticsearch([{'host': 'localhost', 'port': '9200'}])
for filename in os.listdir(directory):
if filename.endswith(".json"):
json_filename = directory+filename
print("Loading " + json_filename)
with open(json_filename, 'r') as input_file:
i = 1
batchCtr = 1
bulk_action = []
bulkCount = 0
ij = ijson.items(input_file, json_root)
print(ij)
for rec in ij:
print(rec)
#pk = rec['clin'][PK]
pk = rec['clin'][PK]
print(pk)
bulk = {
"_index" : indexName,
#"_type" : typeName,
"_id" : pk,
"_source" : rec,
}
bulk_action.append(bulk)
i = i + 1
batchCtr = batchCtr + 1
if batchCtr > THRESHOLD:
try:
#print(bulk_action)
bulkCount = bulkCount + batchCtr
rtn_status = helpers.bulk(es, bulk_action)
if rtn_status:
print(rtn_status)
#print ('Imported data ' + str(bulkCount-1) + ' successfully from ' + json_filename)
batchCtr = 1
bulk_action = []
except Exception as ex:
print ("Loading failed for " + json_filename)
errors.append(json_filename)
print ('Error:' + str(ex))
#print ("Loading failed!")
#pass
if i < THRESHOLD:
try:
rtn_status = helpers.bulk(es, bulk_action)
if rtn_status:
print(rtn_status)
#print ('Imported data ' + str(i-1) + ' successfully from ' + json_filename)
batchCtr = 1
bulk_action = []
except Exception as ex:
print ('Error:' + str(ex))
print ("Loading failed for " + json_filename)
errors.append(json_filename)
#pass
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-d", required=True, help="dir path to json file(s)")
parser.add_argument("-thres", help="set the batch threshold")
parser.add_argument("-i", help="set the index name")
parser.add_argument("-t", help="set the type")
parser.add_argument("-pk", help="primary key of the record, default 'ID'")
parser.add_argument("-r", help="json root node, default 'item', passing 'NOROOT' will ignore the root item")
args = parser.parse_args()
print("Args:")
print(args)
if args.d:
directory = args.d
if directory[-1] != '/':
directory = directory + '/'
if args.thres:
THRESHOLD = int(args.thres)
print ("Batch threshold: " + str(THRESHOLD))
print(type(THRESHOLD))
if args.i:
indexName = args.i
if args.t:
typeName = args.t
if args.pk:
PK = args.pk
if args.r:
if args.r == "NOROOT":
json_root = "" # ignore the root
else:
json_root = args.r
start = time.time()
loadit()
end = time.time()
print("Elapsed time: {}".format((end-start)))
if len(errors) > 0:
print("The following files failed:")
print(errors)
| 25.95122 | 109 | 0.628446 | import requests, json, os
import argparse
import pandas as pd
import ijson
import time
from elasticsearch import Elasticsearch
from elasticsearch import helpers
directory = ""
indexName = "aurora-meta2"
typeName = "patient"
THRESHOLD = 10000
PK = "ID"
json_root = "item"
errors = []
def loadit():
es = Elasticsearch([{'host': 'localhost', 'port': '9200'}])
for filename in os.listdir(directory):
if filename.endswith(".json"):
json_filename = directory+filename
print("Loading " + json_filename)
with open(json_filename, 'r') as input_file:
i = 1
batchCtr = 1
bulk_action = []
bulkCount = 0
ij = ijson.items(input_file, json_root)
print(ij)
for rec in ij:
print(rec)
pk = rec['clin'][PK]
print(pk)
bulk = {
"_index" : indexName,
"_id" : pk,
"_source" : rec,
}
bulk_action.append(bulk)
i = i + 1
batchCtr = batchCtr + 1
if batchCtr > THRESHOLD:
try:
bulkCount = bulkCount + batchCtr
rtn_status = helpers.bulk(es, bulk_action)
if rtn_status:
print(rtn_status)
batchCtr = 1
bulk_action = []
except Exception as ex:
print ("Loading failed for " + json_filename)
errors.append(json_filename)
print ('Error:' + str(ex))
if i < THRESHOLD:
try:
rtn_status = helpers.bulk(es, bulk_action)
if rtn_status:
print(rtn_status)
batchCtr = 1
bulk_action = []
except Exception as ex:
print ('Error:' + str(ex))
print ("Loading failed for " + json_filename)
errors.append(json_filename)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-d", required=True, help="dir path to json file(s)")
parser.add_argument("-thres", help="set the batch threshold")
parser.add_argument("-i", help="set the index name")
parser.add_argument("-t", help="set the type")
parser.add_argument("-pk", help="primary key of the record, default 'ID'")
parser.add_argument("-r", help="json root node, default 'item', passing 'NOROOT' will ignore the root item")
args = parser.parse_args()
print("Args:")
print(args)
if args.d:
directory = args.d
if directory[-1] != '/':
directory = directory + '/'
if args.thres:
THRESHOLD = int(args.thres)
print ("Batch threshold: " + str(THRESHOLD))
print(type(THRESHOLD))
if args.i:
indexName = args.i
if args.t:
typeName = args.t
if args.pk:
PK = args.pk
if args.r:
if args.r == "NOROOT":
json_root = ""
else:
json_root = args.r
start = time.time()
loadit()
end = time.time()
print("Elapsed time: {}".format((end-start)))
if len(errors) > 0:
print("The following files failed:")
print(errors)
| true | true |
f72b05a397836379cf15a5545dc470a6f2762a91 | 5,781 | py | Python | smoke/data/build.py | SmallMunich/Smoke | 591a03bdb5cad962999914c9a97c7a8bed9e529b | [
"MIT"
] | 2 | 2022-03-08T02:54:57.000Z | 2022-03-10T09:09:40.000Z | smoke/data/build.py | SmallMunich/Smoke | 591a03bdb5cad962999914c9a97c7a8bed9e529b | [
"MIT"
] | null | null | null | smoke/data/build.py | SmallMunich/Smoke | 591a03bdb5cad962999914c9a97c7a8bed9e529b | [
"MIT"
] | null | null | null | import logging
import copy
import bisect
import numpy as np
import torch.utils.data
from smoke.utils.comm import get_world_size
from smoke.utils.imports import import_file
from smoke.utils.envs import seed_all_rng
from . import datasets as D
from . import samplers
from .transforms import build_transforms
from .collate_batch import BatchCollator
def build_dataset(cfg, transforms, dataset_catalog, is_train=True):
'''
Args:
dataset_list (list[str]): Contains the names of the datasets.
transforms (callable): transforms to apply to each (image, target) sample
dataset_catalog (DatasetCatalog): contains the information on how to
construct a dataset.
is_train (bool): whether to setup the dataset for training or testing
Returns:
'''
dataset_list = cfg.DATASETS.TRAIN if is_train else cfg.DATASETS.TEST
if not isinstance(dataset_list, (list, tuple)):
raise RuntimeError(
"dataset_list should be a list of strings, got {}".format(dataset_list)
)
datasets = []
for dataset_name in dataset_list:
data = dataset_catalog.get(dataset_name)
factory = getattr(D, data["factory"])
args = data["args"]
args["cfg"] = cfg
args["is_train"] = is_train
args["transforms"] = transforms
# make dataset from factory
dataset = factory(**args)
datasets.append(dataset)
# for testing, return a list of datasets
if not is_train:
return datasets
# for training, concatenate all datasets into a single one
dataset = datasets[0]
if len(datasets) > 1:
dataset = D.ConcatDataset(datasets)
return [dataset]
def make_data_loader(cfg, is_train=True):
num_gpus = get_world_size()
if is_train:
images_per_batch = cfg.SOLVER.IMS_PER_BATCH
assert images_per_batch % num_gpus == 0, \
"SOLVER.IMS_PER_BATCH ({}) must be divisible by the number of GPUs ({}) used." \
.format(images_per_batch, num_gpus)
images_per_gpu = images_per_batch // num_gpus
else:
images_per_batch = cfg.TEST.IMS_PER_BATCH
assert images_per_batch % num_gpus == 0, \
"SOLVER.IMS_PER_BATCH ({}) must be divisible by the number of GPUs ({}) used." \
.format(images_per_batch, num_gpus)
images_per_gpu = images_per_batch // num_gpus
# if images_per_gpu > 1:
# logger = logging.getLogger(__name__)
# logger.warning(
# "When using more than one image per GPU you may encounter "
# "an out-of-memory (OOM) error if your GPU does not have "
# "sufficient memory. If this happens, you can reduce "
# "SOLVER.IMS_PER_BATCH (for training) or "
# "TEST.IMS_PER_BATCH (for inference). For training, you must "
# "also adjust the learning rate and schedule length according "
# "to the linear scaling rule. See for example: "
# "https://github.com/facebookresearch/Detectron/blob/master/configs/getting_started/tutorial_1gpu_e2e_faster_rcnn_R-50-FPN.yaml#L14"
# )
# group images which have similar aspect ratio. In this case, we only
# group in two cases: those with width / height > 1, and the other way around,
# but the code supports more general grouping strategy
aspect_grouping = [1] if cfg.DATALOADER.ASPECT_RATIO_GROUPING else []
path_catalog = import_file(
"smoke.config.paths_catalog", cfg.PATHS_CATALOG, True
)
DatasetCatalog = path_catalog.DatasetCatalog
transforms = build_transforms(cfg, is_train)
datasets = build_dataset(cfg, transforms, DatasetCatalog, is_train)
data_loaders = []
for dataset in datasets:
sampler = samplers.TrainingSampler(len(dataset))
batch_sampler = torch.utils.data.sampler.BatchSampler(
sampler, images_per_gpu, drop_last=True
)
collator = BatchCollator(cfg.DATALOADER.SIZE_DIVISIBILITY)
num_workers = cfg.DATALOADER.NUM_WORKERS
# import pdb; pdb.set_trace()
data_loader = torch.utils.data.DataLoader(
dataset,
num_workers=num_workers,
batch_sampler=batch_sampler,
collate_fn=collator,
worker_init_fn=worker_init_reset_seed,
)
data_loaders.append(data_loader)
if is_train:
# during training, a single (possibly concatenated) data_loader is returned
assert len(data_loaders) == 1
return data_loaders[0]
return data_loaders
def build_test_loader(cfg, is_train=False):
path_catalog = import_file(
"smoke.config.paths_catalog", cfg.PATHS_CATALOG, True
)
DatasetCatalog = path_catalog.DatasetCatalog
transforms = build_transforms(cfg, is_train)
datasets = build_dataset(cfg, transforms, DatasetCatalog, is_train)
data_loaders = []
for dataset in datasets:
sampler = samplers.InferenceSampler(len(dataset))
batch_sampler = torch.utils.data.sampler.BatchSampler(
sampler, 1, drop_last=False
)
collator = BatchCollator(cfg.DATALOADER.SIZE_DIVISIBILITY)
num_workers = cfg.DATALOADER.NUM_WORKERS
data_loader = torch.utils.data.DataLoader(
dataset,
num_workers=num_workers,
batch_sampler=batch_sampler,
collate_fn=collator,
)
data_loaders.append(data_loader)
# Origin is data_loader, Now I think this should be data_loaders
return data_loader
def trivial_batch_collator(batch):
"""
A batch collator that does nothing.
"""
return batch
def worker_init_reset_seed(worker_id):
seed_all_rng(np.random.randint(2 ** 31) + worker_id)
| 34.825301 | 145 | 0.669088 | import logging
import copy
import bisect
import numpy as np
import torch.utils.data
from smoke.utils.comm import get_world_size
from smoke.utils.imports import import_file
from smoke.utils.envs import seed_all_rng
from . import datasets as D
from . import samplers
from .transforms import build_transforms
from .collate_batch import BatchCollator
def build_dataset(cfg, transforms, dataset_catalog, is_train=True):
dataset_list = cfg.DATASETS.TRAIN if is_train else cfg.DATASETS.TEST
if not isinstance(dataset_list, (list, tuple)):
raise RuntimeError(
"dataset_list should be a list of strings, got {}".format(dataset_list)
)
datasets = []
for dataset_name in dataset_list:
data = dataset_catalog.get(dataset_name)
factory = getattr(D, data["factory"])
args = data["args"]
args["cfg"] = cfg
args["is_train"] = is_train
args["transforms"] = transforms
dataset = factory(**args)
datasets.append(dataset)
if not is_train:
return datasets
dataset = datasets[0]
if len(datasets) > 1:
dataset = D.ConcatDataset(datasets)
return [dataset]
def make_data_loader(cfg, is_train=True):
num_gpus = get_world_size()
if is_train:
images_per_batch = cfg.SOLVER.IMS_PER_BATCH
assert images_per_batch % num_gpus == 0, \
"SOLVER.IMS_PER_BATCH ({}) must be divisible by the number of GPUs ({}) used." \
.format(images_per_batch, num_gpus)
images_per_gpu = images_per_batch // num_gpus
else:
images_per_batch = cfg.TEST.IMS_PER_BATCH
assert images_per_batch % num_gpus == 0, \
"SOLVER.IMS_PER_BATCH ({}) must be divisible by the number of GPUs ({}) used." \
.format(images_per_batch, num_gpus)
images_per_gpu = images_per_batch // num_gpus
aspect_grouping = [1] if cfg.DATALOADER.ASPECT_RATIO_GROUPING else []
path_catalog = import_file(
"smoke.config.paths_catalog", cfg.PATHS_CATALOG, True
)
DatasetCatalog = path_catalog.DatasetCatalog
transforms = build_transforms(cfg, is_train)
datasets = build_dataset(cfg, transforms, DatasetCatalog, is_train)
data_loaders = []
for dataset in datasets:
sampler = samplers.TrainingSampler(len(dataset))
batch_sampler = torch.utils.data.sampler.BatchSampler(
sampler, images_per_gpu, drop_last=True
)
collator = BatchCollator(cfg.DATALOADER.SIZE_DIVISIBILITY)
num_workers = cfg.DATALOADER.NUM_WORKERS
data_loader = torch.utils.data.DataLoader(
dataset,
num_workers=num_workers,
batch_sampler=batch_sampler,
collate_fn=collator,
worker_init_fn=worker_init_reset_seed,
)
data_loaders.append(data_loader)
if is_train:
assert len(data_loaders) == 1
return data_loaders[0]
return data_loaders
def build_test_loader(cfg, is_train=False):
path_catalog = import_file(
"smoke.config.paths_catalog", cfg.PATHS_CATALOG, True
)
DatasetCatalog = path_catalog.DatasetCatalog
transforms = build_transforms(cfg, is_train)
datasets = build_dataset(cfg, transforms, DatasetCatalog, is_train)
data_loaders = []
for dataset in datasets:
sampler = samplers.InferenceSampler(len(dataset))
batch_sampler = torch.utils.data.sampler.BatchSampler(
sampler, 1, drop_last=False
)
collator = BatchCollator(cfg.DATALOADER.SIZE_DIVISIBILITY)
num_workers = cfg.DATALOADER.NUM_WORKERS
data_loader = torch.utils.data.DataLoader(
dataset,
num_workers=num_workers,
batch_sampler=batch_sampler,
collate_fn=collator,
)
data_loaders.append(data_loader)
return data_loader
def trivial_batch_collator(batch):
return batch
def worker_init_reset_seed(worker_id):
seed_all_rng(np.random.randint(2 ** 31) + worker_id)
| true | true |
f72b0684f170d3fddc3fc47d05fff76101d188b3 | 1,072 | py | Python | i3wsgroups/cli.py | damani42/i3-workspace-groups | 13fe8e22e829166eb22df031b4c39f3501dfb362 | [
"MIT"
] | null | null | null | i3wsgroups/cli.py | damani42/i3-workspace-groups | 13fe8e22e829166eb22df031b4c39f3501dfb362 | [
"MIT"
] | null | null | null | i3wsgroups/cli.py | damani42/i3-workspace-groups | 13fe8e22e829166eb22df031b4c39f3501dfb362 | [
"MIT"
] | null | null | null | import argparse
def add_common_args(parser: argparse.ArgumentParser):
parser.add_argument(
'--dry-run',
action='store_true',
default=False,
help='If true, will not actually do any changes to i3 workspaces.')
parser.add_argument(
'--log-level',
choices=('debug', 'info', 'warning', 'error', 'critical'),
default='warning',
help='Logging level for stderr and syslog.')
def add_workspace_naming_args(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
'--window-icons-all-groups',
action='store_true',
default=False,
help='If true, will add the icons of the open windows to workspaces'
' in all groups, and not just the active group. Also implies '
'--window-icons.')
parser.add_argument(
'--renumber-workspaces',
action='store_true',
default=False,
help='If true, will renumber workspaces in every groups so that they '
'are in numerical order, similar to tmux\'s renumber-windows option.')
| 34.580645 | 78 | 0.636194 | import argparse
def add_common_args(parser: argparse.ArgumentParser):
parser.add_argument(
'--dry-run',
action='store_true',
default=False,
help='If true, will not actually do any changes to i3 workspaces.')
parser.add_argument(
'--log-level',
choices=('debug', 'info', 'warning', 'error', 'critical'),
default='warning',
help='Logging level for stderr and syslog.')
def add_workspace_naming_args(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
'--window-icons-all-groups',
action='store_true',
default=False,
help='If true, will add the icons of the open windows to workspaces'
' in all groups, and not just the active group. Also implies '
'--window-icons.')
parser.add_argument(
'--renumber-workspaces',
action='store_true',
default=False,
help='If true, will renumber workspaces in every groups so that they '
'are in numerical order, similar to tmux\'s renumber-windows option.')
| true | true |
f72b073f2c249ce06aea52ce2b03bad057fb64ac | 10,626 | py | Python | src/neqsim/process/processTools.py | kwafafoa/neqsimpython | 2a540297552b39dac2666bbfb7c76eda0f5779db | [
"Apache-2.0"
] | null | null | null | src/neqsim/process/processTools.py | kwafafoa/neqsimpython | 2a540297552b39dac2666bbfb7c76eda0f5779db | [
"Apache-2.0"
] | null | null | null | src/neqsim/process/processTools.py | kwafafoa/neqsimpython | 2a540297552b39dac2666bbfb7c76eda0f5779db | [
"Apache-2.0"
] | null | null | null | import jpype
import jpype.imports
from jpype.types import *
from neqsim.neqsimpython import neqsim
processoperations = neqsim.processSimulation.processSystem.ProcessSystem()
def stream(thermoSystem, name="stream ?", t=0, p=0):
if t != 0:
thermoSystem.setTemperature(t)
if p != 0:
thermoSystem.setPressure(p)
stream = neqsim.processSimulation.processEquipment.stream.Stream(thermoSystem)
stream.setName(name)
processoperations.add(stream)
return stream
def neqstream(thermoSystem, name="stream ?", t=0, p=0):
if t != 0:
thermoSystem.setTemperature(t)
if p != 0:
thermoSystem.setPressure(p)
stream = neqsim.processSimulation.processEquipment.stream.NeqStream(thermoSystem)
stream.setName(name)
processoperations.add(stream)
return stream
def recycle(teststream, name="recycle ?"):
recycle1 = neqsim.processSimulation.processEquipment.util.Recycle()
recycle1.addStream(teststream)
processoperations.add(recycle1)
return recycle1
def saturator(teststream, name="water saturator"):
streamsaturator = neqsim.processSimulation.processEquipment.util.StreamSaturatorUtil(teststream)
processoperations.add(streamsaturator)
return streamsaturator
def glycoldehydrationlmodule(teststream, name="TEG process"):
dehydrationlmodule = neqsim.processSimulation.processSystem.processModules.GlycolDehydrationlModule()
dehydrationlmodule.setName(name)
dehydrationlmodule.addInputStream("gasStreamToAbsorber", teststream)
processoperations.add(dehydrationlmodule)
return dehydrationlmodule
def openprocess(filename):
processoperations = neqsim.processSimulation.processSystem.ProcessSystem.open(filename)
return processoperations
def separator(teststream, name="separator ?"):
separator = neqsim.processSimulation.processEquipment.separator.Separator(teststream)
separator.setName(name)
processoperations.add(separator)
return separator
def GORfitter(teststream, name="GOR fitter ?"):
GORfitter1 = neqsim.processSimulation.processEquipment.util.GORfitter(name, teststream)
GORfitter1.setName(name)
processoperations.add(GORfitter1)
return GORfitter1
def simpleTEGAbsorber(name="TEG absorber ?"):
absorber = neqsim.processSimulation.processEquipment.absorber.SimpleTEGAbsorber()
absorber.setName(name)
processoperations.add(absorber)
return absorber
def waterStripperColumn(name="water stripper ?"):
stripper = neqsim.processSimulation.processEquipment.absorber.WaterStripperColumn()
stripper.setName(name)
processoperations.add(stripper)
return stripper
def gasscrubber(teststream, name="scrubber ?"):
separator = neqsim.processSimulation.processEquipment.separator.GasScrubber(teststream)
separator.setName(name)
processoperations.add(separator)
return separator
def separator3phase(teststream, name="separator ?"):
separator = neqsim.processSimulation.processEquipment.separator.ThreePhaseSeparator(teststream)
separator.setName(name)
processoperations.add(separator)
return separator
def valve(teststream, p=1.0, name="valve ?"):
valve = neqsim.processSimulation.processEquipment.valve.ThrottlingValve(teststream)
valve.setOutletPressure(p)
valve.setName(name)
processoperations.add(valve)
return valve
def recycle2(name="recycle ?"):
recyc = neqsim.processSimulation.processEquipment.util.Recycle(name)
processoperations.add(recyc)
return recyc
def calculator(name="calculator ?"):
calc2 = neqsim.processSimulation.processEquipment.util.Calculator(name)
processoperations.add(calc2)
return calc2
def setpoint(name1, unit1, name2, unit2):
setp = neqsim.processSimulation.processEquipment.util.SetPoint(name1, unit1, name2, unit2)
processoperations.add(setp)
return setp
def filters(teststream):
filter2 = neqsim.processSimulation.processEquipment.filter.Filter(teststream)
processoperations.add(filter2)
return filter2
def compressor(teststream, pres=10.0, name="compressor ?"):
compressor = neqsim.processSimulation.processEquipment.compressor.Compressor(teststream)
compressor.setOutletPressure(pres)
compressor.setName(name)
processoperations.add(compressor)
return compressor
def compressorChart(compressor, curveConditions, speed, flow, head, polyEff ):
compressor.getCompressorChart().setCurves(JDouble[:](curveConditions), JDouble[:](speed), JDouble[:][:](flow), JDouble[:][:](head), JDouble[:][:](polyEff))
def compressorSurgeCurve(compressor, curveConditions, surgeflow, surgehead):
compressor.getCompressorChart().getSurgeCurve().setCurve(JDouble[:](curveConditions), JDouble[:](surgeflow), JDouble[:](surgehead))
def compressorStoneWallCurve(compressor, curveConditions, stoneWallflow, stoneWallHead):
compressor.getCompressorChart().getStoneWallCurve().setCurve(JDouble[:](curveConditions), JDouble[:](stoneWallflow), JDouble[:](stoneWallHead))
def pump(teststream, p=1.0, name="pump ?"):
pump = neqsim.processSimulation.processEquipment.pump.Pump(teststream)
pump.setOutletPressure(p)
pump.setName(name)
processoperations.add(pump)
return pump
def expander(teststream, p, name="expander ?"):
expander = neqsim.processSimulation.processEquipment.expander.Expander(teststream)
expander.setOutletPressure(p)
expander.setName(name)
processoperations.add(expander)
return expander
def mixer(name=""):
mixer = neqsim.processSimulation.processEquipment.mixer.StaticMixer()
mixer.setName(name)
processoperations.add(mixer)
return mixer
def phasemixer(name=""):
mixer = neqsim.processSimulation.processEquipment.mixer.StaticPhaseMixer()
mixer.setName(name)
processoperations.add(mixer)
return mixer
def nequnit(teststream, equipment="pipeline", flowpattern="stratified", numberOfNodes=100):
neqUn = neqsim.processSimulation.processEquipment.util.NeqSimUnit(teststream, equipment, flowpattern)
neqUn.setNumberOfNodes(numberOfNodes)
processoperations.add(neqUn)
return neqUn
def splitter(teststream, splitfactors, name=""):
splitter = neqsim.processSimulation.processEquipment.splitter.Splitter(teststream)
splitter.setSplitNumber(len(splitfactors))
splitter.setSplitFactors(JDouble[:](splitfactors))
splitter.setName(name)
processoperations.add(splitter)
return splitter
def heater(teststream, name=""):
heater = neqsim.processSimulation.processEquipment.heatExchanger.Heater(teststream)
heater.setName(name)
processoperations.add(heater)
return heater
def simplereservoir(fluid, name="Reservoir 1", gasvolume=10.0 * 1e7, oilvolume=120.0 * 1e6, watervolume=10.0e6):
reserv = neqsim.processSimulation.processEquipment.reservoir.SimpleReservoir(name)
reserv.setReservoirFluid(fluid, gasvolume, oilvolume, watervolume)
processoperations.add(reserv)
return reserv
def cooler(teststream, name=""):
cooler = neqsim.processSimulation.processEquipment.heatExchanger.Cooler(teststream)
cooler.setName(name)
processoperations.add(cooler)
return cooler
def heatExchanger(stream1, stream2=None, name=""):
if stream2==None:
heater = neqsim.processSimulation.processEquipment.heatExchanger.HeatExchanger(stream1)
else:
heater = neqsim.processSimulation.processEquipment.heatExchanger.HeatExchanger(stream1, stream2)
heater.setName(name)
processoperations.add(heater)
return heater
def distillationColumn(trays=5, reboil=True, condenser=True, name="destColumn"):
distillationColumn = neqsim.processSimulation.processEquipment.distillation.DistillationColumn(trays, reboil, condenser)
distillationColumn.setName(name)
processoperations.add(distillationColumn)
return distillationColumn
def neqheater(teststream, name=""):
neqheater = neqsim.processSimulation.processEquipment.heatExchanger.NeqHeater(teststream)
neqheater.setName(name)
processoperations.add(neqheater)
return neqheater
def twophasepipe(teststream, position, diameter, height, outTemp, rough):
pipe = neqsim.processSimulation.processEquipment.pipeline.TwoPhasePipeLine(teststream)
pipe.setOutputFileName("c:/tempNew20.nc")
pipe.setInitialFlowPattern("annular")
numberOfLegs = len(position) - 1
numberOfNodesInLeg = 60
pipe.setNumberOfLegs(numberOfLegs)
pipe.setNumberOfNodesInLeg(numberOfNodesInLeg)
pipe.setLegPositions(position)
pipe.setHeightProfile(height)
pipe.setPipeDiameters(diameter)
pipe.setPipeWallRoughness(rough)
pipe.setOuterTemperatures(outTemp)
pipe.setEquilibriumMassTransfer(0)
pipe.setEquilibriumHeatTransfer(1)
processoperations.add(pipe)
return pipe
def pipe(teststream, length, deltaElevation, diameter, rough):
pipe = neqsim.processSimulation.processEquipment.pipeline.AdiabaticPipe(teststream)
pipe.setDiameter(diameter)
pipe.setLength(length)
pipe.setPipeWallRoughness(rough)
pipe.setInletElevation(0.0)
pipe.setOutletElevation(deltaElevation)
processoperations.add(pipe)
return pipe
def pipeline(teststream, position, diameter, height, outTemp, rough, outerHeatTransferCoefficients, pipeWallHeatTransferCoefficients, numberOfNodesInLeg = 50):
pipe = neqsim.processSimulation.processEquipment.pipeline.OnePhasePipeLine(teststream)
pipe.setOutputFileName("c:/tempNew20.nc")
numberOfLegs = len(position) - 1
pipe.setNumberOfLegs(numberOfLegs)
pipe.setNumberOfNodesInLeg(numberOfNodesInLeg)
pipe.setLegPositions(JDouble[:](position))
pipe.setHeightProfile(JDouble[:](height))
pipe.setPipeDiameters(JDouble[:](diameter))
pipe.setPipeWallRoughness(JDouble[:](rough))
pipe.setPipeOuterHeatTransferCoefficients(JDouble[:](outerHeatTransferCoefficients))
pipe.setPipeWallHeatTransferCoefficients(JDouble[:](pipeWallHeatTransferCoefficients))
pipe.setOuterTemperatures(JDouble[:](outTemp))
processoperations.add(pipe)
return pipe
def clear():
processoperations.clearAll()
def run():
processoperations.run()
def clearProcess():
processoperations.clearAll()
def runProcess():
processoperations.run()
def runProcessAsThread(process):
Thread = jpype.JPackage('java.lang.Thread')
threadProcess = Thread(process)
threadProcess.run()
return threadProcess
def getProcess():
return processoperations
def runtrans():
processoperations.runTransient()
def view():
processoperations.displayResult()
def viewProcess():
processoperations.displayResult()
| 36.768166 | 159 | 0.769245 | import jpype
import jpype.imports
from jpype.types import *
from neqsim.neqsimpython import neqsim
processoperations = neqsim.processSimulation.processSystem.ProcessSystem()
def stream(thermoSystem, name="stream ?", t=0, p=0):
if t != 0:
thermoSystem.setTemperature(t)
if p != 0:
thermoSystem.setPressure(p)
stream = neqsim.processSimulation.processEquipment.stream.Stream(thermoSystem)
stream.setName(name)
processoperations.add(stream)
return stream
def neqstream(thermoSystem, name="stream ?", t=0, p=0):
if t != 0:
thermoSystem.setTemperature(t)
if p != 0:
thermoSystem.setPressure(p)
stream = neqsim.processSimulation.processEquipment.stream.NeqStream(thermoSystem)
stream.setName(name)
processoperations.add(stream)
return stream
def recycle(teststream, name="recycle ?"):
recycle1 = neqsim.processSimulation.processEquipment.util.Recycle()
recycle1.addStream(teststream)
processoperations.add(recycle1)
return recycle1
def saturator(teststream, name="water saturator"):
streamsaturator = neqsim.processSimulation.processEquipment.util.StreamSaturatorUtil(teststream)
processoperations.add(streamsaturator)
return streamsaturator
def glycoldehydrationlmodule(teststream, name="TEG process"):
dehydrationlmodule = neqsim.processSimulation.processSystem.processModules.GlycolDehydrationlModule()
dehydrationlmodule.setName(name)
dehydrationlmodule.addInputStream("gasStreamToAbsorber", teststream)
processoperations.add(dehydrationlmodule)
return dehydrationlmodule
def openprocess(filename):
processoperations = neqsim.processSimulation.processSystem.ProcessSystem.open(filename)
return processoperations
def separator(teststream, name="separator ?"):
separator = neqsim.processSimulation.processEquipment.separator.Separator(teststream)
separator.setName(name)
processoperations.add(separator)
return separator
def GORfitter(teststream, name="GOR fitter ?"):
GORfitter1 = neqsim.processSimulation.processEquipment.util.GORfitter(name, teststream)
GORfitter1.setName(name)
processoperations.add(GORfitter1)
return GORfitter1
def simpleTEGAbsorber(name="TEG absorber ?"):
absorber = neqsim.processSimulation.processEquipment.absorber.SimpleTEGAbsorber()
absorber.setName(name)
processoperations.add(absorber)
return absorber
def waterStripperColumn(name="water stripper ?"):
stripper = neqsim.processSimulation.processEquipment.absorber.WaterStripperColumn()
stripper.setName(name)
processoperations.add(stripper)
return stripper
def gasscrubber(teststream, name="scrubber ?"):
separator = neqsim.processSimulation.processEquipment.separator.GasScrubber(teststream)
separator.setName(name)
processoperations.add(separator)
return separator
def separator3phase(teststream, name="separator ?"):
separator = neqsim.processSimulation.processEquipment.separator.ThreePhaseSeparator(teststream)
separator.setName(name)
processoperations.add(separator)
return separator
def valve(teststream, p=1.0, name="valve ?"):
valve = neqsim.processSimulation.processEquipment.valve.ThrottlingValve(teststream)
valve.setOutletPressure(p)
valve.setName(name)
processoperations.add(valve)
return valve
def recycle2(name="recycle ?"):
recyc = neqsim.processSimulation.processEquipment.util.Recycle(name)
processoperations.add(recyc)
return recyc
def calculator(name="calculator ?"):
calc2 = neqsim.processSimulation.processEquipment.util.Calculator(name)
processoperations.add(calc2)
return calc2
def setpoint(name1, unit1, name2, unit2):
setp = neqsim.processSimulation.processEquipment.util.SetPoint(name1, unit1, name2, unit2)
processoperations.add(setp)
return setp
def filters(teststream):
filter2 = neqsim.processSimulation.processEquipment.filter.Filter(teststream)
processoperations.add(filter2)
return filter2
def compressor(teststream, pres=10.0, name="compressor ?"):
compressor = neqsim.processSimulation.processEquipment.compressor.Compressor(teststream)
compressor.setOutletPressure(pres)
compressor.setName(name)
processoperations.add(compressor)
return compressor
def compressorChart(compressor, curveConditions, speed, flow, head, polyEff ):
compressor.getCompressorChart().setCurves(JDouble[:](curveConditions), JDouble[:](speed), JDouble[:][:](flow), JDouble[:][:](head), JDouble[:][:](polyEff))
def compressorSurgeCurve(compressor, curveConditions, surgeflow, surgehead):
compressor.getCompressorChart().getSurgeCurve().setCurve(JDouble[:](curveConditions), JDouble[:](surgeflow), JDouble[:](surgehead))
def compressorStoneWallCurve(compressor, curveConditions, stoneWallflow, stoneWallHead):
compressor.getCompressorChart().getStoneWallCurve().setCurve(JDouble[:](curveConditions), JDouble[:](stoneWallflow), JDouble[:](stoneWallHead))
def pump(teststream, p=1.0, name="pump ?"):
pump = neqsim.processSimulation.processEquipment.pump.Pump(teststream)
pump.setOutletPressure(p)
pump.setName(name)
processoperations.add(pump)
return pump
def expander(teststream, p, name="expander ?"):
expander = neqsim.processSimulation.processEquipment.expander.Expander(teststream)
expander.setOutletPressure(p)
expander.setName(name)
processoperations.add(expander)
return expander
def mixer(name=""):
mixer = neqsim.processSimulation.processEquipment.mixer.StaticMixer()
mixer.setName(name)
processoperations.add(mixer)
return mixer
def phasemixer(name=""):
mixer = neqsim.processSimulation.processEquipment.mixer.StaticPhaseMixer()
mixer.setName(name)
processoperations.add(mixer)
return mixer
def nequnit(teststream, equipment="pipeline", flowpattern="stratified", numberOfNodes=100):
neqUn = neqsim.processSimulation.processEquipment.util.NeqSimUnit(teststream, equipment, flowpattern)
neqUn.setNumberOfNodes(numberOfNodes)
processoperations.add(neqUn)
return neqUn
def splitter(teststream, splitfactors, name=""):
splitter = neqsim.processSimulation.processEquipment.splitter.Splitter(teststream)
splitter.setSplitNumber(len(splitfactors))
splitter.setSplitFactors(JDouble[:](splitfactors))
splitter.setName(name)
processoperations.add(splitter)
return splitter
def heater(teststream, name=""):
heater = neqsim.processSimulation.processEquipment.heatExchanger.Heater(teststream)
heater.setName(name)
processoperations.add(heater)
return heater
def simplereservoir(fluid, name="Reservoir 1", gasvolume=10.0 * 1e7, oilvolume=120.0 * 1e6, watervolume=10.0e6):
reserv = neqsim.processSimulation.processEquipment.reservoir.SimpleReservoir(name)
reserv.setReservoirFluid(fluid, gasvolume, oilvolume, watervolume)
processoperations.add(reserv)
return reserv
def cooler(teststream, name=""):
cooler = neqsim.processSimulation.processEquipment.heatExchanger.Cooler(teststream)
cooler.setName(name)
processoperations.add(cooler)
return cooler
def heatExchanger(stream1, stream2=None, name=""):
if stream2==None:
heater = neqsim.processSimulation.processEquipment.heatExchanger.HeatExchanger(stream1)
else:
heater = neqsim.processSimulation.processEquipment.heatExchanger.HeatExchanger(stream1, stream2)
heater.setName(name)
processoperations.add(heater)
return heater
def distillationColumn(trays=5, reboil=True, condenser=True, name="destColumn"):
distillationColumn = neqsim.processSimulation.processEquipment.distillation.DistillationColumn(trays, reboil, condenser)
distillationColumn.setName(name)
processoperations.add(distillationColumn)
return distillationColumn
def neqheater(teststream, name=""):
neqheater = neqsim.processSimulation.processEquipment.heatExchanger.NeqHeater(teststream)
neqheater.setName(name)
processoperations.add(neqheater)
return neqheater
def twophasepipe(teststream, position, diameter, height, outTemp, rough):
pipe = neqsim.processSimulation.processEquipment.pipeline.TwoPhasePipeLine(teststream)
pipe.setOutputFileName("c:/tempNew20.nc")
pipe.setInitialFlowPattern("annular")
numberOfLegs = len(position) - 1
numberOfNodesInLeg = 60
pipe.setNumberOfLegs(numberOfLegs)
pipe.setNumberOfNodesInLeg(numberOfNodesInLeg)
pipe.setLegPositions(position)
pipe.setHeightProfile(height)
pipe.setPipeDiameters(diameter)
pipe.setPipeWallRoughness(rough)
pipe.setOuterTemperatures(outTemp)
pipe.setEquilibriumMassTransfer(0)
pipe.setEquilibriumHeatTransfer(1)
processoperations.add(pipe)
return pipe
def pipe(teststream, length, deltaElevation, diameter, rough):
pipe = neqsim.processSimulation.processEquipment.pipeline.AdiabaticPipe(teststream)
pipe.setDiameter(diameter)
pipe.setLength(length)
pipe.setPipeWallRoughness(rough)
pipe.setInletElevation(0.0)
pipe.setOutletElevation(deltaElevation)
processoperations.add(pipe)
return pipe
def pipeline(teststream, position, diameter, height, outTemp, rough, outerHeatTransferCoefficients, pipeWallHeatTransferCoefficients, numberOfNodesInLeg = 50):
pipe = neqsim.processSimulation.processEquipment.pipeline.OnePhasePipeLine(teststream)
pipe.setOutputFileName("c:/tempNew20.nc")
numberOfLegs = len(position) - 1
pipe.setNumberOfLegs(numberOfLegs)
pipe.setNumberOfNodesInLeg(numberOfNodesInLeg)
pipe.setLegPositions(JDouble[:](position))
pipe.setHeightProfile(JDouble[:](height))
pipe.setPipeDiameters(JDouble[:](diameter))
pipe.setPipeWallRoughness(JDouble[:](rough))
pipe.setPipeOuterHeatTransferCoefficients(JDouble[:](outerHeatTransferCoefficients))
pipe.setPipeWallHeatTransferCoefficients(JDouble[:](pipeWallHeatTransferCoefficients))
pipe.setOuterTemperatures(JDouble[:](outTemp))
processoperations.add(pipe)
return pipe
def clear():
processoperations.clearAll()
def run():
processoperations.run()
def clearProcess():
processoperations.clearAll()
def runProcess():
processoperations.run()
def runProcessAsThread(process):
Thread = jpype.JPackage('java.lang.Thread')
threadProcess = Thread(process)
threadProcess.run()
return threadProcess
def getProcess():
return processoperations
def runtrans():
processoperations.runTransient()
def view():
processoperations.displayResult()
def viewProcess():
processoperations.displayResult()
| true | true |
f72b0759efafb83d0661f521221014ba2f8d3aab | 7,021 | py | Python | tests/graph/test_floyd_warshall.py | aalekhpatel07/retworkx | ae93fcab17d55bc259476c65a677221b4177870a | [
"Apache-2.0"
] | 1 | 2021-11-29T23:15:07.000Z | 2021-11-29T23:15:07.000Z | tests/graph/test_floyd_warshall.py | aalekhpatel07/retworkx | ae93fcab17d55bc259476c65a677221b4177870a | [
"Apache-2.0"
] | 40 | 2020-08-31T06:09:06.000Z | 2022-03-18T19:02:34.000Z | tests/graph/test_floyd_warshall.py | aalekhpatel07/retworkx | ae93fcab17d55bc259476c65a677221b4177870a | [
"Apache-2.0"
] | null | null | null | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
import numpy
import retworkx
class TestFloydWarshall(unittest.TestCase):
parallel_threshold = 300
def test_vs_dijkstra_all_pairs(self):
graph = retworkx.PyGraph()
a = graph.add_node("A")
b = graph.add_node("B")
c = graph.add_node("C")
d = graph.add_node("D")
e = graph.add_node("E")
f = graph.add_node("F")
edge_list = [
(a, b, 7),
(c, a, 9),
(a, d, 14),
(b, c, 10),
(d, c, 2),
(d, e, 9),
(b, f, 15),
(c, f, 11),
(e, f, 6),
]
graph.add_edges_from(edge_list)
dijkstra_lengths = retworkx.graph_all_pairs_dijkstra_path_lengths(
graph, float
)
expected = {k: {**v, k: 0.0} for k, v in dijkstra_lengths.items()}
result = retworkx.graph_floyd_warshall(
graph, float, parallel_threshold=self.parallel_threshold
)
self.assertEqual(result, expected)
def test_vs_dijkstra_all_pairs_with_node_removal(self):
graph = retworkx.PyGraph()
a = graph.add_node("A")
b = graph.add_node("B")
c = graph.add_node("C")
d = graph.add_node("D")
e = graph.add_node("E")
f = graph.add_node("F")
edge_list = [
(a, b, 7),
(c, a, 9),
(a, d, 14),
(b, c, 10),
(d, c, 2),
(d, e, 9),
(b, f, 15),
(c, f, 11),
(e, f, 6),
]
graph.add_edges_from(edge_list)
graph.remove_node(d)
dijkstra_lengths = retworkx.graph_all_pairs_dijkstra_path_lengths(
graph, float
)
expected = {k: {**v, k: 0.0} for k, v in dijkstra_lengths.items()}
result = retworkx.graph_floyd_warshall(
graph, float, parallel_threshold=self.parallel_threshold
)
self.assertEqual(result, expected)
def test_floyd_warshall_empty_graph(self):
graph = retworkx.PyGraph()
self.assertEqual({}, retworkx.graph_floyd_warshall(graph, float))
def test_floyd_warshall_graph_no_edges(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(1000)))
expected = {x: {} for x in range(1000)}
self.assertEqual(
expected,
retworkx.graph_floyd_warshall(graph, float),
)
def test_floyd_warshall_numpy_three_edges(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(6)))
weights = [2, 12, 1, 5, 1]
graph.add_edges_from([(i, i + 1, weights[i]) for i in range(5)])
graph.add_edge(5, 0, 10)
dist = retworkx.graph_floyd_warshall_numpy(
graph, lambda x: x, parallel_threshold=self.parallel_threshold
)
self.assertEqual(dist[0, 3], 15)
self.assertEqual(dist[3, 0], 15)
def test_weighted_numpy_two_edges(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(8)))
graph.add_edges_from(
[
(0, 1, 2),
(1, 2, 2),
(2, 3, 1),
(3, 4, 1),
(4, 5, 1),
(5, 6, 1),
(6, 7, 1),
(7, 0, 1),
]
)
dist = retworkx.graph_floyd_warshall_numpy(
graph, lambda x: x, parallel_threshold=self.parallel_threshold
)
self.assertEqual(dist[0, 2], 4)
self.assertEqual(dist[2, 0], 4)
def test_weighted_numpy_negative_cycle(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(4)))
graph.add_edges_from(
[
(0, 1, 1),
(1, 2, -1),
(2, 3, -1),
(3, 0, -1),
]
)
dist = retworkx.graph_floyd_warshall_numpy(
graph, lambda x: x, parallel_threshold=self.parallel_threshold
)
self.assertTrue(numpy.all(numpy.diag(dist) < 0))
def test_floyd_warshall_numpy_cycle(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(7)))
graph.add_edges_from_no_data(
[(0, 1), (0, 6), (1, 2), (2, 3), (3, 4), (4, 5), (5, 6)]
)
dist = retworkx.graph_floyd_warshall_numpy(
graph, lambda x: 1, parallel_threshold=self.parallel_threshold
)
self.assertEqual(dist[0, 3], 3)
self.assertEqual(dist[0, 4], 3)
def test_numpy_no_edges(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(4)))
dist = retworkx.graph_floyd_warshall_numpy(
graph, lambda x: x, parallel_threshold=self.parallel_threshold
)
expected = numpy.full((4, 4), numpy.inf)
numpy.fill_diagonal(expected, 0)
self.assertTrue(numpy.array_equal(dist, expected))
def test_floyd_warshall_numpy_graph_cycle_with_removals(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(8)))
graph.remove_node(0)
graph.add_edges_from_no_data(
[(1, 2), (1, 7), (2, 3), (3, 4), (4, 5), (5, 6), (6, 7)]
)
dist = retworkx.graph_floyd_warshall_numpy(
graph, lambda x: 1, parallel_threshold=self.parallel_threshold
)
self.assertEqual(dist[0, 3], 3)
self.assertEqual(dist[0, 4], 3)
def test_floyd_warshall_numpy_graph_cycle_no_weight_fn(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(8)))
graph.remove_node(0)
graph.add_edges_from_no_data(
[(1, 2), (1, 7), (2, 3), (3, 4), (4, 5), (5, 6), (6, 7)]
)
dist = retworkx.graph_floyd_warshall_numpy(graph)
self.assertEqual(dist[0, 3], 3)
self.assertEqual(dist[0, 4], 3)
def test_floyd_warshall_numpy_graph_cycle_default_weight(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(8)))
graph.remove_node(0)
graph.add_edges_from_no_data(
[(1, 2), (1, 7), (2, 3), (3, 4), (4, 5), (5, 6), (6, 7)]
)
dist = retworkx.graph_floyd_warshall_numpy(
graph, default_weight=2, parallel_threshold=self.parallel_threshold
)
self.assertEqual(dist[0, 3], 6)
self.assertEqual(dist[0, 4], 6)
class TestParallelFloydWarshall(TestFloydWarshall):
parallel_threshold = 0
| 32.808411 | 79 | 0.565874 |
import unittest
import numpy
import retworkx
class TestFloydWarshall(unittest.TestCase):
parallel_threshold = 300
def test_vs_dijkstra_all_pairs(self):
graph = retworkx.PyGraph()
a = graph.add_node("A")
b = graph.add_node("B")
c = graph.add_node("C")
d = graph.add_node("D")
e = graph.add_node("E")
f = graph.add_node("F")
edge_list = [
(a, b, 7),
(c, a, 9),
(a, d, 14),
(b, c, 10),
(d, c, 2),
(d, e, 9),
(b, f, 15),
(c, f, 11),
(e, f, 6),
]
graph.add_edges_from(edge_list)
dijkstra_lengths = retworkx.graph_all_pairs_dijkstra_path_lengths(
graph, float
)
expected = {k: {**v, k: 0.0} for k, v in dijkstra_lengths.items()}
result = retworkx.graph_floyd_warshall(
graph, float, parallel_threshold=self.parallel_threshold
)
self.assertEqual(result, expected)
def test_vs_dijkstra_all_pairs_with_node_removal(self):
graph = retworkx.PyGraph()
a = graph.add_node("A")
b = graph.add_node("B")
c = graph.add_node("C")
d = graph.add_node("D")
e = graph.add_node("E")
f = graph.add_node("F")
edge_list = [
(a, b, 7),
(c, a, 9),
(a, d, 14),
(b, c, 10),
(d, c, 2),
(d, e, 9),
(b, f, 15),
(c, f, 11),
(e, f, 6),
]
graph.add_edges_from(edge_list)
graph.remove_node(d)
dijkstra_lengths = retworkx.graph_all_pairs_dijkstra_path_lengths(
graph, float
)
expected = {k: {**v, k: 0.0} for k, v in dijkstra_lengths.items()}
result = retworkx.graph_floyd_warshall(
graph, float, parallel_threshold=self.parallel_threshold
)
self.assertEqual(result, expected)
def test_floyd_warshall_empty_graph(self):
graph = retworkx.PyGraph()
self.assertEqual({}, retworkx.graph_floyd_warshall(graph, float))
def test_floyd_warshall_graph_no_edges(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(1000)))
expected = {x: {} for x in range(1000)}
self.assertEqual(
expected,
retworkx.graph_floyd_warshall(graph, float),
)
def test_floyd_warshall_numpy_three_edges(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(6)))
weights = [2, 12, 1, 5, 1]
graph.add_edges_from([(i, i + 1, weights[i]) for i in range(5)])
graph.add_edge(5, 0, 10)
dist = retworkx.graph_floyd_warshall_numpy(
graph, lambda x: x, parallel_threshold=self.parallel_threshold
)
self.assertEqual(dist[0, 3], 15)
self.assertEqual(dist[3, 0], 15)
def test_weighted_numpy_two_edges(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(8)))
graph.add_edges_from(
[
(0, 1, 2),
(1, 2, 2),
(2, 3, 1),
(3, 4, 1),
(4, 5, 1),
(5, 6, 1),
(6, 7, 1),
(7, 0, 1),
]
)
dist = retworkx.graph_floyd_warshall_numpy(
graph, lambda x: x, parallel_threshold=self.parallel_threshold
)
self.assertEqual(dist[0, 2], 4)
self.assertEqual(dist[2, 0], 4)
def test_weighted_numpy_negative_cycle(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(4)))
graph.add_edges_from(
[
(0, 1, 1),
(1, 2, -1),
(2, 3, -1),
(3, 0, -1),
]
)
dist = retworkx.graph_floyd_warshall_numpy(
graph, lambda x: x, parallel_threshold=self.parallel_threshold
)
self.assertTrue(numpy.all(numpy.diag(dist) < 0))
def test_floyd_warshall_numpy_cycle(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(7)))
graph.add_edges_from_no_data(
[(0, 1), (0, 6), (1, 2), (2, 3), (3, 4), (4, 5), (5, 6)]
)
dist = retworkx.graph_floyd_warshall_numpy(
graph, lambda x: 1, parallel_threshold=self.parallel_threshold
)
self.assertEqual(dist[0, 3], 3)
self.assertEqual(dist[0, 4], 3)
def test_numpy_no_edges(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(4)))
dist = retworkx.graph_floyd_warshall_numpy(
graph, lambda x: x, parallel_threshold=self.parallel_threshold
)
expected = numpy.full((4, 4), numpy.inf)
numpy.fill_diagonal(expected, 0)
self.assertTrue(numpy.array_equal(dist, expected))
def test_floyd_warshall_numpy_graph_cycle_with_removals(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(8)))
graph.remove_node(0)
graph.add_edges_from_no_data(
[(1, 2), (1, 7), (2, 3), (3, 4), (4, 5), (5, 6), (6, 7)]
)
dist = retworkx.graph_floyd_warshall_numpy(
graph, lambda x: 1, parallel_threshold=self.parallel_threshold
)
self.assertEqual(dist[0, 3], 3)
self.assertEqual(dist[0, 4], 3)
def test_floyd_warshall_numpy_graph_cycle_no_weight_fn(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(8)))
graph.remove_node(0)
graph.add_edges_from_no_data(
[(1, 2), (1, 7), (2, 3), (3, 4), (4, 5), (5, 6), (6, 7)]
)
dist = retworkx.graph_floyd_warshall_numpy(graph)
self.assertEqual(dist[0, 3], 3)
self.assertEqual(dist[0, 4], 3)
def test_floyd_warshall_numpy_graph_cycle_default_weight(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(8)))
graph.remove_node(0)
graph.add_edges_from_no_data(
[(1, 2), (1, 7), (2, 3), (3, 4), (4, 5), (5, 6), (6, 7)]
)
dist = retworkx.graph_floyd_warshall_numpy(
graph, default_weight=2, parallel_threshold=self.parallel_threshold
)
self.assertEqual(dist[0, 3], 6)
self.assertEqual(dist[0, 4], 6)
class TestParallelFloydWarshall(TestFloydWarshall):
parallel_threshold = 0
| true | true |
f72b08276373a7b8064dc7eb363bb32779d3d0ce | 9,830 | py | Python | anima/ui/widgets/task_dashboard.py | MehmetErer/anima | f92ae599b5a4c181fc8e131a9ccdde537e635303 | [
"MIT"
] | 101 | 2015-02-08T22:20:11.000Z | 2022-03-21T18:56:42.000Z | anima/ui/widgets/task_dashboard.py | MehmetErer/anima | f92ae599b5a4c181fc8e131a9ccdde537e635303 | [
"MIT"
] | 23 | 2016-11-30T08:33:21.000Z | 2021-01-26T12:11:12.000Z | anima/ui/widgets/task_dashboard.py | MehmetErer/anima | f92ae599b5a4c181fc8e131a9ccdde537e635303 | [
"MIT"
] | 27 | 2015-01-03T06:49:45.000Z | 2021-12-28T03:30:54.000Z | # -*- coding: utf-8 -*-
from anima.ui.lib import QtCore, QtWidgets
class TaskDashboardWidget(QtWidgets.QWidget):
"""A widget that displays task related information
"""
def __init__(self, task=None, parent=None, **kwargs):
self._task = None
self.parent = parent
super(TaskDashboardWidget, self).__init__(parent=parent)
# storage for UI stuff
self.vertical_layout = None
self.widget_label = None
self.task_thumbnail_widget = None
self.schedule_info_form_layout = None
self.task_detail_widget = None
self.task_timing_widget = None
self.description_label = None
self.description_field = None
self.description_field_is_updating = False
self.responsible_info_widget = None
self.resource_info_widget = None
self.task_versions_usage_info_widget = None
self.watch_task_button = None
self.fix_task_status_button = None
self.task_status_label = None
self.task_progress = None
self.task_notes_widget = None
self._setup_ui()
self.task = task
def _setup_ui(self):
"""create the UI widgets
"""
# we need a main layout
# may be a vertical one
# or a form layout
self.vertical_layout = QtWidgets.QVBoxLayout(self)
# -------------------------
# Dialog Label and buttons
horizontal_layout3 = QtWidgets.QHBoxLayout()
self.vertical_layout.addLayout(horizontal_layout3)
self.widget_label = QtWidgets.QLabel(self)
self.widget_label.setStyleSheet(
"color: rgb(71, 143, 202);\nfont: 18pt;"
)
horizontal_layout3.addWidget(self.widget_label)
horizontal_layout3.addStretch(1)
# Add Watch Task button
self.watch_task_button = QtWidgets.QPushButton(self)
self.watch_task_button.setMaximumWidth(24)
self.watch_task_button.setMaximumHeight(24)
self.watch_task_button.setText("W")
self.watch_task_button.setToolTip("Watch Task")
self.fix_task_status_button = QtWidgets.QPushButton(self)
self.fix_task_status_button.setMaximumWidth(24)
self.fix_task_status_button.setMaximumHeight(24)
self.fix_task_status_button.setText("F")
self.fix_task_status_button.setToolTip("Fix Task Status")
horizontal_layout3.addWidget(self.watch_task_button)
horizontal_layout3.addWidget(self.fix_task_status_button)
QtCore.QObject.connect(
self.fix_task_status_button,
QtCore.SIGNAL("clicked()"),
self.fix_task_status
)
# Add Status Label
vertical_layout3 = QtWidgets.QVBoxLayout()
from anima.ui.widgets.task_status_label import TaskStatusLabel
self.task_status_label = TaskStatusLabel(task=self.task)
self.task_status_label.setMaximumHeight(12)
vertical_layout3.addWidget(self.task_status_label)
# Add ProgressBar
self.task_progress = QtWidgets.QProgressBar(self)
self.task_progress.setMinimum(0)
self.task_progress.setMaximum(100)
self.task_progress.setValue(50)
self.task_progress.setAlignment(QtCore.Qt.AlignCenter)
self.task_progress.setMaximumHeight(12)
self.task_progress.setStyleSheet("""
QProgressBar::chunk {
background-color: #3add36;
width: 1px;
}
""")
vertical_layout3.addWidget(self.task_progress)
# set items closer to each other
vertical_layout3.setSpacing(0)
horizontal_layout3.addLayout(vertical_layout3)
# Add divider
line = QtWidgets.QFrame(self)
line.setFrameShape(QtWidgets.QFrame.HLine)
line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.vertical_layout.addWidget(line)
horizontal_layout1 = QtWidgets.QHBoxLayout()
self.vertical_layout.addLayout(horizontal_layout1)
vertical_layout1 = QtWidgets.QVBoxLayout()
vertical_layout2 = QtWidgets.QVBoxLayout()
horizontal_layout1.addLayout(vertical_layout1)
horizontal_layout1.addLayout(vertical_layout2)
# --------------------------
# Horizontal Layout for thumbnail and detail widgets
horizontal_layout2 = QtWidgets.QHBoxLayout()
vertical_layout1.addLayout(horizontal_layout2)
# --------------------------
# Task Thumbnail
from anima.ui.widgets.entity_thumbnail import EntityThumbnailWidget
self.task_thumbnail_widget = EntityThumbnailWidget(task=self.task, parent=self)
horizontal_layout2.addWidget(self.task_thumbnail_widget)
# --------------------------
# Task Detail Info
from anima.ui.widgets.task_detail import TaskDetailWidget
self.task_detail_widget = TaskDetailWidget(task=self.task, parent=self)
horizontal_layout2.addWidget(self.task_detail_widget)
# --------------------------
# Task Timing Info
from anima.ui.widgets.task_timing import TaskTimingInfoWidget
self.task_timing_widget = TaskTimingInfoWidget(task=self.task, parent=self)
horizontal_layout2.addWidget(self.task_timing_widget)
# add stretcher
# horizontal_layout2.addStretch(1)
# --------------------------
# Description field
self.description_label = QtWidgets.QLabel(self)
self.description_label.setStyleSheet("""
background-color: gray;
color: white;
font-weight: bold;
padding: 0.5em;
""")
self.description_label.setText("Description")
self.description_field = QtWidgets.QTextEdit(self)
self.description_field.setAcceptRichText(True)
vertical_layout1.addWidget(self.description_label)
vertical_layout1.addWidget(self.description_field)
# add stretcher
vertical_layout1.addStretch(1)
# connect signal
self.description_field.textChanged.connect(self.update_description)
# ---------------------------
# Responsible Info
from anima.ui.widgets.responsible_info import ResponsibleInfoWidget
self.responsible_info_widget = ResponsibleInfoWidget(
task=self.task, parent=self
)
vertical_layout2.addWidget(self.responsible_info_widget)
# ---------------------------
# Resource Info
from anima.ui.widgets.resource_info import ResourceInfoWidget
self.resource_info_widget = ResourceInfoWidget(
task=self.task, parent=self
)
vertical_layout2.addWidget(self.resource_info_widget)
# ---------------------------
# Task Versions Usage Info
from anima.ui.widgets.task_version_usage_info import \
TaskVersionUsageInfoWidget
self.task_versions_usage_info_widget = TaskVersionUsageInfoWidget(
task=self.task, parent=self
)
vertical_layout2.addWidget(self.task_versions_usage_info_widget)
vertical_layout2.addStretch(1)
horizontal_layout1.setStretch(0, 2)
horizontal_layout1.setStretch(1, 1)
# ---------------------------
# Task Notes
from anima.ui.widgets.entity_notes import EntityNotesWidgets
self.task_notes_widget = EntityNotesWidgets(entity=self.task, parent=self)
self.vertical_layout.addWidget(self.task_notes_widget)
@property
def task(self):
"""getter for the _task attribute
"""
return self._task
@task.setter
def task(self, task):
"""setter for the task attribute
"""
from stalker import Task
if isinstance(task, Task):
self._task = task
else:
self._task = None
# self.description_label = None
# self.description_field = None
# self.responsible_info_widget = None
# self.resource_info_widget = None
# self.task_versions_usage_info_widget = None
# self.watch_task_button = None
# self.fix_task_status_button = None
# self.task_progress = None
if self._task:
self.description_field_is_updating = True
self.description_field.setText(self._task.description)
self.description_field_is_updating = False
self.task_progress.setValue(self._task.percent_complete)
else:
self.description_field_is_updating = True
self.description_field.setText('')
self.description_field_is_updating = False
self.task_progress.setValue(0)
self.widget_label.setText(self._task.name if self._task else 'Task Name')
self.task_thumbnail_widget.task = self._task
self.task_detail_widget.task = self._task
self.task_timing_widget.task = self._task
self.task_status_label.task = self._task
self.task_notes_widget.task = self._task
def fix_task_status(self):
"""fix current task status
"""
from stalker import Task
assert isinstance(self.task, Task)
from anima import utils
utils.fix_task_statuses(self.task)
utils.fix_task_computed_time(self.task)
from stalker.db.session import DBSession
DBSession.add(self.task)
DBSession.commit()
def update_description(self):
"""runs when description field has changed
"""
if self.description_field_is_updating:
return
self.description_field_is_updating = True
self.task.description = self.description_field.toPlainText()
from stalker.db.session import DBSession
DBSession.add(self.task)
DBSession.commit()
self.description_field_is_updating = False
| 35.487365 | 87 | 0.649135 |
from anima.ui.lib import QtCore, QtWidgets
class TaskDashboardWidget(QtWidgets.QWidget):
def __init__(self, task=None, parent=None, **kwargs):
self._task = None
self.parent = parent
super(TaskDashboardWidget, self).__init__(parent=parent)
self.vertical_layout = None
self.widget_label = None
self.task_thumbnail_widget = None
self.schedule_info_form_layout = None
self.task_detail_widget = None
self.task_timing_widget = None
self.description_label = None
self.description_field = None
self.description_field_is_updating = False
self.responsible_info_widget = None
self.resource_info_widget = None
self.task_versions_usage_info_widget = None
self.watch_task_button = None
self.fix_task_status_button = None
self.task_status_label = None
self.task_progress = None
self.task_notes_widget = None
self._setup_ui()
self.task = task
def _setup_ui(self):
self.vertical_layout = QtWidgets.QVBoxLayout(self)
horizontal_layout3 = QtWidgets.QHBoxLayout()
self.vertical_layout.addLayout(horizontal_layout3)
self.widget_label = QtWidgets.QLabel(self)
self.widget_label.setStyleSheet(
"color: rgb(71, 143, 202);\nfont: 18pt;"
)
horizontal_layout3.addWidget(self.widget_label)
horizontal_layout3.addStretch(1)
self.watch_task_button = QtWidgets.QPushButton(self)
self.watch_task_button.setMaximumWidth(24)
self.watch_task_button.setMaximumHeight(24)
self.watch_task_button.setText("W")
self.watch_task_button.setToolTip("Watch Task")
self.fix_task_status_button = QtWidgets.QPushButton(self)
self.fix_task_status_button.setMaximumWidth(24)
self.fix_task_status_button.setMaximumHeight(24)
self.fix_task_status_button.setText("F")
self.fix_task_status_button.setToolTip("Fix Task Status")
horizontal_layout3.addWidget(self.watch_task_button)
horizontal_layout3.addWidget(self.fix_task_status_button)
QtCore.QObject.connect(
self.fix_task_status_button,
QtCore.SIGNAL("clicked()"),
self.fix_task_status
)
vertical_layout3 = QtWidgets.QVBoxLayout()
from anima.ui.widgets.task_status_label import TaskStatusLabel
self.task_status_label = TaskStatusLabel(task=self.task)
self.task_status_label.setMaximumHeight(12)
vertical_layout3.addWidget(self.task_status_label)
self.task_progress = QtWidgets.QProgressBar(self)
self.task_progress.setMinimum(0)
self.task_progress.setMaximum(100)
self.task_progress.setValue(50)
self.task_progress.setAlignment(QtCore.Qt.AlignCenter)
self.task_progress.setMaximumHeight(12)
self.task_progress.setStyleSheet("""
QProgressBar::chunk {
background-color: #3add36;
width: 1px;
}
""")
vertical_layout3.addWidget(self.task_progress)
vertical_layout3.setSpacing(0)
horizontal_layout3.addLayout(vertical_layout3)
line = QtWidgets.QFrame(self)
line.setFrameShape(QtWidgets.QFrame.HLine)
line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.vertical_layout.addWidget(line)
horizontal_layout1 = QtWidgets.QHBoxLayout()
self.vertical_layout.addLayout(horizontal_layout1)
vertical_layout1 = QtWidgets.QVBoxLayout()
vertical_layout2 = QtWidgets.QVBoxLayout()
horizontal_layout1.addLayout(vertical_layout1)
horizontal_layout1.addLayout(vertical_layout2)
horizontal_layout2 = QtWidgets.QHBoxLayout()
vertical_layout1.addLayout(horizontal_layout2)
from anima.ui.widgets.entity_thumbnail import EntityThumbnailWidget
self.task_thumbnail_widget = EntityThumbnailWidget(task=self.task, parent=self)
horizontal_layout2.addWidget(self.task_thumbnail_widget)
from anima.ui.widgets.task_detail import TaskDetailWidget
self.task_detail_widget = TaskDetailWidget(task=self.task, parent=self)
horizontal_layout2.addWidget(self.task_detail_widget)
from anima.ui.widgets.task_timing import TaskTimingInfoWidget
self.task_timing_widget = TaskTimingInfoWidget(task=self.task, parent=self)
horizontal_layout2.addWidget(self.task_timing_widget)
self.description_label = QtWidgets.QLabel(self)
self.description_label.setStyleSheet("""
background-color: gray;
color: white;
font-weight: bold;
padding: 0.5em;
""")
self.description_label.setText("Description")
self.description_field = QtWidgets.QTextEdit(self)
self.description_field.setAcceptRichText(True)
vertical_layout1.addWidget(self.description_label)
vertical_layout1.addWidget(self.description_field)
vertical_layout1.addStretch(1)
self.description_field.textChanged.connect(self.update_description)
from anima.ui.widgets.responsible_info import ResponsibleInfoWidget
self.responsible_info_widget = ResponsibleInfoWidget(
task=self.task, parent=self
)
vertical_layout2.addWidget(self.responsible_info_widget)
from anima.ui.widgets.resource_info import ResourceInfoWidget
self.resource_info_widget = ResourceInfoWidget(
task=self.task, parent=self
)
vertical_layout2.addWidget(self.resource_info_widget)
from anima.ui.widgets.task_version_usage_info import \
TaskVersionUsageInfoWidget
self.task_versions_usage_info_widget = TaskVersionUsageInfoWidget(
task=self.task, parent=self
)
vertical_layout2.addWidget(self.task_versions_usage_info_widget)
vertical_layout2.addStretch(1)
horizontal_layout1.setStretch(0, 2)
horizontal_layout1.setStretch(1, 1)
from anima.ui.widgets.entity_notes import EntityNotesWidgets
self.task_notes_widget = EntityNotesWidgets(entity=self.task, parent=self)
self.vertical_layout.addWidget(self.task_notes_widget)
@property
def task(self):
return self._task
@task.setter
def task(self, task):
from stalker import Task
if isinstance(task, Task):
self._task = task
else:
self._task = None
if self._task:
self.description_field_is_updating = True
self.description_field.setText(self._task.description)
self.description_field_is_updating = False
self.task_progress.setValue(self._task.percent_complete)
else:
self.description_field_is_updating = True
self.description_field.setText('')
self.description_field_is_updating = False
self.task_progress.setValue(0)
self.widget_label.setText(self._task.name if self._task else 'Task Name')
self.task_thumbnail_widget.task = self._task
self.task_detail_widget.task = self._task
self.task_timing_widget.task = self._task
self.task_status_label.task = self._task
self.task_notes_widget.task = self._task
def fix_task_status(self):
from stalker import Task
assert isinstance(self.task, Task)
from anima import utils
utils.fix_task_statuses(self.task)
utils.fix_task_computed_time(self.task)
from stalker.db.session import DBSession
DBSession.add(self.task)
DBSession.commit()
def update_description(self):
if self.description_field_is_updating:
return
self.description_field_is_updating = True
self.task.description = self.description_field.toPlainText()
from stalker.db.session import DBSession
DBSession.add(self.task)
DBSession.commit()
self.description_field_is_updating = False
| true | true |
f72b08b59e5cb86bba78fc94a90a6d1fa03c18e3 | 6,363 | py | Python | lsdr/envs/analysis.py | melfm/lsdr | 36b0a85e970fdcaae828eeff6c147432aa767c93 | [
"MIT"
] | 3 | 2019-09-20T19:10:50.000Z | 2021-12-30T02:55:21.000Z | lsdr/envs/analysis.py | melfm/lsdr | 36b0a85e970fdcaae828eeff6c147432aa767c93 | [
"MIT"
] | null | null | null | lsdr/envs/analysis.py | melfm/lsdr | 36b0a85e970fdcaae828eeff6c147432aa767c93 | [
"MIT"
] | 1 | 2020-08-01T21:28:12.000Z | 2020-08-01T21:28:12.000Z | import numpy as np
import torch
import matplotlib.pyplot as plt
import os
import math
import scipy.stats as stats
import lsdr.envs.environment_sampler as env_sampler
from enum import IntEnum
############################
# Optimization Loss Opt
############################
class Objectives(IntEnum):
REWARDS = 1
KL_OPT = 2
REW_AND_KL = 3
def reward_function(x):
return np.exp(-(x-20)**2)
def reward_function_v2(x):
return np.sin(np.sqrt(x**2))
def calculate_reward(x):
return reward_function(x)
def setup_distributions():
##############################
# Initial distribution configs
##############################
test_params = [
np.array([-30.0, 50.0])
]
# This can be modified for the initial distributions
# to be different.
ranges = np.asarray(test_params)
mean = ranges.mean(-1)
covar = (((ranges[:, 1] - ranges[:, 0])**2.0) / 12.0) * np.eye(
ranges.shape[0])
mu_train, L_train = mean, np.linalg.cholesky(covar)
dist_params = [mu_train, L_train]
sampler = env_sampler.init_env_sampler(
'hopper',
seed=0,
experiment_id='test_kl_div_loss_0',
init_dist_params=dist_params,
dist_type='gaussian',
test_dist_params=None)
############################
# Train Distribution
############################
p_train = sampler.train_dist
############################
# Test Distribution
############################
ranges = np.asarray(test_params)
mean = ranges.mean(-1)
covar = (((ranges[:, 1] - ranges[:, 0])**2.0) / 12.0) * np.eye(
ranges.shape[0])
mu_test, L_test = mean, np.linalg.cholesky(covar)
mu_test = torch.tensor(mu_test)
L_test = torch.tensor(L_test)
mu_test = mu_test.float().detach().requires_grad_(False)
L_test = L_test.float().detach().requires_grad_(False)
p_test = torch.distributions.MultivariateNormal(mu_test,
scale_tril=L_test)
train_mean = p_train.mean.detach()
train_std = (p_train._unbroadcasted_scale_tril).diag().detach()
test_mean = p_test.mean.detach()
test_std = (p_test._unbroadcasted_scale_tril).diag().detach()
print('Initial Distributions')
print('Train Distribution Mean ', train_mean)
print('Train Distribution STD ', train_std)
print('Test Distribution Mean ', test_mean)
print('Test Distribution STD ', test_std)
############################
# Plot Initial Distribution
############################
plot_distrs(train_mean, train_std,
test_mean, test_std,
plot_name='initial_train_distr')
return sampler, p_train, p_test
def plot_distrs(train_mean, train_var,
test_mean, test_var,
plot_name='distributions'):
plt.figure()
mu = train_mean
variance = train_var
sigma = math.sqrt(variance)
x = np.linspace(mu - 3*sigma, mu + 3*sigma, 100)
plt.plot(x, stats.norm.pdf(x, mu, sigma), color='green',
label='$p_{\phi}(z)$',
linestyle='-.')
mu = test_mean
variance = test_var
sigma = math.sqrt(variance)
x = np.linspace(mu - 3*sigma, mu + 3*sigma, 100)
plt.plot(x, stats.norm.pdf(x, mu, sigma), color='red', label='$p(z)$')
rew_func_range = np.arange(-20, 50, 1)
plt.plot(rew_func_range, calculate_reward(rew_func_range),
color='orange',
label='$R(\Theta, z)$')
plt.legend(loc='upper left')
res_dir = 'grad_analysis'
if not os.path.exists(res_dir):
os.makedirs(res_dir)
plotname = res_dir + '/' + plot_name + '.png'
plt.savefig(plotname)
def optimize_distribution(sampler, p_train, p_test, objective_opt):
epochs, n_samples = 10000, 1000
alpha = 1e-5
opt = torch.optim.Adam(sampler.params, 1e-2)
mu_grads = []
var_grads = []
def store_mu_grad_rew(grad):
mu_grads.append(np.copy(grad))
def store_tril_grad_rew(grad):
var_grads.append(np.copy(grad))
for _ in range(epochs):
opt.zero_grad()
####################
# Sample from p_test
####################
z = p_test.sample(torch.Size([n_samples]))
contexts = p_train.sample(torch.Size([n_samples]))
################
# Eval Log probs
################
log_p_train = p_train.log_prob(z)
log_p_test = p_test.log_prob(z)
################
# Calculate KL
################
kl_samples = log_p_test - log_p_train
kl_loss = kl_samples.mean(0)
#######################
# Calculate Reward term
#######################
log_probs_context = p_train.log_prob(contexts)
reward_loss = (calculate_reward(contexts) * log_probs_context).mean(0)
if objective_opt == Objectives.REWARDS:
# For this to converge to the reward function,
# need to change `z` sampling to be from train
# distribution.
total_loss = - reward_loss
elif objective_opt == Objectives.KL_OPT:
total_loss = kl_loss
elif objective_opt == Objectives.REW_AND_KL:
total_loss = (-(reward_loss) + (alpha*kl_loss))
else:
raise ValueError('Invalid op')
total_loss.mean().backward()
opt.step()
train_mean = p_train.mean.detach()
train_std = (p_train._unbroadcasted_scale_tril).diag().detach()
test_mean = p_test.mean.detach()
test_std = (p_test._unbroadcasted_scale_tril).diag().detach()
print('Updated Distributions')
print('######################')
print('Train Distribution Mean ', train_mean)
print('Train Distribution STD ', train_std)
print('Test Distribution Mean ', test_mean)
print('Test Distribution STD ', test_std)
plot_distrs(train_mean, train_std,
test_mean, test_std,
plot_name='final_distributions')
if __name__ == '__main__':
sampler, p_train, p_test = setup_distributions()
# objective_opt = Objectives.REWARDS
# objective_opt = Objectives.KL_OPT
objective_opt = Objectives.REW_AND_KL
optimize_distribution(sampler,
p_train,
p_test,
objective_opt)
| 28.28 | 78 | 0.573157 | import numpy as np
import torch
import matplotlib.pyplot as plt
import os
import math
import scipy.stats as stats
import lsdr.envs.environment_sampler as env_sampler
from enum import IntEnum
objective_opt)
| true | true |
f72b09030b2c9ba7bc22260ba632e1a45e870da9 | 1,020 | py | Python | examples/pitz_daily/pitz_daily_runner.py | ImperialCollegeLondon/al_cfd_benchmark | 03b51d7e7d4def804e2ac18084deee8401636851 | [
"MIT"
] | 6 | 2020-09-27T00:14:48.000Z | 2021-11-23T03:35:09.000Z | examples/pitz_daily/pitz_daily_runner.py | ImperialCollegeLondon/al_cfd_benchmark | 03b51d7e7d4def804e2ac18084deee8401636851 | [
"MIT"
] | null | null | null | examples/pitz_daily/pitz_daily_runner.py | ImperialCollegeLondon/al_cfd_benchmark | 03b51d7e7d4def804e2ac18084deee8401636851 | [
"MIT"
] | 2 | 2020-09-27T17:40:33.000Z | 2021-12-13T02:31:49.000Z | # -*- coding: utf-8 -*-
"""Pitz Daily
This case uses the pitzDaily example from the OpenFOAM tutorials
and varies two parameters: Reynolds number and height of the inlet.
It returns the pressure difference between inlet and outlet.
"""
import numpy as np
from active_learning_cfd.cfd_case import CFDCase
import os
class PitzDaily(CFDCase):
mesher = "blockMesh"
solver = "simpleFoam"
template = "pitzDaily"
parameter_names = ("reynolds", "entryHeight")
output_list = (("deltaP", "subtract\(p\) = (.+)"),)
def __call__(self, parameters):
assert len(parameters) == len(self.parameter_names)
parameter_dict = dict(zip(self.parameter_names, parameters))
parameter_dict["reynolds"] = np.power(10, parameter_dict["reynolds"])
self.solve(parameter_dict)
return self.results["deltaP"]
if __name__ == "__main__":
case = PitzDaily()
reynolds = 50800.0
entryHeight = 25.4
print("deltaP = {}".format(case([np.log10(reynolds), entryHeight])))
| 28.333333 | 77 | 0.683333 |
import numpy as np
from active_learning_cfd.cfd_case import CFDCase
import os
class PitzDaily(CFDCase):
mesher = "blockMesh"
solver = "simpleFoam"
template = "pitzDaily"
parameter_names = ("reynolds", "entryHeight")
output_list = (("deltaP", "subtract\(p\) = (.+)"),)
def __call__(self, parameters):
assert len(parameters) == len(self.parameter_names)
parameter_dict = dict(zip(self.parameter_names, parameters))
parameter_dict["reynolds"] = np.power(10, parameter_dict["reynolds"])
self.solve(parameter_dict)
return self.results["deltaP"]
if __name__ == "__main__":
case = PitzDaily()
reynolds = 50800.0
entryHeight = 25.4
print("deltaP = {}".format(case([np.log10(reynolds), entryHeight])))
| true | true |
f72b091c4068f3540061214d903965fad918e1a4 | 5,557 | py | Python | cogdl/oag/dual_position_bert_model.py | li-ziang/cogdl | 60022d3334e3abae2d2a505e6e049a26acf10f39 | [
"MIT"
] | 6 | 2020-07-09T02:48:41.000Z | 2021-06-16T09:04:14.000Z | cogdl/oag/dual_position_bert_model.py | li-ziang/cogdl | 60022d3334e3abae2d2a505e6e049a26acf10f39 | [
"MIT"
] | null | null | null | cogdl/oag/dual_position_bert_model.py | li-ziang/cogdl | 60022d3334e3abae2d2a505e6e049a26acf10f39 | [
"MIT"
] | 1 | 2020-05-19T11:45:45.000Z | 2020-05-19T11:45:45.000Z | import torch
from torch import nn
from torch.nn import CrossEntropyLoss
import logging
from .bert_model import BertPreTrainedModel, BertPreTrainingHeads, BertModel, BertEncoder, BertPooler, BertLayerNorm
logger = logging.getLogger(__name__)
class DualPositionBertEmbeddings(nn.Module):
"""Construct the embeddings from word, position and token_type embeddings."""
def __init__(self, config):
super(DualPositionBertEmbeddings, self).__init__()
self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size)
self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size)
self.position_embeddings_second = nn.Embedding(config.max_position_embeddings, config.hidden_size)
self.token_type_embeddings = nn.Embedding(config.type_vocab_size, config.hidden_size)
# self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load
# any TensorFlow checkpoint file
self.LayerNorm = BertLayerNorm(config.hidden_size, eps=1e-12)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, input_ids, token_type_ids, position_ids, position_ids_second):
if token_type_ids is None:
token_type_ids = torch.zeros_like(input_ids)
words_embeddings = self.word_embeddings(input_ids)
position_embeddings = self.position_embeddings(position_ids)
position_embeddings_second = self.position_embeddings(position_ids_second)
token_type_embeddings = self.token_type_embeddings(token_type_ids)
embeddings = words_embeddings + position_embeddings + position_embeddings_second + token_type_embeddings
embeddings = self.LayerNorm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings
class DualPositionBertModel(BertModel):
def __init__(self, config):
super(DualPositionBertModel, self).__init__(config)
self.embeddings = DualPositionBertEmbeddings(config)
self.encoder = BertEncoder(config)
self.pooler = BertPooler(config)
self.apply(self.init_bert_weights)
logger.info("Init BERT pretrain model")
def forward(
self,
input_ids,
token_type_ids=None,
attention_mask=None,
output_all_encoded_layers=True,
checkpoint_activations=False,
position_ids=None,
position_ids_second=None,
):
if attention_mask is None:
attention_mask = torch.ones_like(input_ids)
if token_type_ids is None:
token_type_ids = torch.zeros_like(input_ids)
if len(attention_mask.shape) == 2:
extended_attention_mask = attention_mask.unsqueeze(1).unsqueeze(2)
elif len(attention_mask.shape) == 3:
extended_attention_mask = attention_mask.unsqueeze(1)
else:
raise Exception("invalid attention mask shape! shape: %s" % (attention_mask.shape))
extended_attention_mask = extended_attention_mask.to(dtype=next(self.parameters()).dtype) # fp16 compatibility
extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0
embedding_output = self.embeddings(input_ids, token_type_ids, position_ids, position_ids_second)
encoded_layers = self.encoder(
embedding_output,
extended_attention_mask,
output_all_encoded_layers=output_all_encoded_layers,
checkpoint_activations=checkpoint_activations,
)
sequence_output = encoded_layers[-1]
pooled_output = self.pooler(sequence_output)
if not output_all_encoded_layers:
encoded_layers = encoded_layers[-1]
return encoded_layers, pooled_output
class DualPositionBertForPreTrainingPreLN(BertPreTrainedModel):
"""BERT model with pre-training heads and dual position
Params:
config: a BertConfig class instance with the configuration to build a new model.
"""
def __init__(self, config):
super(DualPositionBertForPreTrainingPreLN, self).__init__(config)
self.bert = DualPositionBertModel(config)
self.cls = BertPreTrainingHeads(config, self.bert.embeddings.word_embeddings.weight)
self.apply(self.init_bert_weights)
def forward(
self,
input_ids,
token_type_ids=None,
attention_mask=None,
masked_lm_labels=None,
position_ids=None,
position_ids_second=None,
log=True,
):
sequence_output, pooled_output = self.bert(
input_ids=input_ids,
token_type_ids=token_type_ids,
attention_mask=attention_mask,
output_all_encoded_layers=False,
checkpoint_activations=False,
position_ids=position_ids,
position_ids_second=position_ids_second,
)
if masked_lm_labels is not None:
# filter out all masked labels.
masked_token_indexes = torch.nonzero((masked_lm_labels + 1).view(-1)).view(-1)
prediction_scores, _ = self.cls(sequence_output, pooled_output, masked_token_indexes)
target = torch.index_select(masked_lm_labels.view(-1), 0, masked_token_indexes)
loss_fct = CrossEntropyLoss(ignore_index=-1)
masked_lm_loss = loss_fct(prediction_scores.view(-1, self.config.vocab_size), target)
return masked_lm_loss
else:
prediction_scores, _ = self.cls(sequence_output, pooled_output)
return prediction_scores
| 41.781955 | 119 | 0.703617 | import torch
from torch import nn
from torch.nn import CrossEntropyLoss
import logging
from .bert_model import BertPreTrainedModel, BertPreTrainingHeads, BertModel, BertEncoder, BertPooler, BertLayerNorm
logger = logging.getLogger(__name__)
class DualPositionBertEmbeddings(nn.Module):
def __init__(self, config):
super(DualPositionBertEmbeddings, self).__init__()
self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size)
self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size)
self.position_embeddings_second = nn.Embedding(config.max_position_embeddings, config.hidden_size)
self.token_type_embeddings = nn.Embedding(config.type_vocab_size, config.hidden_size)
self.LayerNorm = BertLayerNorm(config.hidden_size, eps=1e-12)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, input_ids, token_type_ids, position_ids, position_ids_second):
if token_type_ids is None:
token_type_ids = torch.zeros_like(input_ids)
words_embeddings = self.word_embeddings(input_ids)
position_embeddings = self.position_embeddings(position_ids)
position_embeddings_second = self.position_embeddings(position_ids_second)
token_type_embeddings = self.token_type_embeddings(token_type_ids)
embeddings = words_embeddings + position_embeddings + position_embeddings_second + token_type_embeddings
embeddings = self.LayerNorm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings
class DualPositionBertModel(BertModel):
def __init__(self, config):
super(DualPositionBertModel, self).__init__(config)
self.embeddings = DualPositionBertEmbeddings(config)
self.encoder = BertEncoder(config)
self.pooler = BertPooler(config)
self.apply(self.init_bert_weights)
logger.info("Init BERT pretrain model")
def forward(
self,
input_ids,
token_type_ids=None,
attention_mask=None,
output_all_encoded_layers=True,
checkpoint_activations=False,
position_ids=None,
position_ids_second=None,
):
if attention_mask is None:
attention_mask = torch.ones_like(input_ids)
if token_type_ids is None:
token_type_ids = torch.zeros_like(input_ids)
if len(attention_mask.shape) == 2:
extended_attention_mask = attention_mask.unsqueeze(1).unsqueeze(2)
elif len(attention_mask.shape) == 3:
extended_attention_mask = attention_mask.unsqueeze(1)
else:
raise Exception("invalid attention mask shape! shape: %s" % (attention_mask.shape))
extended_attention_mask = extended_attention_mask.to(dtype=next(self.parameters()).dtype)
extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0
embedding_output = self.embeddings(input_ids, token_type_ids, position_ids, position_ids_second)
encoded_layers = self.encoder(
embedding_output,
extended_attention_mask,
output_all_encoded_layers=output_all_encoded_layers,
checkpoint_activations=checkpoint_activations,
)
sequence_output = encoded_layers[-1]
pooled_output = self.pooler(sequence_output)
if not output_all_encoded_layers:
encoded_layers = encoded_layers[-1]
return encoded_layers, pooled_output
class DualPositionBertForPreTrainingPreLN(BertPreTrainedModel):
def __init__(self, config):
super(DualPositionBertForPreTrainingPreLN, self).__init__(config)
self.bert = DualPositionBertModel(config)
self.cls = BertPreTrainingHeads(config, self.bert.embeddings.word_embeddings.weight)
self.apply(self.init_bert_weights)
def forward(
self,
input_ids,
token_type_ids=None,
attention_mask=None,
masked_lm_labels=None,
position_ids=None,
position_ids_second=None,
log=True,
):
sequence_output, pooled_output = self.bert(
input_ids=input_ids,
token_type_ids=token_type_ids,
attention_mask=attention_mask,
output_all_encoded_layers=False,
checkpoint_activations=False,
position_ids=position_ids,
position_ids_second=position_ids_second,
)
if masked_lm_labels is not None:
masked_token_indexes = torch.nonzero((masked_lm_labels + 1).view(-1)).view(-1)
prediction_scores, _ = self.cls(sequence_output, pooled_output, masked_token_indexes)
target = torch.index_select(masked_lm_labels.view(-1), 0, masked_token_indexes)
loss_fct = CrossEntropyLoss(ignore_index=-1)
masked_lm_loss = loss_fct(prediction_scores.view(-1, self.config.vocab_size), target)
return masked_lm_loss
else:
prediction_scores, _ = self.cls(sequence_output, pooled_output)
return prediction_scores
| true | true |
f72b094590d5184ffbaf3cd4a122b4c8a53db388 | 7,097 | py | Python | sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/v2020_11_01_preview/_container_registry_management_client.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | 1 | 2021-09-07T18:39:05.000Z | 2021-09-07T18:39:05.000Z | sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/v2020_11_01_preview/_container_registry_management_client.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/v2020_11_01_preview/_container_registry_management_client.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | 1 | 2022-03-04T06:21:56.000Z | 2022-03-04T06:21:56.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import Any, Optional, TYPE_CHECKING
from azure.core.rest import HttpRequest, HttpResponse
from azure.mgmt.core import ARMPipelineClient
from msrest import Deserializer, Serializer
from . import models
from ._configuration import ContainerRegistryManagementClientConfiguration
from .operations import ConnectedRegistriesOperations, ExportPipelinesOperations, ImportPipelinesOperations, Operations, PipelineRunsOperations, PrivateEndpointConnectionsOperations, RegistriesOperations, ReplicationsOperations, ScopeMapsOperations, TokensOperations, WebhooksOperations
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
class ContainerRegistryManagementClient:
"""ContainerRegistryManagementClient.
:ivar connected_registries: ConnectedRegistriesOperations operations
:vartype connected_registries:
azure.mgmt.containerregistry.v2020_11_01_preview.operations.ConnectedRegistriesOperations
:ivar export_pipelines: ExportPipelinesOperations operations
:vartype export_pipelines:
azure.mgmt.containerregistry.v2020_11_01_preview.operations.ExportPipelinesOperations
:ivar registries: RegistriesOperations operations
:vartype registries:
azure.mgmt.containerregistry.v2020_11_01_preview.operations.RegistriesOperations
:ivar import_pipelines: ImportPipelinesOperations operations
:vartype import_pipelines:
azure.mgmt.containerregistry.v2020_11_01_preview.operations.ImportPipelinesOperations
:ivar operations: Operations operations
:vartype operations: azure.mgmt.containerregistry.v2020_11_01_preview.operations.Operations
:ivar pipeline_runs: PipelineRunsOperations operations
:vartype pipeline_runs:
azure.mgmt.containerregistry.v2020_11_01_preview.operations.PipelineRunsOperations
:ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations
:vartype private_endpoint_connections:
azure.mgmt.containerregistry.v2020_11_01_preview.operations.PrivateEndpointConnectionsOperations
:ivar replications: ReplicationsOperations operations
:vartype replications:
azure.mgmt.containerregistry.v2020_11_01_preview.operations.ReplicationsOperations
:ivar scope_maps: ScopeMapsOperations operations
:vartype scope_maps:
azure.mgmt.containerregistry.v2020_11_01_preview.operations.ScopeMapsOperations
:ivar tokens: TokensOperations operations
:vartype tokens: azure.mgmt.containerregistry.v2020_11_01_preview.operations.TokensOperations
:ivar webhooks: WebhooksOperations operations
:vartype webhooks:
azure.mgmt.containerregistry.v2020_11_01_preview.operations.WebhooksOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The Microsoft Azure subscription ID.
:type subscription_id: str
:param base_url: Service URL. Default value is 'https://management.azure.com'.
:type base_url: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
"""
def __init__(
self,
credential: "TokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any
) -> None:
self._config = ContainerRegistryManagementClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs)
self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.connected_registries = ConnectedRegistriesOperations(self._client, self._config, self._serialize, self._deserialize)
self.export_pipelines = ExportPipelinesOperations(self._client, self._config, self._serialize, self._deserialize)
self.registries = RegistriesOperations(self._client, self._config, self._serialize, self._deserialize)
self.import_pipelines = ImportPipelinesOperations(self._client, self._config, self._serialize, self._deserialize)
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
self.pipeline_runs = PipelineRunsOperations(self._client, self._config, self._serialize, self._deserialize)
self.private_endpoint_connections = PrivateEndpointConnectionsOperations(self._client, self._config, self._serialize, self._deserialize)
self.replications = ReplicationsOperations(self._client, self._config, self._serialize, self._deserialize)
self.scope_maps = ScopeMapsOperations(self._client, self._config, self._serialize, self._deserialize)
self.tokens = TokensOperations(self._client, self._config, self._serialize, self._deserialize)
self.webhooks = WebhooksOperations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(
self,
request, # type: HttpRequest
**kwargs: Any
) -> HttpResponse:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = client._send_request(request)
<HttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.HttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
def close(self):
# type: () -> None
self._client.close()
def __enter__(self):
# type: () -> ContainerRegistryManagementClient
self._client.__enter__()
return self
def __exit__(self, *exc_details):
# type: (Any) -> None
self._client.__exit__(*exc_details)
| 53.360902 | 286 | 0.748908 |
from copy import deepcopy
from typing import Any, Optional, TYPE_CHECKING
from azure.core.rest import HttpRequest, HttpResponse
from azure.mgmt.core import ARMPipelineClient
from msrest import Deserializer, Serializer
from . import models
from ._configuration import ContainerRegistryManagementClientConfiguration
from .operations import ConnectedRegistriesOperations, ExportPipelinesOperations, ImportPipelinesOperations, Operations, PipelineRunsOperations, PrivateEndpointConnectionsOperations, RegistriesOperations, ReplicationsOperations, ScopeMapsOperations, TokensOperations, WebhooksOperations
if TYPE_CHECKING:
from azure.core.credentials import TokenCredential
class ContainerRegistryManagementClient:
def __init__(
self,
credential: "TokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any
) -> None:
self._config = ContainerRegistryManagementClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs)
self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.connected_registries = ConnectedRegistriesOperations(self._client, self._config, self._serialize, self._deserialize)
self.export_pipelines = ExportPipelinesOperations(self._client, self._config, self._serialize, self._deserialize)
self.registries = RegistriesOperations(self._client, self._config, self._serialize, self._deserialize)
self.import_pipelines = ImportPipelinesOperations(self._client, self._config, self._serialize, self._deserialize)
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
self.pipeline_runs = PipelineRunsOperations(self._client, self._config, self._serialize, self._deserialize)
self.private_endpoint_connections = PrivateEndpointConnectionsOperations(self._client, self._config, self._serialize, self._deserialize)
self.replications = ReplicationsOperations(self._client, self._config, self._serialize, self._deserialize)
self.scope_maps = ScopeMapsOperations(self._client, self._config, self._serialize, self._deserialize)
self.tokens = TokensOperations(self._client, self._config, self._serialize, self._deserialize)
self.webhooks = WebhooksOperations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(
self,
request,
**kwargs: Any
) -> HttpResponse:
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
def close(self):
self._client.close()
def __enter__(self):
self._client.__enter__()
return self
def __exit__(self, *exc_details):
self._client.__exit__(*exc_details)
| true | true |
f72b097de1b2982d94f31803515377aa94536b9a | 1,869 | py | Python | authentik/stages/deny/tests.py | BeryJu/passbook | 350f0d836580f4411524614f361a76c4f27b8a2d | [
"MIT"
] | 15 | 2020-01-05T09:09:57.000Z | 2020-11-28T05:27:39.000Z | authentik/stages/deny/tests.py | BeryJu/passbook | 350f0d836580f4411524614f361a76c4f27b8a2d | [
"MIT"
] | 302 | 2020-01-21T08:03:59.000Z | 2020-12-04T05:04:57.000Z | authentik/stages/deny/tests.py | BeryJu/passbook | 350f0d836580f4411524614f361a76c4f27b8a2d | [
"MIT"
] | 3 | 2020-03-04T08:21:59.000Z | 2020-08-01T20:37:18.000Z | """deny tests"""
from django.urls import reverse
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
from authentik.flows.markers import StageMarker
from authentik.flows.models import FlowDesignation, FlowStageBinding
from authentik.flows.planner import FlowPlan
from authentik.flows.tests import FlowTestCase
from authentik.flows.views.executor import SESSION_KEY_PLAN
from authentik.stages.deny.models import DenyStage
class TestUserDenyStage(FlowTestCase):
"""Deny tests"""
def setUp(self):
super().setUp()
self.user = create_test_admin_user()
self.flow = create_test_flow(FlowDesignation.AUTHENTICATION)
self.stage = DenyStage.objects.create(name="logout")
self.binding = FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=2)
def test_valid_get(self):
"""Test with a valid pending user and backend"""
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
session = self.client.session
session[SESSION_KEY_PLAN] = plan
session.save()
response = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug})
)
self.assertStageResponse(response, self.flow, component="ak-stage-access-denied")
def test_valid_post(self):
"""Test with a valid pending user and backend"""
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
session = self.client.session
session[SESSION_KEY_PLAN] = plan
session.save()
response = self.client.post(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug})
)
self.assertStageResponse(response, self.flow, component="ak-stage-access-denied")
| 38.9375 | 99 | 0.70626 | from django.urls import reverse
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
from authentik.flows.markers import StageMarker
from authentik.flows.models import FlowDesignation, FlowStageBinding
from authentik.flows.planner import FlowPlan
from authentik.flows.tests import FlowTestCase
from authentik.flows.views.executor import SESSION_KEY_PLAN
from authentik.stages.deny.models import DenyStage
class TestUserDenyStage(FlowTestCase):
def setUp(self):
super().setUp()
self.user = create_test_admin_user()
self.flow = create_test_flow(FlowDesignation.AUTHENTICATION)
self.stage = DenyStage.objects.create(name="logout")
self.binding = FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=2)
def test_valid_get(self):
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
session = self.client.session
session[SESSION_KEY_PLAN] = plan
session.save()
response = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug})
)
self.assertStageResponse(response, self.flow, component="ak-stage-access-denied")
def test_valid_post(self):
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
session = self.client.session
session[SESSION_KEY_PLAN] = plan
session.save()
response = self.client.post(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug})
)
self.assertStageResponse(response, self.flow, component="ak-stage-access-denied")
| true | true |
f72b09d34e7b78c00c0b504b76cded6aa3b45a39 | 1,425 | py | Python | models/vasilyev2020/src/score.py | leoribeiro/repro | 7dc2ad611925542b4deb62fd1e30761ba56a7f60 | [
"Apache-2.0"
] | 15 | 2021-07-28T19:52:03.000Z | 2022-03-28T15:55:17.000Z | models/vasilyev2020/src/score.py | leoribeiro/repro | 7dc2ad611925542b4deb62fd1e30761ba56a7f60 | [
"Apache-2.0"
] | 3 | 2021-11-19T17:09:34.000Z | 2022-02-14T19:40:48.000Z | models/vasilyev2020/src/score.py | leoribeiro/repro | 7dc2ad611925542b4deb62fd1e30761ba56a7f60 | [
"Apache-2.0"
] | null | null | null | import argparse
import json
import os
from blanc import BlancHelp, BlancTune
def main(args):
kwargs = json.loads(args.kwargs)
device = "cpu" if args.device == -1 else "cuda"
if args.type == "tune":
blanc = BlancTune(device=device, random_seed=args.random_seed, **kwargs)
elif args.type == "help":
blanc = BlancHelp(device=device, **kwargs)
else:
raise Exception(f"Unknown BLANC type: {args.type}")
documents = []
summaries_list = []
with open(args.input_file, "r") as f:
for line in f:
data = json.loads(line)
documents.append(data["document"])
summaries_list.append(data["summaries"])
scores_list = blanc.eval_summaries_for_docs(documents, summaries_list)
dirname = os.path.dirname(args.output_file)
if dirname:
os.makedirs(dirname, exist_ok=True)
with open(args.output_file, "w") as out:
out.write(json.dumps(scores_list))
if __name__ == "__main__":
argp = argparse.ArgumentParser()
argp.add_argument("--input-file", required=True)
argp.add_argument("--type", required=True, choices=["help", "tune"])
argp.add_argument("--device", required=True, type=int)
argp.add_argument("--random-seed", required=True, type=int)
argp.add_argument("--kwargs", required=True)
argp.add_argument("--output-file", required=True)
args = argp.parse_args()
main(args)
| 31.666667 | 80 | 0.655439 | import argparse
import json
import os
from blanc import BlancHelp, BlancTune
def main(args):
kwargs = json.loads(args.kwargs)
device = "cpu" if args.device == -1 else "cuda"
if args.type == "tune":
blanc = BlancTune(device=device, random_seed=args.random_seed, **kwargs)
elif args.type == "help":
blanc = BlancHelp(device=device, **kwargs)
else:
raise Exception(f"Unknown BLANC type: {args.type}")
documents = []
summaries_list = []
with open(args.input_file, "r") as f:
for line in f:
data = json.loads(line)
documents.append(data["document"])
summaries_list.append(data["summaries"])
scores_list = blanc.eval_summaries_for_docs(documents, summaries_list)
dirname = os.path.dirname(args.output_file)
if dirname:
os.makedirs(dirname, exist_ok=True)
with open(args.output_file, "w") as out:
out.write(json.dumps(scores_list))
if __name__ == "__main__":
argp = argparse.ArgumentParser()
argp.add_argument("--input-file", required=True)
argp.add_argument("--type", required=True, choices=["help", "tune"])
argp.add_argument("--device", required=True, type=int)
argp.add_argument("--random-seed", required=True, type=int)
argp.add_argument("--kwargs", required=True)
argp.add_argument("--output-file", required=True)
args = argp.parse_args()
main(args)
| true | true |
f72b0a2e2db8a201933a779f2d9eaf3fc70eda33 | 9,937 | py | Python | python/tvm/tensor_graph/testing/relay_examples/lenet.py | QinHan-Erin/AMOS | 634bf48edf4015e4a69a8c32d49b96bce2b5f16f | [
"Apache-2.0"
] | 22 | 2022-03-18T07:29:31.000Z | 2022-03-23T14:54:32.000Z | python/tvm/tensor_graph/testing/relay_examples/lenet.py | QinHan-Erin/AMOS | 634bf48edf4015e4a69a8c32d49b96bce2b5f16f | [
"Apache-2.0"
] | null | null | null | python/tvm/tensor_graph/testing/relay_examples/lenet.py | QinHan-Erin/AMOS | 634bf48edf4015e4a69a8c32d49b96bce2b5f16f | [
"Apache-2.0"
] | 2 | 2022-03-18T08:26:34.000Z | 2022-03-20T06:02:48.000Z | import tvm
import numpy as np
from tvm import relay
from tvm.relay.testing import run_infer_type, gradient
def get_lenet(batch_size,
num_classes=10,
image_shape=(1, 28, 28),
dtype="float32"):
"""Get lenet funciton
Parameters
----------
batch_size : int
The batch size used in the model
num_classes : int, optional
Number of claseses
image_shape : tuple, optional
The input image shape
dtype : str, optional
The data type
Returns
-------
net : relay.Function
The dataflow.
"""
data_shape = (batch_size,) + image_shape
data = relay.TensorType(data_shape, dtype=dtype)
data = relay.var("data", data)
conv_w1 = relay.var('c1.weight')
c1 = relay.nn.conv2d(data=data, weight=conv_w1, channels=6, kernel_size=(5, 5),
strides=(1, 1), padding=(2, 2))
conv_b1 = relay.var('c1.bias', dtype=dtype)
c1 = relay.nn.bias_add(c1, conv_b1, axis=-1)
act_c1 = relay.nn.relu(data=c1)
# Max-pooling
# [64, 6, 14, 14]
conv_w2 = relay.var('c2.weight', dtype=dtype)
conv_b2 = relay.var('c2.bias', dtype=dtype)
p1 = relay.nn.conv2d(data=act_c1, weight=conv_w2, channels=6, kernel_size=(2, 2),
strides=(2, 2), padding=(0, 0))
p1 = relay.nn.bias_add(p1, conv_b2, axis=-1)
# Convolution
conv_w3 = relay.var('c3.weight', dtype=dtype)
conv_b3 = relay.var('c3.bias', dtype=dtype)
c2 = relay.nn.conv2d(data=p1, weight=conv_w3, channels=6, kernel_size=(5, 5),
strides=(1, 1), padding=(0, 0))
c2 = relay.nn.bias_add(c2, conv_b3, axis=-1)
# [64, 6, 28, 28]conv2d(p1, 16, (5, 5), (1, 1), (0, 0), 'c2') # [64, 16, 10, 10]
act_c2 = relay.nn.relu(data=c2)
# Max-pooling
# [64, 16, 5, 5]
conv_w4 = relay.var('c4.weight', dtype=dtype)
conv_b4 = relay.var('c4.bias', dtype=dtype)
p2 = relay.nn.conv2d(data=act_c2, weight=conv_w4, channels=6, kernel_size=(2, 2),
strides=(2, 2), padding=(0, 0))
p2 = relay.nn.bias_add(p2, conv_b4, axis=-1)
# reshape
r1 = relay.nn.batch_flatten(data=p2)
w1 = relay.var('fc1.weight', dtype=dtype)
b1 = relay.var('fc1.bias', dtype=dtype)
fc1 = relay.nn.dense(data=r1, weight=w1, units=128)
fc1 = relay.nn.bias_add(fc1, b1, axis=-1)
act1 = relay.nn.relu(data=fc1)
w2 = relay.var('fc2.weight', dtype=dtype)
b2 = relay.var('fc2.bias', dtype=dtype)
fc2 = relay.nn.dense(data=act1, weight=w2, units=64)
fc2 = relay.nn.bias_add(fc2, b2, axis=-1)
act2 = relay.nn.relu(data=fc2)
w3 = relay.var('fc3.weight', dtype=dtype)
b3 = relay.var('fc3.bias', dtype=dtype)
fc3 = relay.nn.dense(data=act2, weight=w3, units=num_classes)
fc3 = relay.nn.bias_add(fc3, b3, axis=-1)
lenet = relay.nn.softmax(data=fc3)
argu_list = [conv_w1, conv_b1, conv_w2, conv_b2, w1, b1, w2, b2, w3, b3]
return relay.Function(relay.analysis.free_vars(lenet), lenet), argu_list
def make_sgd_update_net(loss_function, var, lr=0.002, scale=1.0, wd=0.0, clip=None):
type_loss_function = run_infer_type(loss_function)
grad_func = run_infer_type(gradient(type_loss_function))
grads = relay.TupleWrapper(relay.TupleGetItem(grad_func.body, 1), len(loss_function.params))
useful_grad = []
type_var = []
for var_item in var:
for index, value_item in enumerate(type_loss_function.params):
if var_item.name_hint == value_item.name_hint:
useful_grad.append(grads[index])
type_var.append(value_item)
break
else:
raise("can't get required params from loss function, internal error")
updates = []
for i, v in enumerate(type_var):
g = useful_grad[i]
g = relay.multiply(g, relay.const(scale, "float32"))
if clip is not None:
g = relay.clip(g, a_min=-1 * clip, a_max=clip)
g = relay.subtract(v,
relay.multiply(relay.const(lr, "float32"),
relay.add(g,
relay.multiply(relay.const(wd, "float32"),
v))))
updates.append(g)
sgd_body = relay.Tuple(updates)
return relay.Function(relay.analysis.free_vars(sgd_body), sgd_body)
def make_adam_update_net(loss_function, var, lr=0.001, beta1=0.9, beta2=0.99, scale=1.0, wd=0.0, clip=None, name="adam", dtype='float32'):
type_loss_function = run_infer_type(loss_function)
grad_func = run_infer_type(gradient(type_loss_function))
grads = relay.TupleWrapper(relay.TupleGetItem(grad_func.body, 1), len(loss_function.params))
useful_grad = []
type_var = []
for var_item in var:
for index, value_item in enumerate(type_loss_function.params):
if var_item.name_hint == value_item.name_hint:
useful_grad.append(grads[index])
type_var.append(value_item)
break
else:
raise("can't get required params from loss function, internal error")
print(type_var)
updates = []
m = []
t = relay.zeros(shape=[1], dtype=dtype)
epsilon = 1e-04
const_1 = relay.const(1, dtype=dtype)
const_beta1 = relay.const(beta1, dtype=dtype)
const_beta2 = relay.const(beta2, dtype=dtype)
for i, va in enumerate(type_var):
m.append(relay.zeros_like(va))
update_t = relay.add(t, const_1)
rate = relay.divide(relay.sqrt(relay.subtract(const_1, relay.power(const_beta2, update_t))),
relay.subtract(const_1, relay.power(const_beta1, update_t)))
lr_t = relay.multiply(relay.const(lr, dtype=dtype), rate)
for var, g, m in zip(type_var, useful_grad, m):
update_m = relay.add(relay.multiply(const_beta1, m),
relay.multiply(relay.subtract(const_1, const_beta1), g))
update_v = relay.add(relay.multiply(const_beta2, m),
relay.multiply(relay.subtract(const_1, const_beta2),
relay.multiply(g, g)))
update_var = relay.subtract(var,
relay.divide(relay.multiply(lr_t, update_m),
relay.add(relay.sqrt(update_v),
relay.const(epsilon, dtype="float32"))))
updates.append(update_var)
adam_body = relay.Tuple(updates)
return relay.Function(relay.analysis.free_vars(adam_body), adam_body)
def mse_loss(lenet_function, target):
sub = relay.subtract(lenet_function.body, target)
loss_body = relay.sum(relay.multiply(sub, sub))
return relay.Function(relay.analysis.free_vars(loss_body), loss_body)
# return sum((predict - target)**2) / 2.0
def cross_entropy_loss(lenet_function, target):
loss_body = relay.negative(relay.sum(relay.multiply(relay.log(relay.add(lenet_function.body,
relay.const(1e-5, dtype="float32"))),
target)))
return relay.Function(relay.analysis.free_vars(loss_body), loss_body)
def make_loss_net(lenet_function, target, optim="CROSS"):
"""Get loss funtion for lenet
Parameters
----------
lenet_function : relay.Function
target : relay.Expr
optim : str, optional
loss_function strategy, "CROSS" or "MSE"
Returns
-------
net : relay.Function
The dataflow.
"""
if optim == "CROSS":
return cross_entropy_loss(lenet_function, target)
if optim == "MSE":
return mse_loss(lenet_function, target)
raise("unknown optim, use 'CROSS' or 'MSE'.")
def make_grad_net(loss_function):
"""Get updated funtion for lenet
Parameters
----------
loss_function : relay.Function
Returns
-------
net : relay.Function
The dataflow.
"""
type_loss_function = run_infer_type(loss_function)
grad_func = run_infer_type(gradient(type_loss_function))
return grad_func
def make_update_net(loss_function, weights, optim="SGD"):
"""Get updated funtion for lenet
Parameters
----------
loss_function : relay.Function
weights : [relay.var]
vars to compute gradient
optim : str, optional
updated_function strategy, "ADAM" or "SGD"
Returns
-------
net : relay.Function
The dataflow.
"""
if optim == "ADAM":
return make_adam_update_net(loss_function, weights)
if optim == "SGD":
return make_sgd_update_net(loss_function, weights)
raise("unknown optim, use 'ADAM' or 'SGD'.")
def create_workload(net, initializer=None, seed=0):
"""Helper function to create benchmark image classification workload.
Parameters
----------
net : tvm.relay.Function
The selected function of the network.
initializer : Initializer
The initializer used
seed : int
The seed used in initialization.
Returns
-------
mod : tvm.IRModule
The created relay module.
params : dict of str to NDArray
The parameters.
"""
mod = tvm.IRModule.from_expr(net)
mod = relay.transform.InferType()(mod)
shape_dict = {
v.name_hint : v.checked_type for v in mod["main"].params}
np.random.seed(seed)
initializer = initializer if initializer else Xavier()
params = {}
for k, v in shape_dict.items():
# modify here, skip "label" as well
if k == "data" or k == "label":
continue
init_value = np.zeros(v.concrete_shape).astype(v.dtype)
initializer(k, init_value)
params[k] = tvm.nd.array(init_value, ctx=tvm.cpu(0))
return mod, params
| 36.399267 | 138 | 0.600986 | import tvm
import numpy as np
from tvm import relay
from tvm.relay.testing import run_infer_type, gradient
def get_lenet(batch_size,
num_classes=10,
image_shape=(1, 28, 28),
dtype="float32"):
data_shape = (batch_size,) + image_shape
data = relay.TensorType(data_shape, dtype=dtype)
data = relay.var("data", data)
conv_w1 = relay.var('c1.weight')
c1 = relay.nn.conv2d(data=data, weight=conv_w1, channels=6, kernel_size=(5, 5),
strides=(1, 1), padding=(2, 2))
conv_b1 = relay.var('c1.bias', dtype=dtype)
c1 = relay.nn.bias_add(c1, conv_b1, axis=-1)
act_c1 = relay.nn.relu(data=c1)
conv_w2 = relay.var('c2.weight', dtype=dtype)
conv_b2 = relay.var('c2.bias', dtype=dtype)
p1 = relay.nn.conv2d(data=act_c1, weight=conv_w2, channels=6, kernel_size=(2, 2),
strides=(2, 2), padding=(0, 0))
p1 = relay.nn.bias_add(p1, conv_b2, axis=-1)
conv_w3 = relay.var('c3.weight', dtype=dtype)
conv_b3 = relay.var('c3.bias', dtype=dtype)
c2 = relay.nn.conv2d(data=p1, weight=conv_w3, channels=6, kernel_size=(5, 5),
strides=(1, 1), padding=(0, 0))
c2 = relay.nn.bias_add(c2, conv_b3, axis=-1)
y.nn.relu(data=c2)
conv_w4 = relay.var('c4.weight', dtype=dtype)
conv_b4 = relay.var('c4.bias', dtype=dtype)
p2 = relay.nn.conv2d(data=act_c2, weight=conv_w4, channels=6, kernel_size=(2, 2),
strides=(2, 2), padding=(0, 0))
p2 = relay.nn.bias_add(p2, conv_b4, axis=-1)
r1 = relay.nn.batch_flatten(data=p2)
w1 = relay.var('fc1.weight', dtype=dtype)
b1 = relay.var('fc1.bias', dtype=dtype)
fc1 = relay.nn.dense(data=r1, weight=w1, units=128)
fc1 = relay.nn.bias_add(fc1, b1, axis=-1)
act1 = relay.nn.relu(data=fc1)
w2 = relay.var('fc2.weight', dtype=dtype)
b2 = relay.var('fc2.bias', dtype=dtype)
fc2 = relay.nn.dense(data=act1, weight=w2, units=64)
fc2 = relay.nn.bias_add(fc2, b2, axis=-1)
act2 = relay.nn.relu(data=fc2)
w3 = relay.var('fc3.weight', dtype=dtype)
b3 = relay.var('fc3.bias', dtype=dtype)
fc3 = relay.nn.dense(data=act2, weight=w3, units=num_classes)
fc3 = relay.nn.bias_add(fc3, b3, axis=-1)
lenet = relay.nn.softmax(data=fc3)
argu_list = [conv_w1, conv_b1, conv_w2, conv_b2, w1, b1, w2, b2, w3, b3]
return relay.Function(relay.analysis.free_vars(lenet), lenet), argu_list
def make_sgd_update_net(loss_function, var, lr=0.002, scale=1.0, wd=0.0, clip=None):
type_loss_function = run_infer_type(loss_function)
grad_func = run_infer_type(gradient(type_loss_function))
grads = relay.TupleWrapper(relay.TupleGetItem(grad_func.body, 1), len(loss_function.params))
useful_grad = []
type_var = []
for var_item in var:
for index, value_item in enumerate(type_loss_function.params):
if var_item.name_hint == value_item.name_hint:
useful_grad.append(grads[index])
type_var.append(value_item)
break
else:
raise("can't get required params from loss function, internal error")
updates = []
for i, v in enumerate(type_var):
g = useful_grad[i]
g = relay.multiply(g, relay.const(scale, "float32"))
if clip is not None:
g = relay.clip(g, a_min=-1 * clip, a_max=clip)
g = relay.subtract(v,
relay.multiply(relay.const(lr, "float32"),
relay.add(g,
relay.multiply(relay.const(wd, "float32"),
v))))
updates.append(g)
sgd_body = relay.Tuple(updates)
return relay.Function(relay.analysis.free_vars(sgd_body), sgd_body)
def make_adam_update_net(loss_function, var, lr=0.001, beta1=0.9, beta2=0.99, scale=1.0, wd=0.0, clip=None, name="adam", dtype='float32'):
type_loss_function = run_infer_type(loss_function)
grad_func = run_infer_type(gradient(type_loss_function))
grads = relay.TupleWrapper(relay.TupleGetItem(grad_func.body, 1), len(loss_function.params))
useful_grad = []
type_var = []
for var_item in var:
for index, value_item in enumerate(type_loss_function.params):
if var_item.name_hint == value_item.name_hint:
useful_grad.append(grads[index])
type_var.append(value_item)
break
else:
raise("can't get required params from loss function, internal error")
print(type_var)
updates = []
m = []
t = relay.zeros(shape=[1], dtype=dtype)
epsilon = 1e-04
const_1 = relay.const(1, dtype=dtype)
const_beta1 = relay.const(beta1, dtype=dtype)
const_beta2 = relay.const(beta2, dtype=dtype)
for i, va in enumerate(type_var):
m.append(relay.zeros_like(va))
update_t = relay.add(t, const_1)
rate = relay.divide(relay.sqrt(relay.subtract(const_1, relay.power(const_beta2, update_t))),
relay.subtract(const_1, relay.power(const_beta1, update_t)))
lr_t = relay.multiply(relay.const(lr, dtype=dtype), rate)
for var, g, m in zip(type_var, useful_grad, m):
update_m = relay.add(relay.multiply(const_beta1, m),
relay.multiply(relay.subtract(const_1, const_beta1), g))
update_v = relay.add(relay.multiply(const_beta2, m),
relay.multiply(relay.subtract(const_1, const_beta2),
relay.multiply(g, g)))
update_var = relay.subtract(var,
relay.divide(relay.multiply(lr_t, update_m),
relay.add(relay.sqrt(update_v),
relay.const(epsilon, dtype="float32"))))
updates.append(update_var)
adam_body = relay.Tuple(updates)
return relay.Function(relay.analysis.free_vars(adam_body), adam_body)
def mse_loss(lenet_function, target):
sub = relay.subtract(lenet_function.body, target)
loss_body = relay.sum(relay.multiply(sub, sub))
return relay.Function(relay.analysis.free_vars(loss_body), loss_body)
def cross_entropy_loss(lenet_function, target):
loss_body = relay.negative(relay.sum(relay.multiply(relay.log(relay.add(lenet_function.body,
relay.const(1e-5, dtype="float32"))),
target)))
return relay.Function(relay.analysis.free_vars(loss_body), loss_body)
def make_loss_net(lenet_function, target, optim="CROSS"):
if optim == "CROSS":
return cross_entropy_loss(lenet_function, target)
if optim == "MSE":
return mse_loss(lenet_function, target)
raise("unknown optim, use 'CROSS' or 'MSE'.")
def make_grad_net(loss_function):
type_loss_function = run_infer_type(loss_function)
grad_func = run_infer_type(gradient(type_loss_function))
return grad_func
def make_update_net(loss_function, weights, optim="SGD"):
if optim == "ADAM":
return make_adam_update_net(loss_function, weights)
if optim == "SGD":
return make_sgd_update_net(loss_function, weights)
raise("unknown optim, use 'ADAM' or 'SGD'.")
def create_workload(net, initializer=None, seed=0):
mod = tvm.IRModule.from_expr(net)
mod = relay.transform.InferType()(mod)
shape_dict = {
v.name_hint : v.checked_type for v in mod["main"].params}
np.random.seed(seed)
initializer = initializer if initializer else Xavier()
params = {}
for k, v in shape_dict.items():
if k == "data" or k == "label":
continue
init_value = np.zeros(v.concrete_shape).astype(v.dtype)
initializer(k, init_value)
params[k] = tvm.nd.array(init_value, ctx=tvm.cpu(0))
return mod, params
| true | true |
f72b0a4f41647e949ba4e6202d2c7f3980d53dab | 575 | py | Python | M5_assgmnt.py | AVNEETK99/FANTASY-CRICKET-LEAGUE | 17fc188e48a51c6f3937a9965f1edcead2a8d0b8 | [
"CC0-1.0"
] | 23 | 2018-07-18T10:47:12.000Z | 2021-07-31T21:53:17.000Z | M5_assgmnt.py | RupinSamria/Summer-Training-Python-development | 4fa38344d6aa71581b004c16eddeec22f9f739f4 | [
"CC0-1.0"
] | 3 | 2018-11-18T07:11:05.000Z | 2020-04-30T20:16:51.000Z | M5_assgmnt.py | RupinSamria/Summer-Training-Python-development | 4fa38344d6aa71581b004c16eddeec22f9f739f4 | [
"CC0-1.0"
] | 53 | 2018-10-04T05:49:30.000Z | 2021-12-12T15:52:17.000Z | import sqlite3
mystore=sqlite3.connect('bookstores.db')
mycursor=mystore.cursor()
sql=''' create table book (id integer primary key not null,title text(20),
author text(20),price real);'''
mycursor.execute(sql)
sql='''insert into book
values(1,'think java','rhooney',550.0);'''
mycursor.execute(sql)
mystore.commit()
sql='''insert into book
values(2,'think python','allen',450.0);'''
mycursor.execute(sql)
mystore.commit()
sql='''insert into book
values(3,'think c++','booty',375.0);'''
mycursor.execute(sql)
mystore.commit()
mystore.close()
| 21.296296 | 75 | 0.683478 | import sqlite3
mystore=sqlite3.connect('bookstores.db')
mycursor=mystore.cursor()
sql=''' create table book (id integer primary key not null,title text(20),
author text(20),price real);'''
mycursor.execute(sql)
sql='''insert into book
values(1,'think java','rhooney',550.0);'''
mycursor.execute(sql)
mystore.commit()
sql='''insert into book
values(2,'think python','allen',450.0);'''
mycursor.execute(sql)
mystore.commit()
sql='''insert into book
values(3,'think c++','booty',375.0);'''
mycursor.execute(sql)
mystore.commit()
mystore.close()
| true | true |
f72b0a5531db17b2a97a3179af5c86bd986dd358 | 12,137 | py | Python | test/data_join/test_data_block_dumper.py | chen1i/fedlearner | 981514dadbd0aa49ae87d185dd247d310e35605c | [
"Apache-2.0"
] | null | null | null | test/data_join/test_data_block_dumper.py | chen1i/fedlearner | 981514dadbd0aa49ae87d185dd247d310e35605c | [
"Apache-2.0"
] | null | null | null | test/data_join/test_data_block_dumper.py | chen1i/fedlearner | 981514dadbd0aa49ae87d185dd247d310e35605c | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import unittest
import os
import tensorflow.compat.v1 as tf
tf.enable_eager_execution()
from google.protobuf import text_format, timestamp_pb2
import tensorflow_io
from tensorflow.compat.v1 import gfile
from fedlearner.common import db_client
from fedlearner.common import common_pb2 as common_pb
from fedlearner.common import data_join_service_pb2 as dj_pb
from fedlearner.data_join import (
data_block_manager, common, data_block_dumper,
raw_data_manifest_manager, raw_data_visitor, visitor
)
from fedlearner.data_join.data_block_manager import DataBlockBuilder
from fedlearner.data_join.raw_data_iter_impl.tf_record_iter import TfExampleItem
class TestDataBlockDumper(unittest.TestCase):
def setUp(self):
data_source_f = common_pb.DataSource()
data_source_f.data_source_meta.name = "milestone"
data_source_f.data_source_meta.partition_num = 1
data_source_f.output_base_dir = "./output-f"
self.data_source_f = data_source_f
if gfile.Exists(self.data_source_f.output_base_dir):
gfile.DeleteRecursively(self.data_source_f.output_base_dir)
data_source_l = common_pb.DataSource()
data_source_l.data_source_meta.name = "milestone"
data_source_l.data_source_meta.partition_num = 1
data_source_l.output_base_dir = "./output-l"
self.raw_data_dir_l = "./raw_data-l"
self.data_source_l = data_source_l
if gfile.Exists(self.data_source_l.output_base_dir):
gfile.DeleteRecursively(self.data_source_l.output_base_dir)
if gfile.Exists(self.raw_data_dir_l):
gfile.DeleteRecursively(self.raw_data_dir_l)
self.kvstore = db_client.DBClient('etcd', True)
self.kvstore.delete_prefix(common.data_source_kvstore_base_dir(self.data_source_l.data_source_meta.name))
self.manifest_manager = raw_data_manifest_manager.RawDataManifestManager(
self.kvstore, self.data_source_l)
def generate_follower_data_block(self):
dbm = data_block_manager.DataBlockManager(self.data_source_f, 0)
self.assertEqual(dbm.get_dumped_data_block_count(), 0)
self.assertEqual(dbm.get_lastest_data_block_meta(), None)
leader_index = 0
follower_index = 65536
self.dumped_metas = []
for i in range(5):
builder = DataBlockBuilder(
common.data_source_data_block_dir(self.data_source_f),
self.data_source_f.data_source_meta.name,
0, i, dj_pb.WriterOptions(output_writer='TF_RECORD'), None
)
builder.set_data_block_manager(dbm)
for j in range(1024):
feat = {}
example_id = '{}'.format(i * 1024 + j).encode()
feat['example_id'] = tf.train.Feature(
bytes_list=tf.train.BytesList(value=[example_id]))
event_time = 150000000 + i * 1024 + j
feat['event_time'] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[event_time]))
feat['leader_index'] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[leader_index]))
feat['follower_index'] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[follower_index]))
example = tf.train.Example(features=tf.train.Features(feature=feat))
builder.append_item(TfExampleItem(example.SerializeToString()),
leader_index, follower_index)
leader_index += 3
follower_index += 1
meta = builder.finish_data_block()
self.dumped_metas.append(meta)
self.leader_start_index = 0
self.leader_end_index = leader_index
self.assertEqual(dbm.get_dumped_data_block_count(), 5)
for (idx, meta) in enumerate(self.dumped_metas):
self.assertEqual(dbm.get_data_block_meta_by_index(idx), meta)
def generate_leader_raw_data(self):
dbm = data_block_manager.DataBlockManager(self.data_source_l, 0)
raw_data_dir = os.path.join(self.raw_data_dir_l, common.partition_repr(0))
if gfile.Exists(raw_data_dir):
gfile.DeleteRecursively(raw_data_dir)
gfile.MakeDirs(raw_data_dir)
rdm = raw_data_visitor.RawDataManager(self.kvstore, self.data_source_l, 0)
block_index = 0
builder = DataBlockBuilder(
self.raw_data_dir_l,
self.data_source_l.data_source_meta.name,
0, block_index, dj_pb.WriterOptions(output_writer='TF_RECORD'), None
)
process_index = 0
start_index = 0
for i in range(0, self.leader_end_index + 3):
if (i > 0 and i % 2048 == 0) or (i == self.leader_end_index + 2):
meta = builder.finish_data_block()
if meta is not None:
ofname = common.encode_data_block_fname(
self.data_source_l.data_source_meta.name,
meta
)
fpath = os.path.join(raw_data_dir, ofname)
self.manifest_manager.add_raw_data(
0,
[dj_pb.RawDataMeta(file_path=fpath,
timestamp=timestamp_pb2.Timestamp(seconds=3))],
False)
process_index += 1
start_index += len(meta.example_ids)
block_index += 1
builder = DataBlockBuilder(
self.raw_data_dir_l,
self.data_source_l.data_source_meta.name,
0, block_index, dj_pb.WriterOptions(output_writer='TF_RECORD'), None
)
feat = {}
pt = i + 1 << 30
if i % 3 == 0:
pt = i // 3
example_id = '{}'.format(pt).encode()
feat['example_id'] = tf.train.Feature(
bytes_list=tf.train.BytesList(value=[example_id]))
event_time = 150000000 + pt
feat['event_time'] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[event_time]))
example = tf.train.Example(features=tf.train.Features(feature=feat))
builder.append_item(TfExampleItem(example.SerializeToString()), i, i)
fpaths = [os.path.join(raw_data_dir, f)
for f in gfile.ListDirectory(raw_data_dir)
if not gfile.IsDirectory(os.path.join(raw_data_dir, f))]
for fpath in fpaths:
if not fpath.endswith(common.DataBlockSuffix):
gfile.Remove(fpath)
def test_data_block_dumper(self):
self.generate_follower_data_block()
self.generate_leader_raw_data()
dbd = data_block_dumper.DataBlockDumperManager(
self.kvstore, self.data_source_l, 0,
dj_pb.RawDataOptions(raw_data_iter='TF_RECORD', read_ahead_size=1<<20, read_batch_size=128),
dj_pb.WriterOptions(output_writer='TF_RECORD')
)
self.assertEqual(dbd.get_next_data_block_index(), 0)
for (idx, meta) in enumerate(self.dumped_metas):
success, next_index = dbd.add_synced_data_block_meta(meta)
self.assertTrue(success)
self.assertEqual(next_index, idx + 1)
self.assertTrue(dbd.need_dump())
self.assertEqual(dbd.get_next_data_block_index(), len(self.dumped_metas))
with dbd.make_data_block_dumper() as dumper:
dumper()
dbm_f = data_block_manager.DataBlockManager(self.data_source_f, 0)
dbm_l = data_block_manager.DataBlockManager(self.data_source_l, 0)
self.assertEqual(dbm_f.get_dumped_data_block_count(), len(self.dumped_metas))
self.assertEqual(dbm_f.get_dumped_data_block_count(),
dbm_l.get_dumped_data_block_count())
for (idx, meta) in enumerate(self.dumped_metas):
self.assertEqual(meta.data_block_index, idx)
self.assertEqual(dbm_l.get_data_block_meta_by_index(idx), meta)
self.assertEqual(dbm_f.get_data_block_meta_by_index(idx), meta)
meta_fpth_l = os.path.join(
common.data_source_data_block_dir(self.data_source_l),
common.partition_repr(0),
common.encode_data_block_meta_fname(
self.data_source_l.data_source_meta.name,
0, meta.data_block_index
)
)
mitr = tf.io.tf_record_iterator(meta_fpth_l)
meta_l = text_format.Parse(next(mitr), dj_pb.DataBlockMeta())
self.assertEqual(meta_l, meta)
meta_fpth_f = os.path.join(
common.data_source_data_block_dir(self.data_source_f),
common.partition_repr(0),
common.encode_data_block_meta_fname(
self.data_source_f.data_source_meta.name,
0, meta.data_block_index
)
)
mitr = tf.io.tf_record_iterator(meta_fpth_f)
meta_f = text_format.Parse(next(mitr), dj_pb.DataBlockMeta())
self.assertEqual(meta_f, meta)
data_fpth_l = os.path.join(
common.data_source_data_block_dir(self.data_source_l),
common.partition_repr(0),
common.encode_data_block_fname(
self.data_source_l.data_source_meta.name,
meta_l
)
)
for (iidx, record) in enumerate(tf.io.tf_record_iterator(data_fpth_l)):
example = tf.train.Example()
example.ParseFromString(record)
feat = example.features.feature
self.assertEqual(feat['example_id'].bytes_list.value[0],
meta.example_ids[iidx])
self.assertEqual(len(meta.example_ids), iidx + 1)
data_fpth_f = os.path.join(
common.data_source_data_block_dir(self.data_source_f),
common.partition_repr(0),
common.encode_data_block_fname(
self.data_source_l.data_source_meta.name,
meta_f
)
)
for (iidx, record) in enumerate(tf.io.tf_record_iterator(data_fpth_f)):
example = tf.train.Example()
example.ParseFromString(record)
feat = example.features.feature
self.assertEqual(feat['example_id'].bytes_list.value[0],
meta.example_ids[iidx])
self.assertEqual(len(meta.example_ids), iidx +1)
def tearDown(self):
if gfile.Exists(self.data_source_f.output_base_dir):
gfile.DeleteRecursively(self.data_source_f.output_base_dir)
if gfile.Exists(self.data_source_l.output_base_dir):
gfile.DeleteRecursively(self.data_source_l.output_base_dir)
if gfile.Exists(self.raw_data_dir_l):
gfile.DeleteRecursively(self.raw_data_dir_l)
self.kvstore.delete_prefix(common.data_source_kvstore_base_dir(self.data_source_l.data_source_meta.name))
if __name__ == '__main__':
unittest.main()
| 49.538776 | 113 | 0.616215 |
import unittest
import os
import tensorflow.compat.v1 as tf
tf.enable_eager_execution()
from google.protobuf import text_format, timestamp_pb2
import tensorflow_io
from tensorflow.compat.v1 import gfile
from fedlearner.common import db_client
from fedlearner.common import common_pb2 as common_pb
from fedlearner.common import data_join_service_pb2 as dj_pb
from fedlearner.data_join import (
data_block_manager, common, data_block_dumper,
raw_data_manifest_manager, raw_data_visitor, visitor
)
from fedlearner.data_join.data_block_manager import DataBlockBuilder
from fedlearner.data_join.raw_data_iter_impl.tf_record_iter import TfExampleItem
class TestDataBlockDumper(unittest.TestCase):
def setUp(self):
data_source_f = common_pb.DataSource()
data_source_f.data_source_meta.name = "milestone"
data_source_f.data_source_meta.partition_num = 1
data_source_f.output_base_dir = "./output-f"
self.data_source_f = data_source_f
if gfile.Exists(self.data_source_f.output_base_dir):
gfile.DeleteRecursively(self.data_source_f.output_base_dir)
data_source_l = common_pb.DataSource()
data_source_l.data_source_meta.name = "milestone"
data_source_l.data_source_meta.partition_num = 1
data_source_l.output_base_dir = "./output-l"
self.raw_data_dir_l = "./raw_data-l"
self.data_source_l = data_source_l
if gfile.Exists(self.data_source_l.output_base_dir):
gfile.DeleteRecursively(self.data_source_l.output_base_dir)
if gfile.Exists(self.raw_data_dir_l):
gfile.DeleteRecursively(self.raw_data_dir_l)
self.kvstore = db_client.DBClient('etcd', True)
self.kvstore.delete_prefix(common.data_source_kvstore_base_dir(self.data_source_l.data_source_meta.name))
self.manifest_manager = raw_data_manifest_manager.RawDataManifestManager(
self.kvstore, self.data_source_l)
def generate_follower_data_block(self):
dbm = data_block_manager.DataBlockManager(self.data_source_f, 0)
self.assertEqual(dbm.get_dumped_data_block_count(), 0)
self.assertEqual(dbm.get_lastest_data_block_meta(), None)
leader_index = 0
follower_index = 65536
self.dumped_metas = []
for i in range(5):
builder = DataBlockBuilder(
common.data_source_data_block_dir(self.data_source_f),
self.data_source_f.data_source_meta.name,
0, i, dj_pb.WriterOptions(output_writer='TF_RECORD'), None
)
builder.set_data_block_manager(dbm)
for j in range(1024):
feat = {}
example_id = '{}'.format(i * 1024 + j).encode()
feat['example_id'] = tf.train.Feature(
bytes_list=tf.train.BytesList(value=[example_id]))
event_time = 150000000 + i * 1024 + j
feat['event_time'] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[event_time]))
feat['leader_index'] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[leader_index]))
feat['follower_index'] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[follower_index]))
example = tf.train.Example(features=tf.train.Features(feature=feat))
builder.append_item(TfExampleItem(example.SerializeToString()),
leader_index, follower_index)
leader_index += 3
follower_index += 1
meta = builder.finish_data_block()
self.dumped_metas.append(meta)
self.leader_start_index = 0
self.leader_end_index = leader_index
self.assertEqual(dbm.get_dumped_data_block_count(), 5)
for (idx, meta) in enumerate(self.dumped_metas):
self.assertEqual(dbm.get_data_block_meta_by_index(idx), meta)
def generate_leader_raw_data(self):
dbm = data_block_manager.DataBlockManager(self.data_source_l, 0)
raw_data_dir = os.path.join(self.raw_data_dir_l, common.partition_repr(0))
if gfile.Exists(raw_data_dir):
gfile.DeleteRecursively(raw_data_dir)
gfile.MakeDirs(raw_data_dir)
rdm = raw_data_visitor.RawDataManager(self.kvstore, self.data_source_l, 0)
block_index = 0
builder = DataBlockBuilder(
self.raw_data_dir_l,
self.data_source_l.data_source_meta.name,
0, block_index, dj_pb.WriterOptions(output_writer='TF_RECORD'), None
)
process_index = 0
start_index = 0
for i in range(0, self.leader_end_index + 3):
if (i > 0 and i % 2048 == 0) or (i == self.leader_end_index + 2):
meta = builder.finish_data_block()
if meta is not None:
ofname = common.encode_data_block_fname(
self.data_source_l.data_source_meta.name,
meta
)
fpath = os.path.join(raw_data_dir, ofname)
self.manifest_manager.add_raw_data(
0,
[dj_pb.RawDataMeta(file_path=fpath,
timestamp=timestamp_pb2.Timestamp(seconds=3))],
False)
process_index += 1
start_index += len(meta.example_ids)
block_index += 1
builder = DataBlockBuilder(
self.raw_data_dir_l,
self.data_source_l.data_source_meta.name,
0, block_index, dj_pb.WriterOptions(output_writer='TF_RECORD'), None
)
feat = {}
pt = i + 1 << 30
if i % 3 == 0:
pt = i // 3
example_id = '{}'.format(pt).encode()
feat['example_id'] = tf.train.Feature(
bytes_list=tf.train.BytesList(value=[example_id]))
event_time = 150000000 + pt
feat['event_time'] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[event_time]))
example = tf.train.Example(features=tf.train.Features(feature=feat))
builder.append_item(TfExampleItem(example.SerializeToString()), i, i)
fpaths = [os.path.join(raw_data_dir, f)
for f in gfile.ListDirectory(raw_data_dir)
if not gfile.IsDirectory(os.path.join(raw_data_dir, f))]
for fpath in fpaths:
if not fpath.endswith(common.DataBlockSuffix):
gfile.Remove(fpath)
def test_data_block_dumper(self):
self.generate_follower_data_block()
self.generate_leader_raw_data()
dbd = data_block_dumper.DataBlockDumperManager(
self.kvstore, self.data_source_l, 0,
dj_pb.RawDataOptions(raw_data_iter='TF_RECORD', read_ahead_size=1<<20, read_batch_size=128),
dj_pb.WriterOptions(output_writer='TF_RECORD')
)
self.assertEqual(dbd.get_next_data_block_index(), 0)
for (idx, meta) in enumerate(self.dumped_metas):
success, next_index = dbd.add_synced_data_block_meta(meta)
self.assertTrue(success)
self.assertEqual(next_index, idx + 1)
self.assertTrue(dbd.need_dump())
self.assertEqual(dbd.get_next_data_block_index(), len(self.dumped_metas))
with dbd.make_data_block_dumper() as dumper:
dumper()
dbm_f = data_block_manager.DataBlockManager(self.data_source_f, 0)
dbm_l = data_block_manager.DataBlockManager(self.data_source_l, 0)
self.assertEqual(dbm_f.get_dumped_data_block_count(), len(self.dumped_metas))
self.assertEqual(dbm_f.get_dumped_data_block_count(),
dbm_l.get_dumped_data_block_count())
for (idx, meta) in enumerate(self.dumped_metas):
self.assertEqual(meta.data_block_index, idx)
self.assertEqual(dbm_l.get_data_block_meta_by_index(idx), meta)
self.assertEqual(dbm_f.get_data_block_meta_by_index(idx), meta)
meta_fpth_l = os.path.join(
common.data_source_data_block_dir(self.data_source_l),
common.partition_repr(0),
common.encode_data_block_meta_fname(
self.data_source_l.data_source_meta.name,
0, meta.data_block_index
)
)
mitr = tf.io.tf_record_iterator(meta_fpth_l)
meta_l = text_format.Parse(next(mitr), dj_pb.DataBlockMeta())
self.assertEqual(meta_l, meta)
meta_fpth_f = os.path.join(
common.data_source_data_block_dir(self.data_source_f),
common.partition_repr(0),
common.encode_data_block_meta_fname(
self.data_source_f.data_source_meta.name,
0, meta.data_block_index
)
)
mitr = tf.io.tf_record_iterator(meta_fpth_f)
meta_f = text_format.Parse(next(mitr), dj_pb.DataBlockMeta())
self.assertEqual(meta_f, meta)
data_fpth_l = os.path.join(
common.data_source_data_block_dir(self.data_source_l),
common.partition_repr(0),
common.encode_data_block_fname(
self.data_source_l.data_source_meta.name,
meta_l
)
)
for (iidx, record) in enumerate(tf.io.tf_record_iterator(data_fpth_l)):
example = tf.train.Example()
example.ParseFromString(record)
feat = example.features.feature
self.assertEqual(feat['example_id'].bytes_list.value[0],
meta.example_ids[iidx])
self.assertEqual(len(meta.example_ids), iidx + 1)
data_fpth_f = os.path.join(
common.data_source_data_block_dir(self.data_source_f),
common.partition_repr(0),
common.encode_data_block_fname(
self.data_source_l.data_source_meta.name,
meta_f
)
)
for (iidx, record) in enumerate(tf.io.tf_record_iterator(data_fpth_f)):
example = tf.train.Example()
example.ParseFromString(record)
feat = example.features.feature
self.assertEqual(feat['example_id'].bytes_list.value[0],
meta.example_ids[iidx])
self.assertEqual(len(meta.example_ids), iidx +1)
def tearDown(self):
if gfile.Exists(self.data_source_f.output_base_dir):
gfile.DeleteRecursively(self.data_source_f.output_base_dir)
if gfile.Exists(self.data_source_l.output_base_dir):
gfile.DeleteRecursively(self.data_source_l.output_base_dir)
if gfile.Exists(self.raw_data_dir_l):
gfile.DeleteRecursively(self.raw_data_dir_l)
self.kvstore.delete_prefix(common.data_source_kvstore_base_dir(self.data_source_l.data_source_meta.name))
if __name__ == '__main__':
unittest.main()
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.